1/* GCC instrumentation plugin for ThreadSanitizer.
2   Copyright (C) 2011-2022 Free Software Foundation, Inc.
3   Contributed by Dmitry Vyukov <dvyukov@google.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "backend.h"
26#include "rtl.h"
27#include "tree.h"
28#include "memmodel.h"
29#include "gimple.h"
30#include "tree-pass.h"
31#include "ssa.h"
32#include "cgraph.h"
33#include "fold-const.h"
34#include "gimplify.h"
35#include "gimple-iterator.h"
36#include "gimplify-me.h"
37#include "tree-cfg.h"
38#include "tree-iterator.h"
39#include "gimple-fold.h"
40#include "tree-ssa-loop-ivopts.h"
41#include "tree-eh.h"
42#include "tsan.h"
43#include "stringpool.h"
44#include "attribs.h"
45#include "asan.h"
46#include "builtins.h"
47#include "target.h"
48#include "diagnostic-core.h"
49
50/* Number of instrumented memory accesses in the current function.  */
51
52/* Builds the following decl
53   void __tsan_read/writeX (void *addr);  */
54
55static tree
56get_memory_access_decl (bool is_write, unsigned size, bool volatilep)
57{
58  enum built_in_function fcode;
59  int pos;
60
61  if (size <= 1)
62    pos = 0;
63  else if (size <= 3)
64    pos = 1;
65  else if (size <= 7)
66    pos = 2;
67  else if (size <= 15)
68    pos = 3;
69  else
70    pos = 4;
71
72  if (param_tsan_distinguish_volatile && volatilep)
73    fcode = is_write ? BUILT_IN_TSAN_VOLATILE_WRITE1
74		     : BUILT_IN_TSAN_VOLATILE_READ1;
75  else
76    fcode = is_write ? BUILT_IN_TSAN_WRITE1
77		     : BUILT_IN_TSAN_READ1;
78  fcode = (built_in_function)(fcode + pos);
79
80  return builtin_decl_implicit (fcode);
81}
82
83/* Check as to whether EXPR refers to a store to vptr.  */
84
85static tree
86is_vptr_store (gimple *stmt, tree expr, bool is_write)
87{
88  if (is_write == true
89      && gimple_assign_single_p (stmt)
90      && TREE_CODE (expr) == COMPONENT_REF)
91    {
92      tree field = TREE_OPERAND (expr, 1);
93      if (TREE_CODE (field) == FIELD_DECL
94	  && DECL_VIRTUAL_P (field))
95	return gimple_assign_rhs1 (stmt);
96    }
97  return NULL;
98}
99
100/* Instruments EXPR if needed. If any instrumentation is inserted,
101   return true.  */
102
103static bool
104instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
105{
106  tree base, rhs, expr_ptr, builtin_decl;
107  basic_block bb;
108  HOST_WIDE_INT size;
109  gimple *stmt, *g;
110  gimple_seq seq;
111  location_t loc;
112  unsigned int align;
113
114  size = int_size_in_bytes (TREE_TYPE (expr));
115  if (size <= 0)
116    return false;
117
118  poly_int64 unused_bitsize, unused_bitpos;
119  tree offset;
120  machine_mode mode;
121  int unsignedp, reversep, volatilep = 0;
122  base = get_inner_reference (expr, &unused_bitsize, &unused_bitpos, &offset,
123			      &mode, &unsignedp, &reversep, &volatilep);
124
125  /* No need to instrument accesses to decls that don't escape,
126     they can't escape to other threads then.  */
127  if (DECL_P (base) && !is_global_var (base))
128    {
129      struct pt_solution pt;
130      memset (&pt, 0, sizeof (pt));
131      pt.escaped = 1;
132      pt.ipa_escaped = flag_ipa_pta != 0;
133      if (!pt_solution_includes (&pt, base))
134	return false;
135      if (!may_be_aliased (base))
136	return false;
137    }
138
139  if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base)))
140    return false;
141
142  stmt = gsi_stmt (gsi);
143  loc = gimple_location (stmt);
144  rhs = is_vptr_store (stmt, expr, is_write);
145
146  if ((TREE_CODE (expr) == COMPONENT_REF
147       && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
148      || TREE_CODE (expr) == BIT_FIELD_REF)
149    {
150      HOST_WIDE_INT bitpos, bitsize;
151      base = TREE_OPERAND (expr, 0);
152      if (TREE_CODE (expr) == COMPONENT_REF)
153	{
154	  expr = TREE_OPERAND (expr, 1);
155	  if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr))
156	    expr = DECL_BIT_FIELD_REPRESENTATIVE (expr);
157	  if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr))
158	      || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr))
159	      || !tree_fits_uhwi_p (DECL_SIZE (expr)))
160	    return false;
161	  bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT
162		   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr));
163	  bitsize = tree_to_uhwi (DECL_SIZE (expr));
164	}
165      else
166	{
167	  if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2))
168	      || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
169	    return false;
170	  bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2));
171	  bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1));
172	}
173      if (bitpos < 0 || bitsize <= 0)
174	return false;
175      size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1)
176	     / BITS_PER_UNIT;
177      if (may_be_nonaddressable_p (base))
178	return false;
179      align = get_object_alignment (base);
180      if (align < BITS_PER_UNIT)
181	return false;
182      bitpos = bitpos & ~(BITS_PER_UNIT - 1);
183      if ((align - 1) & bitpos)
184	{
185	  align = (align - 1) & bitpos;
186	  align = least_bit_hwi (align);
187	}
188      expr = build_fold_addr_expr (unshare_expr (base));
189      expr = build2 (MEM_REF, char_type_node, expr,
190		     build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT));
191      expr_ptr = build_fold_addr_expr (expr);
192    }
193  else
194    {
195      if (may_be_nonaddressable_p (expr))
196	return false;
197      align = get_object_alignment (expr);
198      if (align < BITS_PER_UNIT)
199	return false;
200      expr_ptr = build_fold_addr_expr (unshare_expr (expr));
201    }
202  expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE);
203  if ((size & (size - 1)) != 0 || size > 16
204      || align < MIN (size, 8) * BITS_PER_UNIT)
205    {
206      builtin_decl = builtin_decl_implicit (is_write
207					    ? BUILT_IN_TSAN_WRITE_RANGE
208					    : BUILT_IN_TSAN_READ_RANGE);
209      g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size));
210    }
211  else if (rhs == NULL)
212    g = gimple_build_call (get_memory_access_decl (is_write, size,
213						   TREE_THIS_VOLATILE (expr)),
214			   1, expr_ptr);
215  else
216    {
217      builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
218      g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs));
219    }
220  gimple_set_location (g, loc);
221  gimple_seq_add_stmt_without_update (&seq, g);
222  /* Instrumentation for assignment of a function result
223     must be inserted after the call.  Instrumentation for
224     reads of function arguments must be inserted before the call.
225     That's because the call can contain synchronization.  */
226  if (is_gimple_call (stmt) && is_write)
227    {
228      /* If the call can throw, it must be the last stmt in
229	 a basic block, so the instrumented stmts need to be
230	 inserted in successor bbs.  */
231      if (is_ctrl_altering_stmt (stmt))
232	{
233	  edge e;
234
235	  bb = gsi_bb (gsi);
236	  e = find_fallthru_edge (bb->succs);
237	  if (e)
238	    gsi_insert_seq_on_edge_immediate (e, seq);
239	}
240      else
241	gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
242    }
243  else
244    gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
245
246  return true;
247}
248
249/* Actions for sync/atomic builtin transformations.  */
250enum tsan_atomic_action
251{
252  check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
253  bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst,
254  bool_clear, bool_test_and_set
255};
256
257/* Table how to map sync/atomic builtins to their corresponding
258   tsan equivalents.  */
259static const struct tsan_map_atomic
260{
261  enum built_in_function fcode, tsan_fcode;
262  enum tsan_atomic_action action;
263  enum tree_code code;
264} tsan_atomic_table[] =
265{
266#define TRANSFORM(fcode, tsan_fcode, action, code) \
267  { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
268#define CHECK_LAST(fcode, tsan_fcode) \
269  TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
270#define ADD_SEQ_CST(fcode, tsan_fcode) \
271  TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
272#define ADD_ACQUIRE(fcode, tsan_fcode) \
273  TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
274#define WEAK_CAS(fcode, tsan_fcode) \
275  TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
276#define STRONG_CAS(fcode, tsan_fcode) \
277  TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
278#define BOOL_CAS(fcode, tsan_fcode) \
279  TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
280#define VAL_CAS(fcode, tsan_fcode) \
281  TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
282#define LOCK_RELEASE(fcode, tsan_fcode) \
283  TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
284#define FETCH_OP(fcode, tsan_fcode, code) \
285  TRANSFORM (fcode, tsan_fcode, fetch_op, code)
286#define FETCH_OPS(fcode, tsan_fcode, code) \
287  TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
288#define BOOL_CLEAR(fcode, tsan_fcode) \
289  TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK)
290#define BOOL_TEST_AND_SET(fcode, tsan_fcode) \
291  TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK)
292
293  CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
294  CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
295  CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
296  CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
297  CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
298  CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
299  CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
300  CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
301  CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
302  CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
303  CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
304  CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
305  CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
306  CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
307  CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
308  CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
309  CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
310  CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
311  CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
312  CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
313  CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
314  CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
315  CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
316  CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
317  CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
318  CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
319  CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
320  CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
321  CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
322  CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
323  CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
324  CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
325  CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
326  CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
327  CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
328  CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
329  CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
330  CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
331  CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
332  CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
333  CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
334  CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
335  CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
336  CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
337  CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
338
339  CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
340  CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
341
342  FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
343  FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
344  FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
345  FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
346  FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
347  FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
348  FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
349  FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
350  FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
351  FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
352  FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
353  FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
354  FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
355  FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
356  FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
357  FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
358  FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
359  FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
360  FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
361  FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
362  FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
363  FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
364  FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
365  FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
366  FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
367  FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
368  FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
369  FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
370  FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
371  FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
372
373  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
374  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
375  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
376  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
377  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
378
379  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
380  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
381  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
382  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
383  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
384  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
385  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
386  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
387  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
388  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
389  ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
390  ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
391  ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
392  ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
393  ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
394  ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
395  ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
396  ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
397  ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
398  ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
399  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
400  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
401  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
402  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
403  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
404  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
405  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
406  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
407  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
408  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
409
410  ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
411
412  FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
413  FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
414  FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
415  FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
416  FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
417  FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
418  FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
419  FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
420  FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
421  FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
422  FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
423  FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
424  FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
425  FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
426  FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
427  FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
428  FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
429  FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
430  FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
431  FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
432  FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
433  FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
434  FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
435  FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
436  FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
437  FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
438  FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
439  FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
440  FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
441  FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
442
443  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
444  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
445  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
446  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
447  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
448
449  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
450  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
451	      TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
452  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
453	      TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
454  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
455	      TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
456  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
457	      TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
458
459  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
460	    TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
461  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
462	    TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
463  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
464	    TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
465  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
466	    TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
467  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
468	    TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
469
470  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
471  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
472  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
473  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
474  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
475	   TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
476
477  LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
478  LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
479  LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
480  LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
481  LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE),
482
483  BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE),
484
485  BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE)
486};
487
488/* Instrument an atomic builtin.  */
489
490static void
491instrument_builtin_call (gimple_stmt_iterator *gsi)
492{
493  gimple *stmt = gsi_stmt (*gsi), *g;
494  tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
495  enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
496  unsigned int i, num = gimple_call_num_args (stmt), j;
497  for (j = 0; j < 6 && j < num; j++)
498    args[j] = gimple_call_arg (stmt, j);
499  for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
500    if (fcode != tsan_atomic_table[i].fcode)
501      continue;
502    else
503      {
504	if (fcode == BUILT_IN_ATOMIC_THREAD_FENCE)
505	  warning_at (gimple_location (stmt), OPT_Wtsan,
506		      "%qs is not supported with %qs", "atomic_thread_fence",
507		      "-fsanitize=thread");
508
509	tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
510	if (decl == NULL_TREE)
511	  return;
512	switch (tsan_atomic_table[i].action)
513	  {
514	  case check_last:
515	  case fetch_op:
516	    last_arg = gimple_call_arg (stmt, num - 1);
517	    if (tree_fits_uhwi_p (last_arg)
518		&& memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
519	      return;
520	    gimple_call_set_fndecl (stmt, decl);
521	    update_stmt (stmt);
522	    maybe_clean_eh_stmt (stmt);
523	    if (tsan_atomic_table[i].action == fetch_op)
524	      {
525		args[1] = gimple_call_arg (stmt, 1);
526		goto adjust_result;
527	      }
528	    return;
529	  case add_seq_cst:
530	  case add_acquire:
531	  case fetch_op_seq_cst:
532	    gcc_assert (num <= 2);
533	    for (j = 0; j < num; j++)
534	      args[j] = gimple_call_arg (stmt, j);
535	    for (; j < 2; j++)
536	      args[j] = NULL_TREE;
537	    args[num] = build_int_cst (NULL_TREE,
538				       tsan_atomic_table[i].action
539				       != add_acquire
540				       ? MEMMODEL_SEQ_CST
541				       : MEMMODEL_ACQUIRE);
542	    update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
543	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
544	    stmt = gsi_stmt (*gsi);
545	    if (tsan_atomic_table[i].action == fetch_op_seq_cst)
546	      {
547	      adjust_result:
548		lhs = gimple_call_lhs (stmt);
549		if (lhs == NULL_TREE)
550		  return;
551		if (!useless_type_conversion_p (TREE_TYPE (lhs),
552						TREE_TYPE (args[1])))
553		  {
554		    tree var = make_ssa_name (TREE_TYPE (lhs));
555		    g = gimple_build_assign (var, NOP_EXPR, args[1]);
556		    gsi_insert_after (gsi, g, GSI_NEW_STMT);
557		    args[1] = var;
558		  }
559		gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs)));
560		/* BIT_NOT_EXPR stands for NAND.  */
561		if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
562		  {
563		    tree var = make_ssa_name (TREE_TYPE (lhs));
564		    g = gimple_build_assign (var, BIT_AND_EXPR,
565					     gimple_call_lhs (stmt), args[1]);
566		    gsi_insert_after (gsi, g, GSI_NEW_STMT);
567		    g = gimple_build_assign (lhs, BIT_NOT_EXPR, var);
568		  }
569		else
570		  g = gimple_build_assign (lhs, tsan_atomic_table[i].code,
571					   gimple_call_lhs (stmt), args[1]);
572		update_stmt (stmt);
573		gsi_insert_after (gsi, g, GSI_NEW_STMT);
574	      }
575	    return;
576	  case weak_cas:
577	    if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
578	      continue;
579	    /* FALLTHRU */
580	  case strong_cas:
581	    gcc_assert (num == 6);
582	    for (j = 0; j < 6; j++)
583	      args[j] = gimple_call_arg (stmt, j);
584	    if (tree_fits_uhwi_p (args[4])
585		&& memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST)
586	      return;
587	    if (tree_fits_uhwi_p (args[5])
588		&& memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST)
589	      return;
590	    update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
591				args[4], args[5]);
592	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
593	    return;
594	  case bool_cas:
595	  case val_cas:
596	    gcc_assert (num == 3);
597	    for (j = 0; j < 3; j++)
598	      args[j] = gimple_call_arg (stmt, j);
599	    t = TYPE_ARG_TYPES (TREE_TYPE (decl));
600	    t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
601	    t = create_tmp_var (t);
602	    mark_addressable (t);
603	    if (!useless_type_conversion_p (TREE_TYPE (t),
604					    TREE_TYPE (args[1])))
605	      {
606		g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)),
607					 NOP_EXPR, args[1]);
608		gsi_insert_before (gsi, g, GSI_SAME_STMT);
609		args[1] = gimple_assign_lhs (g);
610	      }
611	    g = gimple_build_assign (t, args[1]);
612	    gsi_insert_before (gsi, g, GSI_SAME_STMT);
613	    lhs = gimple_call_lhs (stmt);
614	    update_gimple_call (gsi, decl, 5, args[0],
615				build_fold_addr_expr (t), args[2],
616				build_int_cst (NULL_TREE,
617					       MEMMODEL_SEQ_CST),
618				build_int_cst (NULL_TREE,
619					       MEMMODEL_SEQ_CST));
620	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
621	    if (tsan_atomic_table[i].action == val_cas && lhs)
622	      {
623		tree cond;
624		stmt = gsi_stmt (*gsi);
625		g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
626		gsi_insert_after (gsi, g, GSI_NEW_STMT);
627		t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
628		cond = build2 (NE_EXPR, boolean_type_node, t,
629			       build_int_cst (TREE_TYPE (t), 0));
630		g = gimple_build_assign (lhs, COND_EXPR, cond, args[1],
631					 gimple_assign_lhs (g));
632		gimple_call_set_lhs (stmt, t);
633		update_stmt (stmt);
634		gsi_insert_after (gsi, g, GSI_NEW_STMT);
635	      }
636	    return;
637	  case lock_release:
638	    gcc_assert (num == 1);
639	    t = TYPE_ARG_TYPES (TREE_TYPE (decl));
640	    t = TREE_VALUE (TREE_CHAIN (t));
641	    update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
642				build_int_cst (t, 0),
643				build_int_cst (NULL_TREE,
644					       MEMMODEL_RELEASE));
645	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
646	    return;
647	  case bool_clear:
648	  case bool_test_and_set:
649	    if (BOOL_TYPE_SIZE != 8)
650	      {
651		decl = NULL_TREE;
652		for (j = 1; j < 5; j++)
653		  if (BOOL_TYPE_SIZE == (8 << j))
654		    {
655		      enum built_in_function tsan_fcode
656			= (enum built_in_function)
657			  (tsan_atomic_table[i].tsan_fcode + j);
658		      decl = builtin_decl_implicit (tsan_fcode);
659		      break;
660		    }
661		if (decl == NULL_TREE)
662		  return;
663	      }
664	    last_arg = gimple_call_arg (stmt, num - 1);
665	    if (tree_fits_uhwi_p (last_arg)
666		&& memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
667	      return;
668	    t = TYPE_ARG_TYPES (TREE_TYPE (decl));
669	    t = TREE_VALUE (TREE_CHAIN (t));
670	    if (tsan_atomic_table[i].action == bool_clear)
671	      {
672		update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
673				    build_int_cst (t, 0), last_arg);
674		maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
675		return;
676	      }
677	    t = build_int_cst (t, targetm.atomic_test_and_set_trueval);
678	    update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
679				t, last_arg);
680	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
681	    stmt = gsi_stmt (*gsi);
682	    lhs = gimple_call_lhs (stmt);
683	    if (lhs == NULL_TREE)
684	      return;
685	    if (targetm.atomic_test_and_set_trueval != 1
686		|| !useless_type_conversion_p (TREE_TYPE (lhs),
687					       TREE_TYPE (t)))
688	      {
689		tree new_lhs = make_ssa_name (TREE_TYPE (t));
690		gimple_call_set_lhs (stmt, new_lhs);
691		if (targetm.atomic_test_and_set_trueval != 1)
692		  g = gimple_build_assign (lhs, NE_EXPR, new_lhs,
693					   build_int_cst (TREE_TYPE (t), 0));
694		else
695		  g = gimple_build_assign (lhs, NOP_EXPR, new_lhs);
696		gsi_insert_after (gsi, g, GSI_NEW_STMT);
697		update_stmt (stmt);
698	      }
699	    return;
700	  default:
701	    continue;
702	  }
703      }
704}
705
706/* Instruments the gimple pointed to by GSI. Return
707   true if func entry/exit should be instrumented.  */
708
709static bool
710instrument_gimple (gimple_stmt_iterator *gsi)
711{
712  gimple *stmt;
713  tree rhs, lhs;
714  bool instrumented = false;
715
716  stmt = gsi_stmt (*gsi);
717  if (is_gimple_call (stmt)
718      && (gimple_call_fndecl (stmt)
719	  != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
720    {
721      /* All functions with function call will have exit instrumented,
722	 therefore no function calls other than __tsan_func_exit
723	 shall appear in the functions.  */
724      gimple_call_set_tail (as_a <gcall *> (stmt), false);
725      if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
726	instrument_builtin_call (gsi);
727      return true;
728    }
729  else if (is_gimple_assign (stmt)
730	   && !gimple_clobber_p (stmt))
731    {
732      if (gimple_store_p (stmt))
733	{
734	  lhs = gimple_assign_lhs (stmt);
735	  instrumented = instrument_expr (*gsi, lhs, true);
736	}
737      if (gimple_assign_load_p (stmt))
738	{
739	  rhs = gimple_assign_rhs1 (stmt);
740	  instrumented = instrument_expr (*gsi, rhs, false);
741	}
742    }
743  return instrumented;
744}
745
746/* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin.  */
747
748static void
749replace_func_exit (gimple *stmt)
750{
751  tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
752  gimple *g = gimple_build_call (builtin_decl, 0);
753  gimple_set_location (g, cfun->function_end_locus);
754  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
755  gsi_replace (&gsi, g, true);
756}
757
758/* Instrument function exit.  Used when TSAN_FUNC_EXIT does not exist.  */
759
760static void
761instrument_func_exit (void)
762{
763  location_t loc;
764  basic_block exit_bb;
765  gimple_stmt_iterator gsi;
766  gimple *stmt, *g;
767  tree builtin_decl;
768  edge e;
769  edge_iterator ei;
770
771  /* Find all function exits.  */
772  exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
773  FOR_EACH_EDGE (e, ei, exit_bb->preds)
774    {
775      gsi = gsi_last_bb (e->src);
776      stmt = gsi_stmt (gsi);
777      gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
778		  || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
779      loc = gimple_location (stmt);
780      builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
781      g = gimple_build_call (builtin_decl, 0);
782      gimple_set_location (g, loc);
783      gsi_insert_before (&gsi, g, GSI_SAME_STMT);
784    }
785}
786
787/* Instruments all interesting memory accesses in the current function.
788   Return true if func entry/exit should be instrumented.  */
789
790static bool
791instrument_memory_accesses (bool *cfg_changed)
792{
793  basic_block bb;
794  gimple_stmt_iterator gsi;
795  bool fentry_exit_instrument = false;
796  bool func_exit_seen = false;
797  auto_vec<gimple *> tsan_func_exits;
798
799  FOR_EACH_BB_FN (bb, cfun)
800    {
801      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
802	{
803	  gimple *stmt = gsi_stmt (gsi);
804	  if (gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
805	    {
806	      if (fentry_exit_instrument)
807		replace_func_exit (stmt);
808	      else
809		tsan_func_exits.safe_push (stmt);
810	      func_exit_seen = true;
811	    }
812	  else
813	    fentry_exit_instrument
814	      |= (instrument_gimple (&gsi)
815		  && param_tsan_instrument_func_entry_exit);
816	}
817      if (gimple_purge_dead_eh_edges (bb))
818	*cfg_changed = true;
819    }
820  unsigned int i;
821  gimple *stmt;
822  FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
823    if (fentry_exit_instrument)
824      replace_func_exit (stmt);
825    else
826      {
827	gsi = gsi_for_stmt (stmt);
828	gsi_remove (&gsi, true);
829      }
830  if (fentry_exit_instrument && !func_exit_seen)
831    instrument_func_exit ();
832  return fentry_exit_instrument;
833}
834
835/* Instruments function entry.  */
836
837static void
838instrument_func_entry (void)
839{
840  tree ret_addr, builtin_decl;
841  gimple *g;
842  gimple_seq seq = NULL;
843
844  builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
845  g = gimple_build_call (builtin_decl, 1, integer_zero_node);
846  ret_addr = make_ssa_name (ptr_type_node);
847  gimple_call_set_lhs (g, ret_addr);
848  gimple_set_location (g, cfun->function_start_locus);
849  gimple_seq_add_stmt_without_update (&seq, g);
850
851  builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
852  g = gimple_build_call (builtin_decl, 1, ret_addr);
853  gimple_set_location (g, cfun->function_start_locus);
854  gimple_seq_add_stmt_without_update (&seq, g);
855
856  edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
857  gsi_insert_seq_on_edge_immediate (e, seq);
858}
859
860/* ThreadSanitizer instrumentation pass.  */
861
862static unsigned
863tsan_pass (void)
864{
865  initialize_sanitizer_builtins ();
866  bool cfg_changed = false;
867  if (instrument_memory_accesses (&cfg_changed))
868    instrument_func_entry ();
869  return cfg_changed ? TODO_cleanup_cfg : 0;
870}
871
872/* Inserts __tsan_init () into the list of CTORs.  */
873
874void
875tsan_finish_file (void)
876{
877  tree ctor_statements = NULL_TREE;
878
879  initialize_sanitizer_builtins ();
880  tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
881  append_to_statement_list (build_call_expr (init_decl, 0),
882			    &ctor_statements);
883  cgraph_build_static_cdtor ('I', ctor_statements,
884			     MAX_RESERVED_INIT_PRIORITY - 1);
885}
886
887/* The pass descriptor.  */
888
889namespace {
890
891const pass_data pass_data_tsan =
892{
893  GIMPLE_PASS, /* type */
894  "tsan", /* name */
895  OPTGROUP_NONE, /* optinfo_flags */
896  TV_NONE, /* tv_id */
897  ( PROP_ssa | PROP_cfg ), /* properties_required */
898  0, /* properties_provided */
899  0, /* properties_destroyed */
900  0, /* todo_flags_start */
901  TODO_update_ssa, /* todo_flags_finish */
902};
903
904class pass_tsan : public gimple_opt_pass
905{
906public:
907  pass_tsan (gcc::context *ctxt)
908    : gimple_opt_pass (pass_data_tsan, ctxt)
909  {}
910
911  /* opt_pass methods: */
912  opt_pass * clone () { return new pass_tsan (m_ctxt); }
913  virtual bool gate (function *)
914{
915  return sanitize_flags_p (SANITIZE_THREAD);
916}
917
918  virtual unsigned int execute (function *) { return tsan_pass (); }
919
920}; // class pass_tsan
921
922} // anon namespace
923
924gimple_opt_pass *
925make_pass_tsan (gcc::context *ctxt)
926{
927  return new pass_tsan (ctxt);
928}
929
930namespace {
931
932const pass_data pass_data_tsan_O0 =
933{
934  GIMPLE_PASS, /* type */
935  "tsan0", /* name */
936  OPTGROUP_NONE, /* optinfo_flags */
937  TV_NONE, /* tv_id */
938  ( PROP_ssa | PROP_cfg ), /* properties_required */
939  0, /* properties_provided */
940  0, /* properties_destroyed */
941  0, /* todo_flags_start */
942  TODO_update_ssa, /* todo_flags_finish */
943};
944
945class pass_tsan_O0 : public gimple_opt_pass
946{
947public:
948  pass_tsan_O0 (gcc::context *ctxt)
949    : gimple_opt_pass (pass_data_tsan_O0, ctxt)
950  {}
951
952  /* opt_pass methods: */
953  virtual bool gate (function *)
954    {
955      return (sanitize_flags_p (SANITIZE_THREAD) && !optimize);
956    }
957
958  virtual unsigned int execute (function *) { return tsan_pass (); }
959
960}; // class pass_tsan_O0
961
962} // anon namespace
963
964gimple_opt_pass *
965make_pass_tsan_O0 (gcc::context *ctxt)
966{
967  return new pass_tsan_O0 (ctxt);
968}
969