tsan.c 33.3 KB
Newer Older
1
/* GCC instrumentation plugin for ThreadSanitizer.
2
   Copyright (C) 2011-2018 Free Software Foundation, Inc.
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
   Contributed by Dmitry Vyukov <dvyukov@google.com>

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */


#include "config.h"
#include "system.h"
#include "coretypes.h"
25
#include "backend.h"
26
#include "rtl.h"
27
#include "tree.h"
28
#include "memmodel.h"
29
#include "gimple.h"
30
#include "tree-pass.h"
31
#include "ssa.h"
32
#include "cgraph.h"
33
#include "fold-const.h"
34
#include "gimplify.h"
35
#include "gimple-iterator.h"
36
#include "gimplify-me.h"
37
#include "tree-cfg.h"
38
#include "tree-iterator.h"
39
#include "tree-ssa-propagate.h"
40
#include "tree-ssa-loop-ivopts.h"
41
#include "tree-eh.h"
42
#include "tsan.h"
43 44
#include "stringpool.h"
#include "attribs.h"
45
#include "asan.h"
46
#include "builtins.h"
47
#include "target.h"
48 49 50 51 52 53 54 55 56 57 58 59

/* Number of instrumented memory accesses in the current function.  */

/* Builds the following decl
   void __tsan_read/writeX (void *addr);  */

static tree
get_memory_access_decl (bool is_write, unsigned size)
{
  enum built_in_function fcode;

  if (size <= 1)
60 61
    fcode = is_write ? BUILT_IN_TSAN_WRITE1
		     : BUILT_IN_TSAN_READ1;
62
  else if (size <= 3)
63 64
    fcode = is_write ? BUILT_IN_TSAN_WRITE2
		     : BUILT_IN_TSAN_READ2;
65
  else if (size <= 7)
66 67
    fcode = is_write ? BUILT_IN_TSAN_WRITE4
		     : BUILT_IN_TSAN_READ4;
68
  else if (size <= 15)
69 70
    fcode = is_write ? BUILT_IN_TSAN_WRITE8
		     : BUILT_IN_TSAN_READ8;
71
  else
72 73
    fcode = is_write ? BUILT_IN_TSAN_WRITE16
		     : BUILT_IN_TSAN_READ16;
74 75 76 77 78 79 80

  return builtin_decl_implicit (fcode);
}

/* Check as to whether EXPR refers to a store to vptr.  */

static tree
81
is_vptr_store (gimple *stmt, tree expr, bool is_write)
82 83 84 85 86 87 88 89 90 91 92 93 94 95
{
  if (is_write == true
      && gimple_assign_single_p (stmt)
      && TREE_CODE (expr) == COMPONENT_REF)
    {
      tree field = TREE_OPERAND (expr, 1);
      if (TREE_CODE (field) == FIELD_DECL
	  && DECL_VIRTUAL_P (field))
	return gimple_assign_rhs1 (stmt);
    }
  return NULL;
}

/* Instruments EXPR if needed. If any instrumentation is inserted,
96
   return true.  */
97 98 99 100

static bool
instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
{
101
  tree base, rhs, expr_ptr, builtin_decl;
102 103
  basic_block bb;
  HOST_WIDE_INT size;
104
  gimple *stmt, *g;
105
  gimple_seq seq;
106
  location_t loc;
107
  unsigned int align;
108 109

  size = int_size_in_bytes (TREE_TYPE (expr));
110
  if (size <= 0)
111 112
    return false;

113
  poly_int64 unused_bitsize, unused_bitpos;
114
  tree offset;
115
  machine_mode mode;
116
  int unsignedp, reversep, volatilep = 0;
117 118
  base = get_inner_reference (expr, &unused_bitsize, &unused_bitpos, &offset,
			      &mode, &unsignedp, &reversep, &volatilep);
119 120 121

  /* No need to instrument accesses to decls that don't escape,
     they can't escape to other threads then.  */
122
  if (DECL_P (base) && !is_global_var (base))
123 124 125 126 127 128 129
    {
      struct pt_solution pt;
      memset (&pt, 0, sizeof (pt));
      pt.escaped = 1;
      pt.ipa_escaped = flag_ipa_pta != 0;
      if (!pt_solution_includes (&pt, base))
	return false;
130
      if (!may_be_aliased (base))
131 132 133
	return false;
    }

134
  if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base)))
135 136 137 138 139
    return false;

  stmt = gsi_stmt (gsi);
  loc = gimple_location (stmt);
  rhs = is_vptr_store (stmt, expr, is_write);
140 141 142 143 144

  if ((TREE_CODE (expr) == COMPONENT_REF
       && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
      || TREE_CODE (expr) == BIT_FIELD_REF)
    {
145
      HOST_WIDE_INT bitpos, bitsize;
146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171
      base = TREE_OPERAND (expr, 0);
      if (TREE_CODE (expr) == COMPONENT_REF)
	{
	  expr = TREE_OPERAND (expr, 1);
	  if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr))
	    expr = DECL_BIT_FIELD_REPRESENTATIVE (expr);
	  if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr))
	      || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr))
	      || !tree_fits_uhwi_p (DECL_SIZE (expr)))
	    return false;
	  bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT
		   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr));
	  bitsize = tree_to_uhwi (DECL_SIZE (expr));
	}
      else
	{
	  if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2))
	      || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
	    return false;
	  bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2));
	  bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1));
	}
      if (bitpos < 0 || bitsize <= 0)
	return false;
      size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1)
	     / BITS_PER_UNIT;
172 173
      if (may_be_nonaddressable_p (base))
	return false;
174 175 176 177 178 179 180
      align = get_object_alignment (base);
      if (align < BITS_PER_UNIT)
	return false;
      bitpos = bitpos & ~(BITS_PER_UNIT - 1);
      if ((align - 1) & bitpos)
	{
	  align = (align - 1) & bitpos;
181
	  align = least_bit_hwi (align);
182 183 184 185 186 187 188 189
	}
      expr = build_fold_addr_expr (unshare_expr (base));
      expr = build2 (MEM_REF, char_type_node, expr,
		     build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT));
      expr_ptr = build_fold_addr_expr (expr);
    }
  else
    {
190 191
      if (may_be_nonaddressable_p (expr))
	return false;
192 193 194 195 196
      align = get_object_alignment (expr);
      if (align < BITS_PER_UNIT)
	return false;
      expr_ptr = build_fold_addr_expr (unshare_expr (expr));
    }
197
  expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE);
198
  if ((size & (size - 1)) != 0 || size > 16
199 200 201 202 203 204 205 206
      || align < MIN (size, 8) * BITS_PER_UNIT)
    {
      builtin_decl = builtin_decl_implicit (is_write
					    ? BUILT_IN_TSAN_WRITE_RANGE
					    : BUILT_IN_TSAN_READ_RANGE);
      g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size));
    }
  else if (rhs == NULL)
207 208
    g = gimple_build_call (get_memory_access_decl (is_write, size),
			   1, expr_ptr);
209 210 211
  else
    {
      builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
212
      g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs));
213 214
    }
  gimple_set_location (g, loc);
215
  gimple_seq_add_stmt_without_update (&seq, g);
216 217 218 219 220 221 222 223
  /* Instrumentation for assignment of a function result
     must be inserted after the call.  Instrumentation for
     reads of function arguments must be inserted before the call.
     That's because the call can contain synchronization.  */
  if (is_gimple_call (stmt) && is_write)
    {
      /* If the call can throw, it must be the last stmt in
	 a basic block, so the instrumented stmts need to be
224
	 inserted in successor bbs.  */
225 226 227 228 229 230 231
      if (is_ctrl_altering_stmt (stmt))
	{
	  edge e;

	  bb = gsi_bb (gsi);
	  e = find_fallthru_edge (bb->succs);
	  if (e)
232
	    gsi_insert_seq_on_edge_immediate (e, seq);
233 234
	}
      else
235
	gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
236 237
    }
  else
238
    gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
239 240 241 242

  return true;
}

243 244 245 246
/* Actions for sync/atomic builtin transformations.  */
enum tsan_atomic_action
{
  check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
247 248
  bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst,
  bool_clear, bool_test_and_set
249 250 251 252
};

/* Table how to map sync/atomic builtins to their corresponding
   tsan equivalents.  */
253
static const struct tsan_map_atomic
254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281
{
  enum built_in_function fcode, tsan_fcode;
  enum tsan_atomic_action action;
  enum tree_code code;
} tsan_atomic_table[] =
{
#define TRANSFORM(fcode, tsan_fcode, action, code) \
  { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
#define CHECK_LAST(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
#define ADD_SEQ_CST(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
#define ADD_ACQUIRE(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
#define WEAK_CAS(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
#define STRONG_CAS(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
#define BOOL_CAS(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
#define VAL_CAS(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
#define LOCK_RELEASE(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
#define FETCH_OP(fcode, tsan_fcode, code) \
  TRANSFORM (fcode, tsan_fcode, fetch_op, code)
#define FETCH_OPS(fcode, tsan_fcode, code) \
  TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
282 283 284 285
#define BOOL_CLEAR(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK)
#define BOOL_TEST_AND_SET(fcode, tsan_fcode) \
  TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK)
286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474

  CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
  CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
  CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
  CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
  CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
  CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
  CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
  CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
  CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
  CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
  CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
  CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
  CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
  CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
  CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
  CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
  CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
  CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
  CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
  CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
  CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
  CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
  CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
  CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
  CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
  CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
  CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
  CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
  CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
  CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
  CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
  CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
  CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
  CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
  CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
  CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
  CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
  CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
  CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
  CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
  CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
  CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
  CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
  CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
  CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),

  CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
  CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),

  FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
  FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
  FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
  FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
  FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
  FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
  FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
  FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
  FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
  FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
  FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
  FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
  FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
  FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
  FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
  FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
  FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
  FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
  FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
  FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),

  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
  ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),

  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
  ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
  ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
  ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
  ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
  ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
  ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
  ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
  ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
  ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
  ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
  ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
  ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
  ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
  ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),

  ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),

  FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
  FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
  FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
  FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
  FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
  FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
  FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
  FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
  FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
  FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
  FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
  FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
  FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
  FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
  FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
  FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
  FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
  FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
  FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
  FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
  FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
  FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),

  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
  WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),

  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
	      TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
	      TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
	      TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
  STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
	      TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),

  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
	    TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
	    TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
	    TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
	    TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
  BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
	    TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),

  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
  VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
	   TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),

  LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
  LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
  LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
  LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
475 476 477 478 479
  LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE),

  BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE),

  BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE)
480 481 482 483 484 485 486
};

/* Instrument an atomic builtin.  */

static void
instrument_builtin_call (gimple_stmt_iterator *gsi)
{
487
  gimple *stmt = gsi_stmt (*gsi), *g;
488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505
  tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
  enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
  unsigned int i, num = gimple_call_num_args (stmt), j;
  for (j = 0; j < 6 && j < num; j++)
    args[j] = gimple_call_arg (stmt, j);
  for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
    if (fcode != tsan_atomic_table[i].fcode)
      continue;
    else
      {
	tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
	if (decl == NULL_TREE)
	  return;
	switch (tsan_atomic_table[i].action)
	  {
	  case check_last:
	  case fetch_op:
	    last_arg = gimple_call_arg (stmt, num - 1);
506 507
	    if (tree_fits_uhwi_p (last_arg)
		&& memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
508 509 510
	      return;
	    gimple_call_set_fndecl (stmt, decl);
	    update_stmt (stmt);
511
	    maybe_clean_eh_stmt (stmt);
512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531
	    if (tsan_atomic_table[i].action == fetch_op)
	      {
		args[1] = gimple_call_arg (stmt, 1);
		goto adjust_result;
	      }
	    return;
	  case add_seq_cst:
	  case add_acquire:
	  case fetch_op_seq_cst:
	    gcc_assert (num <= 2);
	    for (j = 0; j < num; j++)
	      args[j] = gimple_call_arg (stmt, j);
	    for (; j < 2; j++)
	      args[j] = NULL_TREE;
	    args[num] = build_int_cst (NULL_TREE,
				       tsan_atomic_table[i].action
				       != add_acquire
				       ? MEMMODEL_SEQ_CST
				       : MEMMODEL_ACQUIRE);
	    update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
532
	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
533 534 535 536 537 538 539 540 541 542
	    stmt = gsi_stmt (*gsi);
	    if (tsan_atomic_table[i].action == fetch_op_seq_cst)
	      {
	      adjust_result:
		lhs = gimple_call_lhs (stmt);
		if (lhs == NULL_TREE)
		  return;
		if (!useless_type_conversion_p (TREE_TYPE (lhs),
						TREE_TYPE (args[1])))
		  {
543
		    tree var = make_ssa_name (TREE_TYPE (lhs));
544
		    g = gimple_build_assign (var, NOP_EXPR, args[1]);
545 546 547
		    gsi_insert_after (gsi, g, GSI_NEW_STMT);
		    args[1] = var;
		  }
548
		gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs)));
549 550 551
		/* BIT_NOT_EXPR stands for NAND.  */
		if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
		  {
552
		    tree var = make_ssa_name (TREE_TYPE (lhs));
553 554
		    g = gimple_build_assign (var, BIT_AND_EXPR,
					     gimple_call_lhs (stmt), args[1]);
555
		    gsi_insert_after (gsi, g, GSI_NEW_STMT);
556
		    g = gimple_build_assign (lhs, BIT_NOT_EXPR, var);
557 558
		  }
		else
559 560
		  g = gimple_build_assign (lhs, tsan_atomic_table[i].code,
					   gimple_call_lhs (stmt), args[1]);
561 562 563 564 565 566 567 568 569 570 571 572
		update_stmt (stmt);
		gsi_insert_after (gsi, g, GSI_NEW_STMT);
	      }
	    return;
	  case weak_cas:
	    if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
	      continue;
	    /* FALLTHRU */
	  case strong_cas:
	    gcc_assert (num == 6);
	    for (j = 0; j < 6; j++)
	      args[j] = gimple_call_arg (stmt, j);
573 574
	    if (tree_fits_uhwi_p (args[4])
		&& memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST)
575
	      return;
576 577
	    if (tree_fits_uhwi_p (args[5])
		&& memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST)
578 579 580
	      return;
	    update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
				args[4], args[5]);
581
	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
582 583 584 585 586 587 588 589
	    return;
	  case bool_cas:
	  case val_cas:
	    gcc_assert (num == 3);
	    for (j = 0; j < 3; j++)
	      args[j] = gimple_call_arg (stmt, j);
	    t = TYPE_ARG_TYPES (TREE_TYPE (decl));
	    t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
590
	    t = create_tmp_var (t);
591 592 593 594
	    mark_addressable (t);
	    if (!useless_type_conversion_p (TREE_TYPE (t),
					    TREE_TYPE (args[1])))
	      {
595 596
		g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)),
					 NOP_EXPR, args[1]);
597 598 599 600 601 602 603 604 605 606 607 608
		gsi_insert_before (gsi, g, GSI_SAME_STMT);
		args[1] = gimple_assign_lhs (g);
	      }
	    g = gimple_build_assign (t, args[1]);
	    gsi_insert_before (gsi, g, GSI_SAME_STMT);
	    lhs = gimple_call_lhs (stmt);
	    update_gimple_call (gsi, decl, 5, args[0],
				build_fold_addr_expr (t), args[2],
				build_int_cst (NULL_TREE,
					       MEMMODEL_SEQ_CST),
				build_int_cst (NULL_TREE,
					       MEMMODEL_SEQ_CST));
609
	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
610 611 612 613
	    if (tsan_atomic_table[i].action == val_cas && lhs)
	      {
		tree cond;
		stmt = gsi_stmt (*gsi);
614
		g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
615 616 617 618
		gsi_insert_after (gsi, g, GSI_NEW_STMT);
		t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
		cond = build2 (NE_EXPR, boolean_type_node, t,
			       build_int_cst (TREE_TYPE (t), 0));
619 620
		g = gimple_build_assign (lhs, COND_EXPR, cond, args[1],
					 gimple_assign_lhs (g));
621 622 623 624 625 626 627 628 629 630 631 632 633
		gimple_call_set_lhs (stmt, t);
		update_stmt (stmt);
		gsi_insert_after (gsi, g, GSI_NEW_STMT);
	      }
	    return;
	  case lock_release:
	    gcc_assert (num == 1);
	    t = TYPE_ARG_TYPES (TREE_TYPE (decl));
	    t = TREE_VALUE (TREE_CHAIN (t));
	    update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
				build_int_cst (t, 0),
				build_int_cst (NULL_TREE,
					       MEMMODEL_RELEASE));
634
	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
635
	    return;
636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653
	  case bool_clear:
	  case bool_test_and_set:
	    if (BOOL_TYPE_SIZE != 8)
	      {
		decl = NULL_TREE;
		for (j = 1; j < 5; j++)
		  if (BOOL_TYPE_SIZE == (8 << j))
		    {
		      enum built_in_function tsan_fcode
			= (enum built_in_function)
			  (tsan_atomic_table[i].tsan_fcode + j);
		      decl = builtin_decl_implicit (tsan_fcode);
		      break;
		    }
		if (decl == NULL_TREE)
		  return;
	      }
	    last_arg = gimple_call_arg (stmt, num - 1);
654 655
	    if (tree_fits_uhwi_p (last_arg)
		&& memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
656 657 658 659 660 661 662
	      return;
	    t = TYPE_ARG_TYPES (TREE_TYPE (decl));
	    t = TREE_VALUE (TREE_CHAIN (t));
	    if (tsan_atomic_table[i].action == bool_clear)
	      {
		update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
				    build_int_cst (t, 0), last_arg);
663
		maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
664 665 666 667 668
		return;
	      }
	    t = build_int_cst (t, targetm.atomic_test_and_set_trueval);
	    update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
				t, last_arg);
669
	    maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688
	    stmt = gsi_stmt (*gsi);
	    lhs = gimple_call_lhs (stmt);
	    if (lhs == NULL_TREE)
	      return;
	    if (targetm.atomic_test_and_set_trueval != 1
		|| !useless_type_conversion_p (TREE_TYPE (lhs),
					       TREE_TYPE (t)))
	      {
		tree new_lhs = make_ssa_name (TREE_TYPE (t));
		gimple_call_set_lhs (stmt, new_lhs);
		if (targetm.atomic_test_and_set_trueval != 1)
		  g = gimple_build_assign (lhs, NE_EXPR, new_lhs,
					   build_int_cst (TREE_TYPE (t), 0));
		else
		  g = gimple_build_assign (lhs, NOP_EXPR, new_lhs);
		gsi_insert_after (gsi, g, GSI_NEW_STMT);
		update_stmt (stmt);
	      }
	    return;
689 690 691 692 693 694
	  default:
	    continue;
	  }
      }
}

695
/* Instruments the gimple pointed to by GSI. Return
696
   true if func entry/exit should be instrumented.  */
697 698

static bool
699
instrument_gimple (gimple_stmt_iterator *gsi)
700
{
701
  gimple *stmt;
702 703 704
  tree rhs, lhs;
  bool instrumented = false;

705
  stmt = gsi_stmt (*gsi);
706 707 708
  if (is_gimple_call (stmt)
      && (gimple_call_fndecl (stmt)
	  != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
709
    {
710 711 712 713
      /* All functions with function call will have exit instrumented,
	 therefore no function calls other than __tsan_func_exit
	 shall appear in the functions.  */
      gimple_call_set_tail (as_a <gcall *> (stmt), false);
714
      if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
715 716 717
	instrument_builtin_call (gsi);
      return true;
    }
718 719
  else if (is_gimple_assign (stmt)
	   && !gimple_clobber_p (stmt))
720 721 722 723
    {
      if (gimple_store_p (stmt))
	{
	  lhs = gimple_assign_lhs (stmt);
724
	  instrumented = instrument_expr (*gsi, lhs, true);
725 726 727 728
	}
      if (gimple_assign_load_p (stmt))
	{
	  rhs = gimple_assign_rhs1 (stmt);
729
	  instrumented = instrument_expr (*gsi, rhs, false);
730 731 732 733 734
	}
    }
  return instrumented;
}

735 736 737
/* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin.  */

static void
738
replace_func_exit (gimple *stmt)
739 740
{
  tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
741
  gimple *g = gimple_build_call (builtin_decl, 0);
742 743 744 745 746 747 748 749 750 751 752 753 754
  gimple_set_location (g, cfun->function_end_locus);
  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
  gsi_replace (&gsi, g, true);
}

/* Instrument function exit.  Used when TSAN_FUNC_EXIT does not exist.  */

static void
instrument_func_exit (void)
{
  location_t loc;
  basic_block exit_bb;
  gimple_stmt_iterator gsi;
755
  gimple *stmt, *g;
756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775
  tree builtin_decl;
  edge e;
  edge_iterator ei;

  /* Find all function exits.  */
  exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
  FOR_EACH_EDGE (e, ei, exit_bb->preds)
    {
      gsi = gsi_last_bb (e->src);
      stmt = gsi_stmt (gsi);
      gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
		  || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
      loc = gimple_location (stmt);
      builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
      g = gimple_build_call (builtin_decl, 0);
      gimple_set_location (g, loc);
      gsi_insert_before (&gsi, g, GSI_SAME_STMT);
    }
}

776
/* Instruments all interesting memory accesses in the current function.
777
   Return true if func entry/exit should be instrumented.  */
778 779

static bool
780
instrument_memory_accesses (bool *cfg_changed)
781 782 783 784
{
  basic_block bb;
  gimple_stmt_iterator gsi;
  bool fentry_exit_instrument = false;
785
  bool func_exit_seen = false;
786
  auto_vec<gimple *> tsan_func_exits;
787

788
  FOR_EACH_BB_FN (bb, cfun)
789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple *stmt = gsi_stmt (gsi);
	  if (gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
	    {
	      if (fentry_exit_instrument)
		replace_func_exit (stmt);
	      else
		tsan_func_exits.safe_push (stmt);
	      func_exit_seen = true;
	    }
	  else
	    fentry_exit_instrument |= instrument_gimple (&gsi);
	}
      if (gimple_purge_dead_eh_edges (bb))
	*cfg_changed = true;
    }
807
  unsigned int i;
808
  gimple *stmt;
809 810 811 812 813 814 815 816 817 818
  FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
    if (fentry_exit_instrument)
      replace_func_exit (stmt);
    else
      {
	gsi = gsi_for_stmt (stmt);
	gsi_remove (&gsi, true);
      }
  if (fentry_exit_instrument && !func_exit_seen)
    instrument_func_exit ();
819 820 821 822 823 824 825 826 827
  return fentry_exit_instrument;
}

/* Instruments function entry.  */

static void
instrument_func_entry (void)
{
  tree ret_addr, builtin_decl;
828
  gimple *g;
829
  gimple_seq seq = NULL;
830 831 832

  builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
  g = gimple_build_call (builtin_decl, 1, integer_zero_node);
833
  ret_addr = make_ssa_name (ptr_type_node);
834 835
  gimple_call_set_lhs (g, ret_addr);
  gimple_set_location (g, cfun->function_start_locus);
836
  gimple_seq_add_stmt_without_update (&seq, g);
837

838
  builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
839 840
  g = gimple_build_call (builtin_decl, 1, ret_addr);
  gimple_set_location (g, cfun->function_start_locus);
841 842 843 844
  gimple_seq_add_stmt_without_update (&seq, g);

  edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
  gsi_insert_seq_on_edge_immediate (e, seq);
845 846 847 848 849 850 851
}

/* ThreadSanitizer instrumentation pass.  */

static unsigned
tsan_pass (void)
{
852
  initialize_sanitizer_builtins ();
853 854
  bool cfg_changed = false;
  if (instrument_memory_accesses (&cfg_changed))
855
    instrument_func_entry ();
856
  return cfg_changed ? TODO_cleanup_cfg : 0;
857 858 859 860 861 862 863
}

/* Inserts __tsan_init () into the list of CTORs.  */

void
tsan_finish_file (void)
{
864
  tree ctor_statements = NULL_TREE;
865

866 867
  initialize_sanitizer_builtins ();
  tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
868 869 870 871 872 873 874 875
  append_to_statement_list (build_call_expr (init_decl, 0),
			    &ctor_statements);
  cgraph_build_static_cdtor ('I', ctor_statements,
			     MAX_RESERVED_INIT_PRIORITY - 1);
}

/* The pass descriptor.  */

876 877 878
namespace {

const pass_data pass_data_tsan =
879
{
880 881 882 883 884 885 886 887
  GIMPLE_PASS, /* type */
  "tsan", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  ( PROP_ssa | PROP_cfg ), /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
888
  TODO_update_ssa, /* todo_flags_finish */
889 890
};

891 892 893
class pass_tsan : public gimple_opt_pass
{
public:
894 895
  pass_tsan (gcc::context *ctxt)
    : gimple_opt_pass (pass_data_tsan, ctxt)
896 897 898
  {}

  /* opt_pass methods: */
899
  opt_pass * clone () { return new pass_tsan (m_ctxt); }
900 901
  virtual bool gate (function *)
{
902
  return sanitize_flags_p (SANITIZE_THREAD);
903 904
}

905
  virtual unsigned int execute (function *) { return tsan_pass (); }
906 907 908 909 910 911 912 913 914 915 916 917 918 919

}; // class pass_tsan

} // anon namespace

gimple_opt_pass *
make_pass_tsan (gcc::context *ctxt)
{
  return new pass_tsan (ctxt);
}

namespace {

const pass_data pass_data_tsan_O0 =
920
{
921 922 923 924 925 926 927 928
  GIMPLE_PASS, /* type */
  "tsan0", /* name */
  OPTGROUP_NONE, /* optinfo_flags */
  TV_NONE, /* tv_id */
  ( PROP_ssa | PROP_cfg ), /* properties_required */
  0, /* properties_provided */
  0, /* properties_destroyed */
  0, /* todo_flags_start */
929
  TODO_update_ssa, /* todo_flags_finish */
930
};
931 932 933 934

class pass_tsan_O0 : public gimple_opt_pass
{
public:
935 936
  pass_tsan_O0 (gcc::context *ctxt)
    : gimple_opt_pass (pass_data_tsan_O0, ctxt)
937 938 939
  {}

  /* opt_pass methods: */
940 941
  virtual bool gate (function *)
    {
942
      return (sanitize_flags_p (SANITIZE_THREAD) && !optimize);
943 944
    }

945
  virtual unsigned int execute (function *) { return tsan_pass (); }
946 947 948 949 950 951 952 953 954 955

}; // class pass_tsan_O0

} // anon namespace

gimple_opt_pass *
make_pass_tsan_O0 (gcc::context *ctxt)
{
  return new pass_tsan_O0 (ctxt);
}