atomicops_internals_tsan.h 14.8 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4 5 6 7 8


// This file is an internal atomic implementation for compiler-based
// ThreadSanitizer. Use base/atomicops.h instead.

9 10
#ifndef V8_BASE_ATOMICOPS_INTERNALS_TSAN_H_
#define V8_BASE_ATOMICOPS_INTERNALS_TSAN_H_
11

12
namespace v8 {
13
namespace base {
14 15 16 17

#ifndef TSAN_INTERFACE_ATOMIC_H
#define TSAN_INTERFACE_ATOMIC_H

18 19 20 21 22 23 24

extern "C" {
typedef char  __tsan_atomic8;
typedef short __tsan_atomic16;  // NOLINT
typedef int   __tsan_atomic32;
typedef long  __tsan_atomic64;  // NOLINT

25 26 27 28 29 30 31 32 33
#if defined(__SIZEOF_INT128__) \
    || (__clang_major__ * 100 + __clang_minor__ >= 302)
typedef __int128 __tsan_atomic128;
#define __TSAN_HAS_INT128 1
#else
typedef char     __tsan_atomic128;
#define __TSAN_HAS_INT128 0
#endif

34
typedef enum {
35 36 37 38 39 40
  __tsan_memory_order_relaxed,
  __tsan_memory_order_consume,
  __tsan_memory_order_acquire,
  __tsan_memory_order_release,
  __tsan_memory_order_acq_rel,
  __tsan_memory_order_seq_cst,
41 42
} __tsan_memory_order;

43
__tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8* a,
44
    __tsan_memory_order mo);
45
__tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16* a,
46
    __tsan_memory_order mo);
47
__tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32* a,
48
    __tsan_memory_order mo);
49
__tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64* a,
50
    __tsan_memory_order mo);
51
__tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128* a,
52 53
    __tsan_memory_order mo);

54
void __tsan_atomic8_store(volatile __tsan_atomic8* a, __tsan_atomic8 v,
55
    __tsan_memory_order mo);
56
void __tsan_atomic16_store(volatile __tsan_atomic16* a, __tsan_atomic16 v,
57
    __tsan_memory_order mo);
58
void __tsan_atomic32_store(volatile __tsan_atomic32* a, __tsan_atomic32 v,
59
    __tsan_memory_order mo);
60
void __tsan_atomic64_store(volatile __tsan_atomic64* a, __tsan_atomic64 v,
61
    __tsan_memory_order mo);
62
void __tsan_atomic128_store(volatile __tsan_atomic128* a, __tsan_atomic128 v,
63 64
    __tsan_memory_order mo);

65
__tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8* a,
66
    __tsan_atomic8 v, __tsan_memory_order mo);
67
__tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16* a,
68
    __tsan_atomic16 v, __tsan_memory_order mo);
69
__tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32* a,
70
    __tsan_atomic32 v, __tsan_memory_order mo);
71
__tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64* a,
72
    __tsan_atomic64 v, __tsan_memory_order mo);
73
__tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128* a,
74
    __tsan_atomic128 v, __tsan_memory_order mo);
75

76
__tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8* a,
77
    __tsan_atomic8 v, __tsan_memory_order mo);
78
__tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16* a,
79
    __tsan_atomic16 v, __tsan_memory_order mo);
80
__tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32* a,
81
    __tsan_atomic32 v, __tsan_memory_order mo);
82
__tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64* a,
83
    __tsan_atomic64 v, __tsan_memory_order mo);
84
__tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128* a,
85
    __tsan_atomic128 v, __tsan_memory_order mo);
86

87
__tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8* a,
88
    __tsan_atomic8 v, __tsan_memory_order mo);
89
__tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16* a,
90
    __tsan_atomic16 v, __tsan_memory_order mo);
91
__tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32* a,
92
    __tsan_atomic32 v, __tsan_memory_order mo);
93
__tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64* a,
94
    __tsan_atomic64 v, __tsan_memory_order mo);
95
__tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128* a,
96
    __tsan_atomic128 v, __tsan_memory_order mo);
97

98
__tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8* a,
99
    __tsan_atomic8 v, __tsan_memory_order mo);
100
__tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16* a,
101
    __tsan_atomic16 v, __tsan_memory_order mo);
102
__tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32* a,
103
    __tsan_atomic32 v, __tsan_memory_order mo);
104
__tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64* a,
105
    __tsan_atomic64 v, __tsan_memory_order mo);
106
__tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128* a,
107
    __tsan_atomic128 v, __tsan_memory_order mo);
108

109
__tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8* a,
110
    __tsan_atomic8 v, __tsan_memory_order mo);
111
__tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16* a,
112
    __tsan_atomic16 v, __tsan_memory_order mo);
113
__tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32* a,
114
    __tsan_atomic32 v, __tsan_memory_order mo);
115
__tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64* a,
116
    __tsan_atomic64 v, __tsan_memory_order mo);
117
__tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128* a,
118
    __tsan_atomic128 v, __tsan_memory_order mo);
119

120
__tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8* a,
121
    __tsan_atomic8 v, __tsan_memory_order mo);
122
__tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16* a,
123
    __tsan_atomic16 v, __tsan_memory_order mo);
124
__tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32* a,
125
    __tsan_atomic32 v, __tsan_memory_order mo);
126
__tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64* a,
127
    __tsan_atomic64 v, __tsan_memory_order mo);
128 129
__tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128* a,
    __tsan_atomic128 v, __tsan_memory_order mo);
130

131 132
int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8* a,
    __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo,
133
    __tsan_memory_order fail_mo);
134 135
int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16* a,
    __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo,
136
    __tsan_memory_order fail_mo);
137 138
int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32* a,
    __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo,
139
    __tsan_memory_order fail_mo);
140 141
int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64* a,
    __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo,
142
    __tsan_memory_order fail_mo);
143 144
int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128* a,
    __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo,
145 146
    __tsan_memory_order fail_mo);

147 148
int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8* a,
    __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo,
149
    __tsan_memory_order fail_mo);
150 151
int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16* a,
    __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo,
152
    __tsan_memory_order fail_mo);
153 154
int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32* a,
    __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo,
155
    __tsan_memory_order fail_mo);
156 157
int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64* a,
    __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo,
158
    __tsan_memory_order fail_mo);
159 160
int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128* a,
    __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo,
161 162 163
    __tsan_memory_order fail_mo);

__tsan_atomic8 __tsan_atomic8_compare_exchange_val(
164
    volatile __tsan_atomic8* a, __tsan_atomic8 c, __tsan_atomic8 v,
165 166
    __tsan_memory_order mo, __tsan_memory_order fail_mo);
__tsan_atomic16 __tsan_atomic16_compare_exchange_val(
167
    volatile __tsan_atomic16* a, __tsan_atomic16 c, __tsan_atomic16 v,
168 169
    __tsan_memory_order mo, __tsan_memory_order fail_mo);
__tsan_atomic32 __tsan_atomic32_compare_exchange_val(
170
    volatile __tsan_atomic32* a, __tsan_atomic32 c, __tsan_atomic32 v,
171 172
    __tsan_memory_order mo, __tsan_memory_order fail_mo);
__tsan_atomic64 __tsan_atomic64_compare_exchange_val(
173
    volatile __tsan_atomic64* a, __tsan_atomic64 c, __tsan_atomic64 v,
174 175
    __tsan_memory_order mo, __tsan_memory_order fail_mo);
__tsan_atomic128 __tsan_atomic128_compare_exchange_val(
176
    volatile __tsan_atomic128* a, __tsan_atomic128 c, __tsan_atomic128 v,
177
    __tsan_memory_order mo, __tsan_memory_order fail_mo);
178 179

void __tsan_atomic_thread_fence(__tsan_memory_order mo);
180
void __tsan_atomic_signal_fence(__tsan_memory_order mo);
181 182 183 184
}  // extern "C"

#endif  // #ifndef TSAN_INTERFACE_ATOMIC_H

185
inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
186 187 188 189
                                         Atomic32 old_value,
                                         Atomic32 new_value) {
  Atomic32 cmp = old_value;
  __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
190
      __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
191 192 193
  return cmp;
}

194
inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
195 196
                                         Atomic32 new_value) {
  return __tsan_atomic32_exchange(ptr, new_value,
197
      __tsan_memory_order_relaxed);
198 199
}

200
inline Atomic32 Acquire_AtomicExchange(volatile Atomic32* ptr,
201 202
                                       Atomic32 new_value) {
  return __tsan_atomic32_exchange(ptr, new_value,
203
      __tsan_memory_order_acquire);
204 205
}

206
inline Atomic32 Release_AtomicExchange(volatile Atomic32* ptr,
207 208
                                       Atomic32 new_value) {
  return __tsan_atomic32_exchange(ptr, new_value,
209
      __tsan_memory_order_release);
210 211
}

212
inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
213 214
                                          Atomic32 increment) {
  return increment + __tsan_atomic32_fetch_add(ptr, increment,
215
      __tsan_memory_order_relaxed);
216 217
}

218
inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
219 220
                                        Atomic32 increment) {
  return increment + __tsan_atomic32_fetch_add(ptr, increment,
221
      __tsan_memory_order_acq_rel);
222 223
}

224
inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
225 226 227 228
                                       Atomic32 old_value,
                                       Atomic32 new_value) {
  Atomic32 cmp = old_value;
  __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
229
      __tsan_memory_order_acquire, __tsan_memory_order_acquire);
230 231 232
  return cmp;
}

233
inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
234 235 236 237
                                       Atomic32 old_value,
                                       Atomic32 new_value) {
  Atomic32 cmp = old_value;
  __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
238
      __tsan_memory_order_release, __tsan_memory_order_relaxed);
239 240 241
  return cmp;
}

242 243 244 245
inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) {
  __tsan_atomic8_store(ptr, value, __tsan_memory_order_relaxed);
}

246
inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
247 248 249
  __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
}

250
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
251 252 253 254
  __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
  __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
}

255
inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
256 257 258
  __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
}

259 260 261 262
inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) {
  return __tsan_atomic8_load(ptr, __tsan_memory_order_relaxed);
}

263
inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
264 265 266
  return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
}

267
inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
268 269 270
  return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
}

271
inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
272 273 274 275
  __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
  return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
}

276
inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
277 278 279 280
                                         Atomic64 old_value,
                                         Atomic64 new_value) {
  Atomic64 cmp = old_value;
  __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
281
      __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
282 283 284
  return cmp;
}

285
inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
286 287 288 289
                                         Atomic64 new_value) {
  return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed);
}

290
inline Atomic64 Acquire_AtomicExchange(volatile Atomic64* ptr,
291 292 293 294
                                       Atomic64 new_value) {
  return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire);
}

295
inline Atomic64 Release_AtomicExchange(volatile Atomic64* ptr,
296 297 298 299
                                       Atomic64 new_value) {
  return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release);
}

300
inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
301 302
                                          Atomic64 increment) {
  return increment + __tsan_atomic64_fetch_add(ptr, increment,
303
      __tsan_memory_order_relaxed);
304 305
}

306
inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
307 308
                                        Atomic64 increment) {
  return increment + __tsan_atomic64_fetch_add(ptr, increment,
309
      __tsan_memory_order_acq_rel);
310 311
}

312
inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
313 314 315
  __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
}

316
inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
317 318 319 320
  __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
  __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
}

321
inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
322 323 324
  __tsan_atomic64_store(ptr, value, __tsan_memory_order_release);
}

325
inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
326 327 328
  return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
}

329
inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
330 331 332
  return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire);
}

333
inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
334 335 336 337
  __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
  return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
}

338
inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
339 340 341 342
                                       Atomic64 old_value,
                                       Atomic64 new_value) {
  Atomic64 cmp = old_value;
  __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
343
      __tsan_memory_order_acquire, __tsan_memory_order_acquire);
344 345 346
  return cmp;
}

347
inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
348 349 350 351
                                       Atomic64 old_value,
                                       Atomic64 new_value) {
  Atomic64 cmp = old_value;
  __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
352
      __tsan_memory_order_release, __tsan_memory_order_relaxed);
353 354 355 356 357 358 359
  return cmp;
}

inline void MemoryBarrier() {
  __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
}

360
}  // namespace base
361 362
}  // namespace v8

363
#endif  // V8_BASE_ATOMICOPS_INTERNALS_TSAN_H_