@@ -32,6 +32,7 @@ using namespace __tsan;
32
32
static StaticSpinMutex mutex128;
33
33
#endif
34
34
35
+ #if SANITIZER_DEBUG
35
36
static bool IsLoadOrder (morder mo) {
36
37
return mo == mo_relaxed || mo == mo_consume
37
38
|| mo == mo_acquire || mo == mo_seq_cst;
@@ -40,6 +41,7 @@ static bool IsLoadOrder(morder mo) {
40
41
static bool IsStoreOrder (morder mo) {
41
42
return mo == mo_relaxed || mo == mo_release || mo == mo_seq_cst;
42
43
}
44
+ #endif
43
45
44
46
static bool IsReleaseOrder (morder mo) {
45
47
return mo == mo_release || mo == mo_acq_rel || mo == mo_seq_cst;
@@ -202,7 +204,7 @@ static memory_order to_mo(morder mo) {
202
204
case mo_acq_rel: return memory_order_acq_rel;
203
205
case mo_seq_cst: return memory_order_seq_cst;
204
206
}
205
- CHECK (0 );
207
+ DCHECK (0 );
206
208
return memory_order_seq_cst;
207
209
}
208
210
@@ -220,7 +222,7 @@ static a128 NoTsanAtomicLoad(const volatile a128 *a, morder mo) {
220
222
221
223
template <typename T>
222
224
static T AtomicLoad (ThreadState *thr, uptr pc, const volatile T *a, morder mo) {
223
- CHECK (IsLoadOrder (mo));
225
+ DCHECK (IsLoadOrder (mo));
224
226
// This fast-path is critical for performance.
225
227
// Assume the access is atomic.
226
228
if (!IsAcquireOrder (mo)) {
@@ -258,7 +260,7 @@ static void NoTsanAtomicStore(volatile a128 *a, a128 v, morder mo) {
258
260
template <typename T>
259
261
static void AtomicStore (ThreadState *thr, uptr pc, volatile T *a, T v,
260
262
morder mo) {
261
- CHECK (IsStoreOrder (mo));
263
+ DCHECK (IsStoreOrder (mo));
262
264
MemoryAccess (thr, pc, (uptr)a, AccessSize<T>(), kAccessWrite | kAccessAtomic );
263
265
// This fast-path is critical for performance.
264
266
// Assume the access is atomic.
@@ -403,7 +405,7 @@ static bool AtomicCAS(ThreadState *thr, uptr pc, volatile T *a, T *c, T v,
403
405
// 31.7.2.18: "The failure argument shall not be memory_order_release
404
406
// nor memory_order_acq_rel". LLVM (2021-05) fallbacks to Monotonic
405
407
// (mo_relaxed) when those are used.
406
- CHECK (IsLoadOrder (fmo));
408
+ DCHECK (IsLoadOrder (fmo));
407
409
408
410
MemoryAccess (thr, pc, (uptr)a, AccessSize<T>(), kAccessWrite | kAccessAtomic );
409
411
if (LIKELY (mo == mo_relaxed && fmo == mo_relaxed)) {
0 commit comments