sanitizer_atomic_msvc.h revision 238901
1169695Skan//===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
2169695Skan//
3169695Skan//                     The LLVM Compiler Infrastructure
4169695Skan//
5169695Skan// This file is distributed under the University of Illinois Open Source
6169695Skan// License. See LICENSE.TXT for details.
7169695Skan//
8169695Skan//===----------------------------------------------------------------------===//
9169695Skan//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Not intended for direct inclusion. Include sanitizer_atomic.h.
12//
13//===----------------------------------------------------------------------===//
14
15#ifndef SANITIZER_ATOMIC_MSVC_H
16#define SANITIZER_ATOMIC_MSVC_H
17
18extern "C" void _ReadWriteBarrier();
19#pragma intrinsic(_ReadWriteBarrier)
20extern "C" void _mm_mfence();
21#pragma intrinsic(_mm_mfence)
22extern "C" void _mm_pause();
23#pragma intrinsic(_mm_pause)
24extern "C" long _InterlockedExchangeAdd(  // NOLINT
25    long volatile * Addend, long Value);  // NOLINT
26#pragma intrinsic(_InterlockedExchangeAdd)
27
28namespace __sanitizer {
29
30INLINE void atomic_signal_fence(memory_order) {
31  _ReadWriteBarrier();
32}
33
34INLINE void atomic_thread_fence(memory_order) {
35  _mm_mfence();
36}
37
38INLINE void proc_yield(int cnt) {
39  for (int i = 0; i < cnt; i++)
40    _mm_pause();
41}
42
43template<typename T>
44INLINE typename T::Type atomic_load(
45    const volatile T *a, memory_order mo) {
46  DCHECK(mo & (memory_order_relaxed | memory_order_consume
47      | memory_order_acquire | memory_order_seq_cst));
48  DCHECK(!((uptr)a % sizeof(*a)));
49  typename T::Type v;
50  if (mo == memory_order_relaxed) {
51    v = a->val_dont_use;
52  } else {
53    atomic_signal_fence(memory_order_seq_cst);
54    v = a->val_dont_use;
55    atomic_signal_fence(memory_order_seq_cst);
56  }
57  return v;
58}
59
60template<typename T>
61INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
62  DCHECK(mo & (memory_order_relaxed | memory_order_release
63      | memory_order_seq_cst));
64  DCHECK(!((uptr)a % sizeof(*a)));
65  if (mo == memory_order_relaxed) {
66    a->val_dont_use = v;
67  } else {
68    atomic_signal_fence(memory_order_seq_cst);
69    a->val_dont_use = v;
70    atomic_signal_fence(memory_order_seq_cst);
71  }
72  if (mo == memory_order_seq_cst)
73    atomic_thread_fence(memory_order_seq_cst);
74}
75
76INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a,
77    u32 v, memory_order mo) {
78  (void)mo;
79  DCHECK(!((uptr)a % sizeof(*a)));
80  return (u32)_InterlockedExchangeAdd(
81      (volatile long*)&a->val_dont_use, (long)v);  // NOLINT
82}
83
84INLINE u8 atomic_exchange(volatile atomic_uint8_t *a,
85    u8 v, memory_order mo) {
86  (void)mo;
87  DCHECK(!((uptr)a % sizeof(*a)));
88  __asm {
89    mov eax, a
90    mov cl, v
91    xchg [eax], cl  // NOLINT
92    mov v, cl
93  }
94  return v;
95}
96
97INLINE u16 atomic_exchange(volatile atomic_uint16_t *a,
98    u16 v, memory_order mo) {
99  (void)mo;
100  DCHECK(!((uptr)a % sizeof(*a)));
101  __asm {
102    mov eax, a
103    mov cx, v
104    xchg [eax], cx  // NOLINT
105    mov v, cx
106  }
107  return v;
108}
109
110}  // namespace __sanitizer
111
112#endif  // SANITIZER_ATOMIC_CLANG_H
113