2121#include "utils_windows_intrin.h"
2222
2323#pragma intrinsic(_BitScanForward64)
24- #else
25- #include < pthread.h>
24+ #else // !_WIN32
2625
27- #ifndef __cplusplus
26+ #include <pthread.h>
2827#include <stdatomic.h>
29- #else /* __cplusplus */
30- #include < atomic>
31- #define _Atomic (X ) std::atomic<X>
32-
33- using std::memory_order_acq_rel;
34- using std::memory_order_acquire;
35- using std::memory_order_relaxed;
36- using std::memory_order_release;
3728
38- #endif /* __cplusplus */
39-
40- #endif /* _WIN32 */
29+ #endif /* !_WIN32 */
4130
4231#include "utils_common.h"
4332#include "utils_sanitizers.h"
@@ -118,14 +107,6 @@ static __inline void utils_atomic_load_acquire_ptr(void **ptr, void **out) {
118107 * (uintptr_t * )out = ret ;
119108}
120109
121- static __inline void utils_atomic_store_release_u64 (uint64_t *ptr,
122- uint64_t *val) {
123- ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
124- ASSERT_IS_ALIGNED ((uintptr_t )val, 8 );
125- utils_annotate_release (ptr);
126- InterlockedExchange64 ((LONG64 volatile *)ptr, *(LONG64 *)val);
127- }
128-
129110static __inline void utils_atomic_store_release_ptr (void * * ptr , void * val ) {
130111 ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
131112 utils_annotate_release (ptr );
@@ -146,14 +127,12 @@ static __inline uint64_t utils_atomic_decrement_u64(uint64_t *ptr) {
146127
147128static __inline uint64_t utils_fetch_and_add_u64 (uint64_t * ptr , uint64_t val ) {
148129 ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
149- ASSERT_IS_ALIGNED ((uintptr_t )&val, 8 );
150130 // return the value that had previously been in *ptr
151131 return InterlockedExchangeAdd64 ((LONG64 volatile * )(ptr ), val );
152132}
153133
154134static __inline uint64_t utils_fetch_and_sub_u64 (uint64_t * ptr , uint64_t val ) {
155135 ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
156- ASSERT_IS_ALIGNED ((uintptr_t )&val, 8 );
157136 // return the value that had previously been in *ptr
158137 // NOTE: on Windows there is no *Sub* version of InterlockedExchange
159138 return InterlockedExchangeAdd64 ((LONG64 volatile * )(ptr ), - (LONG64 )val );
@@ -203,14 +182,6 @@ static inline void utils_atomic_load_acquire_ptr(void **ptr, void **out) {
203182 utils_annotate_acquire ((void * )ptr );
204183}
205184
206- static inline void utils_atomic_store_release_u64 (uint64_t *ptr,
207- uint64_t *val) {
208- ASSERT_IS_ALIGNED ((uintptr_t )ptr, 8 );
209- ASSERT_IS_ALIGNED ((uintptr_t )val, 8 );
210- utils_annotate_release (ptr);
211- __atomic_store (ptr, val, memory_order_release);
212- }
213-
214185static inline void utils_atomic_store_release_ptr (void * * ptr , void * val ) {
215186 ASSERT_IS_ALIGNED ((uintptr_t )ptr , 8 );
216187 utils_annotate_release (ptr );
0 commit comments