1- /*
1+ /*
22 * Copyright (c) 2017, The Linux Foundation. All rights reserved.
33 *
44 * SPDX-License-Identifier: BSD-3-Clause
3030 */
3131
3232#include <stdint.h>
33+ #include <stdbool.h>
3334
3435#ifndef __LH_ATOMICS_H_
3536#define __LH_ATOMICS_H_
@@ -99,11 +100,11 @@ static inline void prefetch64 (unsigned long *ptr) {
99100}
100101
101102static inline unsigned long fetchadd64_acquire_release (unsigned long * ptr , unsigned long val ) {
102- #if defined(__x86_64__ )
103+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
103104 asm volatile ("lock xaddq %q0, %1\n"
104105 : "+r" (val ), "+m" (* (ptr ))
105106 : : "memory" , "cc" );
106- #elif defined(__aarch64__ )
107+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
107108#if defined(USE_LSE )
108109 unsigned long old ;
109110
@@ -130,18 +131,18 @@ static inline unsigned long fetchadd64_acquire_release (unsigned long *ptr, unsi
130131 val = old ;
131132#endif
132133#else
133- /* TODO: builtin atomic call */
134+ val = __atomic_fetch_add ( ptr , val , __ATOMIC_ACQ_REL );
134135#endif
135136
136137 return val ;
137138}
138139
139140static inline unsigned long fetchadd64_acquire (unsigned long * ptr , unsigned long val ) {
140- #if defined(__x86_64__ )
141+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
141142 asm volatile ("lock xaddq %q0, %1\n"
142143 : "+r" (val ), "+m" (* (ptr ))
143144 : : "memory" , "cc" );
144- #elif defined(__aarch64__ )
145+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
145146#if defined(USE_LSE )
146147 unsigned long old ;
147148
@@ -168,18 +169,18 @@ static inline unsigned long fetchadd64_acquire (unsigned long *ptr, unsigned lon
168169 val = old ;
169170#endif
170171#else
171- /* TODO: builtin atomic call */
172+ val = __atomic_fetch_add ( ptr , val , __ATOMIC_ACQUIRE );
172173#endif
173174
174175 return val ;
175176}
176177
177178static inline unsigned long fetchadd64_release (unsigned long * ptr , unsigned long val ) {
178- #if defined(__x86_64__ )
179+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
179180 asm volatile ("lock xaddq %q0, %1\n"
180181 : "+r" (val ), "+m" (* (ptr ))
181182 : : "memory" , "cc" );
182- #elif defined(__aarch64__ )
183+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
183184#if defined(USE_LSE )
184185 unsigned long old ;
185186
@@ -206,18 +207,18 @@ static inline unsigned long fetchadd64_release (unsigned long *ptr, unsigned lon
206207#endif
207208 val = old ;
208209#else
209- /* TODO: builtin atomic call */
210+ val = __atomic_fetch_add ( ptr , val , __ATOMIC_RELEASE );
210211#endif
211212
212213 return val ;
213214}
214215
215216static inline unsigned long fetchadd64 (unsigned long * ptr , unsigned long val ) {
216- #if defined(__x86_64__ )
217+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
217218 asm volatile ("lock xaddq %q0, %1\n"
218219 : "+r" (val ), "+m" (* (ptr ))
219220 : : "memory" , "cc" );
220- #elif defined(__aarch64__ )
221+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
221222#if defined(USE_LSE )
222223 unsigned long old ;
223224
@@ -244,20 +245,20 @@ static inline unsigned long fetchadd64 (unsigned long *ptr, unsigned long val) {
244245 val = old ;
245246#endif
246247#else
247- /* TODO: builtin atomic call */
248+ val = __atomic_fetch_add ( ptr , val , __ATOMIC_RELAXED );
248249#endif
249250
250251 return val ;
251252}
252253
253254static inline unsigned long fetchsub64 (unsigned long * ptr , unsigned long val ) {
254- #if defined(__x86_64__ )
255+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
255256 val = (unsigned long ) (- (long ) val );
256257
257258 asm volatile ("lock xaddq %q0, %1\n"
258259 : "+r" (val ), "+m" (* (ptr ))
259260 : : "memory" , "cc" );
260- #elif defined(__aarch64__ )
261+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
261262#if defined(USE_LSE )
262263 unsigned long old ;
263264 val = (unsigned long ) (- (long ) val );
@@ -285,18 +286,18 @@ static inline unsigned long fetchsub64 (unsigned long *ptr, unsigned long val) {
285286 val = old ;
286287#endif
287288#else
288- /* TODO: builtin atomic call */
289+ val = __atomic_fetch_sub ( ptr , val , __ATOMIC_RELAXED );
289290#endif
290291
291292 return val ;
292293}
293294
294295static inline unsigned long swap64 (unsigned long * ptr , unsigned long val ) {
295- #if defined(__x86_64__ )
296+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
296297 asm volatile ("xchgq %q0, %1\n"
297298 : "+r" (val ), "+m" (* (ptr ))
298299 : : "memory" , "cc" );
299- #elif defined(__aarch64__ )
300+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
300301#if defined(USE_LSE )
301302 unsigned long old ;
302303
@@ -322,7 +323,7 @@ static inline unsigned long swap64 (unsigned long *ptr, unsigned long val) {
322323 val = old ;
323324#endif
324325#else
325- /* TODO: builtin atomic call */
326+ val = __atomic_exchange_n ( ptr , val , __ATOMIC_ACQ_REL );
326327#endif
327328
328329 return val ;
@@ -331,12 +332,12 @@ static inline unsigned long swap64 (unsigned long *ptr, unsigned long val) {
331332static inline unsigned long cas64 (unsigned long * ptr , unsigned long val , unsigned long exp ) {
332333 unsigned long old ;
333334
334- #if defined(__x86_64__ )
335+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
335336 asm volatile ("lock cmpxchgq %2, %1\n"
336337 : "=a" (old ), "+m" (* (ptr ))
337338 : "r" (val ), "0" (exp )
338339 : "memory" );
339- #elif defined(__aarch64__ )
340+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
340341#if defined(USE_LSE )
341342 asm volatile (
342343 " mov %[old], %[exp]\n"
@@ -360,7 +361,8 @@ static inline unsigned long cas64 (unsigned long *ptr, unsigned long val, unsign
360361 : );
361362#endif
362363#else
363- /* TODO: builtin atomic call */
364+ old = exp ;
365+ __atomic_compare_exchange_n (ptr , & old , val , true, __ATOMIC_RELAXED , __ATOMIC_RELAXED );
364366#endif
365367
366368 return old ;
@@ -369,12 +371,12 @@ static inline unsigned long cas64 (unsigned long *ptr, unsigned long val, unsign
369371static inline unsigned long cas64_acquire (unsigned long * ptr , unsigned long val , unsigned long exp ) {
370372 unsigned long old ;
371373
372- #if defined(__x86_64__ )
374+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
373375 asm volatile ("lock cmpxchgq %2, %1\n"
374376 : "=a" (old ), "+m" (* (ptr ))
375377 : "r" (val ), "0" (exp )
376378 : "memory" );
377- #elif defined(__aarch64__ )
379+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
378380#if defined(USE_LSE )
379381 asm volatile (
380382 " mov %[old], %[exp]\n"
@@ -398,7 +400,8 @@ static inline unsigned long cas64_acquire (unsigned long *ptr, unsigned long val
398400 : );
399401#endif
400402#else
401- /* TODO: builtin atomic call */
403+ old = exp ;
404+ __atomic_compare_exchange_n (ptr , & old , val , true, __ATOMIC_ACQUIRE , __ATOMIC_ACQUIRE );
402405#endif
403406
404407 return old ;
@@ -407,12 +410,12 @@ static inline unsigned long cas64_acquire (unsigned long *ptr, unsigned long val
407410static inline unsigned long cas64_release (unsigned long * ptr , unsigned long val , unsigned long exp ) {
408411 unsigned long old ;
409412
410- #if defined(__x86_64__ )
413+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
411414 asm volatile ("lock cmpxchgq %2, %1\n"
412415 : "=a" (old ), "+m" (* (ptr ))
413416 : "r" (val ), "0" (exp )
414417 : "memory" );
415- #elif defined(__aarch64__ )
418+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
416419#if defined(USE_LSE )
417420 asm volatile (
418421 " mov %[old], %[exp]\n"
@@ -436,7 +439,8 @@ static inline unsigned long cas64_release (unsigned long *ptr, unsigned long val
436439 : );
437440#endif
438441#else
439- /* TODO: builtin atomic call */
442+ old = exp ;
443+ __atomic_compare_exchange_n (ptr , & old , val , true, __ATOMIC_RELEASE , __ATOMIC_RELEASE );
440444#endif
441445
442446 return old ;
@@ -445,12 +449,12 @@ static inline unsigned long cas64_release (unsigned long *ptr, unsigned long val
445449static inline unsigned long cas64_acquire_release (unsigned long * ptr , unsigned long val , unsigned long exp ) {
446450 unsigned long old ;
447451
448- #if defined(__x86_64__ )
452+ #if defined(__x86_64__ ) && !defined( USE_BUILTIN )
449453 asm volatile ("lock cmpxchgq %2, %1\n"
450454 : "=a" (old ), "+m" (* (ptr ))
451455 : "r" (val ), "0" (exp )
452456 : "memory" );
453- #elif defined(__aarch64__ )
457+ #elif defined(__aarch64__ ) && !defined( USE_BUILTIN )
454458#if defined(USE_LSE )
455459 asm volatile (
456460 " mov %[old], %[exp]\n"
@@ -474,7 +478,8 @@ static inline unsigned long cas64_acquire_release (unsigned long *ptr, unsigned
474478 : );
475479#endif
476480#else
477- /* TODO: builtin atomic call */
481+ old = exp ;
482+ __atomic_compare_exchange_n (ptr , & old , val , true, __ATOMIC_ACQ_REL , __ATOMIC_ACQ_REL );
478483#endif
479484
480485 return old ;
0 commit comments