88#ifndef _ASM_ARC_ATOMIC64_ARCV2_H
99#define _ASM_ARC_ATOMIC64_ARCV2_H
1010
11+ #if defined(CONFIG_ISA_ARCV3 )
12+ #define ATOMIC_CONSTR "+ATOMC"
13+ #else
14+ #define ATOMIC_CONSTR "+ATO"
15+ #endif
16+
1117typedef struct {
1218 s64 __aligned (8 ) counter ;
1319} atomic64_t ;
@@ -58,7 +64,7 @@ static inline void arch_atomic64_##op(s64 a, atomic64_t *v) \
5864 " " #op2 " %H0, %H0, %H2 \n" \
5965 " scondd %0, %1 \n" \
6066 " bnz 1b \n" \
61- : "=&r"(val), "+ATO" (v->counter) \
67+ : "=&r"(val), ATOMIC_CONSTR (v->counter) \
6268 : "ir"(a) \
6369 : "cc", "memory"); \
6470} \
@@ -75,7 +81,7 @@ static inline s64 arch_atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \
7581 " " #op2 " %H0, %H0, %H2 \n" \
7682 " scondd %0, %1 \n" \
7783 " bnz 1b \n" \
78- : "=&r"(val), "+ATO" (v->counter) \
84+ : "=&r"(val), ATOMIC_CONSTR (v->counter) \
7985 : "ir"(a) \
8086 : "cc"); /* memory clobber comes from smp_mb() */ \
8187 \
@@ -97,7 +103,7 @@ static inline s64 arch_atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \
97103 " " #op2 " %H1, %H0, %H3 \n" \
98104 " scondd %1, %2 \n" \
99105 " bnz 1b \n" \
100- : "=&r"(orig), "=&r"(val), "+ATO" (v->counter) \
106+ : "=&r"(orig), "=&r"(val), ATOMIC_CONSTR (v->counter) \
101107 : "ir"(a) \
102108 : "cc"); /* memory clobber comes from smp_mb() */ \
103109 \
@@ -151,7 +157,7 @@ arch_atomic64_cmpxchg(atomic64_t *ptr, s64 expected, s64 new)
151157 " scondd %3, %1 \n"
152158 " bnz 1b \n"
153159 "2: \n"
154- : "=&r" (prev ), "+ATO" (* ptr )
160+ : "=&r" (prev ), ATOMIC_CONSTR (* ptr )
155161 : "ir" (expected ), "r" (new )
156162 : "cc" ); /* memory clobber comes from smp_mb() */
157163
@@ -171,7 +177,7 @@ static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
171177 " scondd %2, %1 \n"
172178 " bnz 1b \n"
173179 "2: \n"
174- : "=&r" (prev ), "+ATO" (* ptr )
180+ : "=&r" (prev ), ATOMIC_CONSTR (* ptr )
175181 : "r" (new )
176182 : "cc" ); /* memory clobber comes from smp_mb() */
177183
@@ -202,7 +208,7 @@ static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
202208 " scondd %0, %1 \n"
203209 " bnz 1b \n"
204210 "2: \n"
205- : "=&r" (val ), "+ATO" (v -> counter )
211+ : "=&r" (val ), ATOMIC_CONSTR (v -> counter )
206212 :
207213 : "cc" ); /* memory clobber comes from smp_mb() */
208214
@@ -237,7 +243,7 @@ static inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
237243 " scondd %1, %2 \n"
238244 " bnz 1b \n"
239245 "3: \n"
240- : "=&r" (old ), "=&r" (temp ), "+ATO" (v -> counter )
246+ : "=&r" (old ), "=&r" (temp ), ATOMIC_CONSTR (v -> counter )
241247 : "r" (a ), "r" (u )
242248 : "cc" ); /* memory clobber comes from smp_mb() */
243249
0 commit comments