Skip to content

Commit e11d6ef

Browse files
pavelvkozlovabrodkin
authored andcommitted
ARCv3: HS5x: enable use of llockd/scondd instrs in atomic64_* funcs
Adopt ARCv2 atomic64 code to be able to use atomic instructions llockd and scondd for 64-bit data on HS5x. Signed-off-by: Pavel Kozlov <pavel.kozlov@synopsys.com>
1 parent 62aa242 commit e11d6ef

File tree

2 files changed

+14
-8
lines changed

2 files changed

+14
-8
lines changed

arch/arc/Kconfig

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ config ARC
1919
select CLONE_BACKWARDS
2020
select COMMON_CLK
2121
select DMA_DIRECT_REMAP
22-
select GENERIC_ATOMIC64 if ISA_ARCOMPACT || ARC_CPU_HS5X || !(ARC_HAS_LL64 && ARC_HAS_LLSC)
22+
select GENERIC_ATOMIC64 if ISA_ARCOMPACT || !(ARC_HAS_LL64 && ARC_HAS_LLSC)
2323
select GENERIC_FIND_FIRST_BIT
2424
# for now, we don't need GENERIC_IRQ_PROBE, CONFIG_GENERIC_IRQ_CHIP
2525
select GENERIC_IRQ_SHOW

arch/arc/include/asm/atomic64-arcv2.h

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,12 @@
88
#ifndef _ASM_ARC_ATOMIC64_ARCV2_H
99
#define _ASM_ARC_ATOMIC64_ARCV2_H
1010

11+
#if defined(CONFIG_ISA_ARCV3)
12+
#define ATOMIC_CONSTR "+ATOMC"
13+
#else
14+
#define ATOMIC_CONSTR "+ATO"
15+
#endif
16+
1117
typedef struct {
1218
s64 __aligned(8) counter;
1319
} atomic64_t;
@@ -58,7 +64,7 @@ static inline void arch_atomic64_##op(s64 a, atomic64_t *v) \
5864
" " #op2 " %H0, %H0, %H2 \n" \
5965
" scondd %0, %1 \n" \
6066
" bnz 1b \n" \
61-
: "=&r"(val), "+ATO"(v->counter) \
67+
: "=&r"(val), ATOMIC_CONSTR(v->counter) \
6268
: "ir"(a) \
6369
: "cc", "memory"); \
6470
} \
@@ -75,7 +81,7 @@ static inline s64 arch_atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \
7581
" " #op2 " %H0, %H0, %H2 \n" \
7682
" scondd %0, %1 \n" \
7783
" bnz 1b \n" \
78-
: "=&r"(val), "+ATO"(v->counter) \
84+
: "=&r"(val), ATOMIC_CONSTR(v->counter) \
7985
: "ir"(a) \
8086
: "cc"); /* memory clobber comes from smp_mb() */ \
8187
\
@@ -97,7 +103,7 @@ static inline s64 arch_atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \
97103
" " #op2 " %H1, %H0, %H3 \n" \
98104
" scondd %1, %2 \n" \
99105
" bnz 1b \n" \
100-
: "=&r"(orig), "=&r"(val), "+ATO"(v->counter) \
106+
: "=&r"(orig), "=&r"(val), ATOMIC_CONSTR(v->counter) \
101107
: "ir"(a) \
102108
: "cc"); /* memory clobber comes from smp_mb() */ \
103109
\
@@ -151,7 +157,7 @@ arch_atomic64_cmpxchg(atomic64_t *ptr, s64 expected, s64 new)
151157
" scondd %3, %1 \n"
152158
" bnz 1b \n"
153159
"2: \n"
154-
: "=&r"(prev), "+ATO"(*ptr)
160+
: "=&r"(prev), ATOMIC_CONSTR(*ptr)
155161
: "ir"(expected), "r"(new)
156162
: "cc"); /* memory clobber comes from smp_mb() */
157163

@@ -171,7 +177,7 @@ static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
171177
" scondd %2, %1 \n"
172178
" bnz 1b \n"
173179
"2: \n"
174-
: "=&r"(prev), "+ATO"(*ptr)
180+
: "=&r"(prev), ATOMIC_CONSTR(*ptr)
175181
: "r"(new)
176182
: "cc"); /* memory clobber comes from smp_mb() */
177183

@@ -202,7 +208,7 @@ static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
202208
" scondd %0, %1 \n"
203209
" bnz 1b \n"
204210
"2: \n"
205-
: "=&r"(val), "+ATO"(v->counter)
211+
: "=&r"(val), ATOMIC_CONSTR(v->counter)
206212
:
207213
: "cc"); /* memory clobber comes from smp_mb() */
208214

@@ -237,7 +243,7 @@ static inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
237243
" scondd %1, %2 \n"
238244
" bnz 1b \n"
239245
"3: \n"
240-
: "=&r"(old), "=&r" (temp), "+ATO"(v->counter)
246+
: "=&r"(old), "=&r" (temp), ATOMIC_CONSTR(v->counter)
241247
: "r"(a), "r"(u)
242248
: "cc"); /* memory clobber comes from smp_mb() */
243249

0 commit comments

Comments
 (0)