aboutsummaryrefslogtreecommitdiff
path: root/arch/arm64/include/asm
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2015-05-29 14:26:59 +0100
committerWill Deacon <will.deacon@arm.com>2015-07-27 15:28:52 +0100
commit0bc671d3f4bee9c31110d096ada0de52380e693d (patch)
tree63c5120264072fa4599fc6fe0f92f6a044d48f0c /arch/arm64/include/asm
parente9a4b795652f654a7870727e5333c1b709b8736c (diff)
arm64: cmpxchg: avoid "cc" clobber in ll/sc routines
We can perform the cmpxchg comparison using eor and cbnz which avoids the "cc" clobber for the ll/sc case and consequently for the LSE case where we may have to fall-back on the ll/sc code at runtime. Reviewed-by: Steve Capper <steve.capper@arm.com> Reviewed-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch/arm64/include/asm')
-rw-r--r--arch/arm64/include/asm/atomic_ll_sc.h14
-rw-r--r--arch/arm64/include/asm/atomic_lse.h4
2 files changed, 8 insertions, 10 deletions
diff --git a/arch/arm64/include/asm/atomic_ll_sc.h b/arch/arm64/include/asm/atomic_ll_sc.h
index f89f1e4ba577..c02684d1eab3 100644
--- a/arch/arm64/include/asm/atomic_ll_sc.h
+++ b/arch/arm64/include/asm/atomic_ll_sc.h
@@ -101,14 +101,13 @@ __LL_SC_PREFIX(atomic_cmpxchg(atomic_t *ptr, int old, int new))
asm volatile("// atomic_cmpxchg\n"
"1: ldxr %w1, %2\n"
-" cmp %w1, %w3\n"
-" b.ne 2f\n"
+" eor %w0, %w1, %w3\n"
+" cbnz %w0, 2f\n"
" stxr %w0, %w4, %2\n"
" cbnz %w0, 1b\n"
"2:"
: "=&r" (tmp), "=&r" (oldval), "+Q" (ptr->counter)
- : "Ir" (old), "r" (new)
- : "cc");
+ : "Lr" (old), "r" (new));
smp_mb();
return oldval;
@@ -179,14 +178,13 @@ __LL_SC_PREFIX(atomic64_cmpxchg(atomic64_t *ptr, long old, long new))
asm volatile("// atomic64_cmpxchg\n"
"1: ldxr %1, %2\n"
-" cmp %1, %3\n"
-" b.ne 2f\n"
+" eor %0, %1, %3\n"
+" cbnz %w0, 2f\n"
" stxr %w0, %4, %2\n"
" cbnz %w0, 1b\n"
"2:"
: "=&r" (res), "=&r" (oldval), "+Q" (ptr->counter)
- : "Ir" (old), "r" (new)
- : "cc");
+ : "Lr" (old), "r" (new));
smp_mb();
return oldval;
diff --git a/arch/arm64/include/asm/atomic_lse.h b/arch/arm64/include/asm/atomic_lse.h
index f3cb1052ab24..a3d21e7cee4f 100644
--- a/arch/arm64/include/asm/atomic_lse.h
+++ b/arch/arm64/include/asm/atomic_lse.h
@@ -166,7 +166,7 @@ static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
" mov %w[ret], w30")
: [ret] "+r" (x0), [v] "+Q" (ptr->counter)
: [old] "r" (w1), [new] "r" (w2)
- : "x30", "cc", "memory");
+ : "x30", "memory");
return x0;
}
@@ -313,7 +313,7 @@ static inline long atomic64_cmpxchg(atomic64_t *ptr, long old, long new)
" mov %[ret], x30")
: [ret] "+r" (x0), [v] "+Q" (ptr->counter)
: [old] "r" (x1), [new] "r" (x2)
- : "x30", "cc", "memory");
+ : "x30", "memory");
return x0;
}