aboutsummaryrefslogtreecommitdiff
path: root/arch/arm64/include/asm
diff options
context:
space:
mode:
authorRohit Vaswani <rvaswani@codeaurora.org>2014-12-17 00:26:36 -0800
committerTrilok Soni <tsoni@codeaurora.org>2016-01-22 15:47:00 -0800
commit59cae1879e3b604ac9f7614883216bff56cafd5a (patch)
treee6532f943fbab6696124615ea62ccf633f285927 /arch/arm64/include/asm
parentb7ca2f4c9a2368ee57eb1ef61da700b58ddc9c62 (diff)
ARM64: spinlock: Add SEV and dsb(ishst) in unlock code
In certain unexplained cases, the stlr alone might not wakeup the processor waiting in WFE on a spinlock. Add an explicity dsb(ishst) and SEV in write_unlock, read_unlock and spin_unlock to ensure that the core waiting on the lock wakes up from WFE. ISHST variant of the DSB should be fine here since this would be applicable for the inner shareable domain only with the store instruction before it. Selectable by config option ARM64_SEV_IN_LOCK_UNLOCK and it is disabled by default. CRs-Fixed: 962923 Change-Id: I691ff5713d4d564623b75b053b40d1f46d74868a Signed-off-by: Rohit Vaswani <rvaswani@codeaurora.org> Signed-off-by: Trilok Soni <tsoni@codeaurora.org>
Diffstat (limited to 'arch/arm64/include/asm')
-rw-r--r--arch/arm64/include/asm/spinlock.h12
1 files changed, 12 insertions, 0 deletions
diff --git a/arch/arm64/include/asm/spinlock.h b/arch/arm64/include/asm/spinlock.h
index 7bc9f639b4d2..bb866d569776 100644
--- a/arch/arm64/include/asm/spinlock.h
+++ b/arch/arm64/include/asm/spinlock.h
@@ -86,6 +86,10 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock)
{
asm volatile(
" stlrh %w1, %0\n"
+#ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK
+" dsb ishst\n"
+" sev\n"
+#endif
: "=Q" (lock->owner)
: "r" (lock->owner + 1)
: "memory");
@@ -154,6 +158,10 @@ static inline void arch_write_unlock(arch_rwlock_t *rw)
{
asm volatile(
" stlr %w1, %0\n"
+#ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK
+ " dsb ishst\n"
+ " sev\n"
+#endif
: "=Q" (rw->lock) : "r" (0) : "memory");
}
@@ -197,6 +205,10 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
"1: ldxr %w0, %2\n"
" sub %w0, %w0, #1\n"
" stlxr %w1, %w0, %2\n"
+#ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK
+ " dsb ishst\n"
+ " sev\n"
+#endif
" cbnz %w1, 1b\n"
: "=&r" (tmp), "=&r" (tmp2), "+Q" (rw->lock)
: