aboutsummaryrefslogtreecommitdiff
path: root/arch/mips/include
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips/include')
-rw-r--r--arch/mips/include/asm/atomic.h16
-rw-r--r--arch/mips/include/asm/barrier.h15
-rw-r--r--arch/mips/include/asm/bitops.h8
-rw-r--r--arch/mips/include/asm/cmpxchg.h10
-rw-r--r--arch/mips/include/asm/spinlock.h4
-rw-r--r--arch/mips/include/asm/system.h4
6 files changed, 32 insertions, 25 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index dd75d673447..519197ede08 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -137,7 +137,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
{
int result;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
int temp;
@@ -189,7 +189,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
{
int result;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
int temp;
@@ -249,7 +249,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
{
int result;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
int temp;
@@ -516,7 +516,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
{
long result;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
long temp;
@@ -568,7 +568,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
{
long result;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
long temp;
@@ -628,7 +628,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
{
long result;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
long temp;
@@ -788,9 +788,9 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
* atomic*_return operations are serializing but not the non-*_return
* versions.
*/
-#define smp_mb__before_atomic_dec() smp_llsc_mb()
+#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
#define smp_mb__after_atomic_dec() smp_llsc_mb()
-#define smp_mb__before_atomic_inc() smp_llsc_mb()
+#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
#define smp_mb__after_atomic_inc() smp_llsc_mb()
#include <asm-generic/atomic-long.h>
diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h
index 91785dc8e94..1a5a51c3e96 100644
--- a/arch/mips/include/asm/barrier.h
+++ b/arch/mips/include/asm/barrier.h
@@ -131,23 +131,26 @@
#endif /* !CONFIG_CPU_HAS_WB */
#if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP)
-#define __WEAK_ORDERING_MB " sync \n"
+#define smp_mb() __asm__ __volatile__("sync" : : :"memory")
+#define smp_rmb() __asm__ __volatile__("sync" : : :"memory")
+#define smp_wmb() __asm__ __volatile__("sync" : : :"memory")
#else
-#define __WEAK_ORDERING_MB " \n"
+#define smp_mb() barrier()
+#define smp_rmb() barrier()
+#define smp_wmb() barrier()
#endif
+
#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
#define __WEAK_LLSC_MB " sync \n"
#else
#define __WEAK_LLSC_MB " \n"
#endif
-#define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
-#define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
-#define smp_wmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
-
#define set_mb(var, value) \
do { var = value; smp_mb(); } while (0)
#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
+#define smp_mb__before_llsc() smp_llsc_mb()
+
#endif /* __ASM_BARRIER_H */
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 84a383806b2..9255cfbee45 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -42,7 +42,7 @@
/*
* clear_bit() doesn't provide any barrier for the compiler.
*/
-#define smp_mb__before_clear_bit() smp_llsc_mb()
+#define smp_mb__before_clear_bit() smp_mb__before_llsc()
#define smp_mb__after_clear_bit() smp_llsc_mb()
/*
@@ -258,7 +258,7 @@ static inline int test_and_set_bit(unsigned long nr,
unsigned short bit = nr & SZLONG_MASK;
unsigned long res;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -395,7 +395,7 @@ static inline int test_and_clear_bit(unsigned long nr,
unsigned short bit = nr & SZLONG_MASK;
unsigned long res;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -485,7 +485,7 @@ static inline int test_and_change_bit(unsigned long nr,
unsigned short bit = nr & SZLONG_MASK;
unsigned long res;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index 815a438a268..ed9aaaaf074 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -72,14 +72,14 @@
*/
extern void __cmpxchg_called_with_bad_pointer(void);
-#define __cmpxchg(ptr, old, new, barrier) \
+#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \
({ \
__typeof__(ptr) __ptr = (ptr); \
__typeof__(*(ptr)) __old = (old); \
__typeof__(*(ptr)) __new = (new); \
__typeof__(*(ptr)) __res = 0; \
\
- barrier; \
+ pre_barrier; \
\
switch (sizeof(*(__ptr))) { \
case 4: \
@@ -96,13 +96,13 @@ extern void __cmpxchg_called_with_bad_pointer(void);
break; \
} \
\
- barrier; \
+ post_barrier; \
\
__res; \
})
-#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_llsc_mb())
-#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, )
+#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
+#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , )
#define cmpxchg64(ptr, o, n) \
({ \
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h
index 21ef9efbde4..5f16696eaa0 100644
--- a/arch/mips/include/asm/spinlock.h
+++ b/arch/mips/include/asm/spinlock.h
@@ -138,7 +138,7 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock)
{
int tmp;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (R10000_LLSC_WAR) {
__asm__ __volatile__ (
@@ -305,7 +305,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
{
unsigned int tmp;
- smp_llsc_mb();
+ smp_mb__before_llsc();
if (R10000_LLSC_WAR) {
__asm__ __volatile__(
diff --git a/arch/mips/include/asm/system.h b/arch/mips/include/asm/system.h
index 83b5509e09e..bb937ccfba1 100644
--- a/arch/mips/include/asm/system.h
+++ b/arch/mips/include/asm/system.h
@@ -95,6 +95,8 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
{
__u32 retval;
+ smp_mb__before_llsc();
+
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long dummy;
@@ -147,6 +149,8 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
{
__u64 retval;
+ smp_mb__before_llsc();
+
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long dummy;