aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNicolas Pitre <nicolas.pitre@linaro.org>2011-04-25 14:08:48 -0400
committerNicolas Pitre <nicolas.pitre@linaro.org>2011-04-25 14:08:48 -0400
commit1090e71d6adca637aeaabe39d7e48e905e407b80 (patch)
tree0db4d2e97718d4d024aa029579a2cdedf8520976
parent1d121efe89c5842090a3c21983490bad6151254c (diff)
Revert "ARM: Add PI/robust mutexes support for SMP kernels"
This reverts commit c4bb07ec0084e26323047bcbe446d59ac352e07f. A nicer version was posted by Will Deacon, which will replace it.
-rw-r--r--arch/arm/include/asm/futex.h129
1 files changed, 7 insertions, 122 deletions
diff --git a/arch/arm/include/asm/futex.h b/arch/arm/include/asm/futex.h
index 2491b18f2e3..b33fe7065b3 100644
--- a/arch/arm/include/asm/futex.h
+++ b/arch/arm/include/asm/futex.h
@@ -3,133 +3,18 @@
#ifdef __KERNEL__
+#ifdef CONFIG_SMP
+
+#include <asm-generic/futex.h>
+
+#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
+
#include <linux/futex.h>
#include <linux/preempt.h>
#include <linux/uaccess.h>
#include <asm/errno.h>
#include <asm/domain.h>
-#ifndef CONFIG_CPU_USE_DOMAINS
-
-#define __futex_atomic_op(insn, res, ret, oldval, uaddr, oparg) \
- __asm__ __volatile__( \
- "1: ldrex %2, [%3]\n" \
- " " insn "\n" \
- "2: strex %0, %1, [%3]\n" \
- " teq %0, #0\n" \
- " bne 1b\n" \
- " mov %1, #0\n" \
- "3:\n" \
- " .pushsection __ex_table,\"a\"\n" \
- " .align 3\n" \
- " .long 1b, 4f, 2b, 4f\n" \
- " .popsection\n" \
- " .pushsection .fixup,\"ax\"\n" \
- "4: mov %1, %5\n" \
- " b 3b\n" \
- " .popsection" \
- : "=&r" (res), "=&r" (ret), "=&r" (oldval) \
- : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
- : "cc", "memory")
-
-static inline int futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
-{
- int op = (encoded_op >> 28) & 7;
- int cmp = (encoded_op >> 24) & 15;
- int oparg = (encoded_op << 8) >> 20;
- int cmparg = (encoded_op << 20) >> 20;
- int oldval = 0, ret;
- unsigned long res;
-
- if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
- oparg = 1 << oparg;
-
- if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
- return -EFAULT;
-
- pagefault_disable(); /* implies preempt_disable() */
-
- switch (op) {
- case FUTEX_OP_SET:
- __futex_atomic_op("mov %1, %4", res, ret, oldval, uaddr, oparg);
- break;
- case FUTEX_OP_ADD:
- __futex_atomic_op("add %1, %2, %4", res, ret, oldval, uaddr, oparg);
- break;
- case FUTEX_OP_OR:
- __futex_atomic_op("orr %1, %2, %4", res, ret, oldval, uaddr, oparg);
- break;
- case FUTEX_OP_ANDN:
- __futex_atomic_op("and %1, %2, %4", res, ret, oldval, uaddr, ~oparg);
- break;
- case FUTEX_OP_XOR:
- __futex_atomic_op("eor %1, %2, %4", res, ret, oldval, uaddr, oparg);
- break;
- default:
- ret = -ENOSYS;
- }
-
- pagefault_enable(); /* subsumes preempt_enable() */
-
- if (!ret) {
- switch (cmp) {
- case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
- case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
- case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
- case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
- case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
- case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
- default: ret = -ENOSYS;
- }
- }
- return ret;
-}
-
-static inline int
-futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
-{
- unsigned long ret;
- unsigned long res;
-
- if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
- return -EFAULT;
-
- smp_mb();
-
- do {
- asm volatile("@futex_atomic_cmpxchg_inatomic\n"
- "1: ldrex %1, [%2]\n"
- " mov %0, #0\n"
- " teq %1, %3\n"
- " it eq @ explicit IT needed for the 2b label\n"
- "2: strexeq %0, %4, [%2]\n"
- "3:\n"
- " .pushsection __ex_table,\"a\"\n"
- " .align 3\n"
- " .long 1b, 4f, 2b, 4f\n"
- " .popsection\n"
- " .pushsection .fixup,\"ax\"\n"
- "4: mov %0, #0\n"
- " mov %1, %5\n"
- " b 3b\n"
- " .popsection"
- : "=&r" (res), "=&r" (ret)
- : "r" (uaddr), "Ir" (oldval), "r" (newval),
- "Ir" (-EFAULT)
- : "memory", "cc");
- } while (res);
-
- smp_mb();
-
- return ret;
-}
-
-#elif defined(CONFIG_SMP)
-
-#include <asm-generic/futex.h>
-
-#else /* we can work around lack of atomic ops by disabling preemption */
-
#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
__asm__ __volatile__( \
"1: " T(ldr) " %1, [%2]\n" \
@@ -235,7 +120,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
return val;
}
-#endif
+#endif /* !SMP */
#endif /* __KERNEL__ */
#endif /* _ASM_ARM_FUTEX_H */