aboutsummaryrefslogtreecommitdiff
path: root/arch/powerpc/include/asm/runlatch.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/include/asm/runlatch.h')
-rw-r--r--arch/powerpc/include/asm/runlatch.h45
1 files changed, 45 insertions, 0 deletions
diff --git a/arch/powerpc/include/asm/runlatch.h b/arch/powerpc/include/asm/runlatch.h
new file mode 100644
index 000000000000..54e9b963876e
--- /dev/null
+++ b/arch/powerpc/include/asm/runlatch.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 1999 Cort Dougan <cort@cs.nmt.edu>
+ */
+#ifndef _ASM_POWERPC_RUNLATCH_H
+#define _ASM_POWERPC_RUNLATCH_H
+
+#ifdef CONFIG_PPC64
+
+extern void __ppc64_runlatch_on(void);
+extern void __ppc64_runlatch_off(void);
+
+/*
+ * We manually hard enable-disable, this is called
+ * in the idle loop and we don't want to mess up
+ * with soft-disable/enable & interrupt replay.
+ */
+#define ppc64_runlatch_off() \
+ do { \
+ if (cpu_has_feature(CPU_FTR_CTRL) && \
+ test_thread_local_flags(_TLF_RUNLATCH)) { \
+ unsigned long msr = mfmsr(); \
+ __hard_irq_disable(); \
+ __ppc64_runlatch_off(); \
+ if (msr & MSR_EE) \
+ __hard_irq_enable(); \
+ } \
+ } while (0)
+
+#define ppc64_runlatch_on() \
+ do { \
+ if (cpu_has_feature(CPU_FTR_CTRL) && \
+ !test_thread_local_flags(_TLF_RUNLATCH)) { \
+ unsigned long msr = mfmsr(); \
+ __hard_irq_disable(); \
+ __ppc64_runlatch_on(); \
+ if (msr & MSR_EE) \
+ __hard_irq_enable(); \
+ } \
+ } while (0)
+#else
+#define ppc64_runlatch_on()
+#define ppc64_runlatch_off()
+#endif /* CONFIG_PPC64 */
+
+#endif /* _ASM_POWERPC_RUNLATCH_H */