aboutsummaryrefslogtreecommitdiff
path: root/arch/arm
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2012-07-20 18:24:55 +0100
committerRussell King <rmk+kernel@arm.linux.org.uk>2012-07-31 10:30:43 +0100
commit5a783cbc48367cfc7b65afc75430953dfe60098f (patch)
treef9bac5b052713b1eda13927f7fb4e6049a566130 /arch/arm
parent24b35521b8ddf088531258f06f681bb7b227bf47 (diff)
ARM: 7478/1: errata: extend workaround for erratum #720789
Commit cdf357f1 ("ARM: 6299/1: errata: TLBIASIDIS and TLBIMVAIS operations can broadcast a faulty ASID") replaced by-ASID TLB flushing operations with all-ASID variants to workaround A9 erratum #720789. This patch extends the workaround to include the tlb_range operations, which were overlooked by the original patch. Cc: <stable@vger.kernel.org> Tested-by: Steve Capper <steve.capper@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm')
-rw-r--r--arch/arm/mm/tlb-v7.S12
1 files changed, 12 insertions, 0 deletions
diff --git a/arch/arm/mm/tlb-v7.S b/arch/arm/mm/tlb-v7.S
index 845f461f8ec1..c2021139cb56 100644
--- a/arch/arm/mm/tlb-v7.S
+++ b/arch/arm/mm/tlb-v7.S
@@ -38,11 +38,19 @@ ENTRY(v7wbi_flush_user_tlb_range)
dsb
mov r0, r0, lsr #PAGE_SHIFT @ align address
mov r1, r1, lsr #PAGE_SHIFT
+#ifdef CONFIG_ARM_ERRATA_720789
+ mov r3, #0
+#else
asid r3, r3 @ mask ASID
+#endif
orr r0, r3, r0, lsl #PAGE_SHIFT @ Create initial MVA
mov r1, r1, lsl #PAGE_SHIFT
1:
+#ifdef CONFIG_ARM_ERRATA_720789
+ ALT_SMP(mcr p15, 0, r0, c8, c3, 3) @ TLB invalidate U MVA all ASID (shareable)
+#else
ALT_SMP(mcr p15, 0, r0, c8, c3, 1) @ TLB invalidate U MVA (shareable)
+#endif
ALT_UP(mcr p15, 0, r0, c8, c7, 1) @ TLB invalidate U MVA
add r0, r0, #PAGE_SZ
@@ -67,7 +75,11 @@ ENTRY(v7wbi_flush_kern_tlb_range)
mov r0, r0, lsl #PAGE_SHIFT
mov r1, r1, lsl #PAGE_SHIFT
1:
+#ifdef CONFIG_ARM_ERRATA_720789
+ ALT_SMP(mcr p15, 0, r0, c8, c3, 3) @ TLB invalidate U MVA all ASID (shareable)
+#else
ALT_SMP(mcr p15, 0, r0, c8, c3, 1) @ TLB invalidate U MVA (shareable)
+#endif
ALT_UP(mcr p15, 0, r0, c8, c7, 1) @ TLB invalidate U MVA
add r0, r0, #PAGE_SZ
cmp r0, r1