aboutsummaryrefslogtreecommitdiff
path: root/arch/arm64/mm
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2014-05-02 16:24:15 +0100
committerCatalin Marinas <catalin.marinas@arm.com>2014-05-09 17:21:24 +0100
commitdc60b777fcdddbadab111028e266fd69d4702b34 (patch)
treed839ce1021b5f64b2925d8f20230cb9564f3e11e /arch/arm64/mm
parentee9e101c11478680d579bd20bb38a4d3e2514fe3 (diff)
arm64: mm: use inner-shareable barriers for inner-shareable maintenance
In order to ensure ordering and completion of inner-shareable maintenance instructions (cache and TLB) on AArch64, we can use the -ish suffix to the dmb and dsb instructions respectively. This patch updates our low-level cache and tlb maintenance routines to use the inner-shareable barrier variants where appropriate. Acked-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch/arm64/mm')
-rw-r--r--arch/arm64/mm/cache.S6
-rw-r--r--arch/arm64/mm/proc.S2
2 files changed, 4 insertions, 4 deletions
diff --git a/arch/arm64/mm/cache.S b/arch/arm64/mm/cache.S
index fda756875fa6..23663837acff 100644
--- a/arch/arm64/mm/cache.S
+++ b/arch/arm64/mm/cache.S
@@ -31,7 +31,7 @@
* Corrupted registers: x0-x7, x9-x11
*/
__flush_dcache_all:
- dsb sy // ensure ordering with previous memory accesses
+ dmb sy // ensure ordering with previous memory accesses
mrs x0, clidr_el1 // read clidr
and x3, x0, #0x7000000 // extract loc from clidr
lsr x3, x3, #23 // left align loc bit field
@@ -128,7 +128,7 @@ USER(9f, dc cvau, x4 ) // clean D line to PoU
add x4, x4, x2
cmp x4, x1
b.lo 1b
- dsb sy
+ dsb ish
icache_line_size x2, x3
sub x3, x2, #1
@@ -139,7 +139,7 @@ USER(9f, ic ivau, x4 ) // invalidate I line PoU
cmp x4, x1
b.lo 1b
9: // ignore any faulting cache operation
- dsb sy
+ dsb ish
isb
ret
ENDPROC(flush_icache_range)
diff --git a/arch/arm64/mm/proc.S b/arch/arm64/mm/proc.S
index 9042aff5e9e3..7736779c9809 100644
--- a/arch/arm64/mm/proc.S
+++ b/arch/arm64/mm/proc.S
@@ -182,7 +182,7 @@ ENDPROC(cpu_do_switch_mm)
ENTRY(__cpu_setup)
ic iallu // I+BTB cache invalidate
tlbi vmalle1is // invalidate I + D TLBs
- dsb sy
+ dsb ish
mov x0, #3 << 20
msr cpacr_el1, x0 // Enable FP/ASIMD