summaryrefslogtreecommitdiff
path: root/bootwrapper/helpers.S
diff options
context:
space:
mode:
Diffstat (limited to 'bootwrapper/helpers.S')
-rw-r--r--bootwrapper/helpers.S1320
1 files changed, 1320 insertions, 0 deletions
diff --git a/bootwrapper/helpers.S b/bootwrapper/helpers.S
new file mode 100644
index 0000000..811b263
--- /dev/null
+++ b/bootwrapper/helpers.S
@@ -0,0 +1,1320 @@
+ ;
+ ; Copyright (c) 2011, ARM Limited. All rights reserved.
+ ;
+ ; Redistribution and use in source and binary forms, with
+ ; or without modification, are permitted provided that the
+ ; following conditions are met:
+ ;
+ ; Redistributions of source code must retain the above
+ ; copyright notice, this list of conditions and the
+ ; following disclaimer.
+ ;
+ ; Redistributions in binary form must reproduce the
+ ; above copyright notice, this list of conditions and
+ ; the following disclaimer in the documentation
+ ; and/or other materials provided with the distribution.
+ ;
+ ; Neither the name of ARM nor the names of its
+ ; contributors may be used to endorse or promote products
+ ; derived from this software without specific prior written
+ ; permission.
+ ;
+
+ EXPORT wfi
+ EXPORT wfe
+ EXPORT sev
+ EXPORT dmb
+ EXPORT dsb
+ EXPORT isb
+ EXPORT smc
+ EXPORT dcisw
+ EXPORT dccsw
+ EXPORT dccisw
+
+ EXPORT read_dacr
+ EXPORT read_ttbr0
+ EXPORT read_cpacr
+ EXPORT read_scr
+ EXPORT read_cpsr
+ EXPORT read_midr
+ EXPORT read_mpidr
+ EXPORT read_cntpct
+ EXPORT read_cntfrq
+ EXPORT read_vmpidr
+ EXPORT read_vmidr
+ EXPORT read_id_pfr0
+ EXPORT read_id_pfr1
+ EXPORT read_id_dfr0
+ EXPORT read_id_afr0
+ EXPORT read_id_mmfr0
+ EXPORT read_id_mmfr1
+ EXPORT read_id_mmfr2
+ EXPORT read_id_mmfr3
+ EXPORT read_id_isar0
+ EXPORT read_id_isar1
+ EXPORT read_id_isar2
+ EXPORT read_id_isar3
+ EXPORT read_id_isar4
+ EXPORT read_id_isar5
+ EXPORT read_cpuid
+ EXPORT read_aidr
+ EXPORT read_ctr
+ EXPORT read_tcmtr
+ EXPORT read_tlbtr
+ EXPORT read_clusterid
+ EXPORT read_sctlr
+ EXPORT read_hsctlr
+ EXPORT read_hdfar
+ EXPORT read_hpfar
+ EXPORT read_vtcr
+ EXPORT read_hcr
+ EXPORT read_hdcr
+ EXPORT read_hcptr
+ EXPORT read_hstr
+ EXPORT read_cnthctl
+ EXPORT read_cntkctl
+ EXPORT read_cntp_ctl
+ EXPORT read_cntp_tval
+ EXPORT read_cnthp_ctl
+ EXPORT read_cnthp_tval
+ EXPORT read_cnthp_cval
+ EXPORT read_ttbcr
+ EXPORT read_clidr
+ EXPORT read_lr
+ EXPORT read_sp
+ EXPORT read_actlr
+ EXPORT read_nsacr
+ EXPORT read_clidr
+ EXPORT read_csselr
+ EXPORT read_ccsidr
+ EXPORT read_nmrr
+ EXPORT read_prrr
+ EXPORT read_mvbar
+ EXPORT read_vbar
+ EXPORT read_hsr
+ EXPORT read_dfar
+ EXPORT read_ifar
+ EXPORT read_dfsr
+ EXPORT read_ifsr
+ EXPORT read_adfsr
+ EXPORT read_aifsr
+
+ EXPORT write_dacr
+ EXPORT write_prrr
+ EXPORT write_nmrr
+ EXPORT write_ttbr0
+ EXPORT write_cpacr
+ EXPORT write_nsacr
+ EXPORT write_cpsr
+ EXPORT write_scr
+ EXPORT write_mvbar
+ EXPORT write_vbar
+ EXPORT write_hvbar
+ EXPORT write_vmpidr
+ EXPORT write_vmidr
+ EXPORT write_csselr
+ EXPORT write_hcr
+ EXPORT write_hdcr
+ EXPORT write_hcptr
+ EXPORT write_hstr
+ EXPORT write_sctlr
+ EXPORT write_actlr
+ EXPORT write_sp
+ EXPORT write_lr
+ EXPORT write_ttbcr
+ EXPORT write_cntfrq
+ EXPORT write_cnthctl
+ EXPORT write_cntkctl
+ EXPORT write_cntp_ctl
+ EXPORT write_cntp_tval
+ EXPORT write_cnthp_ctl
+ EXPORT write_cnthp_tval
+ EXPORT write_cnthp_cval
+ EXPORT write_hsctlr
+ EXPORT write_httbr
+ EXPORT write_vttbr
+ EXPORT write_htcr
+ EXPORT write_vtcr
+ EXPORT write_hmair0
+ EXPORT write_hmair1
+ EXPORT write_dfar
+ EXPORT write_ifar
+ EXPORT write_dfsr
+ EXPORT write_ifsr
+ EXPORT write_adfsr
+ EXPORT write_aifsr
+
+ EXPORT panic
+ EXPORT spin_lock
+ EXPORT spin_trylock
+ EXPORT spin_unlock
+ EXPORT copy_words
+ EXPORT virt_memset
+ EXPORT disable_gic_dist
+ EXPORT enable_gic_dist
+ EXPORT switcher_exit
+ EXPORT hyp_save
+ EXPORT num_secondaries
+ EXPORT virt_dead
+ EXPORT get_sp
+ EXPORT disable_coherency
+ EXPORT enable_coherency
+ EXPORT inv_tlb_all
+ EXPORT inv_icache_all
+ EXPORT inv_bpred_is
+ EXPORT inv_bpred_all
+ EXPORT inv_icache_mva_pou
+ EXPORT inv_dcache_mva_poc
+ EXPORT cln_dcache_mva_pou
+ EXPORT cln_dcache_mva_poc
+ EXPORT enable_user_perfmon_access
+ EXPORT enable_perfmon
+ EXPORT enable_swp
+ EXPORT cache_maint_op
+ EXPORT enter_monitor_mode
+ EXPORT enter_nonsecure_world
+ EXPORT enable_pmu
+
+; Cache maintenance op types
+INV EQU 0x0
+CLN EQU 0x1
+CLN_INV EQU 0x2
+
+ AREA |.text|, CODE
+
+read_cntfrq FUNCTION
+ mrc p15, 0, r0, c14, c0, 0
+ bx lr
+ ENDFUNC
+
+write_cntfrq FUNCTION
+ mcr p15, 0, r0, c14, c0, 0
+ bx lr
+ ENDFUNC
+
+read_cntpct FUNCTION
+ mrrc p15, 0, r2, r3, c14
+ str r2, [r0]
+ str r3, [r1]
+ bx lr
+ ENDFUNC
+
+dcisw FUNCTION
+ mcr p15, 0, r0, c7, c6, 2
+ bx lr
+ ENDFUNC
+
+dccsw FUNCTION
+ mcr p15, 0, r0, c7, c10, 2
+ bx lr
+ ENDFUNC
+
+dccisw FUNCTION
+ mcr p15, 0, r0, c7, c14, 2
+ bx lr
+ ENDFUNC
+
+virt_dead FUNCTION
+ b virt_dead
+ ENDFUNC
+
+disable_gic_dist FUNCTION
+ push {lr}
+ ldr r2, [r1]
+ str r2, [r0]
+ mov r2, #0
+ str r2, [r1]
+ dsb
+ pop {pc}
+ ENDFUNC
+
+enable_gic_dist FUNCTION
+ push {lr}
+ str r0, [r1]
+ dsb
+ pop {pc}
+ ENDFUNC
+
+smc FUNCTION
+ push {r4-r12, lr}
+ smc #0
+ pop {r4-r12, pc}
+ ENDFUNC
+
+dmb FUNCTION
+ dmb
+ bx lr
+ ENDFUNC
+
+wfi FUNCTION
+ wfi
+ bx lr
+ ENDFUNC
+
+wfe FUNCTION
+ wfe
+ bx lr
+ ENDFUNC
+
+sev FUNCTION
+ sev
+ bx lr
+ ENDFUNC
+
+switcher_exit FUNCTION
+ hvc #1
+ bx lr
+ ENDFUNC
+
+hyp_save FUNCTION
+ hvc #2
+ bx lr
+ ENDFUNC
+
+ ; This function takes three arguments
+ ; r0: Destination start address (must be word aligned)
+ ; r1: Source start address (must be word aligned)
+ ; r2: Number of words to copy
+ ; Return value is updated destination pointer (first unwritten word)
+copy_words FUNCTION
+ push {r4, r5}
+0 cmp r2, #3
+ ble %f1
+ ldmia r1!, {r3, r4, r5, r12}
+ stmia r0!, {r3, r4, r5, r12}
+ sub r2, r2, #4
+ b %b0
+
+1 cmp r2, #0
+ beq %f3
+2 ldr r3, [r1], #4
+ str r3, [r0], #4
+ subs r2, r2, #1
+ bne %b2
+
+3 pop {r4, r5}
+ bx lr
+ ENDFUNC
+
+
+virt_memcpy FUNCTION
+ cmp r2, #0
+ bxeq lr
+0 ldrb r3, [r1], #1
+ strb r3, [r0], #1
+ subs r2, #1
+ bne %b0
+ bx lr
+ ENDFUNC
+
+virt_memset FUNCTION
+ cmp r2, #0
+ bxeq lr
+0 strb r1, [r0], #1
+ subs r2, #1
+ bne %b0
+ bx lr
+ ENDFUNC
+
+ AREA APPF_ENTRY_POINT, CODE
+
+ ; Functions we need in the runtime entry point, i.e. before we switch pagetables,
+ ; are placed in this area.
+
+dsb FUNCTION
+ dsb
+ bx lr
+ ENDFUNC
+
+isb FUNCTION
+ isb
+ bx lr
+ ENDFUNC
+
+num_secondaries FUNCTION
+ mrc p15, 1, r0, c9, c0, 2
+ lsr r0, r0, #24
+ and r0, r0, #3
+ bx lr
+ ENDFUNC
+
+read_vmpidr FUNCTION
+ mrc p15, 4, r0, c0, c0, 5
+ bx lr
+ ENDFUNC
+
+read_vmidr FUNCTION
+ mrc p15, 4, r0, c0, c0, 0
+ bx lr
+ ENDFUNC
+
+read_id_pfr0 FUNCTION
+ mrc p15, 0, r0, c0, c1, 0
+ bx lr
+ ENDFUNC
+
+read_id_pfr1 FUNCTION
+ mrc p15, 0, r0, c0, c1, 1
+ bx lr
+ ENDFUNC
+
+read_id_dfr0 FUNCTION
+ mrc p15, 0, r0, c0, c1, 2
+ bx lr
+ ENDFUNC
+
+read_id_afr0 FUNCTION
+ mrc p15, 0, r0, c0, c1, 3
+ bx lr
+ ENDFUNC
+
+read_id_mmfr0 FUNCTION
+ mrc p15, 0, r0, c0, c1, 4
+ bx lr
+ ENDFUNC
+
+read_id_mmfr1 FUNCTION
+ mrc p15, 0, r0, c0, c1, 5
+ bx lr
+ ENDFUNC
+
+read_id_mmfr2 FUNCTION
+ mrc p15, 0, r0, c0, c1, 6
+ bx lr
+ ENDFUNC
+
+read_id_mmfr3 FUNCTION
+ mrc p15, 0, r0, c0, c1, 7
+ bx lr
+ ENDFUNC
+
+read_id_isar0 FUNCTION
+ mrc p15, 0, r0, c0, c2, 0
+ bx lr
+ ENDFUNC
+
+read_id_isar1 FUNCTION
+ mrc p15, 0, r0, c0, c2, 1
+ bx lr
+ ENDFUNC
+
+read_id_isar2 FUNCTION
+ mrc p15, 0, r0, c0, c2, 2
+ bx lr
+ ENDFUNC
+
+read_id_isar3 FUNCTION
+ mrc p15, 0, r0, c0, c2, 3
+ bx lr
+ ENDFUNC
+
+read_id_isar4 FUNCTION
+ mrc p15, 0, r0, c0, c2, 4
+ bx lr
+ ENDFUNC
+
+read_id_isar5 FUNCTION
+ mrc p15, 0, r0, c0, c2, 5
+ bx lr
+ ENDFUNC
+
+read_ctr FUNCTION
+ mrc p15, 0, r0, c0, c0, 1
+ bx lr
+ ENDFUNC
+
+read_tcmtr FUNCTION
+ mrc p15, 0, r0, c0, c0, 2
+ bx lr
+ ENDFUNC
+
+read_tlbtr FUNCTION
+ mrc p15, 0, r0, c0, c0, 3
+ bx lr
+ ENDFUNC
+
+read_aidr FUNCTION
+ mrc p15, 1, r0, c0, c0, 7
+ bx lr
+ ENDFUNC
+
+va_to_pa FUNCTION ; Note: assumes conversion will be successful!
+ mov r1, r0
+ mcr p15, 0, r0, c7, c8, 1 ; Priv Write Current World VA-PA
+ mrc p15, 0, r0, c7, c4, 0 ; Get PA
+ bfc r0, #0, #12 ; We want top bits of translated addr
+ bfc r1, #12, #20 ; plus bottom bits of input addr
+ orr r0, r0, r1
+ bx lr
+ ENDFUNC
+
+read_dacr FUNCTION
+ mrc p15, 0, r0, c3, c0, 0
+ bx lr
+ ENDFUNC
+
+read_ttbr0 FUNCTION
+ mrc p15, 0, r0, c2, c0, 0
+ dsb
+ bx lr
+ ENDFUNC
+
+write_dacr FUNCTION
+ mcr p15, 0, r0, c3, c0, 0
+ isb
+ bx lr
+ ENDFUNC
+
+read_cpacr FUNCTION
+ mrc p15, 0, r0, c1, c0, 2
+ bx lr
+ ENDFUNC
+
+write_cpacr FUNCTION
+ mcr p15, 0, r0, c1, c0, 2
+ bx lr
+ ENDFUNC
+
+read_midr FUNCTION
+ mrc p15, 0, r0, c0, c0, 0;
+ bx lr
+ ENDFUNC
+
+read_mpidr FUNCTION
+ mrc p15, 0, r0, c0, c0, 5
+ bx lr
+ ENDFUNC
+
+read_scr FUNCTION
+ mrc p15, 0, r0, c1, c1, 0
+ bx lr
+ ENDFUNC
+
+write_scr FUNCTION
+ mcr p15, 0, r0, c1, c1, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_nsacr FUNCTION
+ mcr p15, 0, r0, c1, c1, 2
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+read_cpsr FUNCTION
+ mrs r0, CPSR
+ bx lr
+ ENDFUNC
+
+write_cpsr FUNCTION
+ msr CPSR_c, r0
+ bx lr
+ ENDFUNC
+
+write_mvbar FUNCTION
+ mcr p15, 0, r0, c12, c0, 1
+ bx lr
+ ENDFUNC
+
+write_vbar FUNCTION
+ mcr p15, 0, r0, c12, c0, 0
+ bx lr
+ ENDFUNC
+
+write_hvbar FUNCTION
+ mcr p15, 4, r0, c12, c0, 0
+ bx lr
+ ENDFUNC
+
+read_mvbar FUNCTION
+ mrc p15, 0, r0, c12, c0, 1
+ bx lr
+ ENDFUNC
+
+read_vbar FUNCTION
+ mrc p15, 0, r0, c12, c0, 0
+ bx lr
+ ENDFUNC
+
+read_cpuid FUNCTION
+ mrc p15, 0, r0, c0, c0, 5
+ ands r0, r0, #0xf
+ bx lr
+ ENDFUNC
+
+read_clusterid FUNCTION
+ mrc p15, 0, r0, c0, c0, 5
+ lsr r0, r0, #0x8
+ ands r0, r0, #0xf
+ bx lr
+ ENDFUNC
+
+write_ttbr0 FUNCTION
+ mcr p15, 0, r0, c2, c0, 0
+ mcr p15, 0, r0, c7, c5, 6
+ mcr p15, 0, r0, c8, c7, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+read_ttbcr FUNCTION
+ mrc p15, 0, r0, c2, c0, 2
+ bx lr
+ ENDFUNC
+
+write_ttbcr FUNCTION
+ mcr p15, 0, r0, c2, c0, 2
+ bx lr
+ ENDFUNC
+
+write_vmpidr FUNCTION
+ mcr p15, 4, r0, c0, c0, 5
+ isb
+ bx lr
+ ENDFUNC
+
+write_vmidr FUNCTION
+ mcr p15, 4, r0, c0, c0, 0
+ isb
+ bx lr
+ ENDFUNC
+
+read_vtcr FUNCTION
+ mrc p15, 4, r0, c2, c1, 2
+ bx lr
+ ENDFUNC
+
+read_hcr FUNCTION
+ mrc p15, 4, r0, c1, c1, 0
+ bx lr
+ ENDFUNC
+
+read_hdcr FUNCTION
+ mrc p15, 4, r0, c1, c1, 1
+ bx lr
+ ENDFUNC
+
+read_hcptr FUNCTION
+ mrc p15, 4, r0, c1, c1, 2
+ bx lr
+ ENDFUNC
+
+read_hstr FUNCTION
+ mrc p15, 4, r0, c1, c1, 3
+ bx lr
+ ENDFUNC
+
+write_hcr FUNCTION
+ mcr p15, 4, r0, c1, c1, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_hdcr FUNCTION
+ mcr p15, 4, r0, c1, c1, 1
+ bx lr
+ ENDFUNC
+
+write_hcptr FUNCTION
+ mcr p15, 4, r0, c1, c1, 2
+ bx lr
+ ENDFUNC
+
+write_hstr FUNCTION
+ mcr p15, 4, r0, c1, c1, 3
+ bx lr
+ ENDFUNC
+
+write_httbr FUNCTION
+ mcrr p15, 4, r0, r1, c2
+ mcr p15, 0, r0, c7, c5, 6
+ mcr p15, 0, r0, c8, c7, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_vttbr FUNCTION
+ mcrr p15, 6, r0, r1, c2
+ mcr p15, 0, r0, c7, c5, 6
+ mcr p15, 0, r0, c8, c7, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_htcr FUNCTION
+ mcr p15, 4, r0, c2, c0, 2
+ bx lr
+ ENDFUNC
+
+write_vtcr FUNCTION
+ mcr p15, 4, r0, c2, c1, 2
+ bx lr
+ ENDFUNC
+
+write_hmair0 FUNCTION
+ mcr p15, 4, r0, c10, c2, 0
+ bx lr
+ ENDFUNC
+
+write_hmair1 FUNCTION
+ mcr p15, 4, r0, c10, c2, 1
+ bx lr
+ ENDFUNC
+
+read_nsacr FUNCTION
+ mrc p15, 0, r0, c1, c1, 2
+ bx lr
+ ENDFUNC
+
+read_sctlr FUNCTION
+ mrc p15, 0, r0, c1, c0, 0
+ bx lr
+ ENDFUNC
+
+write_sctlr FUNCTION
+ mcr p15, 0, r0, c1, c0, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+read_hsctlr FUNCTION
+ mrc p15, 4, r0, c1, c0, 0
+ bx lr
+ ENDFUNC
+
+read_hdfar FUNCTION
+ mrc p15, 4, r0, c6, c0, 0
+ bx lr
+ ENDFUNC
+
+read_hpfar FUNCTION
+ mrc p15, 4, r0, c6, c0, 4
+ bx lr
+ ENDFUNC
+
+read_hsr FUNCTION
+ mrc p15, 4, r0, c5, c2, 0
+ bx lr
+ ENDFUNC
+
+write_hsctlr FUNCTION
+ mcr p15, 4, r0, c1, c0, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+read_cnthctl FUNCTION
+ mrc p15, 4, r0, c14, c1, 0
+ bx lr
+ ENDFUNC
+
+read_cntkctl FUNCTION
+ mrc p15, 0, r0, c14, c1, 0
+ bx lr
+ ENDFUNC
+
+read_cnthp_cval FUNCTION
+ mrrc p15, 6, r0, r1, c14
+ bx lr
+ ENDFUNC
+
+read_cnthp_tval FUNCTION
+ mrc p15, 4, r0, c14, c2, 0
+ bx lr
+ ENDFUNC
+
+read_cntp_tval FUNCTION
+ mrc p15, 0, r0, c14, c2, 0
+ bx lr
+ ENDFUNC
+
+read_cntp_ctl FUNCTION
+ mrc p15, 0, r0, c14, c2, 1
+ bx lr
+ ENDFUNC
+
+read_cnthp_ctl FUNCTION
+ mrc p15, 4, r0, c14, c2, 1
+ bx lr
+ ENDFUNC
+
+write_cnthctl FUNCTION
+ mcr p15, 4, r0, c14, c1, 0
+ bx lr
+ ENDFUNC
+
+write_cntkctl FUNCTION
+ mcr p15, 0, r0, c14, c1, 0
+ bx lr
+ ENDFUNC
+
+write_cntp_tval FUNCTION
+ mcr p15, 0, r0, c14, c2, 0
+ isb
+ bx lr
+ ENDFUNC
+
+write_cntp_ctl FUNCTION
+ mcr p15, 0, r0, c14, c2, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_cnthp_cval FUNCTION
+ mcrr p15, 6, r0, r1, c14
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_cnthp_tval FUNCTION
+ mcr p15, 4, r0, c14, c2, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_cnthp_ctl FUNCTION
+ mcr p15, 4, r0, c14, c2, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+read_clidr FUNCTION
+ mrc p15, 1, r0, c0, c0, 1 ; read clidr
+ bx lr
+ ENDFUNC
+
+read_ccsidr FUNCTION
+ mrc p15, 1, r0, c0, c0, 0 ; read ccsidr
+ bx lr
+ ENDFUNC
+
+read_csselr FUNCTION
+ mrc p15, 2, r0, c0, c0, 0 ; read csselr
+ bx lr
+ ENDFUNC
+
+write_csselr FUNCTION
+ mcr p15, 2, r0, c0, c0, 0 ; read csselr
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+read_actlr FUNCTION
+ mrc p15, 0, r0, c1, c0, 1
+ bx lr
+ ENDFUNC
+
+write_actlr FUNCTION
+ mcr p15, 0, r0, c1, c0, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+read_prrr FUNCTION
+ mrc p15, 0, r0, c10, c2, 0
+ bx lr
+ ENDFUNC
+
+read_nmrr FUNCTION
+ mrc p15, 0, r0, c10, c2, 1
+ bx lr
+ ENDFUNC
+
+write_prrr FUNCTION
+ mcr p15, 0, r0, c10, c2, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_nmrr FUNCTION
+ mcr p15, 0, r0, c10, c2, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+read_dfar FUNCTION
+ mrc p15, 0, r0, c6, c0, 0
+ bx lr
+ ENDFUNC
+
+read_ifar FUNCTION
+ mrc p15, 0, r0, c6, c0, 2
+ bx lr
+ ENDFUNC
+
+read_dfsr FUNCTION
+ mrc p15, 0, r0, c5, c0, 0
+ bx lr
+ ENDFUNC
+
+read_ifsr FUNCTION
+ mrc p15, 0, r0, c5, c0, 1
+ bx lr
+ ENDFUNC
+
+read_adfsr FUNCTION
+ mrc p15, 0, r0, c5, c1, 0
+ bx lr
+ ENDFUNC
+
+read_aifsr FUNCTION
+ mrc p15, 0, r0, c5, c1, 1
+ bx lr
+ ENDFUNC
+
+write_dfar FUNCTION
+ mcr p15, 0, r0, c6, c0, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_ifar FUNCTION
+ mcr p15, 0, r0, c6, c0, 2
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_dfsr FUNCTION
+ mcr p15, 0, r0, c5, c0, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_ifsr FUNCTION
+ mcr p15, 0, r0, c5, c0, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_adfsr FUNCTION
+ mcr p15, 0, r0, c5, c1, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+write_aifsr FUNCTION
+ mcr p15, 0, r0, c5, c1, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+read_lr FUNCTION
+ ; Save r1
+ push {r1}
+ and r0, r0, #0x1f
+ ; Read the current cpsr
+ mrs r1, cpsr
+ and r1, r1, #0x1f
+ ; Check if the desired lr is of the current mode
+ cmp r0, r1
+ moveq r0, LR
+ beq read_lr_out
+ ; Check if desired lr is of user mode
+ cmp r0, #0x10
+ mrseq r0, LR_usr
+ beq read_lr_out
+ ; Check if desired lr is of supervisor mode
+ cmp r0, #0x13
+ mrseq r0, LR_svc
+read_lr_out
+ pop {r1}
+ bx lr
+ ENDFUNC
+
+write_lr FUNCTION
+ ; Save r2
+ push {r2}
+ and r0, r0, #0x1f
+ ; Read the current cpsr
+ mrs r2, cpsr
+ and r2, r2, #0x1f
+ ; Check if the lr is of the current mode
+ cmp r0, r2
+ moveq LR, r1
+ beq write_lr_out
+ ; Check if the lr is of user mode
+ cmp r0, #0x10
+ msreq LR_usr, r1
+ beq write_lr_out
+ ; Check if the lr is of supervisor mode
+ cmp r0, #0x13
+ msreq LR_svc, r1
+write_lr_out
+ pop {r2}
+ bx lr
+ ENDFUNC
+
+read_sp FUNCTION
+ ; Save r1
+ push {r1}
+ and r0, r0, #0x1f
+ ; Read the current cpsr
+ mrs r1, cpsr
+ and r1, r1, #0x1f
+ ; Check if the desired sp is of the current mode
+ cmp r0, r1
+ moveq r0, SP
+ beq read_sp_out
+ ; Check if desired sp is of user mode
+ cmp r0, #0x10
+ mrseq r0, SP_usr
+ beq read_sp_out
+ ; Check if desired sp is of supervisor mode
+ cmp r0, #0x13
+ mrseq r0, SP_svc
+ beq read_sp_out
+ ; Check if desired sp is of irq mode
+ cmp r0, #0x12
+ mrseq r0, SP_irq
+ beq read_sp_out
+ ; Check if desired sp is of supervisor mode
+ cmp r0, #0x1a
+ mrseq r0, SP_hyp
+ beq read_sp_out
+ ; Check if desired sp is of monitor mode
+ cmp r0, #0x16
+ mrseq r0, SP_mon
+read_sp_out
+ pop {r1}
+ bx lr
+ ENDFUNC
+
+write_sp FUNCTION
+ ; Save r2
+ push {r2}
+ and r0, r0, #0x1f
+ ; Read the current cpsr
+ mrs r2, cpsr
+ and r2, r2, #0x1f
+ ; Check if the sp is of the current mode
+ cmp r0, r2
+ moveq SP, r1
+ beq write_sp_out
+ ; Check if the sp is of user mode
+ cmp r0, #0x10
+ msreq SP_usr, r1
+ beq write_sp_out
+ ; Check if the sp is of supervisor mode
+ cmp r0, #0x13
+ msreq SP_svc, r1
+ beq write_sp_out
+ ; Check if the sp is of irq mode
+ cmp r0, #0x12
+ msreq SP_irq, r1
+ beq write_sp_out
+ ; Check if the sp is of hyp mode
+ cmp r0, #0x1a
+ msreq SP_hyp, r1
+ beq write_sp_out
+ ; Check if the sp is of monitor mode
+ cmp r0, #0x16
+ msreq SP_mon, r1
+write_sp_out
+ pop {r2}
+ bx lr
+ ENDFUNC
+
+ ALIGN 4
+
+;--------------------------------------------------------
+; spin_lock
+;--------------------------------------------------------
+spin_lock FUNCTION
+ MOV r2, #1
+sl_tryloop
+ LDREX r1, [r0]
+ CMP r1, #0
+ STREXEQ r1, r2, [r0]
+ CMPEQ r1, #0
+ BNE sl_tryloop
+ MCR p15, 0, r0, c7, c10, 4
+ bx lr
+ ENDFUNC
+
+;--------------------------------------------------------
+; spin_lock
+;--------------------------------------------------------
+spin_trylock FUNCTION
+ MOV r2, #1
+ LDREX r1, [r0]
+ CMP r1, #0
+ STREXEQ r1, r2, [r0]
+ MOV r0, r1
+ MCR p15, 0, r0, c7, c10, 4
+ bx lr
+ ENDFUNC
+
+ ALIGN 4
+
+;--------------------------------------------------------
+; spin_unlock
+;--------------------------------------------------------
+spin_unlock FUNCTION
+ MOV r1, #0
+ STR r1, [r0]
+ MCR p15, 0, r0, c7, c10, 4
+ bx lr
+ ENDFUNC
+
+ ALIGN 4
+
+;--------------------------------------------------------
+; panic
+;--------------------------------------------------------
+panic FUNCTION
+ isb
+ dsb
+ CPSID aif
+ B panic
+ ENDFUNC
+
+;--------------------------------------------------------------
+; Utility function that takes a pointer (r0), stack size (r1).
+; It returns the pointer to the stack offset for the asked cpu
+;--------------------------------------------------------------
+get_sp FUNCTION
+ ldr r2, =0x2c001800
+ ldr r2, [r2]
+ and r2, r2, #0xff
+ clz r2, r2
+ mov r3, #32
+ sub r2, r3, r2
+ mul r2, r2, r1
+ add r0, r0, r2
+ bx lr
+ ENDFUNC
+
+disable_coherency FUNCTION
+ push {lr}
+ bl read_actlr
+ bic r0, r0, #0x40
+ bl write_actlr
+ isb
+ dsb
+ pop {lr}
+ bx lr
+ ENDFUNC
+
+enable_coherency FUNCTION
+ push {lr}
+ bl read_actlr
+ orr r0, r0, #0x40
+ bl write_actlr
+ isb
+ dsb
+ pop {lr}
+ bx lr
+ ENDFUNC
+
+inv_bpred_is FUNCTION
+ mcr p15, 0, r0, c7, c1, 6
+ bx lr
+ ENDFUNC
+
+inv_bpred_all FUNCTION
+ mcr p15, 0, r0, c7, c5, 6
+ bx lr
+ ENDFUNC
+
+inv_tlb_all FUNCTION
+ mcr p15, 0, r0, c8, c7, 0
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+inv_icache_all FUNCTION
+ mcr p15, 0, r10, c7, c5, 0 ; invalidate I cache
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+inv_icache_mva_pou FUNCTION
+ mcr p15, 0, r0, c7, c5, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+cln_dcache_mva_pou FUNCTION
+ mcr p15, 0, r0, c7, c11, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+cln_dcache_mva_poc FUNCTION
+ mcr p15, 0, r0, c7, c10, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+inv_dcache_mva_poc FUNCTION
+ mcr p15, 0, r0, c7, c6, 1
+ isb
+ dsb
+ bx lr
+ ENDFUNC
+
+ ; Clean/Invalidate/Clean and invalidate a specified cache level.
+ ; Ignore if the level does not exist.
+cache_maint_op FUNCTION
+ push {r4-r11}
+ dsb
+ lsl r10, r0, #1 ; start clean at specified cache level
+ mrc p15, 1, r0, c0, c0, 1 ; read clidr
+10
+ add r2, r10, r10, lsr #1 ; work out 3x current cache level
+ mov r3, r0, lsr r2 ; extract cache type bits from clidr
+ and r3, r3, #7 ; mask of the bits for current cache only
+ cmp r3, #2 ; see what cache we have at this level
+ blt %f50 ; skip if no cache, or just i-cache
+ mcr p15, 2, r10, c0, c0, 0 ; select current cache level in cssr
+ isb ; isb to sych the new cssr&csidr
+ mrc p15, 1, r3, c0, c0, 0 ; read the new csidr
+ and r2, r3, #7 ; extract the length of the cache lines
+ add r2, r2, #4 ; add 4 (line length offset)
+ ldr r4, =0x3ff
+ ands r4, r4, r3, lsr #3 ; find maximum number on the way size
+ clz r5, r4 ; find bit position of way size increment
+ ldr r7, =0x7fff
+ ands r7, r7, r3, lsr #13 ; extract max number of the index size
+20
+ mov r9, r4 ; create working copy of max way size
+30
+ orr r11, r10, r9, lsl r5 ; factor way and cache number into r11
+ lsl r6, r9, r5
+ orr r11, r10, r6 ; factor way and cache number into r11
+ orr r11, r11, r7, lsl r2 ; factor index number into r11
+ lsl r6, r7, r2
+ orr r11, r11, r6 ; factor index number into r11
+ cmp r1, #INV
+ mcreq p15, 0, r11, c7, c6, 2 ; invalidate by set/way
+ beq %f40
+ cmp r1, #CLN
+ mcreq p15, 0, r11, c7, c10, 2 ; clean by set/way
+ beq %f40
+ mcr p15, 0, r11, c7, c14, 2 ; clean & invalidate by set/way
+; nop ; nop
+40
+ subs r9, r9, #1 ; decrement the way
+ bge %b30
+ subs r7, r7, #1 ; decrement the index
+ bge %b20
+50
+ mov r10, #0 ; swith back to cache level 0
+ mcr p15, 2, r10, c0, c0, 0 ; select current cache level in cssr
+ dsb
+ isb
+ pop {r4-r11}
+ bx lr
+ ENDFUNC
+
+enable_user_perfmon_access FUNCTION ; V7 and above
+ mov r0, #1
+ mcr p15, 0, r0, c9, c14, 0 ; write PMUSERENR enable
+ bx lr
+ ENDFUNC
+
+enable_perfmon FUNCTION ; V7 and above
+ mov r0, #(1<<1)+(1<<2)+(1<<4) ; set C, P, X bits
+ mcr p15, 0, r0, c9, c12, 0 ; PMCR
+ mov r0, #(1<<31) ; cycle counter enable
+ mcr p15, 0, r0, c9, c12, 1 ; PMCNTENSET
+ bx lr
+ ENDFUNC
+
+enable_swp FUNCTION ; V7 and above
+ mrc p15, 0, r0, c1, c0, 0
+ orr r0, #0x400
+ mcr p15, 0, r0, c1, c0, 0
+ bx lr
+ ENDFUNC
+
+enter_monitor_mode FUNCTION
+ mov r0, sp ; Save current sp
+ mov r2, lr ; Save current lr
+ mrs r1, cpsr ; Get current mode (SVC) in r1
+ bic r3, r1, #0x1f ; Clear all mode bits
+ orr r3, r3, #0x16 ; Set bits for Monitor mode
+ msr cpsr_cxsf, r3 ; We are now in Monitor Mode
+ mov sp, r0 ; Use the same sp as before
+ mov lr, r2 ; Use the same lr as before
+ msr spsr_cxsf, r1 ; Use saved mode for the MOVS jump to the kernel
+ bx lr
+ ENDFUNC
+
+enter_nonsecure_world FUNCTION
+ push {r4-r7}
+ mov r4, sp ; Save current sp
+ mov r5, lr ; Save current lr
+ mrs r6, spsr ; Get target mode (SVC) in r6
+ bic r7, r6, #0x1f ; Clear all mode bits
+ orr r7, r7, #0x1A ; Set bits for HYP mode
+ msr spsr_cxsf, r7
+ adr lr, hyp_entry
+ movs pc, lr
+hyp_entry ; We are now in HYP mode
+ ; Set the HYP spsr to itself, so that the entry point
+ ; does not see the difference between a function call
+ ; and an exception return.
+ msr spsr_cxsf, r7
+ blx r0
+ msr spsr_cxsf, r6 ; Setup SPSR to jump to NS SVC mode
+ adr r7, ns_svc_entry
+ msr elr_hyp, r7
+ ERET
+ns_svc_entry
+ mov sp, r4
+ mov lr, r5
+ pop {r4-r7}
+ bx lr
+ ENDFUNC
+
+enable_pmu FUNCTION
+ mov r0, #0x0000003f
+ mrc p15, 0, r1, c9, c14, 2 ; Disable overflow interrupts
+ orr r1, r1, r0
+ mcr p15, 0, r1, c9, c14, 2 ; Disable overflow interrupts
+ isb
+ mrc p15, 0, r1, c9, c12, 3 ; Clear overflow flags
+ orr r1, r1, r0
+ mcr p15, 0, r1, c9, c12, 3 ; Clear overflow flags
+ isb
+ mrc p15, 0, r1, c9, c12, 1 ; Enable counters
+ orr r1, r1, r0
+ mcr p15, 0, r1, c9, c12, 1 ; Enable counters
+ isb
+ mov r0, #0x3
+ mrc p15, 0, r1, c9, c12, 0 ;
+ orr r1, r1, r0
+ mcr p15, 0, r1, c9, c12, 0 ; Reset and Master Enable counters
+ bx lr
+ ENDFUNC
+
+ END