summaryrefslogtreecommitdiff
path: root/acsr
diff options
context:
space:
mode:
authorRobin Randhawa <robin.randhawa@arm.com>2011-10-12 16:07:02 +0100
committerRobin Randhawa <robin.randhawa@arm.com>2011-10-12 16:07:02 +0100
commit0656dea51f48c51a57e77187de4d5f66a6ba1337 (patch)
tree5b93e8967b0aa1cadd2724689346c6251cb47669 /acsr
Initial commit of the virtualizer v2.0 release.
This will be the basis for the VSM.
Diffstat (limited to 'acsr')
-rw-r--r--acsr/c_helpers.c96
-rw-r--r--acsr/helpers.h228
-rw-r--r--acsr/helpers.s1069
-rw-r--r--acsr/v7.s650
-rw-r--r--acsr/v7_c.c279
5 files changed, 2322 insertions, 0 deletions
diff --git a/acsr/c_helpers.c b/acsr/c_helpers.c
new file mode 100644
index 0000000..5137140
--- /dev/null
+++ b/acsr/c_helpers.c
@@ -0,0 +1,96 @@
+/*
+ Copyright (c) 2009-11, ARM Limited. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of ARM nor the names of its contributors may be used to
+ endorse or promote products derived from this software without specific
+ prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+*/
+
+
+/**
+ * Lamport's Bakery algorithm for spinlock handling
+ *
+ * Note that the algorithm requires the stack and the bakery struct
+ * to be in Strongly-Ordered memory.
+ */
+
+#include "appf_types.h"
+#include "appf_internals.h"
+#include "appf_helpers.h"
+
+/**
+ * Initialize a bakery - only required if the bakery_t is
+ * on the stack or heap, as static data is zeroed anyway.
+ */
+void initialize_spinlock(bakery_t *bakery)
+{
+ appf_memset(bakery, 0, sizeof(bakery_t));
+}
+
+/**
+ * Claim a bakery lock. Function does not return until
+ * lock has been obtained.
+ */
+void get_spinlock(unsigned cpuid, bakery_t *bakery)
+{
+ unsigned i, max=0, my_full_number, his_full_number;
+
+ /* Get a ticket */
+ bakery->entering[cpuid] = TRUE;
+ for (i=0; i<MAX_CPUS; ++i)
+ {
+ if (bakery->number[i] > max)
+ {
+ max = bakery->number[i];
+ }
+ }
+ ++max;
+ bakery->number[cpuid] = max;
+ bakery->entering[cpuid] = FALSE;
+
+ /* Wait for our turn */
+ my_full_number = (max << 8) + cpuid;
+ for (i=0; i<MAX_CPUS; ++i)
+ {
+ while(bakery->entering[i]); /* Wait */
+ do
+ {
+ his_full_number = bakery->number[i];
+ if (his_full_number)
+ {
+ his_full_number = (his_full_number << 8) + i;
+ }
+ }
+ while(his_full_number && (his_full_number < my_full_number));
+ }
+ dmb();
+}
+
+/**
+ * Release a bakery lock.
+ */
+void release_spinlock(unsigned cpuid, bakery_t *bakery)
+{
+ dmb();
+ bakery->number[cpuid] = 0;
+}
diff --git a/acsr/helpers.h b/acsr/helpers.h
new file mode 100644
index 0000000..0c7fd40
--- /dev/null
+++ b/acsr/helpers.h
@@ -0,0 +1,228 @@
+/*
+ Copyright (c) 2009-11, ARM Limited. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of ARM nor the names of its contributors may be used to
+ endorse or promote products derived from this software without specific
+ prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+*/
+
+/*
+ * V7 functions
+ */
+extern void save_control_registers(unsigned *pointer, int is_secure);
+extern void save_mmu(unsigned *pointer);
+extern void save_mpu(unsigned *pointer);
+extern void save_performance_monitors(unsigned *pointer);
+extern void save_banked_registers(unsigned *pointer);
+extern void save_cp15(unsigned *pointer);
+extern void save_vfp(unsigned *pointer);
+extern void save_generic_timer(unsigned *pointer, int is_hyp);
+extern void save_v7_debug(unsigned *pointer);
+extern void save_fault_status(unsigned *pointer);
+
+extern void restore_control_registers(unsigned *pointer, int is_secure);
+extern void restore_mmu(unsigned *pointer);
+extern void restore_mpu(unsigned *pointer);
+extern void restore_performance_monitors(unsigned *pointer);
+extern void restore_banked_registers(unsigned *pointer);
+extern void restore_cp15(unsigned *pointer);
+extern void restore_vfp(unsigned *pointer);
+extern void restore_generic_timer(unsigned *pointer, int is_hyp);
+extern void restore_v7_debug(unsigned *pointer);
+extern void restore_fault_status(unsigned *pointer);
+
+extern unsigned va_to_pa(unsigned virtual_address);
+extern unsigned get_cpu_type(void);
+
+extern unsigned read_mpidr(void);
+extern unsigned read_sctlr(void);
+extern unsigned read_actlr(void);
+extern unsigned read_prrr(void);
+extern unsigned read_nmrr(void);
+extern unsigned read_l2ctlr(void);
+extern unsigned read_mvbar(void);
+extern unsigned read_cbar(void);
+extern unsigned read_drar(void);
+extern unsigned read_dsar(void);
+extern unsigned read_teehbr(void);
+extern unsigned read_l2ectlr(void);
+extern unsigned read_pmuserenr(void);
+extern unsigned read_pmintenset(void);
+extern unsigned read_pmintenclr(void);
+extern unsigned read_pmovsset(void);
+extern unsigned read_pmccntr(void);
+extern unsigned read_pmxevtyper(void);
+extern unsigned read_pmxevcntr(void);
+extern unsigned read_pmcr(void);
+extern unsigned read_pmcntenset(void);
+extern unsigned read_pmcntenclr(void);
+extern unsigned read_pmovsr(void);
+extern unsigned read_pmswinc(void);
+extern unsigned read_pmselr(void);
+extern unsigned read_pmceid0(void);
+extern unsigned read_pmceid1(void);
+extern unsigned read_dfar(void);
+extern unsigned read_ifar(void);
+extern unsigned read_dfsr(void);
+extern unsigned read_ifsr(void);
+extern unsigned read_adfsr(void);
+extern unsigned read_aifsr(void);
+extern unsigned read_cntfrq(void);
+extern unsigned read_hsctlr(void);
+extern unsigned read_hsr(void);
+extern unsigned read_dacr(void);
+extern unsigned read_ttbr0(void);
+extern unsigned read_cpacr(void);
+extern unsigned read_scr(void);
+extern unsigned read_cpsr(void);
+extern unsigned read_midr(void);
+extern unsigned read_vmpidr(void);
+extern unsigned read_vmidr(void);
+extern unsigned read_id_pfr0(void);
+extern unsigned read_id_pfr1(void);
+extern unsigned read_id_dfr0(void);
+extern unsigned read_id_afr0(void);
+extern unsigned read_id_mmfr0(void);
+extern unsigned read_id_mmfr1(void);
+extern unsigned read_id_mmfr2(void);
+extern unsigned read_id_mmfr3(void);
+extern unsigned read_id_isar0(void);
+extern unsigned read_id_isar1(void);
+extern unsigned read_id_isar2(void);
+extern unsigned read_id_isar3(void);
+extern unsigned read_id_isar4(void);
+extern unsigned read_id_isar5(void);
+extern unsigned read_aidr(void);
+extern unsigned read_vbar(void);
+extern unsigned read_ctr(void);
+extern unsigned read_tcmtr(void);
+extern unsigned read_tlbtr(void);
+extern unsigned read_hcr(void);
+extern unsigned read_hdcr(void);
+extern unsigned read_hcptr(void);
+extern unsigned read_hstr(void);
+extern unsigned read_vtcr(void);
+extern unsigned read_hdfar(void);
+extern unsigned read_hpfar(void);
+extern unsigned read_cpsr(void);
+extern unsigned read_cpuid(void);
+extern unsigned read_clusterid(void);
+extern unsigned read_clidr(void);
+extern unsigned read_ccsidr(void);
+extern unsigned read_csselr(void);
+extern unsigned read_nsacr(void);
+extern unsigned read_ttbr0(void);
+extern unsigned read_ttbcr(void);
+extern unsigned read_cnthctl(void);
+extern unsigned long read_cnthp_cval(void);
+extern unsigned read_cnthp_tval(void);
+extern unsigned read_cnthp_ctl(void);
+extern unsigned read_cntp_ctl(void);
+extern unsigned read_cntp_tval(void);
+extern unsigned long long read_httbr(void);
+extern unsigned long long read_vttbr(void);
+extern unsigned long long read_cntpct(void);
+
+extern void dsb(void);
+extern void dmb(void);
+extern void wfi(void);
+extern void endless_wfi(void);
+extern void wfe(void);
+extern void sev(void);
+extern void isb(void);
+
+extern void write_osdlr(unsigned value);
+extern void write_sctlr(unsigned value);
+extern void write_actlr(unsigned value);
+extern void write_nsacr(unsigned);
+extern void write_ttbr0(unsigned);
+extern void write_ttbcr(unsigned);
+extern void write_cntfrq(unsigned);
+extern void write_cnthctl(unsigned);
+extern void write_cnthp_cval(unsigned, unsigned);
+extern void write_cnthp_tval(unsigned);
+extern void write_cnthp_ctl(unsigned);
+extern void write_cntp_ctl(unsigned);
+extern void write_cntp_tval(unsigned);
+extern void write_csselr(unsigned);
+extern void write_hcr(unsigned);
+extern void write_hdcr(unsigned);
+extern void write_hcptr(unsigned);
+extern void write_hstr(unsigned);
+extern void write_hsctlr(unsigned);
+extern void write_httbr(unsigned long long);
+extern void write_vttbr(unsigned long long);
+extern void write_htcr(unsigned);
+extern void write_vtcr(unsigned);
+extern void write_hmair0(unsigned);
+extern void write_hmair1(unsigned);
+extern void write_vmpidr(unsigned);
+extern void write_vmidr(unsigned);
+extern void write_dacr(unsigned);
+extern void write_ttbr0(unsigned);
+extern void write_cpacr(unsigned);
+extern void write_nsacr(unsigned);
+extern void write_scr(unsigned);
+extern void write_mvbar(unsigned);
+extern void write_hvbar(unsigned);
+extern void write_vbar(unsigned);
+extern void write_prrr(unsigned);
+extern void write_nmrr(unsigned);
+extern void write_dfar(unsigned);
+extern void write_ifar(unsigned);
+extern void write_dfsr(unsigned);
+extern void write_ifsr(unsigned);
+extern void write_adfsr(unsigned);
+extern void write_aifsr(unsigned);
+extern void write_l2ectlr(unsigned);
+extern void write_pmuserenr(unsigned);
+extern void write_pmintenset(unsigned);
+extern void write_pmintenclr(unsigned);
+extern void write_pmovsset(unsigned);
+extern void write_pmccntr(unsigned);
+extern void write_pmxevtyper(unsigned);
+extern void write_pmxevcntr(unsigned);
+extern void write_pmcr(unsigned);
+extern void write_pmcntenset(unsigned);
+extern void write_pmcntenclr(unsigned);
+extern void write_pmovsr(unsigned);
+extern void write_pmswinc(unsigned);
+extern void write_pmselr(unsigned);
+extern void write_pmceid0(unsigned);
+extern void write_pmceid1(unsigned);
+extern void write_osdlr(unsigned value);
+
+extern unsigned * copy_words(volatile unsigned *destination, volatile unsigned *source, unsigned num_words);
+extern void appf_memcpy(void *dst, const void *src, unsigned length);
+extern void appf_memset(void *dst, unsigned value, unsigned length);
+
+extern void initialize_spinlock(bakery_t *bakery);
+extern void get_spinlock(unsigned cpuid, bakery_t *bakery);
+extern void release_spinlock(unsigned cpuid, bakery_t *bakery);
+
+/*
+ * GCC Compatibility
+ */
+#ifndef __ARMCC_VERSION
+#define __nop() __asm__ __volatile__( "nop\n" )
+#endif
diff --git a/acsr/helpers.s b/acsr/helpers.s
new file mode 100644
index 0000000..c19fa48
--- /dev/null
+++ b/acsr/helpers.s
@@ -0,0 +1,1069 @@
+; Copyright (c) 2009-11, ARM Limited. All rights reserved.
+;
+; Redistribution and use in source and binary forms, with or without
+; modification, are permitted provided that the following conditions are met:
+;
+; * Redistributions of source code must retain the above copyright notice,
+; this list of conditions and the following disclaimer.
+; * Redistributions in binary form must reproduce the above copyright notice,
+; this list of conditions and the following disclaimer in the documentation
+; and/or other materials provided with the distribution.
+; * Neither the name of ARM nor the names of its contributors may be used to
+; endorse or promote products derived from this software without specific
+; prior written permission.
+;
+; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+; ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+; LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+; INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+; POSSIBILITY OF SUCH DAMAGE.
+
+ EXPORT isb
+ EXPORT dsb
+ EXPORT dmb
+ EXPORT wfi
+ EXPORT endless_wfi
+ EXPORT wfe
+ EXPORT sev
+ EXPORT copy_words
+ EXPORT appf_memcpy
+ EXPORT appf_memset
+ EXPORT get_cpu_type
+ EXPORT va_to_pa
+
+ EXPORT read_drar
+ EXPORT read_dsar
+ EXPORT read_cbar
+ EXPORT read_sctlr
+ EXPORT read_actlr
+ EXPORT read_prrr
+ EXPORT read_nmrr
+ EXPORT read_l2ctlr
+ EXPORT read_dacr
+ EXPORT read_ttbr0
+ EXPORT read_cpacr
+ EXPORT read_scr
+ EXPORT read_cpsr
+ EXPORT read_midr
+ EXPORT read_mpidr
+ EXPORT read_cntpct
+ EXPORT read_cntfrq
+ EXPORT read_vmpidr
+ EXPORT read_vmidr
+ EXPORT read_vttbr
+ EXPORT read_httbr
+ EXPORT read_id_pfr0
+ EXPORT read_id_pfr1
+ EXPORT read_id_dfr0
+ EXPORT read_id_afr0
+ EXPORT read_id_mmfr0
+ EXPORT read_id_mmfr1
+ EXPORT read_id_mmfr2
+ EXPORT read_id_mmfr3
+ EXPORT read_id_isar0
+ EXPORT read_id_isar1
+ EXPORT read_id_isar2
+ EXPORT read_id_isar3
+ EXPORT read_id_isar4
+ EXPORT read_id_isar5
+ EXPORT read_cpuid
+ EXPORT read_aidr
+ EXPORT read_ctr
+ EXPORT read_tcmtr
+ EXPORT read_tlbtr
+ EXPORT read_clusterid
+ EXPORT read_l2ctlr
+ EXPORT read_hsctlr
+ EXPORT read_hdfar
+ EXPORT read_hpfar
+ EXPORT read_vtcr
+ EXPORT read_hcr
+ EXPORT read_hdcr
+ EXPORT read_hcptr
+ EXPORT read_hstr
+ EXPORT read_cnthctl
+ EXPORT read_cntkctl
+ EXPORT read_cntp_ctl
+ EXPORT read_cntp_tval
+ EXPORT read_cnthp_ctl
+ EXPORT read_cnthp_tval
+ EXPORT read_cnthp_cval
+ EXPORT read_ttbcr
+ EXPORT read_nsacr
+ EXPORT read_clidr
+ EXPORT read_csselr
+ EXPORT read_ccsidr
+ EXPORT read_nmrr
+ EXPORT read_prrr
+ EXPORT read_mvbar
+ EXPORT read_vbar
+ EXPORT read_hsr
+ EXPORT read_dfar
+ EXPORT read_ifar
+ EXPORT read_dfsr
+ EXPORT read_ifsr
+ EXPORT read_adfsr
+ EXPORT read_aifsr
+ EXPORT read_l2ectlr
+ EXPORT read_pmuserenr
+ EXPORT read_pmintenset
+ EXPORT read_pmintenclr
+ EXPORT read_pmovsset
+ EXPORT read_pmccntr
+ EXPORT read_pmxevtyper
+ EXPORT read_pmxevcntr
+ EXPORT read_pmcr
+ EXPORT read_pmcntenset
+ EXPORT read_pmcntenclr
+ EXPORT read_pmovsr
+ EXPORT read_pmswinc
+ EXPORT read_pmselr
+ EXPORT read_pmceid0
+ EXPORT read_pmceid1
+
+ EXPORT write_l2ectlr
+ EXPORT write_pmuserenr
+ EXPORT write_pmintenset
+ EXPORT write_pmintenclr
+ EXPORT write_pmovsset
+ EXPORT write_pmccntr
+ EXPORT write_pmxevtyper
+ EXPORT write_pmxevcntr
+ EXPORT write_pmcr
+ EXPORT write_pmcntenset
+ EXPORT write_pmcntenclr
+ EXPORT write_pmovsr
+ EXPORT write_pmswinc
+ EXPORT write_pmselr
+ EXPORT write_pmceid0
+ EXPORT write_pmceid1
+ EXPORT write_dacr
+ EXPORT write_prrr
+ EXPORT write_nmrr
+ EXPORT write_ttbr0
+ EXPORT write_cpacr
+ EXPORT write_nsacr
+ EXPORT write_scr
+ EXPORT write_mvbar
+ EXPORT write_vbar
+ EXPORT write_hvbar
+ EXPORT write_vmpidr
+ EXPORT write_vmidr
+ EXPORT write_csselr
+ EXPORT write_hcr
+ EXPORT write_hdcr
+ EXPORT write_hcptr
+ EXPORT write_hstr
+ EXPORT write_sctlr
+ EXPORT write_actlr
+ EXPORT write_osdlr
+ EXPORT write_ttbcr
+ EXPORT write_cntfrq
+ EXPORT write_cnthctl
+ EXPORT write_cntkctl
+ EXPORT write_cntp_ctl
+ EXPORT write_cntp_tval
+ EXPORT write_cnthp_ctl
+ EXPORT write_cnthp_tval
+ EXPORT write_cnthp_cval
+ EXPORT write_hsctlr
+ EXPORT write_httbr
+ EXPORT write_vttbr
+ EXPORT write_htcr
+ EXPORT write_vtcr
+ EXPORT write_hmair0
+ EXPORT write_hmair1
+ EXPORT write_dfar
+ EXPORT write_ifar
+ EXPORT write_dfsr
+ EXPORT write_ifsr
+ EXPORT write_adfsr
+ EXPORT write_aifsr
+
+
+MIDR_CPU_MASK EQU 0xff00fff0
+
+ AREA ACSR, CODE, ALIGN=5
+
+
+dmb FUNCTION
+ dmb
+ bx lr
+ ENDFUNC
+
+wfi FUNCTION
+ wfi
+ bx lr
+ ENDFUNC
+
+ ; WFI forever, and attempt to prevent speculative accesses starting
+ ; FIQ and IRQ are assumed to be disabled
+endless_wfi FUNCTION
+ b wfistart
+
+ ALIGN 32
+wfistart
+ b bloop
+loop
+ dsb
+ wfi
+bloop
+ b loop
+ ENDFUNC
+
+wfe FUNCTION
+ wfe
+ bx lr
+ ENDFUNC
+
+sev FUNCTION
+ sev
+ bx lr
+ ENDFUNC
+
+ ; This function takes three arguments
+ ; r0: Destination start address (must be word aligned)
+ ; r1: Source start address (must be word aligned)
+ ; r2: Number of words to copy
+ ; Return value is updated destination pointer (first unwritten word)
+copy_words FUNCTION
+ cmp r2, #0
+ beq %f1
+0 ldr r3, [r1], #4
+ str r3, [r0], #4
+ subs r2, r2, #1
+ bne %b0
+1 bx lr
+ ENDFUNC
+
+
+appf_memcpy FUNCTION
+ cmp r2, #0
+ bxeq lr
+0 ldrb r3, [r1], #1
+ strb r3, [r0], #1
+ subs r2, #1
+ bne %b0
+ bx lr
+ ENDFUNC
+
+appf_memset FUNCTION
+ cmp r2, #0
+ bxeq lr
+0 strb r1, [r0], #1
+ subs r2, #1
+ bne %b0
+ bx lr
+ ENDFUNC
+
+read_cntfrq FUNCTION
+ mrc p15, 0, r0, c14, c0, 0
+ bx lr
+ ENDFUNC
+
+write_cntfrq FUNCTION
+ mcr p15, 0, r0, c14, c0, 0
+ bx lr
+ ENDFUNC
+
+read_cntpct FUNCTION
+ mrrc p15, 0, r0, r1, c14
+ bx lr
+ ENDFUNC
+
+isb FUNCTION
+ isb
+ bx lr
+ ENDFUNC
+
+read_vmpidr FUNCTION
+ mrc p15, 4, r0, c0, c0, 5
+ bx lr
+ ENDFUNC
+
+read_vmidr FUNCTION
+ mrc p15, 4, r0, c0, c0, 0
+ bx lr
+ ENDFUNC
+
+read_id_pfr0 FUNCTION
+ mrc p15, 0, r0, c0, c1, 0
+ bx lr
+ ENDFUNC
+
+read_id_pfr1 FUNCTION
+ mrc p15, 0, r0, c0, c1, 1
+ bx lr
+ ENDFUNC
+
+read_id_dfr0 FUNCTION
+ mrc p15, 0, r0, c0, c1, 2
+ bx lr
+ ENDFUNC
+
+read_id_afr0 FUNCTION
+ mrc p15, 0, r0, c0, c1, 3
+ bx lr
+ ENDFUNC
+
+read_id_mmfr0 FUNCTION
+ mrc p15, 0, r0, c0, c1, 4
+ bx lr
+ ENDFUNC
+
+read_id_mmfr1 FUNCTION
+ mrc p15, 0, r0, c0, c1, 5
+ bx lr
+ ENDFUNC
+
+read_id_mmfr2 FUNCTION
+ mrc p15, 0, r0, c0, c1, 6
+ bx lr
+ ENDFUNC
+
+read_id_mmfr3 FUNCTION
+ mrc p15, 0, r0, c0, c1, 7
+ bx lr
+ ENDFUNC
+
+read_id_isar0 FUNCTION
+ mrc p15, 0, r0, c0, c2, 0
+ bx lr
+ ENDFUNC
+
+read_id_isar1 FUNCTION
+ mrc p15, 0, r0, c0, c2, 1
+ bx lr
+ ENDFUNC
+
+read_id_isar2 FUNCTION
+ mrc p15, 0, r0, c0, c2, 2
+ bx lr
+ ENDFUNC
+
+read_id_isar3 FUNCTION
+ mrc p15, 0, r0, c0, c2, 3
+ bx lr
+ ENDFUNC
+
+read_id_isar4 FUNCTION
+ mrc p15, 0, r0, c0, c2, 4
+ bx lr
+ ENDFUNC
+
+read_id_isar5 FUNCTION
+ mrc p15, 0, r0, c0, c2, 5
+ bx lr
+ ENDFUNC
+
+read_ctr FUNCTION
+ mrc p15, 0, r0, c0, c0, 1
+ bx lr
+ ENDFUNC
+
+read_tcmtr FUNCTION
+ mrc p15, 0, r0, c0, c0, 2
+ bx lr
+ ENDFUNC
+
+read_tlbtr FUNCTION
+ mrc p15, 0, r0, c0, c0, 3
+ bx lr
+ ENDFUNC
+
+read_aidr FUNCTION
+ mrc p15, 1, r0, c0, c0, 7
+ bx lr
+ ENDFUNC
+
+read_dacr FUNCTION
+ mrc p15, 0, r0, c3, c0, 0
+ bx lr
+ ENDFUNC
+
+read_ttbr0 FUNCTION
+ mrc p15, 0, r0, c2, c0, 0
+ bx lr
+ ENDFUNC
+
+write_dacr FUNCTION
+ mcr p15, 0, r0, c3, c0, 0
+ isb
+ bx lr
+ ENDFUNC
+
+read_cpacr FUNCTION
+ mrc p15, 0, r0, c1, c0, 2
+ bx lr
+ ENDFUNC
+
+write_cpacr FUNCTION
+ mcr p15, 0, r0, c1, c0, 2
+ bx lr
+ ENDFUNC
+
+read_midr FUNCTION
+ mrc p15, 0, r0, c0, c0, 0;
+ bx lr
+ ENDFUNC
+
+read_mpidr FUNCTION
+ mrc p15, 0, r0, c0, c0, 5
+ bx lr
+ ENDFUNC
+
+read_scr FUNCTION
+ mrc p15, 0, r0, c1, c1, 0
+ bx lr
+ ENDFUNC
+
+write_scr FUNCTION
+ mcr p15, 0, r0, c1, c1, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_nsacr FUNCTION
+ mcr p15, 0, r0, c1, c1, 2
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+read_cpsr FUNCTION
+ mrs r0, CPSR
+ bx lr
+ ENDFUNC
+
+write_mvbar FUNCTION
+ mcr p15, 0, r0, c12, c0, 1
+ bx lr
+ ENDFUNC
+
+write_vbar FUNCTION
+ mcr p15, 0, r0, c12, c0, 0
+ bx lr
+ ENDFUNC
+
+write_hvbar FUNCTION
+ mcr p15, 4, r0, c12, c0, 0
+ bx lr
+ ENDFUNC
+
+read_mvbar FUNCTION
+ mrc p15, 0, r0, c12, c0, 1
+ bx lr
+ ENDFUNC
+
+read_vbar FUNCTION
+ mrc p15, 0, r0, c12, c0, 0
+ bx lr
+ ENDFUNC
+
+read_cpuid FUNCTION
+ mrc p15, 0, r0, c0, c0, 5
+ ands r0, r0, #0xf
+ bx lr
+ ENDFUNC
+
+read_clusterid FUNCTION
+ mrc p15, 0, r0, c0, c0, 5
+ lsr r0, r0, #0x8
+ ands r0, r0, #0xf
+ bx lr
+ ENDFUNC
+
+write_ttbr0 FUNCTION
+ mcr p15, 0, r0, c2, c0, 0
+ mcr p15, 0, r0, c7, c5, 6
+ mcr p15, 0, r0, c8, c7, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+read_ttbcr FUNCTION
+ mrc p15, 0, r0, c2, c0, 2
+ bx lr
+ ENDFUNC
+
+write_ttbcr FUNCTION
+ mcr p15, 0, r0, c2, c0, 2
+ bx lr
+ ENDFUNC
+
+write_vmpidr FUNCTION
+ mcr p15, 4, r0, c0, c0, 5
+ isb
+ bx lr
+ ENDFUNC
+
+write_vmidr FUNCTION
+ mcr p15, 4, r0, c0, c0, 0
+ isb
+ bx lr
+ ENDFUNC
+
+read_vtcr FUNCTION
+ mrc p15, 4, r0, c2, c1, 2
+ bx lr
+ ENDFUNC
+
+read_hcr FUNCTION
+ mrc p15, 4, r0, c1, c1, 0
+ bx lr
+ ENDFUNC
+
+read_hdcr FUNCTION
+ mrc p15, 4, r0, c1, c1, 1
+ bx lr
+ ENDFUNC
+
+read_hcptr FUNCTION
+ mrc p15, 4, r0, c1, c1, 2
+ bx lr
+ ENDFUNC
+
+read_hstr FUNCTION
+ mrc p15, 4, r0, c1, c1, 3
+ bx lr
+ ENDFUNC
+
+read_httbr FUNCTION
+ mrrc p15, 4, r0, r1, c2
+ bx lr
+ ENDFUNC
+
+read_vttbr FUNCTION
+ mrrc p15, 6, r0, r1, c2
+ bx lr
+ ENDFUNC
+
+write_hcr FUNCTION
+ mcr p15, 4, r0, c1, c1, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_hdcr FUNCTION
+ mcr p15, 4, r0, c1, c1, 1
+ bx lr
+ ENDFUNC
+
+write_hcptr FUNCTION
+ mcr p15, 4, r0, c1, c1, 2
+ bx lr
+ ENDFUNC
+
+write_hstr FUNCTION
+ mcr p15, 4, r0, c1, c1, 3
+ bx lr
+ ENDFUNC
+
+write_httbr FUNCTION
+ mcrr p15, 4, r0, r1, c2
+ mcr p15, 0, r0, c7, c5, 6
+ mcr p15, 0, r0, c8, c7, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_vttbr FUNCTION
+ mcrr p15, 6, r0, r1, c2
+ mcr p15, 0, r0, c7, c5, 6
+ mcr p15, 0, r0, c8, c7, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_htcr FUNCTION
+ mcr p15, 4, r0, c2, c0, 2
+ bx lr
+ ENDFUNC
+
+write_vtcr FUNCTION
+ mcr p15, 4, r0, c2, c1, 2
+ bx lr
+ ENDFUNC
+
+write_hmair0 FUNCTION
+ mcr p15, 4, r0, c10, c2, 0
+ bx lr
+ ENDFUNC
+
+write_hmair1 FUNCTION
+ mcr p15, 4, r0, c10, c2, 1
+ bx lr
+ ENDFUNC
+
+read_nsacr FUNCTION
+ mrc p15, 0, r0, c1, c1, 2
+ bx lr
+ ENDFUNC
+
+read_l2ctlr FUNCTION
+ mrc p15, 1, r0, c9, c0, 2
+ bx lr
+ ENDFUNC
+
+read_l2ectlr FUNCTION
+ mrc p15, 1, r0, c9, c0, 3
+ bx lr
+ ENDFUNC
+
+read_pmuserenr FUNCTION
+ mrc p15, 0, r0, c9, c14, 0
+ bx lr
+ ENDFUNC
+
+read_pmintenset FUNCTION
+ mrc p15, 0, r0, c9, c14, 1
+ bx lr
+ ENDFUNC
+
+read_pmintenclr FUNCTION
+ mrc p15, 0, r0, c9, c14, 2
+ bx lr
+ ENDFUNC
+
+read_pmovsset FUNCTION
+ mrc p15, 0, r0, c9, c14, 3
+ bx lr
+ ENDFUNC
+
+read_pmccntr FUNCTION
+ mrc p15, 0, r0, c9, c13, 0
+ bx lr
+ ENDFUNC
+
+read_pmxevtyper FUNCTION
+ mrc p15, 0, r0, c9, c13, 1
+ bx lr
+ ENDFUNC
+
+read_pmxevcntr FUNCTION
+ mrc p15, 0, r0, c9, c13, 2
+ bx lr
+ ENDFUNC
+
+read_pmcr FUNCTION
+ mrc p15, 0, r0, c9, c12, 0
+ bx lr
+ ENDFUNC
+
+read_pmcntenset FUNCTION
+ mrc p15, 0, r0, c9, c12, 1
+ bx lr
+ ENDFUNC
+
+read_pmcntenclr FUNCTION
+ mrc p15, 0, r0, c9, c12, 2
+ bx lr
+ ENDFUNC
+
+read_pmovsr FUNCTION
+ mrc p15, 0, r0, c9, c12, 3
+ bx lr
+ ENDFUNC
+
+read_pmswinc FUNCTION
+ mrc p15, 0, r0, c9, c12, 4
+ bx lr
+ ENDFUNC
+
+read_pmselr FUNCTION
+ mrc p15, 0, r0, c9, c12, 5
+ bx lr
+ ENDFUNC
+
+read_pmceid0 FUNCTION
+ mrc p15, 0, r0, c9, c12, 6
+ bx lr
+ ENDFUNC
+
+read_pmceid1 FUNCTION
+ mrc p15, 0, r0, c9, c12, 7
+ bx lr
+ ENDFUNC
+
+write_l2ectlr FUNCTION
+ mcr p15, 1, r0, c9, c0, 3
+ bx lr
+ ENDFUNC
+
+write_pmuserenr FUNCTION
+ mcr p15, 0, r0, c9, c14, 0
+ bx lr
+ ENDFUNC
+
+write_pmintenset FUNCTION
+ mcr p15, 0, r0, c9, c14, 1
+ bx lr
+ ENDFUNC
+
+write_pmintenclr FUNCTION
+ mcr p15, 0, r0, c9, c14, 2
+ bx lr
+ ENDFUNC
+
+write_pmovsset FUNCTION
+ mcr p15, 0, r0, c9, c14, 3
+ bx lr
+ ENDFUNC
+
+write_pmccntr FUNCTION
+ mcr p15, 0, r0, c9, c13, 0
+ bx lr
+ ENDFUNC
+
+write_pmxevtyper FUNCTION
+ mcr p15, 0, r0, c9, c13, 1
+ bx lr
+ ENDFUNC
+
+write_pmxevcntr FUNCTION
+ mcr p15, 0, r0, c9, c13, 2
+ bx lr
+ ENDFUNC
+
+write_pmcr FUNCTION
+ mcr p15, 0, r0, c9, c12, 0
+ bx lr
+ ENDFUNC
+
+write_pmcntenset FUNCTION
+ mcr p15, 0, r0, c9, c12, 1
+ bx lr
+ ENDFUNC
+
+write_pmcntenclr FUNCTION
+ mcr p15, 0, r0, c9, c12, 2
+ bx lr
+ ENDFUNC
+
+write_pmovsr FUNCTION
+ mcr p15, 0, r0, c9, c12, 3
+ bx lr
+ ENDFUNC
+
+write_pmswinc FUNCTION
+ mcr p15, 0, r0, c9, c12, 4
+ bx lr
+ ENDFUNC
+
+write_pmselr FUNCTION
+ mcr p15, 0, r0, c9, c12, 5
+ bx lr
+ ENDFUNC
+
+write_pmceid0 FUNCTION
+ mcr p15, 0, r0, c9, c12, 6
+ bx lr
+ ENDFUNC
+
+write_pmceid1 FUNCTION
+ mcr p15, 0, r0, c9, c12, 7
+ bx lr
+ ENDFUNC
+
+read_sctlr FUNCTION
+ mrc p15, 0, r0, c1, c0, 0
+ bx lr
+ ENDFUNC
+
+write_sctlr FUNCTION
+ mcr p15, 0, r0, c1, c0, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+read_hsctlr FUNCTION
+ mrc p15, 4, r0, c1, c0, 0
+ bx lr
+ ENDFUNC
+
+read_hdfar FUNCTION
+ mrc p15, 4, r0, c6, c0, 0
+ bx lr
+ ENDFUNC
+
+read_hpfar FUNCTION
+ mrc p15, 4, r0, c6, c0, 4
+ bx lr
+ ENDFUNC
+
+read_hsr FUNCTION
+ mrc p15, 4, r0, c5, c2, 0
+ bx lr
+ ENDFUNC
+
+write_hsctlr FUNCTION
+ mcr p15, 4, r0, c1, c0, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+read_cnthctl FUNCTION
+ mrc p15, 4, r0, c14, c1, 0
+ bx lr
+ ENDFUNC
+
+read_cntkctl FUNCTION
+ mrc p15, 0, r0, c14, c1, 0
+ bx lr
+ ENDFUNC
+
+read_cnthp_cval FUNCTION
+ mrrc p15, 6, r0, r1, c14
+ bx lr
+ ENDFUNC
+
+read_cnthp_tval FUNCTION
+ mrc p15, 4, r0, c14, c2, 0
+ bx lr
+ ENDFUNC
+
+read_cntp_tval FUNCTION
+ mrc p15, 0, r0, c14, c2, 0
+ bx lr
+ ENDFUNC
+
+read_cntp_ctl FUNCTION
+ mrc p15, 0, r0, c14, c2, 1
+ bx lr
+ ENDFUNC
+
+read_cnthp_ctl FUNCTION
+ mrc p15, 4, r0, c14, c2, 1
+ bx lr
+ ENDFUNC
+
+write_cnthctl FUNCTION
+ mcr p15, 4, r0, c14, c1, 0
+ bx lr
+ ENDFUNC
+
+write_cntkctl FUNCTION
+ mcr p15, 0, r0, c14, c1, 0
+ bx lr
+ ENDFUNC
+
+write_cntp_tval FUNCTION
+ mcr p15, 0, r0, c14, c2, 0
+ isb
+ bx lr
+ ENDFUNC
+
+write_cntp_ctl FUNCTION
+ mcr p15, 0, r0, c14, c2, 1
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_cnthp_cval FUNCTION
+ mcrr p15, 6, r0, r1, c14
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_cnthp_tval FUNCTION
+ mcr p15, 4, r0, c14, c2, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_cnthp_ctl FUNCTION
+ mcr p15, 4, r0, c14, c2, 1
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+read_clidr FUNCTION
+ mrc p15, 1, r0, c0, c0, 1 ; read clidr
+ bx lr
+ ENDFUNC
+
+read_ccsidr FUNCTION
+ mrc p15, 1, r0, c0, c0, 0 ; read ccsidr
+ bx lr
+ ENDFUNC
+
+read_csselr FUNCTION
+ mrc p15, 2, r0, c0, c0, 0 ; read csselr
+ bx lr
+ ENDFUNC
+
+write_csselr FUNCTION
+ mcr p15, 2, r0, c0, c0, 0 ; read csselr
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+read_actlr FUNCTION
+ mrc p15, 0, r0, c1, c0, 1
+ bx lr
+ ENDFUNC
+
+write_actlr FUNCTION
+ mcr p15, 0, r0, c1, c0, 1
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+read_prrr FUNCTION
+ mrc p15, 0, r0, c10, c2, 0
+ bx lr
+ ENDFUNC
+
+read_nmrr FUNCTION
+ mrc p15, 0, r0, c10, c2, 1
+ bx lr
+ ENDFUNC
+
+write_prrr FUNCTION
+ mcr p15, 0, r0, c10, c2, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_nmrr FUNCTION
+ mcr p15, 0, r0, c10, c2, 1
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+read_dfar FUNCTION
+ mrc p15, 0, r0, c6, c0, 0
+ bx lr
+ ENDFUNC
+
+read_ifar FUNCTION
+ mrc p15, 0, r0, c6, c0, 2
+ bx lr
+ ENDFUNC
+
+read_dfsr FUNCTION
+ mrc p15, 0, r0, c5, c0, 0
+ bx lr
+ ENDFUNC
+
+read_ifsr FUNCTION
+ mrc p15, 0, r0, c5, c0, 1
+ bx lr
+ ENDFUNC
+
+read_adfsr FUNCTION
+ mrc p15, 0, r0, c5, c1, 0
+ bx lr
+ ENDFUNC
+
+read_aifsr FUNCTION
+ mrc p15, 0, r0, c5, c1, 1
+ bx lr
+ ENDFUNC
+
+write_dfar FUNCTION
+ mcr p15, 0, r0, c6, c0, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_ifar FUNCTION
+ mcr p15, 0, r0, c6, c0, 2
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_dfsr FUNCTION
+ mcr p15, 0, r0, c5, c0, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_ifsr FUNCTION
+ mcr p15, 0, r0, c5, c0, 1
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_adfsr FUNCTION
+ mcr p15, 0, r0, c5, c1, 0
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+write_aifsr FUNCTION
+ mcr p15, 0, r0, c5, c1, 1
+ dsb
+ isb
+ bx lr
+ ENDFUNC
+
+read_cbar FUNCTION
+ mrc p15, 4, r0, c15, c0, 0 ; Read Configuration Base Address Register
+ bx lr
+ ENDFUNC
+
+read_drar FUNCTION
+ mrc p14, 0, r0, c1, c0, 0 ; Read Debug ROM Address Register
+ bx lr
+ ENDFUNC
+
+read_dsar FUNCTION
+ mrc p14, 0, r0, c2, c0, 0 ; Read Debug Self Address Offset Register
+ bx lr
+ ENDFUNC
+
+write_osdlr FUNCTION
+ mcr p14, 0, r0, c1, c3, 4 ; Write OS Double Lock Register
+ bx lr
+ ENDFUNC
+
+get_cpu_type FUNCTION
+ mrc p15, 0, r0, c0, c0, 0; read MIDR
+ ldr r1, =MIDR_CPU_MASK
+ ands r0, r1
+ bx lr
+ ENDFUNC
+
+dsb FUNCTION
+ dsb
+ bx lr
+ ENDFUNC
+
+va_to_pa FUNCTION ; Note: assumes conversion will be successful!
+ mov r1, r0
+ mcr p15, 0, r0, c7, c8, 1 ; Priv Write Current World VA-PA
+ mrc p15, 0, r0, c7, c4, 0 ; Get PA
+ bfc r0, #0, #12 ; We want top bits of translated addr
+ bfc r1, #12, #20 ; plus bottom bits of input addr
+ orr r0, r0, r1
+ bx lr
+ ENDFUNC
+
+ END
diff --git a/acsr/v7.s b/acsr/v7.s
new file mode 100644
index 0000000..180fc93
--- /dev/null
+++ b/acsr/v7.s
@@ -0,0 +1,650 @@
+; Copyright (c) 2009-11, ARM Limited. All rights reserved.
+;
+; Redistribution and use in source and binary forms, with or without
+; modification, are permitted provided that the following conditions are met:
+;
+; * Redistributions of source code must retain the above copyright notice,
+; this list of conditions and the following disclaimer.
+; * Redistributions in binary form must reproduce the above copyright notice,
+; this list of conditions and the following disclaimer in the documentation
+; and/or other materials provided with the distribution.
+; * Neither the name of ARM nor the names of its contributors may be used to
+; endorse or promote products derived from this software without specific
+; prior written permission.
+;
+; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+; ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+; LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+; INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+; POSSIBILITY OF SUCH DAMAGE.
+
+ EXPORT save_performance_monitors
+ EXPORT restore_performance_monitors
+
+ EXPORT save_banked_registers
+ EXPORT restore_banked_registers
+
+ EXPORT save_cp15
+ EXPORT restore_cp15
+
+ EXPORT save_control_registers
+ EXPORT restore_control_registers
+
+ EXPORT save_mmu
+ EXPORT restore_mmu
+
+ EXPORT save_mpu
+ EXPORT restore_mpu
+
+ EXPORT save_vfp
+ EXPORT restore_vfp
+
+ EXPORT save_generic_timer
+ EXPORT restore_generic_timer
+
+ EXPORT save_fault_status
+ EXPORT restore_fault_status
+
+ AREA APPF, CODE
+
+; Aliases for mode encodings - do not change
+MODE_USR EQU 0x10
+MODE_FIQ EQU 0x11
+MODE_IRQ EQU 0x12
+MODE_SVC EQU 0x13
+MODE_MON EQU 0x16 ; A-profile (Security Extensions) only
+MODE_ABT EQU 0x17
+MODE_UND EQU 0x1B
+MODE_SYS EQU 0x1F
+MODE_HYP EQU 0x1A
+
+TTBCR_EAE EQU (1<<31) ; Are we using LPAE?
+
+PFR0_THUMB_EE_SUPPORT EQU (1<<12)
+
+
+save_performance_monitors FUNCTION
+
+ push {r4, r8, r9, r10}
+
+ ; Ignore:
+ ; Count Enable Clear Register
+ ; Software Increment Register
+ ; Interrupt Enable Clear Register
+
+ mrc p15,0,r8,c9,c12,0 ; PMon: Control Register
+ bic r1,r8,#1
+ mcr p15,0,r1,c9,c12,0 ; disable counter updates from here
+ isb ; 0b0 => PMCR<0>
+ mrc p15,0,r9,c9,c12,3 ; PMon: Overflow Flag Status Reg
+ mrc p15,0,r10,c9,c12,5 ; PMon: Event Counter Selection Reg
+ stm r0!, {r8-r10}
+ ubfx r9,r8,#11,#5 ; extract # of event counters, N
+ tst r9, r9
+ beq %f1
+
+0 subs r9,r9,#1 ; decrement N
+ mcr p15,0,r9,c9,c12,5 ; PMon: select CounterN
+ isb
+ mrc p15,0,r3,c9,c13,1 ; PMon: save Event Type register
+ mrc p15,0,r4,c9,c13,2 ; PMon: save Event Counter register
+ stm r0!, {r3,r4}
+ bne %b0
+
+1 mrc p15,0,r1,c9,c13,0 ; PMon: Cycle Count Register
+ mrc p15,0,r2,c9,c14,0 ; PMon: User Enable Register
+ mrc p15,0,r3,c9,c14,1 ; PMon: Interrupt Enable Set Reg
+ mrc p15,0,r4,c9,c12,1 ; PMon: Count Enable Set Register
+ stm r0!, {r1-r4}
+
+ pop {r4, r8, r9, r10}
+ bx lr
+ ENDFUNC
+
+restore_performance_monitors FUNCTION
+
+ push {r4-r5, r8-r10, lr}
+ ; NOTE: all counters disabled by PMCR<0> == 0 on reset
+
+ ; Restore performance counters
+ ldm r0!,{r8-r10} ; recover first block of PMon context
+ ; (PMCR, PMOVSR, PMSELR)
+ mov r1, #0 ; generate register of all 0's
+ mvn r2, #0 ; generate register of all 1's
+ mcr p15,0,r2,c9,c14,2 ; disable all counter related interrupts
+ mcr p15,0,r2,c9,c12,3 ; clear all overflow flags
+ isb
+
+ ubfx r12,r8,#11,#5 ; extract # of event counters, N (0-31)
+ tst r12, r12
+ beq %f20
+ mov r3, r12 ; for N >0, generate a 2nd copy of N
+ mov r4, #1
+ lsl r4, r4, r3
+ sub r4, r4, #1 ; set bits<N-1:0> to all 1's
+
+0 subs r3,r3,#1 ; decrement N
+ mcr p15,0,r3,c9,c12,5 ; select Event CounterN
+ isb
+ mrc p15,0,r5,c9,c13,1 ; read Event Type register
+ bfc r5,#0,#8
+ mcr p15,0,r5,c9,c13,1 ; set Event Type to 0x0
+ mcr p15,0,r2,c9,c13,2 ; set Event Counter to all 1's
+ isb
+ bne %b0
+
+ mov r3, #1
+ bic r5, r9, #1<<31
+ mcr p15,0,r5,c9,c12,1 ; enable Event Counters
+ ; (PMOVSR bits set)
+ mcr p15,0,r3,c9,c12,0 ; set the PMCR global enable bit
+ isb
+ mcr p15,0,r9,c9,c12,4 ; set event count overflow bits
+ isb
+ mcr p15,0,r4,c9,c12,2 ; disable Event Counters
+
+ ; restore the event counters
+10 subs r12,r12,#1 ; decrement N
+ mcr p15,0,r12,c9,c12,5 ; select Event CounterN
+ isb
+ ldm r0!,{r3-r4}
+ mcr p15,0,r3,c9,c13,1 ; restore Event Type
+ mcr p15,0,r4,c9,c13,2 ; restore Event Counter
+ isb
+ bne %b10
+
+20 tst r9, #0x80000000 ; check for cycle count overflow flag
+ beq %f40
+ mcr p15,0,r2,c9,c13,0 ; set Cycle Counter to all 1's
+ isb
+ mov r3, #0x80000000
+ mcr p15,0,r3,c9,c12,1 ; enable the Cycle Counter
+ isb
+
+30 mrc p15,0,r4,c9,c12,3 ; check cycle count overflow now set
+ movs r4,r4 ; test bit<31>
+ bpl %b30
+ mcr p15,0,r3,c9,c12,2 ; disable the Cycle Counter
+
+40 mcr p15,0,r1,c9,c12,0 ; clear the PMCR global enable bit
+ isb
+
+ ; restore the remaining PMon registers
+ ldm r0!,{r1-r4}
+ mcr p15,0,r1,c9,c13,0 ; restore Cycle Count Register
+ mcr p15,0,r2,c9,c14,0 ; restore User Enable Register
+ mcr p15,0,r3,c9,c14,1 ; restore Interrupt Enable Set Reg
+ mcr p15,0,r4,c9,c12,1 ; restore Count Enable Set Register
+ mcr p15,0,r10,c9,c12,5 ; restore Event Counter Selection
+ isb
+ mcr p15,0,r8,c9,c12,0 ; restore the PM Control Register
+ isb
+
+ pop {r4-r5, r8-r10, pc}
+ ENDFUNC
+
+
+save_banked_registers FUNCTION
+ mrs r2, CPSR ; save current mode
+ and r3, r2, #0x1f ; If we are in HYP mode then use the virt.
+ cmp r3, #MODE_HYP ; instructions to save the banked registers
+ beq save_in_hyp ; without changing the mode
+
+ cps #MODE_SYS ; switch to System mode
+ str sp,[r0], #4 ; save the User SP
+ str lr,[r0], #4 ; save the User LR
+ cps #MODE_ABT ; switch to Abort mode
+ str sp,[r0], #4 ; save the current SP
+ mrs r3,SPSR
+ stm r0!,{r3,lr} ; save the current SPSR, LR
+ cps #MODE_UND ; switch to Undefined mode
+ str sp,[r0], #4 ; save the current SP
+ mrs r3,SPSR
+ stm r0!,{r3,lr} ; save the current SPSR, LR
+ cps #MODE_IRQ ; switch to IRQ mode
+ str sp,[r0], #4 ; save the current SP
+ mrs r3,SPSR
+ stm r0!,{r3,lr} ; save the current SPSR, LR
+ cps #MODE_FIQ ; switch to FIQ mode
+ str SP,[r0], #4 ; save the current SP
+ mrs r3,SPSR
+ stm r0!,{r8-r12,lr} ; save the current SPSR,r8-r12,LR
+ msr CPSR_cxsf, r2 ; switch back to original mode
+ bx lr
+
+save_in_hyp
+ mrs r1, SP_usr
+ stm r0!, {r1}
+
+ mrs r1, SP_und
+ mrs r2, SPSR_und
+ mrs r3, LR_und
+ stm r0!, {r1-r3}
+
+ mrs r1, SP_abt
+ mrs r2, SPSR_abt
+ mrs r3, LR_abt
+ stm r0!, {r1-r3}
+
+ mrs r1, SP_svc
+ mrs r2, SPSR_svc
+ mrs r3, LR_svc
+ stm r0!, {r1-r3}
+
+ mrs r1, SP_irq
+ mrs r2, SPSR_irq
+ mrs r3, LR_irq
+ stm r0!, {r1-r3}
+
+ mrs r1, SP_fiq
+ mrs r2, SPSR_fiq
+ mrs r3, LR_fiq
+ stm r0!, {r1-r3}
+ mrs r1, r8_fiq
+ mrs r2, r9_fiq
+ mrs r3, r10_fiq
+ stm r0!, {r1-r3}
+ mrs r1, r11_fiq
+ mrs r2, r12_fiq
+ stm r0!, {r1-r2}
+ bx lr
+
+ ENDFUNC
+
+restore_banked_registers FUNCTION
+ mrs r2, CPSR ; save current mode
+ and r3, r2, #0x1f ; If we are in HYP mode then use the virt.
+ cmp r3, #MODE_HYP ; instructions to restore the banked registers
+ beq rest_in_hyp ; without changing the mode
+
+ cps #MODE_SYS ; switch to System mode
+ ldr sp,[r0],#4 ; restore the User SP
+ ldr lr,[r0],#4 ; restore the User LR
+ cps #MODE_ABT ; switch to Abort mode
+ ldr sp,[r0],#4 ; restore the current SP
+ ldm r0!,{r3,lr} ; restore the current LR
+ msr SPSR_fsxc,r3 ; restore the current SPSR
+ cps #MODE_UND ; switch to Undefined mode
+ ldr sp,[r0],#4 ; restore the current SP
+ ldm r0!,{r3,lr} ; restore the current LR
+ msr SPSR_fsxc,r3 ; restore the current SPSR
+ cps #MODE_IRQ ; switch to IRQ mode
+ ldr sp,[r0],#4 ; restore the current SP
+ ldm r0!,{r3,lr} ; restore the current LR
+ msr SPSR_fsxc,r3 ; restore the current SPSR
+ cps #MODE_FIQ ; switch to FIQ mode
+ ldr sp,[r0],#4 ; restore the current SP
+ ldm r0!,{r8-r12,lr} ; restore the current r8-r12,LR
+ msr SPSR_fsxc,r4 ; restore the current SPSR
+ msr CPSR_cxsf, r2 ; switch back to original mode
+0 bx lr
+
+rest_in_hyp
+ ldm r0!, {r1}
+ msr SP_usr, r1
+
+ ldm r0!, {r1-r3}
+ msr SP_und, r1
+ msr SPSR_und, r2
+ msr LR_und, r3
+
+ ldm r0!, {r1-r3}
+ msr SP_abt, r1
+ msr SPSR_abt, r2
+ msr LR_abt, r3
+
+ ldm r0!, {r1-r3}
+ msr SP_svc, r1
+ msr SPSR_svc, r2
+ msr LR_svc, r3
+
+ ldm r0!, {r1-r3}
+ msr SP_irq, r1
+ msr SPSR_irq, r2
+ msr LR_irq, r3
+
+ ldm r0!, {r1-r3}
+ msr SP_fiq, r1
+ msr SPSR_fiq, r2
+ msr LR_fiq, r3
+
+ ldm r0!, {r1-r3}
+ msr r8_fiq, r1
+ msr r9_fiq, r2
+ msr r10_fiq, r3
+
+ ldm r0!, {r1-r2}
+ msr r11_fiq, r1
+ msr r12_fiq, r2
+
+ bx lr
+ ENDFUNC
+
+
+save_cp15 FUNCTION
+ ; CSSELR Cache Size Selection Register
+ mrc p15,2,r3,c0,c0,0
+ str r3,[r0], #4
+
+ ; IMPLEMENTATION DEFINED - proprietary features:
+ ; (CP15 register 15, TCM support, lockdown support, etc.)
+
+ ; NOTE: IMP DEF registers might have save and restore order that relate
+ ; to other CP15 registers or logical grouping requirements and can
+ ; therefore occur at any point in this sequence.
+ bx lr
+ ENDFUNC
+
+restore_cp15 FUNCTION
+ ; CSSELR – Cache Size Selection Register
+ ldr r3,[r0], #4
+ mcr p15,2,r3,c0,c0,0
+
+ bx lr
+ ENDFUNC
+
+ ; Function called with two arguments:
+ ; r0 contains address to store control registers
+ ; r1 is non-zero if we are Secure
+save_control_registers FUNCTION
+ cmp r1, #0 ; Are we Secure?
+ mrc p15,0,r2,c1,c0,1 ; ACTLR - Auxiliary Control Register
+ mrc p15,0,r3,c1,c0,0 ; SCTLR - System Control Register
+ mrc p15,0,r12,c1,c0,2 ; CPACR - Coprocessor Access Control Register
+ stm r0!, {r2-r3, r12}
+
+ mrcne p15,0,r1,c12,c0,1 ; MVBAR - Monitor Vector Base Address Register
+ mrcne p15,0,r2,c1,c1,0 ; Secure Configuration Register
+ mrcne p15,0,r3,c1,c1,1 ; Secure Debug Enable Register
+ mrcne p15,0,r12,c1,c1,2 ; Non-Secure Access Control Register
+ stmne r0!, {r1-r3,r12}
+
+ mrc p15,0,r1,c13,c0,1 ; CONTEXTIDR
+ mrc p15,0,r2,c13,c0,2 ; TPIDRURW
+ mrc p15,0,r3,c13,c0,3 ; TPIDRURO
+ mrc p15,0,r12,c13,c0,4 ; TPIDRPRW
+ stm r0!, {r1-r3,r12}
+
+ ; The next two registers are only present if ThumbEE is implemented
+ mrc p15, 0, r1, c0, c1, 0 ; Read ID_PFR0
+ tst r1, #PFR0_THUMB_EE_SUPPORT
+ mrcne p14,6,r1,c0,c0,0 ; TEECR
+ mrcne p14,6,r2,c1,c0,0 ; TEEHBR
+ stmne r0!, {r1, r2}
+
+ mrc p14,7,r1,c1,c0,0 ; JOSCR
+ mrc p14,7,r2,c2,c0,0 ; JMCR
+ stm r0!, {r1, r2}
+ bx lr
+ ENDFUNC
+
+
+ ; Function called with two arguments:
+ ; r0 contains address to read control registers
+ ; r1 is non-zero if we are Secure
+restore_control_registers FUNCTION
+ cmp r1, #0 ; Are we Secure?
+ ldm r0!, {r2-r3, r12}
+ mcr p15,0,r2,c1,c0,1 ; ACTLR - Auxiliary Control Register
+ mcr p15,0,r3,c1,c0,0 ; SCTLR - System Control Register
+ mcr p15,0,r12,c1,c0,2 ; CPACR - Coprocessor Access Control Register
+
+ ldmne r0!, {r1-r3,r12}
+ mcrne p15,0,r1,c12,c0,1 ; MVBAR - Monitor Vector Base Address Register
+ mcrne p15,0,r2,c1,c1,0 ; Secure Configuration Register
+ mcrne p15,0,r3,c1,c1,1 ; Secure Debug Enable Register
+ mcrne p15,0,r12,c1,c1,2 ; Non-Secure Access Control Register
+
+ ldm r0!, {r1-r3,r12}
+ mcr p15,0,r1,c13,c0,1 ; CONTEXTIDR
+ mcr p15,0,r2,c13,c0,2 ; TPIDRURW
+ mcr p15,0,r3,c13,c0,3 ; TPIDRURO
+ mcr p15,0,r12,c13,c0,4 ; TPIDRPRW
+
+ ; The next two registers are only present if ThumbEE is implemented
+ mrc p15, 0, r1, c0, c1, 0 ; Read ID_PFR0
+ tst r1, #PFR0_THUMB_EE_SUPPORT
+ ldmne r0!, {r1,r2}
+ mcrne p14,6,r1,c0,c0,0 ; TEECR
+ mcrne p14,6,r2,c1,c0,0 ; TEEHBR
+
+ ldm r0!, {r1, r2}
+ mcr p14,7,r1,c1,c0,0 ; JOSCR
+ mcr p14,7,r2,c2,c0,0 ; JMCR
+ isb
+ bx lr
+ ENDFUNC
+
+save_mmu FUNCTION
+ push {r4, r5, r6, r7}
+ ; ASSUMPTION: no useful fault address / fault status information
+
+ mrc p15,0,r4,c12,c0,0 ; VBAR
+ mrc p15,0,r5,c2,c0,2 ; TTBCR
+
+ tst r5, #TTBCR_EAE ; Are we using LPAE?
+
+ ; save 32 or 64 bit TTBRs
+ mrceq p15,0,r6,c2,c0,0 ; 32 bit TTBR0
+ mrceq p15,0,r7,c2,c0,1 ; 32 bit TTBR1
+ mrrcne p15,0,r6,r7,c2 ; 64 bit TTBR0
+ stm r0!, {r4-r7}
+ mrrcne p15,1,r6,r7,c2 ; 64 bit TTBR1
+ stmne r0!, {r6-r7}
+
+ mrc p15,0,r4,c3,c0,0 ; DACR
+ mrc p15,0,r5,c7,c4,0 ; PAR
+ mrc p15,0,r6,c10,c2,0 ; PRRR
+ mrc p15,0,r7,c10,c2,1 ; NMRR
+ stm r0!, {r4-r7}
+
+ pop {r4, r5, r6, r7}
+ bx lr
+ ENDFUNC
+
+
+restore_mmu FUNCTION
+
+ push {r4, r5, r6, r7}
+ ldm r0!, {r4-r7}
+ mcr p15,0,r4,c12,c0,0 ; VBAR
+ mcr p15,0,r5,c2,c0,2 ; TTBCR
+
+ tst r5, #TTBCR_EAE ; Are we using LPAE?
+
+ ; restore 32 or 64 bit TTBRs
+ mcreq p15,0,r6,c2,c0,0 ; 32 bit TTBR0
+ mcreq p15,0,r7,c2,c0,1 ; 32 bit TTBR1
+ mcrrne p15,0,r6,r7,c2 ; 64-bit TTBR0
+ ldmne r0!, {r6-r7}
+ mcrrne p15,1,r6,r7,c2 ; 64-bit TTBR1
+
+ ldm r0!, {r4-r7}
+ mcr p15,0,r4,c3,c0,0 ; DACR
+ mcr p15,0,r5,c7,c4,0 ; PAR
+ mcr p15,0,r6,c10,c2,0 ; PRRR
+ mcr p15,0,r7,c10,c2,1 ; NMRR
+
+ pop {r4, r5, r6, r7}
+ bx lr
+ ENDFUNC
+
+
+save_mpu FUNCTION
+ mrc p15, 0, r1, c0, c0, 4 ; Read MPUIR
+ and r1, r1, #0xff00
+ lsr r1, r1, #8 ; Extract number of MPU regions
+
+ ; Loop over the number of regions
+10 cmp r1, #0
+ beq %f20
+ sub r1, r1, #1
+ mcr p15, 0, r1, c6, c2, 0 ; Write RGNR
+ mrc p15, 0, r2, c6, c1, 0 ; Read MPU Region Base Address Register
+ mrc p15, 0, r3, c6, c1, 2 ; Read Data MPU Region Size and Enable Register
+ mrc p15, 0, r12, c6, c1, 4 ; Read Region access control Register
+ stm r0!, {r2, r3, r12}
+ b %b10
+
+20 bx lr
+ ENDFUNC
+
+
+restore_mpu FUNCTION
+ mrc p15, 0, r1, c0, c0, 4 ; Read MPUIR
+ and r1, r1, #0xff00
+ lsr r1, r1, #8 ; Extract number of MPU regions
+
+ ; Loop over the number of regions
+10 cmp r1, #0
+ beq %f20
+ ldm r0!, {r2, r3, r12}
+ sub r1, r1, #1
+ mcr p15, 0, r1, c6, c2, 0 ; Write RGNR
+ mcr p15, 0, r2, c6, c1, 0 ; Write MPU Region Base Address Register
+ mcr p15, 0, r3, c6, c1, 2 ; Write Data MPU Region Size and Enable Register
+ mcr p15, 0, r12, c6, c1, 4 ; Write Region access control Register
+ b %b10
+
+20 bx lr
+ ENDFUNC
+
+
+save_vfp FUNCTION
+ ; FPU state save/restore.
+ ; FPSID,MVFR0 and MVFR1 don't get serialized/saved (Read Only).
+ mrc p15,0,r3,c1,c0,2 ; CPACR allows CP10 and CP11 access
+ ORR r2,r3,#0xF00000
+ mcr p15,0,r2,c1,c0,2
+ isb
+ mrc p15,0,r2,c1,c0,2
+ and r2,r2,#0xF00000
+ cmp r2,#0xF00000
+ beq %f0
+ movs r2, #0
+ b %f2
+
+0 ; Save configuration registers and enable.
+ vmrs r12,FPEXC
+ str r12,[r0],#4 ; Save the FPEXC
+ ; Enable FPU access to save/restore the other registers.
+ ldr r2,=0x40000000
+ vmsr FPEXC,r2
+ vmrs r2,FPSCR
+ str r2,[r0],#4 ; Save the FPSCR
+ ; Store the VFP-D16 registers.
+ vstm r0!, {D0-D15}
+ ; Check for Advanced SIMD/VFP-D32 support
+ vmrs r2,MVFR0
+ and r2,r2,#0xF ; extract the A_SIMD bitfield
+ cmp r2, #0x2
+ blt %f1
+ ; Store the Advanced SIMD/VFP-D32 additional registers.
+ vstm r0!, {D16-D31}
+
+ ; IMPLEMENTATION DEFINED: save any subarchitecture defined state
+ ; NOTE: Don't change the order of the FPEXC and CPACR restores
+1
+ vmsr FPEXC,r12 ; Restore the original En bit of FPU.
+2
+ mcr p15,0,r3,c1,c0,2 ; Restore the original CPACR value.
+ bx lr
+ ENDFUNC
+
+
+restore_vfp FUNCTION
+ ; FPU state save/restore. Obviously FPSID,MVFR0 and MVFR1 don't get
+ ; serialized (RO).
+ ; Modify CPACR to allow CP10 and CP11 access
+ mrc p15,0,r1,c1,c0,2
+ ORR r2,r1,#0x00F00000
+ mcr p15,0,r2,c1,c0,2
+ ; Enable FPU access to save/restore the rest of registers.
+ ldr r2,=0x40000000
+ vmsr FPEXC, r2
+ ; Recover FPEXC and FPSCR. These will be restored later.
+ ldm r0!,{r3,r12}
+ ; Restore the VFP-D16 registers.
+ vldm r0!, {D0-D15}
+ ; Check for Advanced SIMD/VFP-D32 support
+ vmrs r2, MVFR0
+ and r2,r2,#0xF ; extract the A_SIMD bitfield
+ cmp r2, #0x2
+ blt %f0
+
+ ; Store the Advanced SIMD/VFP-D32 additional registers.
+ vldm r0!, {D16-D31}
+
+ ; IMPLEMENTATION DEFINED: restore any subarchitecture defined state
+
+0 ; Restore configuration registers and enable.
+ ; Restore FPSCR _before_ FPEXC since FPEXC could disable FPU
+ ; and make setting FPSCR unpredictable.
+ vmsr FPSCR,r12
+ vmsr FPEXC,r3 ; Restore FPEXC after FPSCR
+ ; Restore CPACR
+ mcr p15,0,r1,c1,c0,2
+ bx lr
+ ENDFUNC
+
+
+ ; If r1 is 0, we assume that the OS is not using the Virtualization extensions,
+ ; and that the warm boot code will set up CNTHCTL correctly. If r1 is non-zero
+ ; then CNTHCTL is saved and restored
+ ; CNTP_CVAL will be preserved as it is in the always-on domain.
+
+save_generic_timer FUNCTION
+ mrc p15,0,r2,c14,c2,1 ; read CNTP_CTL
+ mrc p15,0,r3,c14,c2,0 ; read CNTP_TVAL
+ mrc p15,0,r12,c14,c1,0 ; read CNTKCTL
+ stm r0!, {r2, r3, r12}
+ cmp r1, #0
+ mrcne p15,4,r1,c14,c1,0 ; read CNTHCTL
+ strne r1, [r0], #4
+ bx lr
+ ENDFUNC
+
+restore_generic_timer FUNCTION
+ ldm r0!, {r2, r3, r12}
+ mcr p15,0,r3,c14,c2,0 ; write CNTP_TVAL
+ mcr p15,0,r12,c14,c1,0 ; write CNTKCTL
+ mcr p15,0,r2,c14,c2,1 ; write CNTP_CTL
+ cmp r1, #0
+ ldrne r1, [r0], #4
+ mcrne p15,4,r1,c14,c1,0 ; write CNTHCTL
+ bx lr
+ ENDFUNC
+
+
+save_fault_status FUNCTION
+ mrc p15,0,r1,c6,c0,0 ; read DFAR
+ mrc p15,0,r2,c6,c0,2 ; read IFAR
+ mrc p15,0,r3,c5,c0,0 ; read DFSR
+ mrc p15,0,r12,c5,c0,1 ; read IFSR
+ stm r0!, {r1,r2,r3,r12}
+ mrc p15,0,r1,c5,c1,0 ; read ADFSR
+ mrc p15,0,r2,c5,c1,1 ; read AIFSR
+ stm r0!, {r1,r2}
+ bx lr
+ ENDFUNC
+
+restore_fault_status FUNCTION
+ ldm r0!, {r1,r2,r3,r12}
+ mcr p15,0,r1,c6,c0,0 ; write DFAR
+ mcr p15,0,r2,c6,c0,2 ; write IFAR
+ mcr p15,0,r3,c5,c0,0 ; write DFSR
+ mcr p15,0,r12,c5,c0,1 ; write IFSR
+ ldm r0!, {r1,r2}
+ mcr p15,0,r1,c5,c1,0 ; write ADFSR
+ mcr p15,0,r2,c5,c1,1 ; write AIFSR
+ bx lr
+ ENDFUNC
+
+
+ END
diff --git a/acsr/v7_c.c b/acsr/v7_c.c
new file mode 100644
index 0000000..beed7df
--- /dev/null
+++ b/acsr/v7_c.c
@@ -0,0 +1,279 @@
+/*
+ Copyright (c) 2009-11, ARM Limited. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of ARM nor the names of its contributors may be used to
+ endorse or promote products derived from this software without specific
+ prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+*/
+
+/*
+ * The code to save and restore debug context uses the memory-mapped interface.
+ * The registers that are saved are enough to support a self-hosted debugger,
+ * but a different approach should be taken with an external debugger (cp14).
+ */
+
+#include "appf_types.h"
+#include "appf_internals.h"
+#include "appf_helpers.h"
+
+#define DIDR_VERSION_SHIFT 16
+#define DIDR_VERSION_MASK 0xF
+#define DIDR_VERSION_7_1 5
+#define DIDR_BP_SHIFT 24
+#define DIDR_BP_MASK 0xF
+#define DIDR_WP_SHIFT 28
+#define DIDR_WP_MASK 0xF
+#define CLAIMCLR_CLEAR_ALL 0xff
+
+#define DRAR_VALID_MASK 0x00000003
+#define DSAR_VALID_MASK 0x00000003
+#define DRAR_ADDRESS_MASK 0xFFFFF000
+#define DSAR_ADDRESS_MASK 0xFFFFF000
+#define OSLSR_OSLM_MASK 0x00000009
+#define OSLAR_UNLOCKED 0x00000000
+#define OSLAR_LOCKED 0xC5ACCE55
+#define LAR_UNLOCKED 0xC5ACCE55
+#define LAR_LOCKED 0x00000000
+#define OSDLR_UNLOCKED 0x00000000
+#define OSDLR_LOCKED 0x00000001
+
+typedef volatile struct
+{ /* Registers Save? */
+ appf_u32 const didr; /* 0 Read only */
+ appf_u32 dscr_i; /* 1 ignore - use dscr_e instead */
+ appf_u32 const dummy1[3]; /* 2-4 ignore */
+ appf_u32 dtrrx_dtrtx_i; /* 5 ignore */
+ appf_u32 wfar; /* 6 ignore - transient information */
+ appf_u32 vcr; /* 7 Save */
+ appf_u32 const dummy2; /* 8 ignore */
+ appf_u32 ecr; /* 9 ignore */
+ appf_u32 dsccr; /* 10 ignore */
+ appf_u32 dsmcr; /* 11 ignore */
+ appf_u32 const dummy3[20]; /* 12-31 ignore */
+ appf_u32 dtrrx_e; /* 32 ignore */
+ appf_u32 itr_pcsr; /* 33 ignore */
+ appf_u32 dscr_e; /* 34 Save */
+ appf_u32 dtrtx_e; /* 35 ignore */
+ appf_u32 drcr; /* 36 ignore */
+ appf_u32 eacr; /* 37 Save - V7.1 only */
+ appf_u32 const dummy4[2]; /* 38-39 ignore */
+ appf_u32 pcsr; /* 40 ignore */
+ appf_u32 cidsr; /* 41 ignore */
+ appf_u32 vidsr; /* 42 ignore */
+ appf_u32 const dummy5[21]; /* 43-63 ignore */
+ appf_u32 bvr[16]; /* 64-79 Save */
+ appf_u32 bcr[16]; /* 80-95 Save */
+ appf_u32 wvr[16]; /* 96-111 Save */
+ appf_u32 wcr[16]; /* 112-127 Save */
+ appf_u32 const dummy6[16]; /* 128-143 ignore */
+ appf_u32 bxvr[16]; /* 144-159 Save if have Virtualization extensions */
+ appf_u32 const dummy7[32]; /* 160-191 ignore */
+ appf_u32 oslar; /* 192 If oslsr[0] is 1, unlock before save/restore */
+ appf_u32 const oslsr; /* 193 ignore */
+ appf_u32 ossrr; /* 194 ignore */
+ appf_u32 const dummy8; /* 195 ignore */
+ appf_u32 prcr; /* 196 ignore */
+ appf_u32 prsr; /* 197 clear SPD on restore */
+ appf_u32 const dummy9[762]; /* 198-959 ignore */
+ appf_u32 itctrl; /* 960 ignore */
+ appf_u32 const dummy10[39]; /* 961-999 ignore */
+ appf_u32 claimset; /* 1000 Restore claim bits to here */
+ appf_u32 claimclr; /* 1001 Save claim bits from here */
+ appf_u32 const dummy11[2]; /* 1002-1003 ignore */
+ appf_u32 lar; /* 1004 Unlock before restore */
+ appf_u32 const lsr; /* 1005 ignore */
+ appf_u32 const authstatus; /* 1006 Read only */
+ appf_u32 const dummy12; /* 1007 ignore */
+ appf_u32 const devid2; /* 1008 Read only */
+ appf_u32 const devid1; /* 1009 Read only */
+ appf_u32 const devid; /* 1010 Read only */
+ appf_u32 const devtype; /* 1011 Read only */
+ appf_u32 const pid[8]; /* 1012-1019 Read only */
+ appf_u32 const cid[4]; /* 1020-1023 Read only */
+} debug_registers_t;
+
+typedef struct
+{
+ appf_u32 vcr;
+ appf_u32 dscr_e;
+ appf_u32 eacr;
+ appf_u32 bvr[16];
+ appf_u32 bcr[16];
+ appf_u32 wvr[16];
+ appf_u32 wcr[16];
+ appf_u32 bxvr[16];
+ appf_u32 claim;
+} debug_context_t; /* total size 86 * 4 = 344 bytes */
+
+debug_registers_t *read_debug_address(void)
+{
+ unsigned drar, dsar;
+
+ drar = read_drar();
+ dsar = read_dsar();
+
+ if (!(drar & DRAR_VALID_MASK)
+ || !(dsar & DSAR_VALID_MASK))
+ {
+ return 0; /* No memory-mapped debug on this processor */
+ }
+
+ return (debug_registers_t *)((drar & DRAR_ADDRESS_MASK)
+ + (dsar & DSAR_ADDRESS_MASK));
+}
+
+/*
+ * We assume that before save (and after restore):
+ * - OSLAR is NOT locked, or the debugger would not work properly
+ * - LAR is locked, because the ARM ARM says it must be
+ * - OSDLR is NOT locked, or the debugger would not work properly
+ */
+
+void save_v7_debug(appf_u32 *context)
+{
+ debug_registers_t *dbg = (void*)read_debug_address();
+ debug_context_t *ctx = (void*)context;
+ unsigned v71, num_bps, num_wps, i;
+ appf_u32 didr;
+
+ if (!dbg)
+ {
+ return;
+ }
+
+ didr = dbg->didr;
+ /*
+ * Work out what version of debug we have
+ */
+ v71 = (((didr >> DIDR_VERSION_SHIFT) & DIDR_VERSION_MASK) == DIDR_VERSION_7_1);
+
+ /*
+ * Save all context to memory
+ */
+ ctx->vcr = dbg->vcr;
+ ctx->dscr_e = dbg->dscr_e;
+ ctx->claim = dbg->claimclr;
+
+ if (v71)
+ {
+ ctx->eacr = dbg->eacr;
+ }
+
+ num_bps = 1 + ((didr >> DIDR_BP_SHIFT) & DIDR_BP_MASK);
+ for (i=0; i<num_bps; ++i)
+ {
+ ctx->bvr[i] = dbg->bvr[i];
+ ctx->bcr[i] = dbg->bcr[i];
+#ifdef VIRTUALIZATION
+ ctx->bxvr[i] = dbg->bxvr[i]; /* TODO: don't save the ones that don't exist */
+#endif
+ }
+
+ num_wps = 1 + ((didr >> DIDR_WP_SHIFT) & DIDR_WP_MASK);
+ for (i=0; i<num_wps; ++i)
+ {
+ ctx->wvr[i] = dbg->wvr[i];
+ ctx->wcr[i] = dbg->wcr[i];
+ }
+
+ /*
+ * If Debug V7.1, we must set osdlr (by cp14 interface) before power down.
+ * Once we have done this, debug becomes inaccessible.
+ */
+ if (v71)
+ {
+ write_osdlr(OSDLR_LOCKED);
+ }
+}
+
+void restore_v7_debug(appf_u32 *context)
+{
+ debug_registers_t *dbg = (void*)read_debug_address();
+ debug_context_t *ctx = (void*)context;
+ unsigned v71, num_bps, num_wps, i;
+ appf_u32 didr;
+
+ if (!dbg)
+ {
+ return;
+ }
+
+ didr = dbg->didr;
+ /*
+ * Work out what version of debug we have
+ */
+ v71 = (((didr >> DIDR_VERSION_SHIFT) & DIDR_VERSION_MASK) == DIDR_VERSION_7_1);
+
+ /* Enable write access to registers */
+ dbg->lar = LAR_UNLOCKED;
+ /*
+ * If Debug V7.1, we must unset osdlr (by cp14 interface) before restoring.
+ * (If the CPU has not actually power-cycled, osdlr may not be reset).
+ */
+ if (v71)
+ {
+ write_osdlr(OSDLR_UNLOCKED);
+ }
+
+ /*
+ * Restore all context from memory
+ */
+ dbg->vcr = ctx->vcr;
+ dbg->claimclr = CLAIMCLR_CLEAR_ALL;
+ dbg->claimset = ctx->claim;
+
+ if (v71)
+ {
+ dbg->eacr = ctx->eacr;
+ }
+
+ num_bps = 1 + ((didr >> DIDR_BP_SHIFT) & DIDR_BP_MASK);
+ for (i=0; i<num_bps; ++i)
+ {
+ dbg->bvr[i] = ctx->bvr[i];
+ dbg->bcr[i] = ctx->bcr[i];
+#ifdef VIRTUALIZATION
+ dbg->bxvr[i] = ctx->bxvr[i]; /* TODO: don't restore the ones that don't exist */
+#endif
+ }
+
+ num_wps = 1 + ((didr >> DIDR_WP_SHIFT) & DIDR_WP_MASK);
+ for (i=0; i<num_wps; ++i)
+ {
+ dbg->wvr[i] = ctx->wvr[i];
+ dbg->wcr[i] = ctx->wcr[i];
+ }
+
+ /* Clear PRSR.SPD by reading PRSR */
+ if (!v71)
+ {
+ (dbg->prsr);
+ }
+
+ /* Re-enable debug */
+ dbg->dscr_e = ctx->dscr_e;
+
+ /* Disable write access to registers */
+ dbg->lar = LAR_LOCKED;
+}
+