aboutsummaryrefslogtreecommitdiff
path: root/target-arm
diff options
context:
space:
mode:
authorPeter Maydell <peter.maydell@linaro.org>2014-08-19 18:56:27 +0100
committerPeter Maydell <peter.maydell@linaro.org>2014-08-19 19:02:03 +0100
commit50225ad0c185a16c472b3dce984c312e4399a3ef (patch)
tree9e5ce8f6c59bea5f880d7625eedfbecabc4ae027 /target-arm
parent7ea47fe7be86faed4f38f0093ca1226b9b6043eb (diff)
target-arm: Implement ARMv8 single-stepping for AArch32 code
ARMv8 single-stepping requires the exception level that controls the single-stepping to be in AArch64 execution state, but the code being stepped may be in AArch64 or AArch32. Implement the necessary support code for single-stepping AArch32 code. Signed-off-by: Peter Maydell <peter.maydell@linaro.org> Reviewed-by: Edgar E. Iglesias <edgar.iglesias@xilinx.com>
Diffstat (limited to 'target-arm')
-rw-r--r--target-arm/cpu.h21
-rw-r--r--target-arm/translate.c76
2 files changed, 95 insertions, 2 deletions
diff --git a/target-arm/cpu.h b/target-arm/cpu.h
index 3d3e1d5e78..8098b8d357 100644
--- a/target-arm/cpu.h
+++ b/target-arm/cpu.h
@@ -1205,6 +1205,10 @@ static inline bool arm_singlestep_active(CPUARMState *env)
#define ARM_TBFLAG_BSWAP_CODE_MASK (1 << ARM_TBFLAG_BSWAP_CODE_SHIFT)
#define ARM_TBFLAG_CPACR_FPEN_SHIFT 17
#define ARM_TBFLAG_CPACR_FPEN_MASK (1 << ARM_TBFLAG_CPACR_FPEN_SHIFT)
+#define ARM_TBFLAG_SS_ACTIVE_SHIFT 18
+#define ARM_TBFLAG_SS_ACTIVE_MASK (1 << ARM_TBFLAG_SS_ACTIVE_SHIFT)
+#define ARM_TBFLAG_PSTATE_SS_SHIFT 19
+#define ARM_TBFLAG_PSTATE_SS_MASK (1 << ARM_TBFLAG_PSTATE_SS_SHIFT)
/* Bit usage when in AArch64 state */
#define ARM_TBFLAG_AA64_EL_SHIFT 0
@@ -1235,6 +1239,10 @@ static inline bool arm_singlestep_active(CPUARMState *env)
(((F) & ARM_TBFLAG_BSWAP_CODE_MASK) >> ARM_TBFLAG_BSWAP_CODE_SHIFT)
#define ARM_TBFLAG_CPACR_FPEN(F) \
(((F) & ARM_TBFLAG_CPACR_FPEN_MASK) >> ARM_TBFLAG_CPACR_FPEN_SHIFT)
+#define ARM_TBFLAG_SS_ACTIVE(F) \
+ (((F) & ARM_TBFLAG_SS_ACTIVE_MASK) >> ARM_TBFLAG_SS_ACTIVE_SHIFT)
+#define ARM_TBFLAG_PSTATE_SS(F) \
+ (((F) & ARM_TBFLAG_PSTATE_SS_MASK) >> ARM_TBFLAG_PSTATE_SS_SHIFT)
#define ARM_TBFLAG_AA64_EL(F) \
(((F) & ARM_TBFLAG_AA64_EL_MASK) >> ARM_TBFLAG_AA64_EL_SHIFT)
#define ARM_TBFLAG_AA64_FPEN(F) \
@@ -1292,6 +1300,19 @@ static inline void cpu_get_tb_cpu_state(CPUARMState *env, target_ulong *pc,
if (fpen == 3 || (fpen == 1 && arm_current_pl(env) != 0)) {
*flags |= ARM_TBFLAG_CPACR_FPEN_MASK;
}
+ /* The SS_ACTIVE and PSTATE_SS bits correspond to the state machine
+ * states defined in the ARM ARM for software singlestep:
+ * SS_ACTIVE PSTATE.SS State
+ * 0 x Inactive (the TB flag for SS is always 0)
+ * 1 0 Active-pending
+ * 1 1 Active-not-pending
+ */
+ if (arm_singlestep_active(env)) {
+ *flags |= ARM_TBFLAG_SS_ACTIVE_MASK;
+ if (env->uncached_cpsr & PSTATE_SS) {
+ *flags |= ARM_TBFLAG_PSTATE_SS_MASK;
+ }
+ }
}
*cs_base = 0;
diff --git a/target-arm/translate.c b/target-arm/translate.c
index 4cde3096d4..2c0b1deaea 100644
--- a/target-arm/translate.c
+++ b/target-arm/translate.c
@@ -205,6 +205,33 @@ static void gen_exception(int excp, uint32_t syndrome)
tcg_temp_free_i32(tcg_excp);
}
+static void gen_ss_advance(DisasContext *s)
+{
+ /* If the singlestep state is Active-not-pending, advance to
+ * Active-pending.
+ */
+ if (s->ss_active) {
+ s->pstate_ss = 0;
+ gen_helper_clear_pstate_ss(cpu_env);
+ }
+}
+
+static void gen_step_complete_exception(DisasContext *s)
+{
+ /* We just completed step of an insn. Move from Active-not-pending
+ * to Active-pending, and then also take the swstep exception.
+ * This corresponds to making the (IMPDEF) choice to prioritize
+ * swstep exceptions over asynchronous exceptions taken to an exception
+ * level where debug is disabled. This choice has the advantage that
+ * we do not need to maintain internal state corresponding to the
+ * ISV/EX syndrome bits between completion of the step and generation
+ * of the exception, and our syndrome information is always correct.
+ */
+ gen_ss_advance(s);
+ gen_exception(EXCP_UDEF, syn_swstep(s->ss_same_el, 1, s->is_ldex));
+ s->is_jmp = DISAS_EXC;
+}
+
static void gen_smul_dual(TCGv_i32 a, TCGv_i32 b)
{
TCGv_i32 tmp1 = tcg_temp_new_i32();
@@ -3860,7 +3887,7 @@ static inline void gen_goto_tb(DisasContext *s, int n, target_ulong dest)
static inline void gen_jmp (DisasContext *s, uint32_t dest)
{
- if (unlikely(s->singlestep_enabled)) {
+ if (unlikely(s->singlestep_enabled || s->ss_active)) {
/* An indirect jump so that we still trigger the debug exception. */
if (s->thumb)
dest |= 1;
@@ -7281,6 +7308,8 @@ static void gen_load_exclusive(DisasContext *s, int rt, int rt2,
{
TCGv_i32 tmp = tcg_temp_new_i32();
+ s->is_ldex = true;
+
switch (size) {
case 0:
gen_aa32_ld8u(tmp, addr, get_mem_index(s));
@@ -10917,6 +10946,26 @@ static inline void gen_intermediate_code_internal(ARMCPU *cpu,
dc->current_pl = arm_current_pl(env);
dc->features = env->features;
+ /* Single step state. The code-generation logic here is:
+ * SS_ACTIVE == 0:
+ * generate code with no special handling for single-stepping (except
+ * that anything that can make us go to SS_ACTIVE == 1 must end the TB;
+ * this happens anyway because those changes are all system register or
+ * PSTATE writes).
+ * SS_ACTIVE == 1, PSTATE.SS == 1: (active-not-pending)
+ * emit code for one insn
+ * emit code to clear PSTATE.SS
+ * emit code to generate software step exception for completed step
+ * end TB (as usual for having generated an exception)
+ * SS_ACTIVE == 1, PSTATE.SS == 0: (active-pending)
+ * emit code to generate a software step exception
+ * end the TB
+ */
+ dc->ss_active = ARM_TBFLAG_SS_ACTIVE(tb->flags);
+ dc->pstate_ss = ARM_TBFLAG_PSTATE_SS(tb->flags);
+ dc->is_ldex = false;
+ dc->ss_same_el = false; /* Can't be true since EL_d must be AArch64 */
+
cpu_F0s = tcg_temp_new_i32();
cpu_F1s = tcg_temp_new_i32();
cpu_F0d = tcg_temp_new_i64();
@@ -11026,6 +11075,22 @@ static inline void gen_intermediate_code_internal(ARMCPU *cpu,
tcg_gen_debug_insn_start(dc->pc);
}
+ if (dc->ss_active && !dc->pstate_ss) {
+ /* Singlestep state is Active-pending.
+ * If we're in this state at the start of a TB then either
+ * a) we just took an exception to an EL which is being debugged
+ * and this is the first insn in the exception handler
+ * b) debug exceptions were masked and we just unmasked them
+ * without changing EL (eg by clearing PSTATE.D)
+ * In either case we're going to take a swstep exception in the
+ * "did not step an insn" case, and so the syndrome ISV and EX
+ * bits should be zero.
+ */
+ assert(num_insns == 0);
+ gen_exception(EXCP_UDEF, syn_swstep(dc->ss_same_el, 0, 0));
+ goto done_generating;
+ }
+
if (dc->thumb) {
disas_thumb_insn(env, dc);
if (dc->condexec_mask) {
@@ -11058,6 +11123,7 @@ static inline void gen_intermediate_code_internal(ARMCPU *cpu,
} while (!dc->is_jmp && tcg_ctx.gen_opc_ptr < gen_opc_end &&
!cs->singlestep_enabled &&
!singlestep &&
+ !dc->ss_active &&
dc->pc < next_page_start &&
num_insns < max_insns);
@@ -11073,12 +11139,15 @@ static inline void gen_intermediate_code_internal(ARMCPU *cpu,
/* At this stage dc->condjmp will only be set when the skipped
instruction was a conditional branch or trap, and the PC has
already been written. */
- if (unlikely(cs->singlestep_enabled)) {
+ if (unlikely(cs->singlestep_enabled || dc->ss_active)) {
/* Make sure the pc is updated, and raise a debug exception. */
if (dc->condjmp) {
gen_set_condexec(dc);
if (dc->is_jmp == DISAS_SWI) {
+ gen_ss_advance(dc);
gen_exception(EXCP_SWI, syn_aa32_svc(dc->svc_imm, dc->thumb));
+ } else if (dc->ss_active) {
+ gen_step_complete_exception(dc);
} else {
gen_exception_internal(EXCP_DEBUG);
}
@@ -11090,7 +11159,10 @@ static inline void gen_intermediate_code_internal(ARMCPU *cpu,
}
gen_set_condexec(dc);
if (dc->is_jmp == DISAS_SWI && !dc->condjmp) {
+ gen_ss_advance(dc);
gen_exception(EXCP_SWI, syn_aa32_svc(dc->svc_imm, dc->thumb));
+ } else if (dc->ss_active) {
+ gen_step_complete_exception(dc);
} else {
/* FIXME: Single stepping a WFI insn will not halt
the CPU. */