diff options
author | Marc Zyngier <marc.zyngier@arm.com> | 2018-02-06 17:56:15 +0000 |
---|---|---|
committer | Alex Shi <alex.shi@linaro.org> | 2018-03-23 12:52:36 +0800 |
commit | fc3a9ad5ec08d2ae5993627632c688886508d48a (patch) | |
tree | e769c35a262701afdb0076771b4f358b75d8d24e | |
parent | 8a4c20144b9bd321df2a87017c429801009662c2 (diff) |
arm64: KVM: Add SMCCC_ARCH_WORKAROUND_1 fast handling
commit f72af90c3783 upstream.
We want SMCCC_ARCH_WORKAROUND_1 to be fast. As fast as possible.
So let's intercept it as early as we can by testing for the
function call number as soon as we've identified a HVC call
coming from the guest.
Tested-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Reviewed-by: Christoffer Dall <christoffer.dall@linaro.org>
Signed-off-by: Marc Zyngier <marc.zyngier@arm.com>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Alex Shi <alex.shi@linaro.org>
-rw-r--r-- | arch/arm64/kvm/hyp/hyp-entry.S | 19 |
1 files changed, 17 insertions, 2 deletions
diff --git a/arch/arm64/kvm/hyp/hyp-entry.S b/arch/arm64/kvm/hyp/hyp-entry.S index 44c79fd81ad1..101c0712566a 100644 --- a/arch/arm64/kvm/hyp/hyp-entry.S +++ b/arch/arm64/kvm/hyp/hyp-entry.S @@ -15,6 +15,7 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ +#include <linux/arm-smccc.h> #include <linux/linkage.h> #include <asm/alternative.h> @@ -73,8 +74,8 @@ el1_sync: // Guest trapped into EL2 cmp x2, #ESR_ELx_EC_HVC64 b.ne el1_trap - mrs x3, vttbr_el2 // If vttbr is valid, the 64bit guest - cbnz x3, el1_trap // called HVC + mrs x3, vttbr_el2 // If vttbr is valid, the guest + cbnz x3, el1_hvc_guest // called HVC /* Here, we're pretty sure the host called HVC. */ restore_x0_to_x3 @@ -93,6 +94,20 @@ el1_sync: // Guest trapped into EL2 2: eret +el1_hvc_guest: + /* + * Fastest possible path for ARM_SMCCC_ARCH_WORKAROUND_1. + * The workaround has already been applied on the host, + * so let's quickly get back to the guest. We don't bother + * restoring x1, as it can be clobbered anyway. + */ + ldr x1, [sp] // Guest's x0 + eor w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1 + cbnz w1, el1_trap + mov x0, x1 + add sp, sp, #16 + eret + el1_trap: /* * x1: ESR |