KVM: arm64: Move PC rollback on SError to HYP
Instead of handling the "PC rollback on SError during HVC" at EL1 (which requires disclosing PC to a potentially untrusted kernel), let's move this fixup to ... fixup_guest_exit(), which is where we do all fixups. Isn't that neat? Signed-off-by: Marc Zyngier <maz@kernel.org>
This commit is contained in:
parent
cdb5e02ed1
commit
defe21f49b
@ -241,23 +241,6 @@ int handle_exit(struct kvm_vcpu *vcpu, int exception_index)
|
|||||||
{
|
{
|
||||||
struct kvm_run *run = vcpu->run;
|
struct kvm_run *run = vcpu->run;
|
||||||
|
|
||||||
if (ARM_SERROR_PENDING(exception_index)) {
|
|
||||||
u8 esr_ec = ESR_ELx_EC(kvm_vcpu_get_esr(vcpu));
|
|
||||||
|
|
||||||
/*
|
|
||||||
* HVC already have an adjusted PC, which we need to
|
|
||||||
* correct in order to return to after having injected
|
|
||||||
* the SError.
|
|
||||||
*
|
|
||||||
* SMC, on the other hand, is *trapped*, meaning its
|
|
||||||
* preferred return address is the SMC itself.
|
|
||||||
*/
|
|
||||||
if (esr_ec == ESR_ELx_EC_HVC32 || esr_ec == ESR_ELx_EC_HVC64)
|
|
||||||
*vcpu_pc(vcpu) -= 4;
|
|
||||||
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
exception_index = ARM_EXCEPTION_CODE(exception_index);
|
exception_index = ARM_EXCEPTION_CODE(exception_index);
|
||||||
|
|
||||||
switch (exception_index) {
|
switch (exception_index) {
|
||||||
|
@ -411,6 +411,21 @@ static inline bool fixup_guest_exit(struct kvm_vcpu *vcpu, u64 *exit_code)
|
|||||||
if (ARM_EXCEPTION_CODE(*exit_code) != ARM_EXCEPTION_IRQ)
|
if (ARM_EXCEPTION_CODE(*exit_code) != ARM_EXCEPTION_IRQ)
|
||||||
vcpu->arch.fault.esr_el2 = read_sysreg_el2(SYS_ESR);
|
vcpu->arch.fault.esr_el2 = read_sysreg_el2(SYS_ESR);
|
||||||
|
|
||||||
|
if (ARM_SERROR_PENDING(*exit_code)) {
|
||||||
|
u8 esr_ec = kvm_vcpu_trap_get_class(vcpu);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* HVC already have an adjusted PC, which we need to
|
||||||
|
* correct in order to return to after having injected
|
||||||
|
* the SError.
|
||||||
|
*
|
||||||
|
* SMC, on the other hand, is *trapped*, meaning its
|
||||||
|
* preferred return address is the SMC itself.
|
||||||
|
*/
|
||||||
|
if (esr_ec == ESR_ELx_EC_HVC32 || esr_ec == ESR_ELx_EC_HVC64)
|
||||||
|
write_sysreg_el2(read_sysreg_el2(SYS_ELR) - 4, SYS_ELR);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* We're using the raw exception code in order to only process
|
* We're using the raw exception code in order to only process
|
||||||
* the trap if no SError is pending. We will come back to the
|
* the trap if no SError is pending. We will come back to the
|
||||||
|
Loading…
x
Reference in New Issue
Block a user