/* * (not much of an) Emulation layer for 32bit guests. * * Copyright (C) 2012,2013 - ARM Ltd * Author: Marc Zyngier * * based on arch/arm/kvm/emulate.c * Copyright (C) 2012 - Virtual Open Systems and Columbia University * Author: Christoffer Dall * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see . */ #include #include #include /* * stolen from arch/arm/kernel/opcodes.c * * condition code lookup table * index into the table is test code: EQ, NE, ... LT, GT, AL, NV * * bit position in short is condition code: NZCV */ static const unsigned short cc_map[16] = { 0xF0F0, /* EQ == Z set */ 0x0F0F, /* NE */ 0xCCCC, /* CS == C set */ 0x3333, /* CC */ 0xFF00, /* MI == N set */ 0x00FF, /* PL */ 0xAAAA, /* VS == V set */ 0x5555, /* VC */ 0x0C0C, /* HI == C set && Z clear */ 0xF3F3, /* LS == C clear || Z set */ 0xAA55, /* GE == (N==V) */ 0x55AA, /* LT == (N!=V) */ 0x0A05, /* GT == (!Z && (N==V)) */ 0xF5FA, /* LE == (Z || (N!=V)) */ 0xFFFF, /* AL always */ 0 /* NV */ }; static int kvm_vcpu_get_condition(const struct kvm_vcpu *vcpu) { u32 esr = kvm_vcpu_get_hsr(vcpu); if (esr & ESR_ELx_CV) return (esr & ESR_ELx_COND_MASK) >> ESR_ELx_COND_SHIFT; return -1; } /* * Check if a trapped instruction should have been executed or not. */ bool kvm_condition_valid32(const struct kvm_vcpu *vcpu) { unsigned long cpsr; u32 cpsr_cond; int cond; /* Top two bits non-zero? Unconditional. */ if (kvm_vcpu_get_hsr(vcpu) >> 30) return true; /* Is condition field valid? */ cond = kvm_vcpu_get_condition(vcpu); if (cond == 0xE) return true; cpsr = *vcpu_cpsr(vcpu); if (cond < 0) { /* This can happen in Thumb mode: examine IT state. */ unsigned long it; it = ((cpsr >> 8) & 0xFC) | ((cpsr >> 25) & 0x3); /* it == 0 => unconditional. */ if (it == 0) return true; /* The cond for this insn works out as the top 4 bits. */ cond = (it >> 4); } cpsr_cond = cpsr >> 28; if (!((cc_map[cond] >> cpsr_cond) & 1)) return false; return true; } /** * adjust_itstate - adjust ITSTATE when emulating instructions in IT-block * @vcpu: The VCPU pointer * * When exceptions occur while instructions are executed in Thumb IF-THEN * blocks, the ITSTATE field of the CPSR is not advanced (updated), so we have * to do this little bit of work manually. The fields map like this: * * IT[7:0] -> CPSR[26:25],CPSR[15:10] */ static void kvm_adjust_itstate(struct kvm_vcpu *vcpu) { unsigned long itbits, cond; unsigned long cpsr = *vcpu_cpsr(vcpu); bool is_arm = !(cpsr & COMPAT_PSR_T_BIT); BUG_ON(is_arm && (cpsr & COMPAT_PSR_IT_MASK)); if (!(cpsr & COMPAT_PSR_IT_MASK)) return; cond = (cpsr & 0xe000) >> 13; itbits = (cpsr & 0x1c00) >> (10 - 2); itbits |= (cpsr & (0x3 << 25)) >> 25; /* Perform ITAdvance (see page A2-52 in ARM DDI 0406C) */ if ((itbits & 0x7) == 0) itbits = cond = 0; else itbits = (itbits << 1) & 0x1f; cpsr &= ~COMPAT_PSR_IT_MASK; cpsr |= cond << 13; cpsr |= (itbits & 0x1c) << (10 - 2); cpsr |= (itbits & 0x3) << 25; *vcpu_cpsr(vcpu) = cpsr; } /** * kvm_skip_instr - skip a trapped instruction and proceed to the next * @vcpu: The vcpu pointer */ void kvm_skip_instr32(struct kvm_vcpu *vcpu, bool is_wide_instr) { bool is_thumb; is_thumb = !!(*vcpu_cpsr(vcpu) & COMPAT_PSR_T_BIT); if (is_thumb && !is_wide_instr) *vcpu_pc(vcpu) += 2; else *vcpu_pc(vcpu) += 4; kvm_adjust_itstate(vcpu); }