#include <asm/cputype.h>
/* arm64 compatibility macros */
-#define COMPAT_PSR_MODE_ABT ABT_MODE
-#define COMPAT_PSR_MODE_UND UND_MODE
-#define COMPAT_PSR_T_BIT PSR_T_BIT
-#define COMPAT_PSR_I_BIT PSR_I_BIT
-#define COMPAT_PSR_A_BIT PSR_A_BIT
-#define COMPAT_PSR_E_BIT PSR_E_BIT
-#define COMPAT_PSR_IT_MASK PSR_IT_MASK
+#define PSR_AA32_MODE_ABT ABT_MODE
+#define PSR_AA32_MODE_UND UND_MODE
+#define PSR_AA32_T_BIT PSR_T_BIT
+#define PSR_AA32_I_BIT PSR_I_BIT
+#define PSR_AA32_A_BIT PSR_A_BIT
+#define PSR_AA32_E_BIT PSR_E_BIT
+#define PSR_AA32_IT_MASK PSR_IT_MASK
unsigned long *vcpu_reg(struct kvm_vcpu *vcpu, u8 reg_num);
static inline void vcpu_set_thumb(struct kvm_vcpu *vcpu)
{
- *vcpu_cpsr(vcpu) |= COMPAT_PSR_T_BIT;
+ *vcpu_cpsr(vcpu) |= PSR_AA32_T_BIT;
}
/*
u32 mode;
if (vcpu_mode_is_32bit(vcpu)) {
- mode = *vcpu_cpsr(vcpu) & COMPAT_PSR_MODE_MASK;
- return mode > COMPAT_PSR_MODE_USR;
+ mode = *vcpu_cpsr(vcpu) & PSR_AA32_MODE_MASK;
+ return mode > PSR_AA32_MODE_USR;
}
mode = *vcpu_cpsr(vcpu) & PSR_MODE_MASK;
static inline void kvm_vcpu_set_be(struct kvm_vcpu *vcpu)
{
if (vcpu_mode_is_32bit(vcpu)) {
- *vcpu_cpsr(vcpu) |= COMPAT_PSR_E_BIT;
+ *vcpu_cpsr(vcpu) |= PSR_AA32_E_BIT;
} else {
u64 sctlr = vcpu_read_sys_reg(vcpu, SCTLR_EL1);
sctlr |= (1 << 25);
static inline bool kvm_vcpu_is_be(struct kvm_vcpu *vcpu)
{
if (vcpu_mode_is_32bit(vcpu))
- return !!(*vcpu_cpsr(vcpu) & COMPAT_PSR_E_BIT);
+ return !!(*vcpu_cpsr(vcpu) & PSR_AA32_E_BIT);
return !!(vcpu_read_sys_reg(vcpu, SCTLR_EL1) & (1 << 25));
}
}
if (off == KVM_REG_ARM_CORE_REG(regs.pstate)) {
- u32 mode = (*(u32 *)valp) & COMPAT_PSR_MODE_MASK;
+ u32 mode = (*(u32 *)valp) & PSR_AA32_MODE_MASK;
switch (mode) {
- case COMPAT_PSR_MODE_USR:
- case COMPAT_PSR_MODE_FIQ:
- case COMPAT_PSR_MODE_IRQ:
- case COMPAT_PSR_MODE_SVC:
- case COMPAT_PSR_MODE_ABT:
- case COMPAT_PSR_MODE_UND:
+ case PSR_AA32_MODE_USR:
+ case PSR_AA32_MODE_FIQ:
+ case PSR_AA32_MODE_IRQ:
+ case PSR_AA32_MODE_SVC:
+ case PSR_AA32_MODE_ABT:
+ case PSR_AA32_MODE_UND:
case PSR_MODE_EL0t:
case PSR_MODE_EL1t:
case PSR_MODE_EL1h:
static bool __hyp_text __is_be(struct kvm_vcpu *vcpu)
{
if (vcpu_mode_is_32bit(vcpu))
- return !!(read_sysreg_el2(spsr) & COMPAT_PSR_E_BIT);
+ return !!(read_sysreg_el2(spsr) & PSR_AA32_E_BIT);
return !!(read_sysreg(SCTLR_EL1) & SCTLR_ELx_EE);
}
unsigned long *vcpu_reg32(const struct kvm_vcpu *vcpu, u8 reg_num)
{
unsigned long *reg_array = (unsigned long *)&vcpu->arch.ctxt.gp_regs.regs;
- unsigned long mode = *vcpu_cpsr(vcpu) & COMPAT_PSR_MODE_MASK;
+ unsigned long mode = *vcpu_cpsr(vcpu) & PSR_AA32_MODE_MASK;
switch (mode) {
- case COMPAT_PSR_MODE_USR ... COMPAT_PSR_MODE_SVC:
+ case PSR_AA32_MODE_USR ... PSR_AA32_MODE_SVC:
mode &= ~PSR_MODE32_BIT; /* 0 ... 3 */
break;
- case COMPAT_PSR_MODE_ABT:
+ case PSR_AA32_MODE_ABT:
mode = 4;
break;
- case COMPAT_PSR_MODE_UND:
+ case PSR_AA32_MODE_UND:
mode = 5;
break;
- case COMPAT_PSR_MODE_SYS:
+ case PSR_AA32_MODE_SYS:
mode = 0; /* SYS maps to USR */
break;
*/
static int vcpu_spsr32_mode(const struct kvm_vcpu *vcpu)
{
- unsigned long mode = *vcpu_cpsr(vcpu) & COMPAT_PSR_MODE_MASK;
+ unsigned long mode = *vcpu_cpsr(vcpu) & PSR_AA32_MODE_MASK;
switch (mode) {
- case COMPAT_PSR_MODE_SVC: return KVM_SPSR_SVC;
- case COMPAT_PSR_MODE_ABT: return KVM_SPSR_ABT;
- case COMPAT_PSR_MODE_UND: return KVM_SPSR_UND;
- case COMPAT_PSR_MODE_IRQ: return KVM_SPSR_IRQ;
- case COMPAT_PSR_MODE_FIQ: return KVM_SPSR_FIQ;
+ case PSR_AA32_MODE_SVC: return KVM_SPSR_SVC;
+ case PSR_AA32_MODE_ABT: return KVM_SPSR_ABT;
+ case PSR_AA32_MODE_UND: return KVM_SPSR_UND;
+ case PSR_AA32_MODE_IRQ: return KVM_SPSR_IRQ;
+ case PSR_AA32_MODE_FIQ: return KVM_SPSR_FIQ;
default: BUG();
}
}
};
static const struct kvm_regs default_regs_reset32 = {
- .regs.pstate = (COMPAT_PSR_MODE_SVC | COMPAT_PSR_A_BIT |
- COMPAT_PSR_I_BIT | COMPAT_PSR_F_BIT),
+ .regs.pstate = (PSR_AA32_MODE_SVC | PSR_AA32_A_BIT |
+ PSR_AA32_I_BIT | PSR_AA32_F_BIT),
};
static bool cpu_has_32bit_el1(void)
{
unsigned long itbits, cond;
unsigned long cpsr = *vcpu_cpsr(vcpu);
- bool is_arm = !(cpsr & COMPAT_PSR_T_BIT);
+ bool is_arm = !(cpsr & PSR_AA32_T_BIT);
- if (is_arm || !(cpsr & COMPAT_PSR_IT_MASK))
+ if (is_arm || !(cpsr & PSR_AA32_IT_MASK))
return;
cond = (cpsr & 0xe000) >> 13;
else
itbits = (itbits << 1) & 0x1f;
- cpsr &= ~COMPAT_PSR_IT_MASK;
+ cpsr &= ~PSR_AA32_IT_MASK;
cpsr |= cond << 13;
cpsr |= (itbits & 0x1c) << (10 - 2);
cpsr |= (itbits & 0x3) << 25;
{
bool is_thumb;
- is_thumb = !!(*vcpu_cpsr(vcpu) & COMPAT_PSR_T_BIT);
+ is_thumb = !!(*vcpu_cpsr(vcpu) & PSR_AA32_T_BIT);
if (is_thumb && !is_wide_instr)
*vcpu_pc(vcpu) += 2;
else
{
unsigned long cpsr;
unsigned long new_spsr_value = *vcpu_cpsr(vcpu);
- bool is_thumb = (new_spsr_value & COMPAT_PSR_T_BIT);
+ bool is_thumb = (new_spsr_value & PSR_AA32_T_BIT);
u32 return_offset = return_offsets[vect_offset >> 2][is_thumb];
u32 sctlr = vcpu_cp15(vcpu, c1_SCTLR);
- cpsr = mode | COMPAT_PSR_I_BIT;
+ cpsr = mode | PSR_AA32_I_BIT;
if (sctlr & (1 << 30))
- cpsr |= COMPAT_PSR_T_BIT;
+ cpsr |= PSR_AA32_T_BIT;
if (sctlr & (1 << 25))
- cpsr |= COMPAT_PSR_E_BIT;
+ cpsr |= PSR_AA32_E_BIT;
*vcpu_cpsr(vcpu) = cpsr;
void kvm_inject_undef32(struct kvm_vcpu *vcpu)
{
- prepare_fault32(vcpu, COMPAT_PSR_MODE_UND, 4);
+ prepare_fault32(vcpu, PSR_AA32_MODE_UND, 4);
}
/*
fsr = &vcpu_cp15(vcpu, c5_DFSR);
}
- prepare_fault32(vcpu, COMPAT_PSR_MODE_ABT | COMPAT_PSR_A_BIT, vect_offset);
+ prepare_fault32(vcpu, PSR_AA32_MODE_ABT | PSR_AA32_A_BIT, vect_offset);
*far = addr;