vmx_set_constant_host_state(vmx);
}
-static void prepare_vmcs02_early_full(struct vcpu_vmx *vmx,
+static void prepare_vmcs02_early_rare(struct vcpu_vmx *vmx,
struct vmcs12 *vmcs12)
{
prepare_vmcs02_constant_state(vmx);
u64 guest_efer = nested_vmx_calc_efer(vmx, vmcs12);
if (vmx->nested.dirty_vmcs12 || vmx->nested.hv_evmcs)
- prepare_vmcs02_early_full(vmx, vmcs12);
+ prepare_vmcs02_early_rare(vmx, vmcs12);
/*
* PIN CONTROLS
}
}
-static void prepare_vmcs02_full(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12)
+static void prepare_vmcs02_rare(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12)
{
struct hv_enlightened_vmcs *hv_evmcs = vmx->nested.hv_evmcs;
struct vcpu_vmx *vmx = to_vmx(vcpu);
if (vmx->nested.dirty_vmcs12 || vmx->nested.hv_evmcs) {
- prepare_vmcs02_full(vmx, vmcs12);
+ prepare_vmcs02_rare(vmx, vmcs12);
vmx->nested.dirty_vmcs12 = false;
}