Searched refs:is_smm (Results 1 – 10 of 10) sorted by relevance
149 static inline bool is_smm(struct kvm_vcpu *vcpu) in is_smm() function160 static inline bool is_smm(struct kvm_vcpu *vcpu) { return false; } in is_smm() function
236 return !is_smm(vcpu) && in kvm_apic_init_sipi_allowed()
230 bool (*is_smm)(struct x86_emulate_ctxt *ctxt); member
5404 events->smi.smm = is_smm(vcpu); in kvm_vcpu_ioctl_x86_get_vcpu_events()8571 return is_smm(emul_to_vcpu(ctxt)); in emulator_is_smm()8655 .is_smm = emulator_is_smm,10168 if (is_smm(vcpu)) in post_kvm_run_save()12415 WARN_ON_ONCE(is_guest_mode(vcpu) || is_smm(vcpu)); in kvm_vcpu_reset()
2322 if (!ctxt->ops->is_smm(ctxt)) in em_rsm()
855 if (is_smm(vcpu)) { in nested_svm_vmrun()1685 if (is_smm(vcpu) && (kvm_state->flags & KVM_STATE_NESTED_GUEST_MODE)) in svm_set_nested_state()
325 if (!is_smm(vcpu)) in svm_set_efer()4650 return is_smm(vcpu); in svm_smi_blocked()
4303 if (vcpu->arch.smi_pending && !is_smm(vcpu)) { in vmx_check_nested_events()6777 if (is_smm(vcpu) ? in vmx_set_nested_state()
3453 if ((cr4 & X86_CR4_VMXE) && is_smm(vcpu)) in vmx_is_valid_cr4()8199 return !is_smm(vcpu); in vmx_smi_allowed()
5413 role.base.smm = is_smm(vcpu); in kvm_calc_cpu_role()5611 WARN_ON_ONCE(is_smm(vcpu)); in kvm_calc_shadow_ept_root_page_role()