Lines Matching refs:vcpu_svm

244 struct vcpu_svm {  struct
349 void recalc_intercepts(struct vcpu_svm *svm); argument
395 static inline bool ghcb_gpa_is_registered(struct vcpu_svm *svm, u64 val) in ghcb_gpa_is_registered()
421 static __always_inline struct vcpu_svm *to_svm(struct kvm_vcpu *vcpu) in to_svm()
423 return container_of(vcpu, struct vcpu_svm, vcpu); in to_svm()
460 static inline void set_exception_intercept(struct vcpu_svm *svm, u32 bit) in set_exception_intercept()
470 static inline void clr_exception_intercept(struct vcpu_svm *svm, u32 bit) in clr_exception_intercept()
480 static inline void svm_set_intercept(struct vcpu_svm *svm, int bit) in svm_set_intercept()
489 static inline void svm_clr_intercept(struct vcpu_svm *svm, int bit) in svm_clr_intercept()
498 static inline bool svm_is_intercept(struct vcpu_svm *svm, int bit) in svm_is_intercept()
503 static inline bool nested_vgif_enabled(struct vcpu_svm *svm) in nested_vgif_enabled()
509 static inline struct vmcb *get_vgif_vmcb(struct vcpu_svm *svm) in get_vgif_vmcb()
520 static inline void enable_gif(struct vcpu_svm *svm) in enable_gif()
530 static inline void disable_gif(struct vcpu_svm *svm) in disable_gif()
540 static inline bool gif_set(struct vcpu_svm *svm) in gif_set()
550 static inline bool nested_npt_enabled(struct vcpu_svm *svm) in nested_npt_enabled()
555 static inline bool nested_vnmi_enabled(struct vcpu_svm *svm) in nested_vnmi_enabled()
570 static inline struct vmcb *get_vnmi_vmcb_l1(struct vcpu_svm *svm) in get_vnmi_vmcb_l1()
581 static inline bool is_vnmi_enabled(struct vcpu_svm *svm) in is_vnmi_enabled()
609 void disable_nmi_singlestep(struct vcpu_svm *svm);
613 void svm_set_gif(struct vcpu_svm *svm, bool value);
617 void svm_set_x2apic_msr_interception(struct vcpu_svm *svm, bool disable);
629 struct vcpu_svm *svm = to_svm(vcpu); in nested_svm_virtualize_tpr()
634 static inline bool nested_exit_on_smi(struct vcpu_svm *svm) in nested_exit_on_smi()
639 static inline bool nested_exit_on_intr(struct vcpu_svm *svm) in nested_exit_on_intr()
644 static inline bool nested_exit_on_nmi(struct vcpu_svm *svm) in nested_exit_on_nmi()
652 void svm_free_nested(struct vcpu_svm *svm);
653 int svm_allocate_nested(struct vcpu_svm *svm);
658 int nested_svm_vmexit(struct vcpu_svm *svm);
660 static inline int nested_svm_simple_vmexit(struct vcpu_svm *svm, u32 exit_code) in nested_svm_simple_vmexit()
668 int nested_svm_exit_handled(struct vcpu_svm *svm);
670 int nested_svm_check_exception(struct vcpu_svm *svm, unsigned nr,
672 int nested_svm_exit_special(struct vcpu_svm *svm);
675 void nested_copy_vmcb_control_to_cache(struct vcpu_svm *svm,
677 void nested_copy_vmcb_save_to_cache(struct vcpu_svm *svm,
679 void nested_sync_control_from_vmcb02(struct vcpu_svm *svm);
680 void nested_vmcb02_compute_g_pat(struct vcpu_svm *svm);
681 void svm_switch_vmcb(struct vcpu_svm *svm, struct kvm_vmcb_info *target_vmcb);
706 void avic_init_vmcb(struct vcpu_svm *svm, struct vmcb *vmcb);
709 int avic_init_vcpu(struct vcpu_svm *svm);
725 void pre_sev_run(struct vcpu_svm *svm, int cpu);
726 void sev_init_vmcb(struct vcpu_svm *svm);
727 void sev_vcpu_after_set_cpuid(struct vcpu_svm *svm);
728 int sev_es_string_io(struct vcpu_svm *svm, int size, unsigned int port, int in);
729 void sev_es_vcpu_reset(struct vcpu_svm *svm);
731 void sev_es_prepare_switch_to_guest(struct vcpu_svm *svm, struct sev_es_save_area *hostsa);
732 void sev_es_unmap_ghcb(struct vcpu_svm *svm);
801 void __svm_sev_es_vcpu_run(struct vcpu_svm *svm, bool spec_ctrl_intercepted,
803 void __svm_vcpu_run(struct vcpu_svm *svm, bool spec_ctrl_intercepted);
806 static __always_inline bool kvm_ghcb_##field##_is_valid(const struct vcpu_svm *svm) \
812 …static __always_inline u64 kvm_ghcb_get_##field##_if_valid(struct vcpu_svm *svm, struct ghcb *ghcb…