/linux-6.12.1/arch/loongarch/kvm/ |
D | main.c | 17 static struct kvm_context __percpu *vmcs; variable 200 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in kvm_update_vpid() 229 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in kvm_check_vpid() 255 kvm->arch.vmcs = vmcs; in kvm_init_vmcs() 320 vmcs = alloc_percpu(struct kvm_context); in kvm_loongarch_env_init() 321 if (!vmcs) { in kvm_loongarch_env_init() 328 free_percpu(vmcs); in kvm_loongarch_env_init() 329 vmcs = NULL; in kvm_loongarch_env_init() 344 free_percpu(vmcs); in kvm_loongarch_env_init() 345 vmcs = NULL; in kvm_loongarch_env_init() [all …]
|
D | vcpu.c | 39 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_save_host_pmu() 54 context = this_cpu_ptr(vcpu->kvm->arch.vmcs); in kvm_restore_host_pmu() 1517 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in kvm_arch_vcpu_destroy() 1539 context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu); in _kvm_vcpu_load()
|
/linux-6.12.1/arch/x86/kvm/vmx/ |
D | vmx_ops.h | 15 void vmclear_error(struct vmcs *vmcs, u64 phys_addr); 16 void vmptrld_error(struct vmcs *vmcs, u64 phys_addr); 287 static inline void vmcs_clear(struct vmcs *vmcs) in vmcs_clear() argument 289 u64 phys_addr = __pa(vmcs); in vmcs_clear() 291 vmx_asm1(vmclear, "m"(phys_addr), vmcs, phys_addr); in vmcs_clear() 294 static inline void vmcs_load(struct vmcs *vmcs) in vmcs_load() argument 296 u64 phys_addr = __pa(vmcs); in vmcs_load() 301 vmx_asm1(vmptrld, "m"(phys_addr), vmcs, phys_addr); in vmcs_load()
|
D | vmcs.h | 21 struct vmcs { struct 27 DECLARE_PER_CPU(struct vmcs *, current_vmcs); argument 62 struct vmcs *vmcs; member 63 struct vmcs *shadow_vmcs;
|
D | vmx.h | 595 static inline u##bits __##lname##_controls_get(struct loaded_vmcs *vmcs) \ 597 return vmcs->controls_shadow.lname; \ 703 struct vmcs *alloc_vmcs_cpu(bool shadow, int cpu, gfp_t flags); 704 void free_vmcs(struct vmcs *vmcs); 709 static inline struct vmcs *alloc_vmcs(bool shadow) in alloc_vmcs()
|
D | vmx.c | 468 noinline void vmclear_error(struct vmcs *vmcs, u64 phys_addr) in vmclear_error() argument 471 vmcs, phys_addr, vmcs_read32(VM_INSTRUCTION_ERROR)); in vmclear_error() 474 noinline void vmptrld_error(struct vmcs *vmcs, u64 phys_addr) in vmptrld_error() argument 477 vmcs, phys_addr, vmcs_read32(VM_INSTRUCTION_ERROR)); in vmptrld_error() 492 static DEFINE_PER_CPU(struct vmcs *, vmxarea); 493 DEFINE_PER_CPU(struct vmcs *, current_vmcs); 545 evmcs = (struct hv_enlightened_vmcs *)to_vmx(vcpu)->loaded_vmcs->vmcs; in hv_enable_l2_tlb_flush() 774 vmcs_clear(v->vmcs); in vmx_emergency_disable_virtualization_cpu() 786 if (per_cpu(current_vmcs, cpu) == loaded_vmcs->vmcs) in __loaded_vmcs_clear() 789 vmcs_clear(loaded_vmcs->vmcs); in __loaded_vmcs_clear() [all …]
|
D | nested.c | 295 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs() argument 301 if (WARN_ON_ONCE(vmx->loaded_vmcs == vmcs)) in vmx_switch_vmcs() 306 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs() 1595 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_shadow_to_vmcs12() 1615 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12() 1630 struct vmcs *shadow_vmcs = vmx->vmcs01.shadow_vmcs; in copy_vmcs12_to_shadow() 1651 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow() 5252 static struct vmcs *alloc_shadow_vmcs(struct kvm_vcpu *vcpu) in alloc_shadow_vmcs() 5717 vmcs_load(vmx->loaded_vmcs->vmcs); in handle_vmwrite()
|
/linux-6.12.1/tools/perf/util/intel-pt-decoder/ |
D | intel-pt-decoder.c | 124 struct intel_pt_vmcs_info *(*findnew_vmcs_info)(void *data, uint64_t vmcs); 167 uint64_t vmcs; member 2320 bool tsc, pip, vmcs, tma, psbend; member 2358 data->vmcs = true; in intel_pt_vm_psb_lookahead_cb() 2518 payload, decoder->packet.payload, vmcs_info->vmcs, in intel_pt_translate_vm_tsc() 2526 .vmcs = NO_VMCS, in intel_pt_translate_vm_tsc_offset() 2546 vmcs_info->vmcs, vmcs_info->tsc_offset); in intel_pt_print_vmcs_info() 2601 uint64_t vmcs; in intel_pt_vm_tm_corr_tsc() local 2629 vmcs = data->vmcs ? data->vmcs_packet.payload : decoder->vmcs; in intel_pt_vm_tm_corr_tsc() 2630 if (vmcs == NO_VMCS) in intel_pt_vm_tm_corr_tsc() [all …]
|
D | intel-pt-decoder.h | 210 uint64_t vmcs; member 280 struct intel_pt_vmcs_info *(*findnew_vmcs_info)(void *data, uint64_t vmcs);
|
/linux-6.12.1/tools/testing/selftests/kvm/lib/x86_64/ |
D | vmx.c | 86 vmx->vmcs = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx() 87 vmx->vmcs_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx() 88 vmx->vmcs_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx() 163 *(uint32_t *)(vmx->vmcs) = vmcs_revision(); in load_vmcs()
|
/linux-6.12.1/tools/perf/util/ |
D | intel-pt.c | 317 u64 vmcs, in intel_pt_findnew_vmcs() argument 328 if (v->vmcs == vmcs) in intel_pt_findnew_vmcs() 331 if (vmcs < v->vmcs) in intel_pt_findnew_vmcs() 339 v->vmcs = vmcs; in intel_pt_findnew_vmcs() 350 static struct intel_pt_vmcs_info *intel_pt_findnew_vmcs_info(void *data, uint64_t vmcs) in intel_pt_findnew_vmcs_info() argument 355 if (!vmcs && !pt->dflt_tsc_offset) in intel_pt_findnew_vmcs_info() 358 return intel_pt_findnew_vmcs(&pt->vmcs_info, vmcs, pt->dflt_tsc_offset); in intel_pt_findnew_vmcs_info() 4053 u64 tsc_offset, vmcs; in intel_pt_parse_vm_tm_corr_arg() local 4073 vmcs = strtoull(p, &p, 0); in intel_pt_parse_vm_tm_corr_arg() 4076 if (!vmcs) in intel_pt_parse_vm_tm_corr_arg() [all …]
|
/linux-6.12.1/arch/loongarch/include/asm/ |
D | kvm_host.h | 119 struct kvm_context __percpu *vmcs; member
|
/linux-6.12.1/tools/testing/selftests/kvm/include/x86_64/ |
D | vmx.h | 507 void *vmcs; member
|
D | evmcs.h | 251 static inline int evmcs_vmptrld(uint64_t vmcs_pa, void *vmcs) in evmcs_vmptrld() argument 256 current_evmcs = vmcs; in evmcs_vmptrld()
|