Lines Matching refs:kvm
30 void kvm_init_nested(struct kvm *kvm) in kvm_init_nested() argument
32 kvm->arch.nested_mmus = NULL; in kvm_init_nested()
33 kvm->arch.nested_mmus_size = 0; in kvm_init_nested()
36 static int init_nested_s2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu) in init_nested_s2_mmu() argument
48 return kvm_init_stage2_mmu(kvm, mmu, kvm_get_pa_bits(kvm)); in init_nested_s2_mmu()
53 struct kvm *kvm = vcpu->kvm; in kvm_vcpu_init_nested() local
63 num_mmus = atomic_read(&kvm->online_vcpus) * S2_MMU_PER_VCPU; in kvm_vcpu_init_nested()
64 tmp = kvrealloc(kvm->arch.nested_mmus, in kvm_vcpu_init_nested()
65 size_mul(sizeof(*kvm->arch.nested_mmus), num_mmus), in kvm_vcpu_init_nested()
74 if (kvm->arch.nested_mmus != tmp) in kvm_vcpu_init_nested()
75 for (int i = 0; i < kvm->arch.nested_mmus_size; i++) in kvm_vcpu_init_nested()
78 for (int i = kvm->arch.nested_mmus_size; !ret && i < num_mmus; i++) in kvm_vcpu_init_nested()
79 ret = init_nested_s2_mmu(kvm, &tmp[i]); in kvm_vcpu_init_nested()
82 for (int i = kvm->arch.nested_mmus_size; i < num_mmus; i++) in kvm_vcpu_init_nested()
88 kvm->arch.nested_mmus_size = num_mmus; in kvm_vcpu_init_nested()
89 kvm->arch.nested_mmus = tmp; in kvm_vcpu_init_nested()
300 return kvm_read_guest(vcpu->kvm, pa, desc, sizeof(*desc)); in read_guest_s2_desc()
483 struct kvm *kvm = kvm_s2_mmu_to_kvm(mmu); in compute_tlb_inval_range() local
489 if (!ttl || !kvm_has_feat(kvm, ID_AA64MMFR2_EL1, TTL, IMP)) { in compute_tlb_inval_range()
536 void kvm_s2_mmu_iterate_by_vmid(struct kvm *kvm, u16 vmid, in kvm_s2_mmu_iterate_by_vmid() argument
541 write_lock(&kvm->mmu_lock); in kvm_s2_mmu_iterate_by_vmid()
543 for (int i = 0; i < kvm->arch.nested_mmus_size; i++) { in kvm_s2_mmu_iterate_by_vmid()
544 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_s2_mmu_iterate_by_vmid()
553 write_unlock(&kvm->mmu_lock); in kvm_s2_mmu_iterate_by_vmid()
558 struct kvm *kvm = vcpu->kvm; in lookup_s2_mmu() local
562 lockdep_assert_held_write(&kvm->mmu_lock); in lookup_s2_mmu()
585 for (int i = 0; i < kvm->arch.nested_mmus_size; i++) { in lookup_s2_mmu()
586 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in lookup_s2_mmu()
607 struct kvm *kvm = vcpu->kvm; in get_s2_mmu_nested() local
611 lockdep_assert_held_write(&vcpu->kvm->mmu_lock); in get_s2_mmu_nested()
622 for (i = kvm->arch.nested_mmus_next; in get_s2_mmu_nested()
623 i < (kvm->arch.nested_mmus_size + kvm->arch.nested_mmus_next); in get_s2_mmu_nested()
625 s2_mmu = &kvm->arch.nested_mmus[i % kvm->arch.nested_mmus_size]; in get_s2_mmu_nested()
633 kvm->arch.nested_mmus_next = (i + 1) % kvm->arch.nested_mmus_size; in get_s2_mmu_nested()
684 vcpu->arch.hw_mmu = &vcpu->kvm->arch.mmu; in kvm_vcpu_load_hw_mmu()
686 write_lock(&vcpu->kvm->mmu_lock); in kvm_vcpu_load_hw_mmu()
688 write_unlock(&vcpu->kvm->mmu_lock); in kvm_vcpu_load_hw_mmu()
702 if (kvm_is_nested_s2_mmu(vcpu->kvm, vcpu->arch.hw_mmu)) in kvm_vcpu_put_hw_mmu()
744 void kvm_nested_s2_wp(struct kvm *kvm) in kvm_nested_s2_wp() argument
748 lockdep_assert_held_write(&kvm->mmu_lock); in kvm_nested_s2_wp()
750 for (i = 0; i < kvm->arch.nested_mmus_size; i++) { in kvm_nested_s2_wp()
751 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_nested_s2_wp()
758 void kvm_nested_s2_unmap(struct kvm *kvm, bool may_block) in kvm_nested_s2_unmap() argument
762 lockdep_assert_held_write(&kvm->mmu_lock); in kvm_nested_s2_unmap()
764 for (i = 0; i < kvm->arch.nested_mmus_size; i++) { in kvm_nested_s2_unmap()
765 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_nested_s2_unmap()
772 void kvm_nested_s2_flush(struct kvm *kvm) in kvm_nested_s2_flush() argument
776 lockdep_assert_held_write(&kvm->mmu_lock); in kvm_nested_s2_flush()
778 for (i = 0; i < kvm->arch.nested_mmus_size; i++) { in kvm_nested_s2_flush()
779 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_nested_s2_flush()
786 void kvm_arch_flush_shadow_all(struct kvm *kvm) in kvm_arch_flush_shadow_all() argument
790 for (i = 0; i < kvm->arch.nested_mmus_size; i++) { in kvm_arch_flush_shadow_all()
791 struct kvm_s2_mmu *mmu = &kvm->arch.nested_mmus[i]; in kvm_arch_flush_shadow_all()
796 kvfree(kvm->arch.nested_mmus); in kvm_arch_flush_shadow_all()
797 kvm->arch.nested_mmus = NULL; in kvm_arch_flush_shadow_all()
798 kvm->arch.nested_mmus_size = 0; in kvm_arch_flush_shadow_all()
799 kvm_uninit_stage2_mmu(kvm); in kvm_arch_flush_shadow_all()
809 static void limit_nv_id_regs(struct kvm *kvm) in limit_nv_id_regs() argument
814 val = kvm_read_vm_id_reg(kvm, SYS_ID_AA64ISAR0_EL1); in limit_nv_id_regs()
816 kvm_set_vm_id_reg(kvm, SYS_ID_AA64ISAR0_EL1, val); in limit_nv_id_regs()
819 val = kvm_read_vm_id_reg(kvm, SYS_ID_AA64ISAR1_EL1); in limit_nv_id_regs()
822 kvm_set_vm_id_reg(kvm, SYS_ID_AA64ISAR1_EL1, val); in limit_nv_id_regs()
825 val = kvm_read_vm_id_reg(kvm, SYS_ID_AA64PFR0_EL1); in limit_nv_id_regs()
838 kvm_set_vm_id_reg(kvm, SYS_ID_AA64PFR0_EL1, val); in limit_nv_id_regs()
841 val = kvm_read_vm_id_reg(kvm, SYS_ID_AA64PFR1_EL1); in limit_nv_id_regs()
845 kvm_set_vm_id_reg(kvm, SYS_ID_AA64PFR1_EL1, val); in limit_nv_id_regs()
848 val = kvm_read_vm_id_reg(kvm, SYS_ID_AA64MMFR0_EL1); in limit_nv_id_regs()
891 kvm_set_vm_id_reg(kvm, SYS_ID_AA64MMFR0_EL1, val); in limit_nv_id_regs()
893 val = kvm_read_vm_id_reg(kvm, SYS_ID_AA64MMFR1_EL1); in limit_nv_id_regs()
900 kvm_set_vm_id_reg(kvm, SYS_ID_AA64MMFR1_EL1, val); in limit_nv_id_regs()
902 val = kvm_read_vm_id_reg(kvm, SYS_ID_AA64MMFR2_EL1); in limit_nv_id_regs()
912 kvm_set_vm_id_reg(kvm, SYS_ID_AA64MMFR2_EL1, val); in limit_nv_id_regs()
918 kvm_set_vm_id_reg(kvm, SYS_ID_AA64MMFR4_EL1, val); in limit_nv_id_regs()
921 val = kvm_read_vm_id_reg(kvm, SYS_ID_AA64DFR0_EL1); in limit_nv_id_regs()
933 kvm_set_vm_id_reg(kvm, SYS_ID_AA64DFR0_EL1, val); in limit_nv_id_regs()
941 masks = vcpu->kvm->arch.sysreg_masks; in kvm_vcpu_sanitise_vncr_reg()
953 static void set_sysreg_masks(struct kvm *kvm, int sr, u64 res0, u64 res1) in set_sysreg_masks() argument
957 kvm->arch.sysreg_masks->mask[i].res0 = res0; in set_sysreg_masks()
958 kvm->arch.sysreg_masks->mask[i].res1 = res1; in set_sysreg_masks()
961 int kvm_init_nv_sysregs(struct kvm *kvm) in kvm_init_nv_sysregs() argument
965 lockdep_assert_held(&kvm->arch.config_lock); in kvm_init_nv_sysregs()
967 if (kvm->arch.sysreg_masks) in kvm_init_nv_sysregs()
970 kvm->arch.sysreg_masks = kzalloc(sizeof(*(kvm->arch.sysreg_masks)), in kvm_init_nv_sysregs()
972 if (!kvm->arch.sysreg_masks) in kvm_init_nv_sysregs()
975 limit_nv_id_regs(kvm); in kvm_init_nv_sysregs()
979 if (!kvm_has_feat_enum(kvm, ID_AA64MMFR1_EL1, VMIDBits, 16)) in kvm_init_nv_sysregs()
981 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, CnP, IMP)) in kvm_init_nv_sysregs()
983 set_sysreg_masks(kvm, VTTBR_EL2, res0, res1); in kvm_init_nv_sysregs()
988 set_sysreg_masks(kvm, VTCR_EL2, res0, res1); in kvm_init_nv_sysregs()
993 set_sysreg_masks(kvm, VMPIDR_EL2, res0, res1); in kvm_init_nv_sysregs()
998 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, TWED, IMP)) in kvm_init_nv_sysregs()
1000 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, MTE, MTE2)) in kvm_init_nv_sysregs()
1002 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, EVT, TTLBxS)) in kvm_init_nv_sysregs()
1004 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, CSV2, CSV2_2) && in kvm_init_nv_sysregs()
1005 !kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2)) in kvm_init_nv_sysregs()
1007 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, EVT, IMP)) in kvm_init_nv_sysregs()
1009 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, AMU, V1P1)) in kvm_init_nv_sysregs()
1011 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, V1P1)) in kvm_init_nv_sysregs()
1013 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, FWB, IMP)) in kvm_init_nv_sysregs()
1015 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, NV, NV2)) in kvm_init_nv_sysregs()
1017 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, NV, IMP)) in kvm_init_nv_sysregs()
1019 if (!(__vcpu_has_feature(&kvm->arch, KVM_ARM_VCPU_PTRAUTH_ADDRESS) && in kvm_init_nv_sysregs()
1020 __vcpu_has_feature(&kvm->arch, KVM_ARM_VCPU_PTRAUTH_GENERIC))) in kvm_init_nv_sysregs()
1022 if (!kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TME, IMP)) in kvm_init_nv_sysregs()
1024 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, IMP)) in kvm_init_nv_sysregs()
1026 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, LO, IMP)) in kvm_init_nv_sysregs()
1028 if (!kvm_has_feat(kvm, ID_AA64MMFR4_EL1, E2H0, IMP)) in kvm_init_nv_sysregs()
1030 set_sysreg_masks(kvm, HCR_EL2, res0, res1); in kvm_init_nv_sysregs()
1035 if (!kvm_has_feat(kvm, ID_AA64ISAR3_EL1, PACM, TRIVIAL_IMP)) in kvm_init_nv_sysregs()
1037 if (!kvm_has_feat(kvm, ID_AA64PFR2_EL1, FPMR, IMP)) in kvm_init_nv_sysregs()
1039 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, GCS, IMP)) in kvm_init_nv_sysregs()
1041 if (!kvm_has_feat(kvm, ID_AA64ISAR2_EL1, SYSREG_128, IMP)) in kvm_init_nv_sysregs()
1043 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ADERR, DEV_ASYNC)) in kvm_init_nv_sysregs()
1045 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, DF2, IMP)) in kvm_init_nv_sysregs()
1047 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, D128, IMP)) in kvm_init_nv_sysregs()
1049 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, THE, IMP)) in kvm_init_nv_sysregs()
1051 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SCTLRX, IMP)) in kvm_init_nv_sysregs()
1053 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, TCRX, IMP)) in kvm_init_nv_sysregs()
1055 if (!kvm_has_feat(kvm, ID_AA64ISAR2_EL1, MOPS, IMP)) in kvm_init_nv_sysregs()
1057 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, CMOW, IMP)) in kvm_init_nv_sysregs()
1059 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, NMI, IMP)) in kvm_init_nv_sysregs()
1061 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, SME, IMP) || in kvm_init_nv_sysregs()
1064 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, XS, IMP)) in kvm_init_nv_sysregs()
1066 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64_V)) in kvm_init_nv_sysregs()
1068 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64)) in kvm_init_nv_sysregs()
1070 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64_ACCDATA)) in kvm_init_nv_sysregs()
1072 set_sysreg_masks(kvm, HCRX_EL2, res0, res1); in kvm_init_nv_sysregs()
1076 if (!(__vcpu_has_feature(&kvm->arch, KVM_ARM_VCPU_PTRAUTH_ADDRESS) && in kvm_init_nv_sysregs()
1077 __vcpu_has_feature(&kvm->arch, KVM_ARM_VCPU_PTRAUTH_GENERIC))) in kvm_init_nv_sysregs()
1081 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, LO, IMP)) in kvm_init_nv_sysregs()
1085 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, CSV2, CSV2_2) && in kvm_init_nv_sysregs()
1086 !kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2)) in kvm_init_nv_sysregs()
1088 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, GIC, IMP)) in kvm_init_nv_sysregs()
1090 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, IMP)) in kvm_init_nv_sysregs()
1096 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64_ACCDATA)) in kvm_init_nv_sysregs()
1098 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, GCS, IMP)) in kvm_init_nv_sysregs()
1100 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, SME, IMP)) in kvm_init_nv_sysregs()
1102 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, THE, IMP)) in kvm_init_nv_sysregs()
1104 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, S1PIE, IMP)) in kvm_init_nv_sysregs()
1106 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, S1POE, IMP)) in kvm_init_nv_sysregs()
1108 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, S2POE, IMP)) in kvm_init_nv_sysregs()
1110 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, AIE, IMP)) in kvm_init_nv_sysregs()
1112 set_sysreg_masks(kvm, HFGRTR_EL2, res0 | __HFGRTR_EL2_RES0, res1); in kvm_init_nv_sysregs()
1113 set_sysreg_masks(kvm, HFGWTR_EL2, res0 | __HFGWTR_EL2_RES0, res1); in kvm_init_nv_sysregs()
1117 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, DoubleLock, IMP)) in kvm_init_nv_sysregs()
1119 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMUVer, IMP)) in kvm_init_nv_sysregs()
1126 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMSVer, IMP)) in kvm_init_nv_sysregs()
1133 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceVer, IMP)) in kvm_init_nv_sysregs()
1141 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceBuffer, IMP)) in kvm_init_nv_sysregs()
1146 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, BRBE, IMP)) in kvm_init_nv_sysregs()
1149 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMSVer, V1P2)) in kvm_init_nv_sysregs()
1151 set_sysreg_masks(kvm, HDFGRTR_EL2, res0 | HDFGRTR_EL2_RES0, res1); in kvm_init_nv_sysregs()
1154 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMUVer, IMP)) in kvm_init_nv_sysregs()
1156 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceVer, IMP)) in kvm_init_nv_sysregs()
1158 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceFilt, IMP)) in kvm_init_nv_sysregs()
1160 set_sysreg_masks(kvm, HFGWTR_EL2, res0 | HDFGWTR_EL2_RES0, res1); in kvm_init_nv_sysregs()
1165 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, DPB, DPB2)) in kvm_init_nv_sysregs()
1167 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, PAN, PAN2)) in kvm_init_nv_sysregs()
1169 if (!kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, OS)) in kvm_init_nv_sysregs()
1175 if (!kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, RANGE)) in kvm_init_nv_sysregs()
1182 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, SPECRES, IMP)) in kvm_init_nv_sysregs()
1185 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, BRBE, IMP)) in kvm_init_nv_sysregs()
1187 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, GCS, IMP)) in kvm_init_nv_sysregs()
1190 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, SPECRES, COSP_RCTX)) in kvm_init_nv_sysregs()
1192 if (!kvm_has_feat(kvm, ID_AA64ISAR2_EL1, ATS1A, IMP)) in kvm_init_nv_sysregs()
1194 set_sysreg_masks(kvm, HFGITR_EL2, res0, res1); in kvm_init_nv_sysregs()
1199 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, AMU, V1P1)) in kvm_init_nv_sysregs()
1201 set_sysreg_masks(kvm, HAFGRTR_EL2, res0, res1); in kvm_init_nv_sysregs()
1206 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, PAN, PAN3)) in kvm_init_nv_sysregs()
1208 set_sysreg_masks(kvm, SCTLR_EL1, res0, res1); in kvm_init_nv_sysregs()
1218 write_lock(&vcpu->kvm->mmu_lock); in check_nested_vcpu_requests()
1223 write_unlock(&vcpu->kvm->mmu_lock); in check_nested_vcpu_requests()