Lines Matching refs:slb_v
1063 unsigned long eaddr, unsigned long slb_v, long mmio_update) in mmio_cache_search() argument
1074 entry->slb_v == slb_v) in mmio_cache_search()
1097 long kvmppc_hv_find_lock_hpte(struct kvm *kvm, gva_t eaddr, unsigned long slb_v, in kvmppc_hv_find_lock_hpte() argument
1113 if (slb_v & SLB_VSID_L) { in kvmppc_hv_find_lock_hpte()
1116 pshift = slb_base_page_shift[(slb_v & SLB_VSID_LP) >> 4]; in kvmppc_hv_find_lock_hpte()
1118 if (slb_v & SLB_VSID_B_1T) { in kvmppc_hv_find_lock_hpte()
1120 vsid = (slb_v & ~SLB_VSID_B) >> SLB_VSID_SHIFT_1T; in kvmppc_hv_find_lock_hpte()
1124 vsid = (slb_v & ~SLB_VSID_B) >> SLB_VSID_SHIFT; in kvmppc_hv_find_lock_hpte()
1127 avpn = slb_v & ~(somask >> 16); /* also includes B */ in kvmppc_hv_find_lock_hpte()
1191 unsigned long slb_v, unsigned int status, bool data) in kvmppc_hpte_hv_fault() argument
1208 cache_entry = mmio_cache_search(vcpu, addr, slb_v, mmio_update); in kvmppc_hpte_hv_fault()
1216 index = kvmppc_hv_find_lock_hpte(kvm, addr, slb_v, valid); in kvmppc_hpte_hv_fault()
1246 if (!hpte_read_permission(pp, slb_v & key)) in kvmppc_hpte_hv_fault()
1250 if (!hpte_write_permission(pp, slb_v & key)) in kvmppc_hpte_hv_fault()
1253 if (!hpte_read_permission(pp, slb_v & key)) in kvmppc_hpte_hv_fault()
1280 if (slb_v & SLB_VSID_L) { in kvmppc_hpte_hv_fault()
1281 pshift_index = ((slb_v & SLB_VSID_LP) >> 4); in kvmppc_hpte_hv_fault()
1291 cache_entry->slb_v = slb_v; in kvmppc_hpte_hv_fault()