Searched refs:SLB_ESID_V (Results 1 – 15 of 15) sorted by relevance
281 if (!(svcpu->slb[i].esid & SLB_ESID_V)) in kvmppc_mmu_next_segment()316 u64 slb_esid = (eaddr & ESID_MASK) | SLB_ESID_V; in kvmppc_mmu_map_segment()365 if ((svcpu->slb[i].esid & SLB_ESID_V) && in kvmppc_mmu_flush_segment()
69 andis. r9, r10, SLB_ESID_V@h
384 slbe->valid = (rb & SLB_ESID_V) ? 1 : 0; in kvmppc_mmu_book3s_64_slbmte()405 slbe->orige = rb & (ESID_MASK | SLB_ESID_V); in kvmppc_mmu_book3s_64_slbmte()
407 if (slbee & SLB_ESID_V) { in save_clear_guest_mmu()
1160 andis. r0,r8,SLB_ESID_V@h1185 andis. r7,r5,SLB_ESID_V@h
1503 if (rb & SLB_ESID_V) in kvm_arch_vcpu_ioctl_set_sregs_pr()
318 if (!(vcpu->arch.slb[i].orige & SLB_ESID_V)) in kvmppc_mmu_book3s_hv_find_slbe()
2151 if (sregs->u.s.ppc64.slb[i].slbe & SLB_ESID_V) { in kvm_arch_vcpu_ioctl_set_sregs_hv()
132 slb->esid = (ea & (ssize == MMU_SEGSIZE_1T ? ESID_MASK_1T : ESID_MASK)) | SLB_ESID_V; in copro_calculate_slb()
94 oris r0,r6,(SLB_ESID_V)@h
34 #define SLB_ESID_V ASM_CONST(0x0000000008000000) /* valid */ macro868 return (ea & slb_esid_mask(ssize)) | SLB_ESID_V | index; in mk_esid_data()
242 (e & SLB_ESID_V) ? "VALID" : "NOT VALID"); in slb_dump_contents()244 if (!(e & SLB_ESID_V)) in slb_dump_contents()
48 if (!ret && !(be64_to_cpu(sste->esid_data) & SLB_ESID_V)) in find_free_sste()
220 slb->esid = (ea & ESID_MASK) | SLB_ESID_V; in __spu_kernel_slb()
3767 if (!(esid & SLB_ESID_V)) { in dump_segments()