Lines Matching full:ua
364 unsigned long *ua) in kvmppc_tce_to_ua() argument
373 *ua = __gfn_to_hva_memslot(memslot, gfn) | in kvmppc_tce_to_ua()
385 unsigned long ua = 0; in kvmppc_tce_validate() local
394 if (kvmppc_tce_to_ua(stt->kvm, tce, &ua)) in kvmppc_tce_validate()
403 mem = mm_iommu_lookup(stt->kvm->mm, ua, 1ULL << shift); in kvmppc_tce_validate()
404 if (!mem || mm_iommu_ua_to_hpa(mem, ua, shift, &hpa)) { in kvmppc_tce_validate()
521 unsigned long entry, unsigned long ua, in kvmppc_tce_iommu_do_map() argument
533 mem = mm_iommu_lookup(kvm->mm, ua, 1ULL << tbl->it_page_shift); in kvmppc_tce_iommu_do_map()
538 if (WARN_ON_ONCE(mm_iommu_ua_to_hpa(mem, ua, tbl->it_page_shift, &hpa))) in kvmppc_tce_iommu_do_map()
553 *pua = cpu_to_be64(ua); in kvmppc_tce_iommu_do_map()
560 unsigned long entry, unsigned long ua, in kvmppc_tce_iommu_map() argument
571 io_entry + i, ua + pgoff, dir); in kvmppc_tce_iommu_map()
587 unsigned long entry, ua = 0; in kvmppc_h_put_tce() local
609 if ((dir != DMA_NONE) && kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce()
622 entry, ua, dir); in kvmppc_h_put_tce()
646 unsigned long entry, ua = 0; in kvmppc_h_put_tce_indirect() local
671 if (kvmppc_tce_to_ua(vcpu->kvm, tce_list, &ua)) { in kvmppc_h_put_tce_indirect()
675 tces = (u64 __user *) ua; in kvmppc_h_put_tce_indirect()
706 if (kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce_indirect()
713 stit->tbl, entry + i, ua, in kvmppc_h_put_tce_indirect()