Home
last modified time | relevance | path

Searched refs:unmap_pages (Results 1 – 25 of 32) sorted by relevance

12

/linux-6.12.1/drivers/block/xen-blkback/
Dblkback.c650 struct page **unmap_pages) in xen_blkbk_unmap_prepare() argument
661 unmap_pages[invcount] = pages[i]->page; in xen_blkbk_unmap_prepare()
711 req->unmap, req->unmap_pages); in xen_blkbk_unmap_and_respond()
717 work->pages = req->unmap_pages; in xen_blkbk_unmap_and_respond()
736 struct page *unmap_pages[BLKIF_MAX_SEGMENTS_PER_REQUEST]; in xen_blkbk_unmap() local
744 unmap, unmap_pages); in xen_blkbk_unmap()
746 ret = gnttab_unmap_refs(unmap, NULL, unmap_pages, invcount); in xen_blkbk_unmap()
748 gnttab_page_cache_put(&ring->free_pages, unmap_pages, in xen_blkbk_unmap()
Dcommon.h358 struct page *unmap_pages[MAX_INDIRECT_SEGMENTS]; member
/linux-6.12.1/include/linux/
Dio-pgtable.h195 size_t (*unmap_pages)(struct io_pgtable_ops *ops, unsigned long iova, member
Diommu.h650 size_t (*unmap_pages)(struct iommu_domain *domain, unsigned long iova, member
/linux-6.12.1/drivers/iommu/
Dio-pgtable-arm-v7s.c832 .unmap_pages = arm_v7s_unmap_pages, in arm_v7s_alloc_pgtable()
996 if (ops->unmap_pages(ops, iova_start + size, size, 1, NULL) != size) in arm_v7s_do_selftests()
1015 if (ops->unmap_pages(ops, iova, size, 1, NULL) != size) in arm_v7s_do_selftests()
Dipmmu-vmsa.c684 return domain->iop->unmap_pages(domain->iop, iova, pgsize, pgcount, gather); in ipmmu_unmap()
890 .unmap_pages = ipmmu_unmap,
Dmsm_iommu.c519 ret = priv->iop->unmap_pages(priv->iop, iova, pgsize, pgcount, gather); in msm_iommu_unmap()
700 .unmap_pages = msm_iommu_unmap,
Dio-pgtable-arm.c918 .unmap_pages = arm_lpae_unmap_pages, in arm_lpae_alloc_pgtable()
1352 if (ops->unmap_pages(ops, SZ_1G + size, size, 1, NULL) != size) in arm_lpae_run_tests()
1368 if (ops->unmap_pages(ops, iova, size, 1, NULL) != size) in arm_lpae_run_tests()
Dio-pgtable-dart.c384 .unmap_pages = dart_unmap_pages, in dart_alloc_pgtable()
Dsprd-iommu.c418 .unmap_pages = sprd_iommu_unmap,
Dapple-dart.c552 return ops->unmap_pages(ops, iova, pgsize, pgcount, gather); in apple_dart_unmap_pages()
994 .unmap_pages = apple_dart_unmap_pages,
Ds390-iommu.c790 .unmap_pages = s390_iommu_unmap_pages,
Dmtk_iommu_v1.c586 .unmap_pages = mtk_iommu_v1_unmap,
Dmtk_iommu.c819 return dom->iop->unmap_pages(dom->iop, iova, pgsize, pgcount, gather); in mtk_iommu_unmap()
1026 .unmap_pages = mtk_iommu_unmap,
Dsun50i-iommu.c857 .unmap_pages = sun50i_iommu_unmap,
Diommu.c2547 if (WARN_ON(!ops->unmap_pages || domain->pgsize_bitmap == 0UL)) in __iommu_unmap()
2574 unmapped_page = ops->unmap_pages(domain, iova, pgsize, count, iotlb_gather); in __iommu_unmap()
Dtegra-smmu.c1005 .unmap_pages = tegra_smmu_unmap,
Dvirtio-iommu.c1075 .unmap_pages = viommu_unmap_pages,
/linux-6.12.1/drivers/iommu/amd/
Dio_pgtable_v2.c354 pgtable->pgtbl.ops.unmap_pages = iommu_v2_unmap_pages; in v2_alloc_pgtable()
Dio_pgtable.c556 pgtable->pgtbl.ops.unmap_pages = iommu_v1_unmap_pages; in v1_alloc_pgtable()
Diommu.c2577 r = (ops->unmap_pages) ? ops->unmap_pages(ops, iova, pgsize, pgcount, NULL) : 0; in amd_iommu_unmap_pages()
2861 .unmap_pages = amd_iommu_unmap_pages,
/linux-6.12.1/drivers/iommu/arm/arm-smmu/
Dqcom_iommu.c466 ret = ops->unmap_pages(ops, iova, pgsize, pgcount, gather); in qcom_iommu_unmap()
605 .unmap_pages = qcom_iommu_unmap,
Darm-smmu.c1285 ret = ops->unmap_pages(ops, iova, pgsize, pgcount, iotlb_gather); in arm_smmu_unmap_pages()
1655 .unmap_pages = arm_smmu_unmap_pages,
/linux-6.12.1/drivers/gpu/drm/msm/
Dmsm_iommu.c102 unmapped = ops->unmap_pages(ops, iova, pgsize, count, NULL); in msm_iommu_pagetable_unmap()
/linux-6.12.1/drivers/gpu/drm/panfrost/
Dpanfrost_mmu.c372 unmapped_page = ops->unmap_pages(ops, iova, pgsize, pgcount, NULL); in panfrost_mmu_unmap()

12