/linux-6.12.1/arch/alpha/kernel/ |
D | pci_iommu.c | 509 sg->dma_address = -1; in sg_classify() 512 sg->dma_address = -2; in sg_classify() 517 leader->dma_address = leader_flag; in sg_classify() 527 leader->dma_address = leader_flag; in sg_classify() 548 if (leader->dma_address == 0 in sg_fill() 551 out->dma_address = paddr + __direct_map_base; in sg_fill() 555 __va(paddr), size, out->dma_address); in sg_fill() 562 if (leader->dma_address == 0 && dac_allowed) { in sg_fill() 563 out->dma_address = paddr + alpha_mv.pci_dac_offset; in sg_fill() 567 __va(paddr), size, out->dma_address); in sg_fill() [all …]
|
/linux-6.12.1/drivers/gpu/drm/ttm/ |
D | ttm_tt.c | 125 sizeof(*ttm->dma_address), GFP_KERNEL); in ttm_dma_tt_alloc_page_directory() 129 ttm->dma_address = (void *)(ttm->pages + ttm->num_pages); in ttm_dma_tt_alloc_page_directory() 135 ttm->dma_address = kvcalloc(ttm->num_pages, sizeof(*ttm->dma_address), in ttm_sg_tt_alloc_page_directory() 137 if (!ttm->dma_address) in ttm_sg_tt_alloc_page_directory() 157 ttm->dma_address = NULL; in ttm_tt_init_fields() 188 kvfree(ttm->dma_address); in ttm_tt_fini() 190 ttm->dma_address = NULL; in ttm_tt_fini()
|
D | ttm_pool.c | 405 if (tt->dma_address) in ttm_pool_free_range() 406 ttm_pool_unmap(pool, tt->dma_address[i], nr); in ttm_pool_free_range() 432 dma_addr_t *dma_addr = tt->dma_address; in ttm_pool_alloc()
|
/linux-6.12.1/arch/x86/kernel/ |
D | amd_gart_64.c | 285 gart_unmap_page(dev, s->dma_address, s->dma_length, dir, 0); in gart_unmap_sg() 313 s->dma_address = addr; in dma_map_sg_nonforce() 336 unsigned long phys_addr = s->dma_address; in __dma_map_cont() 340 sout->dma_address = iommu_bus_base; in __dma_map_cont() 341 sout->dma_address += iommu_page*PAGE_SIZE + s->offset; in __dma_map_cont() 366 sout->dma_address = start->dma_address; in dma_map_cont() 400 s->dma_address = addr; in gart_map_sg()
|
/linux-6.12.1/drivers/xen/ |
D | swiotlb-xen.c | 335 xen_swiotlb_unmap_page(hwdev, sg->dma_address, sg_dma_len(sg), in xen_swiotlb_unmap_sg() 350 sg->dma_address = xen_swiotlb_map_page(dev, sg_page(sg), in xen_swiotlb_map_sg() 352 if (sg->dma_address == DMA_MAPPING_ERROR) in xen_swiotlb_map_sg() 372 xen_swiotlb_sync_single_for_cpu(dev, sg->dma_address, in xen_swiotlb_sync_sg_for_cpu() 385 xen_swiotlb_sync_single_for_device(dev, sg->dma_address, in xen_swiotlb_sync_sg_for_device()
|
D | grant-dma-ops.c | 245 xen_grant_dma_unmap_page(dev, s->dma_address, sg_dma_len(s), dir, in xen_grant_dma_unmap_sg() 260 s->dma_address = xen_grant_dma_map_page(dev, sg_page(s), s->offset, in xen_grant_dma_map_sg() 262 if (s->dma_address == DMA_MAPPING_ERROR) in xen_grant_dma_map_sg()
|
/linux-6.12.1/arch/sparc/kernel/ |
D | iommu.c | 536 outs->dma_address = dma_addr; in dma_4u_map_sg() 560 vaddr = s->dma_address & IO_PAGE_MASK; in dma_4u_map_sg() 561 npages = iommu_num_pages(s->dma_address, s->dma_length, in dma_4u_map_sg() 596 bus_addr = sg->dma_address & IO_PAGE_MASK; in fetch_sg_ctx() 625 dma_addr_t dma_handle = sg->dma_address; in dma_4u_unmap_sg() 720 iopte = iommu->page_table + ((sglist[0].dma_address - in dma_4u_sync_sg_for_cpu() 726 bus_addr = sglist[0].dma_address & IO_PAGE_MASK; in dma_4u_sync_sg_for_cpu() 734 npages = (IO_PAGE_ALIGN(sgprv->dma_address + sgprv->dma_length) in dma_4u_sync_sg_for_cpu()
|
/linux-6.12.1/arch/arm/mach-footbridge/ |
D | dma-isa.c | 95 dma->buf.dma_address = dma_map_single(&isa_dma_dev, in isa_enable_dma() 100 address = dma->buf.dma_address; in isa_enable_dma()
|
/linux-6.12.1/arch/sparc/mm/ |
D | iommu.c | 257 sg->dma_address =__sbus_iommu_map_page(dev, sg_page(sg), in __sbus_iommu_map_sg() 259 if (sg->dma_address == DMA_MAPPING_ERROR) in __sbus_iommu_map_sg() 306 sbus_iommu_unmap_page(dev, sg->dma_address, sg->length, dir, in sbus_iommu_unmap_sg() 308 sg->dma_address = 0x21212121; in sbus_iommu_unmap_sg()
|
D | io-unit.c | 174 sg->dma_address = iounit_get_area(iounit, (unsigned long) sg_virt(sg), sg->length); in iounit_map_sg() 206 len = ((sg->dma_address & ~PAGE_MASK) + sg->length + (PAGE_SIZE-1)) >> PAGE_SHIFT; in iounit_unmap_sg() 207 vaddr = (sg->dma_address - IOUNIT_DMA_BASE) >> PAGE_SHIFT; in iounit_unmap_sg()
|
/linux-6.12.1/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_ttm_buffer.c | 112 viter->dma_address = &__vmw_piter_dma_addr; in vmw_piter_start() 118 viter->dma_address = &__vmw_piter_sg_addr; in vmw_piter_start() 185 vsgt->addrs = vmw_tt->dma_ttm.dma_address; in vmw_ttm_map_dma() 372 ttm->dma_address, in vmw_ttm_populate()
|
/linux-6.12.1/arch/arm/mach-rpc/ |
D | dma.c | 78 idma->dma_addr = idma->dma.sg->dma_address; in iomd_get_next_sg() 172 idma->dma.buf.dma_address = dma_map_single(&isa_dma_dev, in iomd_enable_dma() 178 idma->dma_addr = idma->dma.sg->dma_address; in iomd_enable_dma()
|
/linux-6.12.1/arch/mips/jazz/ |
D | jazzdma.c | 553 sg->dma_address = vdma_alloc(sg_phys(sg), sg->length); in jazz_dma_map_sg() 554 if (sg->dma_address == DMA_MAPPING_ERROR) in jazz_dma_map_sg() 571 vdma_free(sg->dma_address); in jazz_dma_unmap_sg()
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ttm.c | 253 dma_addr = &bo->ttm->dma_address[mm_cur->start >> PAGE_SHIFT]; in amdgpu_ttm_map_buffer() 256 dma_addr_t dma_address; in amdgpu_ttm_map_buffer() local 258 dma_address = mm_cur->start; in amdgpu_ttm_map_buffer() 259 dma_address += adev->vm_manager.vram_base_offset; in amdgpu_ttm_map_buffer() 262 amdgpu_gart_map(adev, i << PAGE_SHIFT, 1, &dma_address, in amdgpu_ttm_map_buffer() 264 dma_address += PAGE_SIZE; in amdgpu_ttm_map_buffer() 818 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr() 874 1, >t->ttm.dma_address[page_idx], flags); in amdgpu_ttm_gart_bind_gfx9_mqd() 882 >t->ttm.dma_address[page_idx + 1], in amdgpu_ttm_gart_bind_gfx9_mqd() 902 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind() [all …]
|
/linux-6.12.1/drivers/gpu/drm/ttm/tests/ |
D | ttm_tt_test.c | 63 KUNIT_ASSERT_NULL(test, tt->dma_address); in ttm_tt_init_basic() 124 KUNIT_ASSERT_NOT_NULL(test, tt->dma_address); in ttm_tt_fini_sg() 127 KUNIT_ASSERT_NULL(test, tt->dma_address); in ttm_tt_fini_sg()
|
/linux-6.12.1/include/drm/ttm/ |
D | ttm_tt.h | 105 dma_addr_t *dma_address; member
|
/linux-6.12.1/drivers/hid/amd-sfh-hid/ |
D | amd_sfh_common.h | 40 dma_addr_t dma_address; member
|
D | amd_sfh_client.c | 169 info.dma_address = cl_data->sensor_dma_addr[i]; in amd_sfh_resume() 277 info.dma_address = cl_data->sensor_dma_addr[i]; in amd_sfh_hid_client_init()
|
D | amd_sfh_pcie.c | 69 writeq(info.dma_address, privdata->mmio + AMD_C2P_MSG1); in amd_start_sensor_v2() 160 writeq(info.dma_address, privdata->mmio + AMD_C2P_MSG2); in amd_start_sensor()
|
/linux-6.12.1/tools/virtio/linux/ |
D | scatterlist.h | 11 dma_addr_t dma_address; member
|
/linux-6.12.1/include/linux/ |
D | scatterlist.h | 15 dma_addr_t dma_address; member 31 #define sg_dma_address(sg) ((sg)->dma_address)
|
/linux-6.12.1/kernel/dma/ |
D | direct.c | 455 dma_direct_unmap_page(dev, sg->dma_address, in dma_direct_unmap_sg() 489 sg->dma_address = dma_direct_map_page(dev, sg_page(sg), in dma_direct_map_sg() 491 if (sg->dma_address == DMA_MAPPING_ERROR) { in dma_direct_map_sg()
|
/linux-6.12.1/drivers/mmc/host/ |
D | wmt-sdmmc.c | 572 u32 dma_address; in wmt_mci_request() local 632 dma_address = priv->dma_desc_device_addr + 16; in wmt_mci_request() 640 dma_address, 0); in wmt_mci_request() 644 dma_address += 16; in wmt_mci_request()
|
/linux-6.12.1/drivers/dma/ |
D | imx-dma.c | 270 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next() 273 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next() 815 if (sg_dma_len(sgl) & 3 || sgl->dma_address & 3) in imxdma_prep_slave_sg() 819 if (sg_dma_len(sgl) & 1 || sgl->dma_address & 1) in imxdma_prep_slave_sg() 876 imxdmac->sg_list[i].dma_address = dma_addr; in imxdma_prep_dma_cyclic()
|
/linux-6.12.1/drivers/atm/ |
D | nicstar.h | 309 u32 dma_address; member 346 u32 dma_address; member
|