Home
last modified time | relevance | path

Searched refs:dma_dev (Results 1 – 25 of 185) sorted by relevance

12345678

/linux-6.12.1/drivers/dma/
Dmcf-edma-main.c192 INIT_LIST_HEAD(&mcf_edma->dma_dev.channels); in mcf_edma_probe()
200 vchan_init(&mcf_chan->vchan, &mcf_edma->dma_dev); in mcf_edma_probe()
213 dma_cap_set(DMA_PRIVATE, mcf_edma->dma_dev.cap_mask); in mcf_edma_probe()
214 dma_cap_set(DMA_SLAVE, mcf_edma->dma_dev.cap_mask); in mcf_edma_probe()
215 dma_cap_set(DMA_CYCLIC, mcf_edma->dma_dev.cap_mask); in mcf_edma_probe()
217 mcf_edma->dma_dev.dev = &pdev->dev; in mcf_edma_probe()
218 mcf_edma->dma_dev.device_alloc_chan_resources = in mcf_edma_probe()
220 mcf_edma->dma_dev.device_free_chan_resources = in mcf_edma_probe()
222 mcf_edma->dma_dev.device_config = fsl_edma_slave_config; in mcf_edma_probe()
223 mcf_edma->dma_dev.device_prep_dma_cyclic = in mcf_edma_probe()
[all …]
Dfsl-edma-main.c133 list_for_each_entry_safe(chan, _chan, &fsl_edma->dma_dev.channels, device_node) { in fsl_edma_xlate()
177 list_for_each_entry_safe(chan, _chan, &fsl_edma->dma_dev.channels, in fsl_edma3_xlate()
552 INIT_LIST_HEAD(&fsl_edma->dma_dev.channels); in fsl_edma_probe()
584 vchan_init(&fsl_chan->vchan, &fsl_edma->dma_dev); in fsl_edma_probe()
596 dma_cap_set(DMA_PRIVATE, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe()
597 dma_cap_set(DMA_SLAVE, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe()
598 dma_cap_set(DMA_CYCLIC, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe()
599 dma_cap_set(DMA_MEMCPY, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe()
601 fsl_edma->dma_dev.dev = &pdev->dev; in fsl_edma_probe()
602 fsl_edma->dma_dev.device_alloc_chan_resources in fsl_edma_probe()
[all …]
Ddma-axi-dmac.c170 struct dma_device dma_dev; member
177 dma_dev); in chan_to_axi_dmac()
357 dev_dbg(dmac->dma_dev.dev, in axi_dmac_dequeue_partial_xfers()
361 dev_warn(dmac->dma_dev.dev, in axi_dmac_dequeue_partial_xfers()
534 struct device *dev = dmac->dma_dev.dev; in axi_dmac_alloc_desc()
573 struct device *dev = dmac->dma_dev.dev; in axi_dmac_free_desc()
1017 dev_err(dmac->dma_dev.dev, in axi_dmac_detect_caps()
1025 dev_err(dmac->dma_dev.dev, in axi_dmac_detect_caps()
1056 struct dma_device *dma_dev; in axi_dmac_probe() local
1095 dma_dev = &dmac->dma_dev; in axi_dmac_probe()
[all …]
Daltera-msgdma.c814 struct dma_device *dma_dev; in msgdma_probe() local
862 dma_dev = &mdev->dmadev; in msgdma_probe()
865 dma_cap_zero(dma_dev->cap_mask); in msgdma_probe()
866 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in msgdma_probe()
867 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); in msgdma_probe()
869 dma_dev->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in msgdma_probe()
870 dma_dev->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in msgdma_probe()
871 dma_dev->directions = BIT(DMA_MEM_TO_DEV) | BIT(DMA_DEV_TO_MEM) | in msgdma_probe()
873 dma_dev->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR; in msgdma_probe()
876 INIT_LIST_HEAD(&dma_dev->channels); in msgdma_probe()
[all …]
Dimg-mdc-dma.c133 struct dma_device dma_dev; member
182 return mdma->dma_dev.dev; in mdma2dev()
816 list_for_each_entry(chan, &mdma->dma_dev.channels, device_node) { in mdc_of_xlate()
912 dma_cap_zero(mdma->dma_dev.cap_mask); in mdc_dma_probe()
913 dma_cap_set(DMA_SLAVE, mdma->dma_dev.cap_mask); in mdc_dma_probe()
914 dma_cap_set(DMA_PRIVATE, mdma->dma_dev.cap_mask); in mdc_dma_probe()
915 dma_cap_set(DMA_CYCLIC, mdma->dma_dev.cap_mask); in mdc_dma_probe()
916 dma_cap_set(DMA_MEMCPY, mdma->dma_dev.cap_mask); in mdc_dma_probe()
946 mdma->dma_dev.dev = &pdev->dev; in mdc_dma_probe()
947 mdma->dma_dev.device_prep_slave_sg = mdc_prep_slave_sg; in mdc_dma_probe()
[all …]
Dnbpfaxi.c229 struct dma_device dma_dev; member
325 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_read()
334 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_write()
400 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): next 0x%x, cur 0x%x\n", __func__, in nbpf_start()
856 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): force-free desc %p cookie %d\n", in nbpf_chan_idle()
1097 dchan = dma_get_any_slave_channel(&nbpf->dma_dev); in nbpf_of_xlate()
1228 dev_warn(nbpf->dma_dev.dev, "DMA error IRQ %u\n", irq); in nbpf_err_irq()
1246 struct dma_device *dma_dev = &nbpf->dma_dev; in nbpf_chan_probe() local
1254 chan->dma_chan.device = dma_dev; in nbpf_chan_probe()
1258 dev_dbg(dma_dev->dev, "%s(): channel %d: -> %p\n", __func__, n, chan->base); in nbpf_chan_probe()
[all …]
Dep93xx_dma.c234 struct dma_device dma_dev; member
1374 struct dma_device *dma_dev; in ep93xx_dma_of_probe() local
1389 dma_dev = &edma->dma_dev; in ep93xx_dma_of_probe()
1391 INIT_LIST_HEAD(&dma_dev->channels); in ep93xx_dma_of_probe()
1396 edmac->chan.device = dma_dev; in ep93xx_dma_of_probe()
1428 &dma_dev->channels); in ep93xx_dma_of_probe()
1450 dma_cap_mask_t mask = edma->dma_dev.cap_mask; in ep93xx_m2p_dma_of_xlate()
1481 dma_cap_mask_t mask = edma->dma_dev.cap_mask; in ep93xx_m2m_dma_of_xlate()
1506 struct dma_device *dma_dev; in ep93xx_dma_probe() local
1513 dma_dev = &edma->dma_dev; in ep93xx_dma_probe()
[all …]
Dfsl_raid.c629 struct dma_device *dma_dev; in fsl_re_chan_probe() local
637 dma_dev = &re_priv->dma_dev; in fsl_re_chan_probe()
683 chan->chan.device = dma_dev; in fsl_re_chan_probe()
753 struct dma_device *dma_dev; in fsl_re_probe() local
782 dma_dev = &re_priv->dma_dev; in fsl_re_probe()
783 dma_dev->dev = dev; in fsl_re_probe()
784 INIT_LIST_HEAD(&dma_dev->channels); in fsl_re_probe()
787 dma_dev->device_alloc_chan_resources = fsl_re_alloc_chan_resources; in fsl_re_probe()
788 dma_dev->device_tx_status = fsl_re_tx_status; in fsl_re_probe()
789 dma_dev->device_issue_pending = fsl_re_issue_pending; in fsl_re_probe()
[all …]
Dtegra210-adma.c151 struct dma_device dma_dev; member
722 chan = dma_get_any_slave_channel(&tdma->dma_dev); in tegra_dma_of_xlate()
895 INIT_LIST_HEAD(&tdma->dma_dev.channels); in tegra_adma_probe()
912 vchan_init(&tdc->vc, &tdma->dma_dev); in tegra_adma_probe()
927 dma_cap_set(DMA_SLAVE, tdma->dma_dev.cap_mask); in tegra_adma_probe()
928 dma_cap_set(DMA_PRIVATE, tdma->dma_dev.cap_mask); in tegra_adma_probe()
929 dma_cap_set(DMA_CYCLIC, tdma->dma_dev.cap_mask); in tegra_adma_probe()
931 tdma->dma_dev.dev = &pdev->dev; in tegra_adma_probe()
932 tdma->dma_dev.device_alloc_chan_resources = in tegra_adma_probe()
934 tdma->dma_dev.device_free_chan_resources = in tegra_adma_probe()
[all …]
Dmv_xor_v2.c718 struct dma_device *dma_dev; in mv_xor_v2_probe() local
811 dma_dev = &xor_dev->dmadev; in mv_xor_v2_probe()
814 dma_cap_zero(dma_dev->cap_mask); in mv_xor_v2_probe()
815 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in mv_xor_v2_probe()
816 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in mv_xor_v2_probe()
817 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in mv_xor_v2_probe()
820 INIT_LIST_HEAD(&dma_dev->channels); in mv_xor_v2_probe()
823 dma_dev->device_tx_status = dma_cookie_status; in mv_xor_v2_probe()
824 dma_dev->device_issue_pending = mv_xor_v2_issue_pending; in mv_xor_v2_probe()
825 dma_dev->dev = &pdev->dev; in mv_xor_v2_probe()
[all …]
Dsprd-dma.c209 struct dma_device dma_dev; member
358 dev_warn(sdev->dma_dev.dev, in sprd_dma_pause_resume()
422 dev_warn(sdev->dma_dev.dev, "incorrect dma interrupt type\n"); in sprd_dma_get_int_type()
481 dev_err(sdev->dma_dev.dev, "invalid channel mode setting %d\n", in sprd_dma_set_2stage_config()
762 dev_err(sdev->dma_dev.dev, "invalid source step\n"); in sprd_dma_fill_desc()
778 dev_err(sdev->dma_dev.dev, "invalid destination step\n"); in sprd_dma_fill_desc()
786 dev_err(sdev->dma_dev.dev, "invalid source datawidth\n"); in sprd_dma_fill_desc()
792 dev_err(sdev->dma_dev.dev, "invalid destination datawidth\n"); in sprd_dma_fill_desc()
1177 dma_cap_set(DMA_MEMCPY, sdev->dma_dev.cap_mask); in sprd_dma_probe()
1179 INIT_LIST_HEAD(&sdev->dma_dev.channels); in sprd_dma_probe()
[all …]
Dhisi_dma.c161 struct dma_device dma_dev; member
712 vchan_init(&hdma_dev->chan[i].vc, &hdma_dev->dma_dev); in hisi_dma_enable_qps()
824 struct dma_device *dma_dev; in hisi_dma_init_dma_dev() local
826 dma_dev = &hdma_dev->dma_dev; in hisi_dma_init_dma_dev()
827 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in hisi_dma_init_dma_dev()
828 dma_dev->device_free_chan_resources = hisi_dma_free_chan_resources; in hisi_dma_init_dma_dev()
829 dma_dev->device_prep_dma_memcpy = hisi_dma_prep_dma_memcpy; in hisi_dma_init_dma_dev()
830 dma_dev->device_tx_status = hisi_dma_tx_status; in hisi_dma_init_dma_dev()
831 dma_dev->device_issue_pending = hisi_dma_issue_pending; in hisi_dma_init_dma_dev()
832 dma_dev->device_terminate_all = hisi_dma_terminate_all; in hisi_dma_init_dma_dev()
[all …]
Dxgene-dma.c311 struct dma_device dma_dev[XGENE_DMA_MAX_CHANNEL]; member
1486 struct dma_device *dma_dev) in xgene_dma_set_caps() argument
1489 dma_cap_zero(dma_dev->cap_mask); in xgene_dma_set_caps()
1504 dma_cap_set(DMA_PQ, dma_dev->cap_mask); in xgene_dma_set_caps()
1505 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in xgene_dma_set_caps()
1508 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in xgene_dma_set_caps()
1512 dma_dev->dev = chan->dev; in xgene_dma_set_caps()
1513 dma_dev->device_alloc_chan_resources = xgene_dma_alloc_chan_resources; in xgene_dma_set_caps()
1514 dma_dev->device_free_chan_resources = xgene_dma_free_chan_resources; in xgene_dma_set_caps()
1515 dma_dev->device_issue_pending = xgene_dma_issue_pending; in xgene_dma_set_caps()
[all …]
Dtegra186-gpc-dma.c255 struct dma_device dma_dev; member
1288 chan = dma_get_any_slave_channel(&tdma->dma_dev); in tegra_dma_of_xlate()
1382 tdma->dma_dev.dev = &pdev->dev; in tegra_dma_probe()
1398 INIT_LIST_HEAD(&tdma->dma_dev.channels); in tegra_dma_probe()
1417 vchan_init(&tdc->vc, &tdma->dma_dev); in tegra_dma_probe()
1425 dma_cap_set(DMA_SLAVE, tdma->dma_dev.cap_mask); in tegra_dma_probe()
1426 dma_cap_set(DMA_PRIVATE, tdma->dma_dev.cap_mask); in tegra_dma_probe()
1427 dma_cap_set(DMA_MEMCPY, tdma->dma_dev.cap_mask); in tegra_dma_probe()
1428 dma_cap_set(DMA_MEMSET, tdma->dma_dev.cap_mask); in tegra_dma_probe()
1429 dma_cap_set(DMA_CYCLIC, tdma->dma_dev.cap_mask); in tegra_dma_probe()
[all …]
/linux-6.12.1/drivers/dma/ptdma/
Dptdma-dmaengine.c323 struct dma_device *dma_dev = &pt->dma_dev; in pt_dmaengine_register() local
355 dma_dev->dev = pt->dev; in pt_dmaengine_register()
356 dma_dev->src_addr_widths = DMA_SLAVE_BUSWIDTH_64_BYTES; in pt_dmaengine_register()
357 dma_dev->dst_addr_widths = DMA_SLAVE_BUSWIDTH_64_BYTES; in pt_dmaengine_register()
358 dma_dev->directions = DMA_MEM_TO_MEM; in pt_dmaengine_register()
359 dma_dev->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR; in pt_dmaengine_register()
360 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in pt_dmaengine_register()
361 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in pt_dmaengine_register()
367 dma_cap_set(DMA_PRIVATE, dma_dev->cap_mask); in pt_dmaengine_register()
369 INIT_LIST_HEAD(&dma_dev->channels); in pt_dmaengine_register()
[all …]
/linux-6.12.1/drivers/spi/
Dspi-pxa2xx-pci.c84 if (dws->dma_dev != chan->device->dev) in lpss_dma_filter()
91 static void lpss_dma_put_device(void *dma_dev) in lpss_dma_put_device() argument
93 pci_dev_put(dma_dev); in lpss_dma_put_device()
100 struct pci_dev *dma_dev; in lpss_spi_setup() local
152 dma_dev = pci_get_slot(dev->bus, PCI_DEVFN(PCI_SLOT(dev->devfn), 0)); in lpss_spi_setup()
153 ret = devm_add_action_or_reset(&dev->dev, lpss_dma_put_device, dma_dev); in lpss_spi_setup()
158 tx->dma_dev = &dma_dev->dev; in lpss_spi_setup()
163 rx->dma_dev = &dma_dev->dev; in lpss_spi_setup()
196 struct pci_dev *dma_dev; in mrfld_spi_setup() local
228 dma_dev = pci_get_slot(dev->bus, PCI_DEVFN(21, 0)); in mrfld_spi_setup()
[all …]
/linux-6.12.1/drivers/crypto/ccp/
Dccp-dmaengine.c668 struct dma_device *dma_dev = &ccp->dma_dev; in ccp_dmaengine_register() local
714 dma_dev->dev = ccp->dev; in ccp_dmaengine_register()
715 dma_dev->src_addr_widths = CCP_DMA_WIDTH(dma_get_mask(ccp->dev)); in ccp_dmaengine_register()
716 dma_dev->dst_addr_widths = CCP_DMA_WIDTH(dma_get_mask(ccp->dev)); in ccp_dmaengine_register()
717 dma_dev->directions = DMA_MEM_TO_MEM; in ccp_dmaengine_register()
718 dma_dev->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR; in ccp_dmaengine_register()
719 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in ccp_dmaengine_register()
720 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in ccp_dmaengine_register()
729 dma_cap_set(DMA_PRIVATE, dma_dev->cap_mask); in ccp_dmaengine_register()
731 INIT_LIST_HEAD(&dma_dev->channels); in ccp_dmaengine_register()
[all …]
/linux-6.12.1/drivers/dma/sf-pdma/
Dsf-pdma.c93 dev_err(chan->pdma->dma_dev.dev, in sf_pdma_prep_dma_memcpy()
260 dev_err(chan->pdma->dma_dev.dev, "NULL desc.\n"); in sf_pdma_xfer_desc()
456 INIT_LIST_HEAD(&pdma->dma_dev.channels); in sf_pdma_setup_chans()
487 vchan_init(&chan->vchan, &pdma->dma_dev); in sf_pdma_setup_chans()
542 pdma->dma_dev.dev = &pdev->dev; in sf_pdma_probe()
545 dma_cap_set(DMA_MEMCPY, pdma->dma_dev.cap_mask); in sf_pdma_probe()
546 pdma->dma_dev.copy_align = 2; in sf_pdma_probe()
547 pdma->dma_dev.src_addr_widths = widths; in sf_pdma_probe()
548 pdma->dma_dev.dst_addr_widths = widths; in sf_pdma_probe()
549 pdma->dma_dev.directions = BIT(DMA_MEM_TO_MEM); in sf_pdma_probe()
[all …]
/linux-6.12.1/drivers/tty/serial/8250/
D8250_mid.c44 struct pci_dev *dma_dev; member
70 mid->dma_dev = pci_get_slot(pdev->bus, in pnw_setup()
77 pci_dev_put(mid->dma_dev); in pnw_exit()
89 chip = pci_get_drvdata(mid->dma_dev); in tng_handle_irq()
125 mid->dma_dev = pci_get_slot(pdev->bus, PCI_DEVFN(5, 0)); in tng_setup()
133 pci_dev_put(mid->dma_dev); in tng_exit()
192 mid->dma_dev = pdev; in dnv_setup()
200 if (!mid->dma_dev) in dnv_exit()
246 if (s->dma_dev != chan->device->dev || s->chan_id != chan->chan_id) in mid8250_dma_filter()
260 if (!mid->dma_dev) in mid8250_dma_setup()
[all …]
/linux-6.12.1/drivers/net/ethernet/broadcom/
Dbgmac.c135 struct device *dma_dev = bgmac->dma_dev; in bgmac_dma_tx_add() local
162 slot->dma_addr = dma_map_single(dma_dev, skb->data, skb_headlen(skb), in bgmac_dma_tx_add()
164 if (unlikely(dma_mapping_error(dma_dev, slot->dma_addr))) in bgmac_dma_tx_add()
180 slot->dma_addr = skb_frag_dma_map(dma_dev, frag, 0, in bgmac_dma_tx_add()
182 if (unlikely(dma_mapping_error(dma_dev, slot->dma_addr))) in bgmac_dma_tx_add()
211 dma_unmap_single(dma_dev, slot->dma_addr, skb_headlen(skb), in bgmac_dma_tx_add()
220 dma_unmap_page(dma_dev, slot->dma_addr, len, DMA_TO_DEVICE); in bgmac_dma_tx_add()
237 struct device *dma_dev = bgmac->dma_dev; in bgmac_dma_tx_free() local
262 dma_unmap_single(dma_dev, slot->dma_addr, len, in bgmac_dma_tx_free()
265 dma_unmap_page(dma_dev, slot->dma_addr, len, in bgmac_dma_tx_free()
[all …]
/linux-6.12.1/drivers/gpu/drm/exynos/
Dexynos_drm_dma.c48 if (get_dma_ops(priv->dma_dev) != get_dma_ops(subdrv_dev)) { in drm_iommu_attach_device()
100 if (!priv->dma_dev) { in exynos_drm_register_dma()
101 priv->dma_dev = dev; in exynos_drm_register_dma()
116 mapping = iommu_get_domain_for_dev(priv->dma_dev); in exynos_drm_register_dma()
142 priv->dma_dev = NULL; in exynos_drm_cleanup_dma()
/linux-6.12.1/drivers/dma/stm32/
Dstm32-dma3.c297 struct dma_device dma_dev; member
308 return container_of(chan->vchan.chan.device, struct stm32_dma3_ddata, dma_dev); in to_stm32_dma3_ddata()
556 struct dma_device dma_device = ddata->dma_dev; in stm32_dma3_chan_prep_hw()
1044 ret = pm_runtime_resume_and_get(ddata->dma_dev.dev); in stm32_dma3_alloc_chan_resources()
1085 pm_runtime_put_sync(ddata->dma_dev.dev); in stm32_dma3_alloc_chan_resources()
1111 pm_runtime_put_sync(ddata->dma_dev.dev); in stm32_dma3_free_chan_resources()
1504 ret = pm_runtime_resume_and_get(ddata->dma_dev.dev); in stm32_dma3_filter_fn()
1508 pm_runtime_put_sync(ddata->dma_dev.dev); in stm32_dma3_filter_fn()
1524 dma_cap_mask_t mask = ddata->dma_dev.cap_mask; in stm32_dma3_of_xlate()
1530 dev_err(ddata->dma_dev.dev, "Invalid args count\n"); in stm32_dma3_of_xlate()
[all …]
/linux-6.12.1/drivers/dma/xilinx/
Dxdma.c128 struct dma_device dma_dev; member
481 vchan_init(&xchan->vchan, &xdev->dma_dev); in xdma_alloc_channels()
818 struct device *dev = xdev->dma_dev.dev; in xdma_alloc_chan_resources()
1193 dma_async_device_unregister(&xdev->dma_dev); in xdma_remove()
1247 INIT_LIST_HEAD(&xdev->dma_dev.channels); in xdma_probe()
1261 dma_cap_set(DMA_SLAVE, xdev->dma_dev.cap_mask); in xdma_probe()
1262 dma_cap_set(DMA_PRIVATE, xdev->dma_dev.cap_mask); in xdma_probe()
1263 dma_cap_set(DMA_CYCLIC, xdev->dma_dev.cap_mask); in xdma_probe()
1264 dma_cap_set(DMA_INTERLEAVE, xdev->dma_dev.cap_mask); in xdma_probe()
1265 dma_cap_set(DMA_REPEAT, xdev->dma_dev.cap_mask); in xdma_probe()
[all …]
/linux-6.12.1/drivers/dma/sh/
Dshdmac.c164 dev_warn(shdev->shdma_dev.dma_dev.dev, "Can't initialize DMAOR.\n"); in sh_dmae_rst()
168 dev_warn(shdev->shdma_dev.dma_dev.dev, in sh_dmae_rst()
521 struct platform_device *pdev = to_platform_device(sdev->dma_dev.dev); in sh_dmae_chan_probe()
526 sh_chan = devm_kzalloc(sdev->dma_dev.dev, sizeof(struct sh_dmae_chan), in sh_dmae_chan_probe()
548 dev_err(sdev->dma_dev.dev, in sh_dmae_chan_probe()
679 struct dma_device *dma_dev; in sh_dmae_probe() local
718 dma_dev = &shdev->shdma_dev.dma_dev; in sh_dmae_probe()
729 dma_dev->src_addr_widths = widths; in sh_dmae_probe()
730 dma_dev->dst_addr_widths = widths; in sh_dmae_probe()
731 dma_dev->directions = BIT(DMA_MEM_TO_DEV) | BIT(DMA_DEV_TO_MEM); in sh_dmae_probe()
[all …]
/linux-6.12.1/drivers/media/usb/stk1160/
Dstk1160-video.c303 struct device *dma_dev = stk1160_get_dmadev(dev); in stk1160_isoc_irq() local
320 dma_sync_sgtable_for_cpu(dma_dev, stk_urb->sgt, DMA_FROM_DEVICE); in stk1160_isoc_irq()
330 dma_sync_sgtable_for_device(dma_dev, stk_urb->sgt, DMA_FROM_DEVICE); in stk1160_isoc_irq()
368 struct device *dma_dev = stk1160_get_dmadev(dev); in stk_free_urb() local
370 dma_vunmap_noncontiguous(dma_dev, stk_urb->transfer_buffer); in stk_free_urb()
371 dma_free_noncontiguous(dma_dev, stk_urb->urb->transfer_buffer_length, in stk_free_urb()
413 struct device *dma_dev = stk1160_get_dmadev(dev); in stk1160_fill_urb() local
418 stk_urb->sgt = dma_alloc_noncontiguous(dma_dev, sb_size, in stk1160_fill_urb()
428 stk_urb->transfer_buffer = dma_vmap_noncontiguous(dma_dev, sb_size, in stk1160_fill_urb()
437 dma_free_noncontiguous(dma_dev, sb_size, stk_urb->sgt, DMA_FROM_DEVICE); in stk1160_fill_urb()

12345678