Home
last modified time | relevance | path

Searched refs:rx_chan (Results 1 – 25 of 25) sorted by relevance

/linux-6.12.1/drivers/mtd/hyperbus/
Dhbmc-am654.c29 struct dma_chan *rx_chan; member
78 struct dma_chan *rx_chan = priv->rx_chan; in am654_hbmc_dma_read() local
84 if (!priv->rx_chan || !virt_addr_valid(to) || object_is_on_stack(to)) in am654_hbmc_dma_read()
87 dma_dst = dma_map_single(rx_chan->device->dev, to, len, DMA_FROM_DEVICE); in am654_hbmc_dma_read()
88 if (dma_mapping_error(rx_chan->device->dev, dma_dst)) { in am654_hbmc_dma_read()
94 tx = dmaengine_prep_dma_memcpy(rx_chan, dma_dst, dma_src, len, flags); in am654_hbmc_dma_read()
112 dma_async_issue_pending(rx_chan); in am654_hbmc_dma_read()
114 dmaengine_terminate_sync(rx_chan); in am654_hbmc_dma_read()
120 dma_unmap_single(rx_chan->device->dev, dma_dst, len, DMA_FROM_DEVICE); in am654_hbmc_dma_read()
140 struct dma_chan *rx_chan; in am654_hbmc_request_mmap_dma() local
[all …]
/linux-6.12.1/drivers/soc/xilinx/
Dzynqmp_power.c53 static struct mbox_chan *rx_chan; variable
147 ret = mbox_send_message(rx_chan, NULL); in ipi_receive_callback()
363 rx_chan = mbox_request_channel_byname(client, "rx"); in zynqmp_pm_probe()
364 if (IS_ERR(rx_chan)) { in zynqmp_pm_probe()
366 return PTR_ERR(rx_chan); in zynqmp_pm_probe()
399 if (!rx_chan) in zynqmp_pm_remove()
400 mbox_free_channel(rx_chan); in zynqmp_pm_remove()
/linux-6.12.1/drivers/net/ethernet/broadcom/
Dbcm63xx_enet.c268 enet_dma_writel(priv, 1, ENETDMA_BUFALLOC_REG(priv->rx_chan)); in bcm_enet_refill_rx()
270 enet_dmac_writel(priv, 1, ENETDMAC_BUFALLOC, priv->rx_chan); in bcm_enet_refill_rx()
416 ENETDMAC_CHANCFG, priv->rx_chan); in bcm_enet_receive_queue()
497 ENETDMAC_IR, priv->rx_chan); in bcm_enet_poll()
519 ENETDMAC_IRMASK, priv->rx_chan); in bcm_enet_poll()
564 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->rx_chan); in bcm_enet_isr_dma()
782 val |= ENETDMA_CFG_FLOWCH_MASK(priv->rx_chan); in bcm_enet_set_flow()
784 val &= ~ENETDMA_CFG_FLOWCH_MASK(priv->rx_chan); in bcm_enet_set_flow()
935 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->rx_chan); in bcm_enet_open()
1011 ENETDMA_BUFALLOC_REG(priv->rx_chan)); in bcm_enet_open()
[all …]
Dbcm63xx_enet.h216 int rx_chan; member
/linux-6.12.1/drivers/net/ethernet/allwinner/
Dsun4i-emac.c90 struct dma_chan *rx_chan; member
290 desc = dmaengine_prep_slave_single(db->rx_chan, rxbuf, count, in emac_dma_inblk_32bit()
316 dma_async_issue_pending(db->rx_chan); in emac_dma_inblk_32bit()
734 if (rxlen >= dev->mtu && db->rx_chan) { in emac_rx()
930 db->rx_chan = dma_request_chan(&pdev->dev, "rx"); in emac_configure_dma()
931 if (IS_ERR(db->rx_chan)) { in emac_configure_dma()
934 err = PTR_ERR(db->rx_chan); in emac_configure_dma()
946 err = dmaengine_slave_config(db->rx_chan, &conf); in emac_configure_dma()
956 dma_release_channel(db->rx_chan); in emac_configure_dma()
959 db->rx_chan = NULL; in emac_configure_dma()
[all …]
/linux-6.12.1/drivers/tty/serial/
Dsamsung_tty.c108 struct dma_chan *rx_chan; member
518 dma_sync_single_for_cpu(dma->rx_chan->device->dev, dma->rx_addr, in s3c24xx_uart_copy_rx_to_tty()
561 if (dma && dma->rx_chan) { in s3c24xx_serial_stop_rx()
563 dma_status = dmaengine_tx_status(dma->rx_chan, in s3c24xx_serial_stop_rx()
568 dmaengine_terminate_all(dma->rx_chan); in s3c24xx_serial_stop_rx()
617 dmaengine_tx_status(dma->rx_chan, dma->rx_cookie, &state); in s3c24xx_serial_rx_dma_complete()
640 dma_sync_single_for_device(dma->rx_chan->device->dev, dma->rx_addr, in s3c64xx_start_rx_dma()
643 dma->rx_desc = dmaengine_prep_slave_single(dma->rx_chan, in s3c64xx_start_rx_dma()
656 dma_async_issue_pending(dma->rx_chan); in s3c64xx_start_rx_dma()
734 dmaengine_pause(dma->rx_chan); in s3c24xx_serial_rx_chars_dma()
[all …]
/linux-6.12.1/drivers/spi/
Dspi-ti-qspi.c57 struct dma_chan *rx_chan; member
444 struct dma_chan *chan = qspi->rx_chan; in ti_qspi_dma_xfer()
632 if (qspi->rx_chan) { in ti_qspi_exec_mem_op()
745 if (qspi->rx_chan) in ti_qspi_dma_cleanup()
746 dma_release_channel(qspi->rx_chan); in ti_qspi_dma_cleanup()
861 qspi->rx_chan = dma_request_chan_by_mask(&mask); in ti_qspi_probe()
862 if (IS_ERR(qspi->rx_chan)) { in ti_qspi_probe()
865 qspi->rx_chan = NULL; in ti_qspi_probe()
876 dma_release_channel(qspi->rx_chan); in ti_qspi_probe()
879 host->dma_rx = qspi->rx_chan; in ti_qspi_probe()
[all …]
Dspi-cadence-quadspi.c83 struct dma_chan *rx_chan; member
1333 if (!cqspi->rx_chan || !virt_addr_valid(buf)) { in cqspi_direct_read_execute()
1338 ddev = cqspi->rx_chan->device->dev; in cqspi_direct_read_execute()
1344 tx = dmaengine_prep_dma_memcpy(cqspi->rx_chan, dma_dst, dma_src, in cqspi_direct_read_execute()
1364 dma_async_issue_pending(cqspi->rx_chan); in cqspi_direct_read_execute()
1367 dmaengine_terminate_sync(cqspi->rx_chan); in cqspi_direct_read_execute()
1629 cqspi->rx_chan = dma_request_chan_by_mask(&mask); in cqspi_request_mmap_dma()
1630 if (IS_ERR(cqspi->rx_chan)) { in cqspi_request_mmap_dma()
1631 int ret = PTR_ERR(cqspi->rx_chan); in cqspi_request_mmap_dma()
1633 cqspi->rx_chan = NULL; in cqspi_request_mmap_dma()
[all …]
/linux-6.12.1/drivers/usb/renesas_usbhs/
Dfifo.c774 return fifo->rx_chan; in usbhsf_dma_chan_get()
1252 if (fifo->rx_chan) in usbhsf_dma_quit()
1253 dma_release_channel(fifo->rx_chan); in usbhsf_dma_quit()
1256 fifo->rx_chan = NULL; in usbhsf_dma_quit()
1270 fifo->rx_chan = dma_request_channel(mask, usbhsf_dma_filter, in usbhsf_dma_init_pdev()
1290 fifo->rx_chan = dma_request_chan(dev, name); in usbhsf_dma_init_dt()
1291 if (IS_ERR(fifo->rx_chan)) in usbhsf_dma_init_dt()
1292 fifo->rx_chan = NULL; in usbhsf_dma_init_dt()
1306 if (fifo->tx_chan || fifo->rx_chan) in usbhsf_dma_init()
1310 fifo->rx_chan ? "[RX]" : " "); in usbhsf_dma_init()
Dfifo.h26 struct dma_chan *rx_chan; member
/linux-6.12.1/drivers/remoteproc/
Dxlnx_r5_remoteproc.c88 struct mbox_chan *rx_chan; member
248 if (mbox_send_message(ipi->rx_chan, NULL) < 0) in zynqmp_r5_mb_rx_cb()
288 ipi->rx_chan = mbox_request_channel_byname(mbox_cl, "rx"); in zynqmp_r5_setup_mbox()
289 if (IS_ERR(ipi->rx_chan)) { in zynqmp_r5_setup_mbox()
291 ipi->rx_chan = NULL; in zynqmp_r5_setup_mbox()
313 if (ipi->rx_chan) { in zynqmp_r5_free_mbox()
314 mbox_free_channel(ipi->rx_chan); in zynqmp_r5_free_mbox()
315 ipi->rx_chan = NULL; in zynqmp_r5_free_mbox()
/linux-6.12.1/drivers/mmc/host/
Dau1xmmc.c113 u32 rx_chan; member
152 (((h)->flags & HOST_F_XMIT) ? (h)->tx_chan : (h)->rx_chan)
886 host->rx_chan = au1xxx_dbdma_chan_alloc(rxid, memid, in au1xmmc_dbdma_init()
888 if (!host->rx_chan) { in au1xmmc_dbdma_init()
895 au1xxx_dbdma_set_devwidth(host->rx_chan, 8); in au1xmmc_dbdma_init()
898 au1xxx_dbdma_ring_alloc(host->rx_chan, AU1XMMC_DESCRIPTOR_COUNT); in au1xmmc_dbdma_init()
911 au1xxx_dbdma_chan_free(host->rx_chan); in au1xmmc_dbdma_shutdown()
Domap_hsmmc.c196 struct dma_chan *rx_chan; member
813 return data->flags & MMC_DATA_WRITE ? host->tx_chan : host->rx_chan; in omap_hsmmc_get_dma_chan()
1894 host->rx_chan = dma_request_chan(&pdev->dev, "rx"); in omap_hsmmc_probe()
1895 if (IS_ERR(host->rx_chan)) { in omap_hsmmc_probe()
1897 ret = PTR_ERR(host->rx_chan); in omap_hsmmc_probe()
1916 dma_get_max_seg_size(host->rx_chan->device->dev), in omap_hsmmc_probe()
1970 if (!IS_ERR_OR_NULL(host->rx_chan)) in omap_hsmmc_probe()
1971 dma_release_channel(host->rx_chan); in omap_hsmmc_probe()
1990 dma_release_channel(host->rx_chan); in omap_hsmmc_remove()
Dsdhci.c1224 host->rx_chan = dma_request_chan(mmc_dev(mmc), "rx"); in sdhci_external_dma_init()
1225 if (IS_ERR(host->rx_chan)) { in sdhci_external_dma_init()
1231 ret = PTR_ERR(host->rx_chan); in sdhci_external_dma_init()
1234 host->rx_chan = NULL; in sdhci_external_dma_init()
1243 return data->flags & MMC_DATA_WRITE ? host->tx_chan : host->rx_chan; in sdhci_external_dma_channel()
1308 if (host->rx_chan) { in sdhci_external_dma_release()
1309 dma_release_channel(host->rx_chan); in sdhci_external_dma_release()
1310 host->rx_chan = NULL; in sdhci_external_dma_release()
Dsdhci.h577 struct dma_chan *rx_chan; member
/linux-6.12.1/arch/mips/include/asm/mach-bcm63xx/
Dbcm63xx_dev_enet.h60 int rx_chan; member
/linux-6.12.1/arch/mips/bcm63xx/
Ddev-enet.c277 dpd->rx_chan = 0; in bcm63xx_enet_register()
280 dpd->rx_chan = 2; in bcm63xx_enet_register()
/linux-6.12.1/drivers/net/ethernet/xilinx/
Dxilinx_axienet_main.c1141 dma_async_issue_pending(lp->rx_chan); in axienet_dma_rx_cb()
1411 dma_rx_desc = dmaengine_prep_slave_sg(lp->rx_chan, skbuf_dma->sgl, in axienet_rx_submit_desc()
1453 lp->rx_chan = dma_request_chan(lp->dev, "rx_chan0"); in axienet_init_dmaengine()
1454 if (IS_ERR(lp->rx_chan)) { in axienet_init_dmaengine()
1455 ret = PTR_ERR(lp->rx_chan); in axienet_init_dmaengine()
1496 dma_async_issue_pending(lp->rx_chan); in axienet_init_dmaengine()
1509 dma_release_channel(lp->rx_chan); in axienet_init_dmaengine()
1677 dmaengine_terminate_sync(lp->rx_chan); in axienet_stop()
1678 dmaengine_synchronize(lp->rx_chan); in axienet_stop()
1687 dma_release_channel(lp->rx_chan); in axienet_stop()
Dxilinx_axienet.h616 struct dma_chan *rx_chan; member
/linux-6.12.1/drivers/mtd/nand/raw/
Dqcom_nandc.c412 struct dma_chan *rx_chan; member
1316 ret = prepare_bam_async_desc(nandc, nandc->rx_chan, 0); in submit_descs()
1349 dma_async_issue_pending(nandc->rx_chan); in submit_descs()
3030 if (nandc->rx_chan) in qcom_nandc_unalloc()
3031 dma_release_channel(nandc->rx_chan); in qcom_nandc_unalloc()
3093 nandc->rx_chan = dma_request_chan(nandc->dev, "rx"); in qcom_nandc_alloc()
3094 if (IS_ERR(nandc->rx_chan)) { in qcom_nandc_alloc()
3095 ret = PTR_ERR(nandc->rx_chan); in qcom_nandc_alloc()
3096 nandc->rx_chan = NULL; in qcom_nandc_alloc()
/linux-6.12.1/drivers/dma/qcom/
Dgpi.c2063 struct gchan *tx_chan, *rx_chan; in gpi_find_avail_gpii() local
2072 rx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_RX_CHAN]; in gpi_find_avail_gpii()
2074 if (rx_chan->vc.chan.client_count && rx_chan->seid == seid) in gpi_find_avail_gpii()
2086 rx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_RX_CHAN]; in gpi_find_avail_gpii()
2090 rx_chan->vc.chan.client_count) in gpi_find_avail_gpii()
/linux-6.12.1/drivers/ntb/
Dntb_transport.c2423 struct dma_chan *rx_chan, *tx_chan; in ntb_transport_max_size() local
2428 rx_chan = qp->rx_dma_chan; in ntb_transport_max_size()
2431 copy_align = max(rx_chan ? rx_chan->device->copy_align : 0, in ntb_transport_max_size()
/linux-6.12.1/drivers/net/ethernet/chelsio/inline_crypto/chtls/
Dchtls.h304 u32 rx_chan; member
/linux-6.12.1/drivers/scsi/cxgbi/
Dlibcxgbi.h123 unsigned int rx_chan; member
/linux-6.12.1/drivers/net/ethernet/ti/
Dam65-cpsw-nuss.c3315 struct am65_cpsw_rx_chn *rx_chan = &common->rx_chns; in am65_cpsw_nuss_register_ndevs() local
3340 k3_udma_glue_reset_rx_chn(rx_chan->rx_chn, i, in am65_cpsw_nuss_register_ndevs()
3341 rx_chan, in am65_cpsw_nuss_register_ndevs()
3344 k3_udma_glue_disable_rx_chn(rx_chan->rx_chn); in am65_cpsw_nuss_register_ndevs()