/linux-6.12.1/drivers/dma/ |
D | owl-dma.c | 192 struct owl_dma_vchan *vchan; member 381 static inline int owl_dma_cfg_lli(struct owl_dma_vchan *vchan, in owl_dma_cfg_lli() argument 388 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_cfg_lli() 401 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli() 414 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli() 469 struct owl_dma_vchan *vchan) in owl_dma_get_pchan() argument 479 if (!pchan->vchan) { in owl_dma_get_pchan() 480 pchan->vchan = vchan; in owl_dma_get_pchan() 520 pchan->vchan = NULL; in owl_dma_terminate_pchan() 535 static int owl_dma_start_next_txd(struct owl_dma_vchan *vchan) in owl_dma_start_next_txd() argument [all …]
|
D | sun4i-dma.c | 139 struct sun4i_dma_vchan *vchan; member 223 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); in sun4i_dma_free_chan_resources() local 225 vchan_free_chan_resources(&vchan->vc); in sun4i_dma_free_chan_resources() 229 struct sun4i_dma_vchan *vchan) in find_and_use_pchan() argument 239 if (vchan->is_dedicated) { in find_and_use_pchan() 250 pchan->vchan = vchan; in find_and_use_pchan() 267 pchan->vchan = NULL; in release_pchan() 331 struct sun4i_dma_vchan *vchan) in __execute_vchan_pending() argument 339 lockdep_assert_held(&vchan->vc.lock); in __execute_vchan_pending() 342 pchan = find_and_use_pchan(priv, vchan); in __execute_vchan_pending() [all …]
|
D | sun6i-dma.c | 180 struct sun6i_vchan *vchan; member 395 static inline void sun6i_dma_dump_lli(struct sun6i_vchan *vchan, in sun6i_dma_dump_lli() argument 399 dev_dbg(chan2dev(&vchan->vc.chan), in sun6i_dma_dump_lli() 434 static int sun6i_dma_start_desc(struct sun6i_vchan *vchan) in sun6i_dma_start_desc() argument 436 struct sun6i_dma_dev *sdev = to_sun6i_dma_dev(vchan->vc.chan.device); in sun6i_dma_start_desc() 437 struct virt_dma_desc *desc = vchan_next_desc(&vchan->vc); in sun6i_dma_start_desc() 438 struct sun6i_pchan *pchan = vchan->phy; in sun6i_dma_start_desc() 455 sun6i_dma_dump_lli(vchan, pchan->desc->v_lli, pchan->desc->p_lli); in sun6i_dma_start_desc() 460 vchan->irq_type = vchan->cyclic ? DMA_IRQ_PKG : DMA_IRQ_QUEUE; in sun6i_dma_start_desc() 465 irq_val |= vchan->irq_type << (irq_offset * DMA_IRQ_CHAN_WIDTH); in sun6i_dma_start_desc() [all …]
|
D | st_fdma.c | 25 return container_of(c, struct st_fdma_chan, vchan.chan); in to_st_fdma_chan() 80 vdesc = vchan_next_desc(&fchan->vchan); in st_fdma_xfer_desc() 86 cmd = FDMA_CMD_START(fchan->vchan.chan.chan_id); in st_fdma_xfer_desc() 95 dev_dbg(fchan->fdev->dev, "start chan:%d\n", fchan->vchan.chan.chan_id); in st_fdma_xfer_desc() 102 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_ch_sta_update() 140 spin_lock(&fchan->vchan.lock); in st_fdma_irq_handler() 158 spin_unlock(&fchan->vchan.lock); in st_fdma_irq_handler() 283 fchan->vchan.chan.chan_id, fchan->cfg.type); in st_fdma_alloc_chan_res() 295 __func__, fchan->vchan.chan.chan_id); in st_fdma_free_chan_res() 300 spin_lock_irqsave(&fchan->vchan.lock, flags); in st_fdma_free_chan_res() [all …]
|
D | ls2x-apb-dma.c | 124 struct virt_dma_chan vchan; member 156 return container_of(chan, struct ls2x_dma_chan, vchan.chan); in to_ldma_chan() 186 struct ls2x_dma_priv *priv = to_ldma_priv(lchan->vchan.chan.device); in ls2x_dma_write_cmd() 196 struct ls2x_dma_priv *priv = to_ldma_priv(lchan->vchan.chan.device); in ls2x_dma_start_transfer() 202 vdesc = vchan_next_desc(&lchan->vchan); in ls2x_dma_start_transfer() 367 return vchan_tx_prep(&lchan->vchan, &desc->vdesc, flags); in ls2x_dma_prep_slave_sg() 435 return vchan_tx_prep(&lchan->vchan, &desc->vdesc, flags); in ls2x_dma_prep_dma_cyclic() 466 spin_lock_irqsave(&lchan->vchan.lock, flags); in ls2x_dma_issue_pending() 467 if (vchan_issue_pending(&lchan->vchan) && !lchan->desc) in ls2x_dma_issue_pending() 469 spin_unlock_irqrestore(&lchan->vchan.lock, flags); in ls2x_dma_issue_pending() [all …]
|
D | idma64.c | 107 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_stop_transfer() 114 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_start_transfer() 118 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer() 143 stat = this_cpu_ptr(idma64c->vchan.chan.local); in idma64_chan_irq() 145 spin_lock(&idma64c->vchan.lock); in idma64_chan_irq() 163 spin_unlock(&idma64c->vchan.lock); in idma64_chan_irq() 334 return vchan_tx_prep(&idma64c->vchan, &desc->vdesc, flags); in idma64_prep_slave_sg() 342 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_issue_pending() 343 if (vchan_issue_pending(&idma64c->vchan) && !idma64c->desc) in idma64_issue_pending() 345 spin_unlock_irqrestore(&idma64c->vchan.lock, flags); in idma64_issue_pending() [all …]
|
D | fsl-edma-common.c | 49 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler() 53 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler() 69 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler() 105 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request() 139 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request() 185 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux() 195 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux() 238 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all() 242 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all() 243 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all() [all …]
|
D | dma-jz4780.c | 130 struct virt_dma_chan vchan; member 167 return container_of(chan, struct jz4780_dma_chan, vchan.chan); in to_jz4780_dma_chan() 179 return container_of(jzchan->vchan.chan.device, struct jz4780_dma_dev, in jz4780_dma_chan_parent() 403 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_slave_sg() 455 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_cyclic() 483 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_memcpy() 494 vdesc = vchan_next_desc(&jzchan->vchan); in jz4780_dma_begin() 566 spin_lock_irqsave(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending() 568 if (vchan_issue_pending(&jzchan->vchan) && !jzchan->desc) in jz4780_dma_issue_pending() 571 spin_unlock_irqrestore(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending() [all …]
|
D | fsl-qdma.c | 181 struct virt_dma_chan vchan; member 301 return container_of(chan, struct fsl_qdma_chan, vchan.chan); in to_fsl_qdma_chan() 318 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources() 319 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_qdma_free_chan_resources() 320 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources() 322 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_qdma_free_chan_resources() 729 spin_lock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete() 732 spin_unlock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete() 985 return vchan_tx_prep(&fsl_chan->vchan, &fsl_comp->vdesc, flags); in fsl_qdma_prep_memcpy() 999 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_qdma_enqueue_desc() [all …]
|
D | st_fdma.h | 125 struct virt_dma_chan vchan; member 186 + (fchan)->vchan.chan.chan_id * 0x4 \ 191 + (fchan)->vchan.chan.chan_id * 0x4 \ 208 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \ 213 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \
|
D | dma-axi-dmac.c | 143 struct virt_dma_chan vchan; member 176 return container_of(chan->vchan.chan.device, struct axi_dmac, in chan_to_axi_dmac() 182 return container_of(c, struct axi_dmac_chan, vchan.chan); in to_axi_dmac_chan() 245 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer() 470 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler() 481 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler() 493 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all() 496 vchan_get_all_descriptors(&chan->vchan, &head); in axi_dmac_terminate_all() 498 spin_unlock_irqrestore(&chan->vchan.lock, flags); in axi_dmac_terminate_all() 500 vchan_dma_desc_free_list(&chan->vchan, &head); in axi_dmac_terminate_all() [all …]
|
D | pxa_dma.c | 100 struct pxad_chan *vchan; member 151 dev_vdbg(&phy->vchan->vc.chan.dev->device, \ 159 dev_vdbg(&phy->vchan->vc.chan.dev->device, \ 166 dev_vdbg(&phy->vchan->vc.chan.dev->device, \ 390 if (!phy->vchan) { in lookup_phy() 391 phy->vchan = pchan; in lookup_phy() 425 chan->phy->vchan = NULL; in pxad_free_phy() 455 if (!phy->vchan) in phy_enable() 458 dev_dbg(&phy->vchan->vc.chan.dev->device, in phy_enable() 462 pdev = to_pxad_dev(phy->vchan->vc.chan.device); in phy_enable() [all …]
|
D | mmp_pdma.c | 118 struct mmp_pdma_chan *vchan; member 154 if (!phy->vchan) in enable_chan() 157 reg = DRCMR(phy->vchan->drcmr); in enable_chan() 161 if (phy->vchan->byte_align) in enable_chan() 194 if ((dcsr & DCSR_BUSERR) && (phy->vchan)) in clear_chan_irq() 195 dev_warn(phy->vchan->dev, "DCSR_BUSERR\n"); in clear_chan_irq() 207 tasklet_schedule(&phy->vchan->tasklet); in mmp_pdma_chan_handler() 259 if (!phy->vchan) { in lookup_phy() 260 phy->vchan = pchan; in lookup_phy() 286 pchan->phy->vchan = NULL; in mmp_pdma_free_phy()
|
/linux-6.12.1/drivers/dma/hsu/ |
D | hsu.c | 121 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer() 168 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status() 170 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_get_status() 222 stat = this_cpu_ptr(hsuc->vchan.chan.local); in hsu_dma_do_irq() 224 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq() 238 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_do_irq() 295 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg() 303 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending() 304 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending() 306 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending() [all …]
|
D | hsu.h | 87 struct virt_dma_chan vchan; member 100 return container_of(chan, struct hsu_dma_chan, vchan.chan); in to_hsu_dma_chan()
|
/linux-6.12.1/drivers/dma/sf-pdma/ |
D | sf-pdma.c | 47 return container_of(dchan, struct sf_pdma_chan, vchan.chan); in to_sf_pdma_chan() 103 desc->async_tx = vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in sf_pdma_prep_dma_memcpy() 105 spin_lock_irqsave(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy() 107 spin_unlock_irqrestore(&chan->vchan.lock, iflags); in sf_pdma_prep_dma_memcpy() 146 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources() 150 vchan_get_all_descriptors(&chan->vchan, &head); in sf_pdma_free_chan_resources() 152 spin_unlock_irqrestore(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources() 153 vchan_dma_desc_free_list(&chan->vchan, &head); in sf_pdma_free_chan_resources() 166 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_desc_residue() 168 list_for_each_entry(vd, &chan->vchan.desc_submitted, node) in sf_pdma_desc_residue() [all …]
|
/linux-6.12.1/drivers/dma/lgm/ |
D | lgm-dma.c | 195 struct virt_dma_chan vchan; member 292 return container_of(chan, struct ldma_chan, vchan.chan); in to_ldma_chan() 521 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cctrl_cfg() 550 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_init() 578 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_set_class() 596 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_on() 615 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_off() 638 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_hw_cfg() 662 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_cfg() 696 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_reset() [all …]
|
/linux-6.12.1/drivers/dma/stm32/ |
D | stm32-dma.c | 210 struct virt_dma_chan vchan; member 237 return container_of(chan->vchan.chan.device, struct stm32_dma_device, in stm32_dma_get_dev() 243 return container_of(c, struct stm32_dma_chan, vchan.chan); in to_stm32_dma_chan() 253 return &chan->vchan.chan.dev->device; in chan2dev() 505 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all() 515 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_dma_terminate_all() 516 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_terminate_all() 517 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma_terminate_all() 526 vchan_synchronize(&chan->vchan); in stm32_dma_synchronize() 570 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer() [all …]
|
D | stm32-mdma.c | 237 struct virt_dma_chan vchan; member 265 return container_of(chan->vchan.chan.device, struct stm32_mdma_device, in stm32_mdma_get_dev() 271 return container_of(c, struct stm32_mdma_chan, vchan.chan); in to_stm32_mdma_chan() 281 return &chan->vchan.chan.dev->device; in chan2dev() 830 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_slave_sg() 926 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_cyclic() 1112 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_memcpy() 1149 vdesc = vchan_next_desc(&chan->vchan); in stm32_mdma_start_transfer() 1190 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); in stm32_mdma_start_transfer() 1198 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_issue_pending() [all …]
|
D | stm32-dma3.c | 281 struct virt_dma_chan vchan; member 308 return container_of(chan->vchan.chan.device, struct stm32_dma3_ddata, dma_dev); in to_stm32_dma3_ddata() 313 return container_of(c, struct stm32_dma3_chan, vchan.chan); in to_stm32_dma3_chan() 323 return &chan->vchan.chan.dev->device; in chan2dev() 743 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma3_chan_start() 775 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); in stm32_dma3_chan_start() 983 spin_lock(&chan->vchan.lock); in stm32_dma3_chan_irq() 987 spin_unlock(&chan->vchan.lock); in stm32_dma3_chan_irq() 1032 spin_unlock(&chan->vchan.lock); in stm32_dma3_chan_irq() 1097 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma3_free_chan_resources() [all …]
|
/linux-6.12.1/drivers/dma/xilinx/ |
D | xdma.c | 66 struct virt_dma_chan vchan; member 210 return container_of(chan, struct xdma_chan, vchan.chan); in to_xdma_chan() 327 struct virt_dma_desc *vd = vchan_next_desc(&xchan->vchan); in xdma_xfer_start() 480 xchan->vchan.desc_free = xdma_free_desc; in xdma_alloc_channels() 481 vchan_init(&xchan->vchan, &xdev->dma_dev); in xdma_alloc_channels() 501 spin_lock_irqsave(&xdma_chan->vchan.lock, flags); in xdma_issue_pending() 502 if (vchan_issue_pending(&xdma_chan->vchan)) in xdma_issue_pending() 504 spin_unlock_irqrestore(&xdma_chan->vchan.lock, flags); in xdma_issue_pending() 520 spin_lock_irqsave(&xdma_chan->vchan.lock, flags); in xdma_terminate_all() 524 vd = vchan_next_desc(&xdma_chan->vchan); in xdma_terminate_all() [all …]
|
D | xilinx_dpdma.c | 227 struct virt_dma_chan vchan; member 249 container_of(_chan, struct xilinx_dpdma_chan, vchan.chan) 743 return vchan_tx_prep(&chan->vchan, &tx_desc->vdesc, flags); in xilinx_dpdma_chan_prep_cyclic() 925 vdesc = vchan_next_desc(&chan->vchan); in xilinx_dpdma_chan_queue_transfer() 1177 spin_lock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq() 1184 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq() 1255 list_empty(&chan->vchan.desc_issued)) { in xilinx_dpdma_chan_handle_err() 1258 &chan->vchan.desc_issued); in xilinx_dpdma_chan_handle_err() 1309 vchan_tx_prep(&chan->vchan, &desc->vdesc, flags | DMA_CTRL_ACK); in xilinx_dpdma_prep_interleaved_dma() 1352 vchan_free_chan_resources(&chan->vchan); in xilinx_dpdma_free_chan_resources() [all …]
|
/linux-6.12.1/drivers/dma/fsl-dpaa2-qdma/ |
D | dpaa2-qdma.c | 21 return container_of(chan, struct dpaa2_qdma_chan, vchan.chan); in to_dpaa2_qdma_chan() 73 spin_lock_irqsave(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources() 74 vchan_get_all_descriptors(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources() 75 spin_unlock_irqrestore(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources() 77 vchan_dma_desc_free_list(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources() 269 return vchan_tx_prep(&dpaa2_chan->vchan, &dpaa2_comp->vdesc, flags); in dpaa2_qdma_prep_memcpy() 282 spin_lock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending() 283 if (vchan_issue_pending(&dpaa2_chan->vchan)) { in dpaa2_qdma_issue_pending() 284 vdesc = vchan_next_desc(&dpaa2_chan->vchan); in dpaa2_qdma_issue_pending() 301 spin_unlock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending() [all …]
|
/linux-6.12.1/drivers/dma/ti/ |
D | edma.c | 216 struct virt_dma_chan vchan; member 704 return container_of(c, struct edma_chan, vchan.chan); in to_edma_chan() 723 struct device *dev = echan->vchan.chan.device->dev; in edma_execute() 728 vdesc = vchan_next_desc(&echan->vchan); in edma_execute() 817 spin_lock_irqsave(&echan->vchan.lock, flags); in edma_terminate_all() 834 vchan_get_all_descriptors(&echan->vchan, &head); in edma_terminate_all() 835 spin_unlock_irqrestore(&echan->vchan.lock, flags); in edma_terminate_all() 836 vchan_dma_desc_free_list(&echan->vchan, &head); in edma_terminate_all() 845 vchan_synchronize(&echan->vchan); in edma_synchronize() 1102 return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); in edma_prep_slave_sg() [all …]
|
/linux-6.12.1/drivers/dma/amd/qdma/ |
D | qdma.c | 31 return container_of(chan, struct qdma_queue, vchan.chan); in to_qdma_queue() 466 q->vchan.desc_free = qdma_free_vdesc; in qdma_alloc_queues() 467 vchan_init(&q->vchan, &qdev->dma_dev); in qdma_alloc_queues() 554 vchan_free_chan_resources(&queue->vchan); in qdma_free_queue_resources() 615 if (!vchan_next_desc(&queue->vchan)) in qdma_xfer_start() 635 spin_lock_irqsave(&queue->vchan.lock, flags); in qdma_issue_pending() 636 if (vchan_issue_pending(&queue->vchan)) { in qdma_issue_pending() 644 spin_unlock_irqrestore(&queue->vchan.lock, flags); in qdma_issue_pending() 711 struct virt_dma_chan *vc = &q->vchan; in qdma_fill_pending_vdesc() 784 tx = vchan_tx_prep(&q->vchan, &vdesc->vdesc, flags); in qdma_prep_device_sg() [all …]
|