Lines Matching refs:dmatx

272 	struct pl011_dmatx_data	dmatx;  member
432 uap->dmatx.chan = chan; in pl011_dma_probe()
435 dma_chan_name(uap->dmatx.chan)); in pl011_dma_probe()
521 if (uap->dmatx.chan) in pl011_dma_remove()
522 dma_release_channel(uap->dmatx.chan); in pl011_dma_remove()
539 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_callback() local
544 if (uap->dmatx.queued) in pl011_dma_tx_callback()
545 dma_unmap_single(dmatx->chan->device->dev, dmatx->dma, in pl011_dma_tx_callback()
546 dmatx->len, DMA_TO_DEVICE); in pl011_dma_tx_callback()
563 uap->dmatx.queued = false; in pl011_dma_tx_callback()
588 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_refill() local
589 struct dma_chan *chan = dmatx->chan; in pl011_dma_tx_refill()
603 uap->dmatx.queued = false; in pl011_dma_tx_refill()
617 count = kfifo_out_peek(&tport->xmit_fifo, dmatx->buf, count); in pl011_dma_tx_refill()
618 dmatx->len = count; in pl011_dma_tx_refill()
619 dmatx->dma = dma_map_single(dma_dev->dev, dmatx->buf, count, in pl011_dma_tx_refill()
621 if (dmatx->dma == DMA_MAPPING_ERROR) { in pl011_dma_tx_refill()
622 uap->dmatx.queued = false; in pl011_dma_tx_refill()
627 desc = dmaengine_prep_slave_single(chan, dmatx->dma, dmatx->len, DMA_MEM_TO_DEV, in pl011_dma_tx_refill()
630 dma_unmap_single(dma_dev->dev, dmatx->dma, dmatx->len, DMA_TO_DEVICE); in pl011_dma_tx_refill()
631 uap->dmatx.queued = false; in pl011_dma_tx_refill()
652 uap->dmatx.queued = true; in pl011_dma_tx_refill()
684 if (uap->dmatx.queued) { in pl011_dma_tx_irq()
710 if (uap->dmatx.queued) { in pl011_dma_tx_stop()
735 if (!uap->dmatx.queued) { in pl011_dma_tx_start()
791 dmaengine_terminate_async(uap->dmatx.chan); in pl011_dma_flush_buffer()
793 if (uap->dmatx.queued) { in pl011_dma_flush_buffer()
794 dma_unmap_single(uap->dmatx.chan->device->dev, uap->dmatx.dma, in pl011_dma_flush_buffer()
795 uap->dmatx.len, DMA_TO_DEVICE); in pl011_dma_flush_buffer()
796 uap->dmatx.queued = false; in pl011_dma_flush_buffer()
1091 if (!uap->dmatx.chan) in pl011_dma_startup()
1094 uap->dmatx.buf = kmalloc(PL011_DMA_BUFFER_SIZE, GFP_KERNEL | __GFP_DMA); in pl011_dma_startup()
1095 if (!uap->dmatx.buf) { in pl011_dma_startup()
1100 uap->dmatx.len = PL011_DMA_BUFFER_SIZE; in pl011_dma_startup()
1174 dmaengine_terminate_all(uap->dmatx.chan); in pl011_dma_shutdown()
1175 if (uap->dmatx.queued) { in pl011_dma_shutdown()
1176 dma_unmap_single(uap->dmatx.chan->device->dev, in pl011_dma_shutdown()
1177 uap->dmatx.dma, uap->dmatx.len, in pl011_dma_shutdown()
1179 uap->dmatx.queued = false; in pl011_dma_shutdown()
1182 kfree(uap->dmatx.buf); in pl011_dma_shutdown()