Lines Matching refs:td_desc
180 struct timb_dma_desc *td_desc; in __td_start_dma() local
188 td_desc = list_entry(td_chan->active_list.next, struct timb_dma_desc, in __td_start_dma()
199 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
209 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
215 if (td_desc->interrupt) in __td_start_dma()
223 struct timb_dma_desc *td_desc; in __td_finish() local
229 td_desc = list_entry(td_chan->active_list.next, struct timb_dma_desc, in __td_finish()
231 txd = &td_desc->txd; in __td_finish()
248 list_move(&td_desc->desc_node, &td_chan->free_list); in __td_finish()
266 struct timb_dma_desc *td_desc = in __td_ier_mask() local
269 if (td_desc->interrupt) in __td_ier_mask()
279 struct timb_dma_desc *td_desc; in __td_start_next() local
284 td_desc = list_entry(td_chan->queue.next, struct timb_dma_desc, in __td_start_next()
288 __func__, td_desc->txd.cookie); in __td_start_next()
290 list_move(&td_desc->desc_node, &td_chan->active_list); in __td_start_next()
296 struct timb_dma_desc *td_desc = container_of(txd, struct timb_dma_desc, in td_tx_submit() local
308 list_add_tail(&td_desc->desc_node, &td_chan->active_list); in td_tx_submit()
314 list_add_tail(&td_desc->desc_node, &td_chan->queue); in td_tx_submit()
325 struct timb_dma_desc *td_desc; in td_alloc_init_desc() local
328 td_desc = kzalloc(sizeof(struct timb_dma_desc), GFP_KERNEL); in td_alloc_init_desc()
329 if (!td_desc) in td_alloc_init_desc()
332 td_desc->desc_list_len = td_chan->desc_elems * TIMB_DMA_DESC_SIZE; in td_alloc_init_desc()
334 td_desc->desc_list = kzalloc(td_desc->desc_list_len, GFP_KERNEL); in td_alloc_init_desc()
335 if (!td_desc->desc_list) in td_alloc_init_desc()
338 dma_async_tx_descriptor_init(&td_desc->txd, chan); in td_alloc_init_desc()
339 td_desc->txd.tx_submit = td_tx_submit; in td_alloc_init_desc()
340 td_desc->txd.flags = DMA_CTRL_ACK; in td_alloc_init_desc()
342 td_desc->txd.phys = dma_map_single(chan2dmadev(chan), in td_alloc_init_desc()
343 td_desc->desc_list, td_desc->desc_list_len, DMA_TO_DEVICE); in td_alloc_init_desc()
345 err = dma_mapping_error(chan2dmadev(chan), td_desc->txd.phys); in td_alloc_init_desc()
351 return td_desc; in td_alloc_init_desc()
353 kfree(td_desc->desc_list); in td_alloc_init_desc()
354 kfree(td_desc); in td_alloc_init_desc()
360 static void td_free_desc(struct timb_dma_desc *td_desc) in td_free_desc() argument
362 dev_dbg(chan2dev(td_desc->txd.chan), "Freeing desc: %p\n", td_desc); in td_free_desc()
363 dma_unmap_single(chan2dmadev(td_desc->txd.chan), td_desc->txd.phys, in td_free_desc()
364 td_desc->desc_list_len, DMA_TO_DEVICE); in td_free_desc()
366 kfree(td_desc->desc_list); in td_free_desc()
367 kfree(td_desc); in td_free_desc()
371 struct timb_dma_desc *td_desc) in td_desc_put() argument
373 dev_dbg(chan2dev(&td_chan->chan), "Putting desc: %p\n", td_desc); in td_desc_put()
376 list_add(&td_desc->desc_node, &td_chan->free_list); in td_desc_put()
382 struct timb_dma_desc *td_desc, *_td_desc; in td_desc_get() local
386 list_for_each_entry_safe(td_desc, _td_desc, &td_chan->free_list, in td_desc_get()
388 if (async_tx_test_ack(&td_desc->txd)) { in td_desc_get()
389 list_del(&td_desc->desc_node); in td_desc_get()
390 ret = td_desc; in td_desc_get()
394 td_desc); in td_desc_get()
411 struct timb_dma_desc *td_desc = td_alloc_init_desc(td_chan); in td_alloc_chan_resources() local
412 if (!td_desc) { in td_alloc_chan_resources()
422 td_desc_put(td_chan, td_desc); in td_alloc_chan_resources()
436 struct timb_dma_desc *td_desc, *_td_desc; in td_free_chan_resources() local
449 list_for_each_entry_safe(td_desc, _td_desc, &list, desc_node) { in td_free_chan_resources()
451 td_desc); in td_free_chan_resources()
452 td_free_desc(td_desc); in td_free_chan_resources()
496 struct timb_dma_desc *td_desc; in td_prep_slave_sg() local
513 td_desc = td_desc_get(td_chan); in td_prep_slave_sg()
514 if (!td_desc) { in td_prep_slave_sg()
519 td_desc->interrupt = (flags & DMA_PREP_INTERRUPT) != 0; in td_prep_slave_sg()
523 if (desc_usage > td_desc->desc_list_len) { in td_prep_slave_sg()
528 err = td_fill_desc(td_chan, td_desc->desc_list + desc_usage, sg, in td_prep_slave_sg()
533 td_desc_put(td_chan, td_desc); in td_prep_slave_sg()
539 dma_sync_single_for_device(chan2dmadev(chan), td_desc->txd.phys, in td_prep_slave_sg()
540 td_desc->desc_list_len, DMA_TO_DEVICE); in td_prep_slave_sg()
542 return &td_desc->txd; in td_prep_slave_sg()
549 struct timb_dma_desc *td_desc, *_td_desc; in td_terminate_all() local
555 list_for_each_entry_safe(td_desc, _td_desc, &td_chan->queue, in td_terminate_all()
557 list_move(&td_desc->desc_node, &td_chan->free_list); in td_terminate_all()