Lines Matching refs:atdma
285 struct at_dma *atdma; member
361 #define dma_readl(atdma, name) \ argument
362 __raw_readl((atdma)->regs + AT_DMA_##name)
363 #define dma_writel(atdma, name, val) \ argument
364 __raw_writel((val), (atdma)->regs + AT_DMA_##name)
392 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device); in vdbg_dump_regs() local
397 dma_readl(atdma, EBCIMR), in vdbg_dump_regs()
398 dma_readl(atdma, CHSR)); in vdbg_dump_regs()
422 static void atc_setup_irq(struct at_dma *atdma, int chan_id, int on) in atc_setup_irq() argument
430 dma_writel(atdma, EBCIER, ebci); in atc_setup_irq()
432 dma_writel(atdma, EBCIDR, ebci); in atc_setup_irq()
435 static void atc_enable_chan_irq(struct at_dma *atdma, int chan_id) in atc_enable_chan_irq() argument
437 atc_setup_irq(atdma, chan_id, 1); in atc_enable_chan_irq()
440 static void atc_disable_chan_irq(struct at_dma *atdma, int chan_id) in atc_disable_chan_irq() argument
442 atc_setup_irq(atdma, chan_id, 0); in atc_disable_chan_irq()
452 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device); in atc_chan_is_enabled() local
454 return !!(dma_readl(atdma, CHSR) & atchan->mask); in atc_chan_is_enabled()
587 dma_writel(atchan->atdma, CHER, atchan->mask); in atc_dostart()
594 struct at_dma *atdma = to_at_dma(vd->tx.chan->device); in atdma_desc_free() local
600 dma_pool_free(atdma->lli_pool, desc->sg[i].lli, in atdma_desc_free()
606 dma_pool_free(atdma->memset_pool, desc->memset_vaddr, in atdma_desc_free()
784 dma_writel(atchan->atdma, CHDR, AT_DMA_RES(i) | atchan->mask); in atc_handle_error()
828 struct at_dma *atdma = dev_id; in at_dma_interrupt() local
835 imr = dma_readl(atdma, EBCIMR); in at_dma_interrupt()
836 status = dma_readl(atdma, EBCISR); in at_dma_interrupt()
842 dev_vdbg(atdma->dma_device.dev, in at_dma_interrupt()
846 for (i = 0; i < atdma->dma_device.chancnt; i++) { in at_dma_interrupt()
847 atchan = &atdma->chan[i]; in at_dma_interrupt()
871 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_interleaved() local
938 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT, in atc_prep_dma_interleaved()
974 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memcpy() local
1019 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT, in atc_prep_dma_memcpy()
1054 struct at_dma *atdma = to_at_dma(chan->device); in atdma_create_memset_lli() local
1069 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT, in atdma_create_memset_lli()
1098 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset() local
1119 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset()
1157 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset()
1168 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset_sg() local
1186 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset_sg()
1235 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset_sg()
1253 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_slave_sg() local
1303 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, in atc_prep_slave_sg()
1352 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, in atc_prep_slave_sg()
1431 struct at_dma *atdma = to_at_dma(chan->device); in atc_dma_cyclic_fill_desc() local
1437 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_ATOMIC, in atc_dma_cyclic_fill_desc()
1581 struct at_dma *atdma = to_at_dma(chan->device); in atc_pause() local
1589 dma_writel(atdma, CHER, AT_DMA_SUSP(chan_id)); in atc_pause()
1600 struct at_dma *atdma = to_at_dma(chan->device); in atc_resume() local
1611 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id)); in atc_resume()
1622 struct at_dma *atdma = to_at_dma(chan->device); in atc_terminate_all() local
1639 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask); in atc_terminate_all()
1642 while (dma_readl(atdma, CHSR) & atchan->mask) in atc_terminate_all()
1728 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_chan_resources() local
1748 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_device.dev); in atc_alloc_chan_resources()
1926 static void at_dma_off(struct at_dma *atdma) in at_dma_off() argument
1928 dma_writel(atdma, EN, 0); in at_dma_off()
1931 dma_writel(atdma, EBCIDR, -1L); in at_dma_off()
1934 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask) in at_dma_off()
1940 struct at_dma *atdma; in at_dma_probe() local
1960 atdma = devm_kzalloc(&pdev->dev, in at_dma_probe()
1961 struct_size(atdma, chan, plat_dat->nr_channels), in at_dma_probe()
1963 if (!atdma) in at_dma_probe()
1966 atdma->regs = devm_platform_ioremap_resource(pdev, 0); in at_dma_probe()
1967 if (IS_ERR(atdma->regs)) in at_dma_probe()
1968 return PTR_ERR(atdma->regs); in at_dma_probe()
1975 atdma->dma_device.cap_mask = plat_dat->cap_mask; in at_dma_probe()
1976 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1; in at_dma_probe()
1978 atdma->clk = devm_clk_get(&pdev->dev, "dma_clk"); in at_dma_probe()
1979 if (IS_ERR(atdma->clk)) in at_dma_probe()
1980 return PTR_ERR(atdma->clk); in at_dma_probe()
1982 err = clk_prepare_enable(atdma->clk); in at_dma_probe()
1987 at_dma_off(atdma); in at_dma_probe()
1989 err = request_irq(irq, at_dma_interrupt, 0, "at_hdmac", atdma); in at_dma_probe()
1993 platform_set_drvdata(pdev, atdma); in at_dma_probe()
1996 atdma->lli_pool = dma_pool_create("at_hdmac_lli_pool", in at_dma_probe()
1999 if (!atdma->lli_pool) { in at_dma_probe()
2006 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool", in at_dma_probe()
2008 if (!atdma->memset_pool) { in at_dma_probe()
2015 while (dma_readl(atdma, EBCISR)) in at_dma_probe()
2019 INIT_LIST_HEAD(&atdma->dma_device.channels); in at_dma_probe()
2021 struct at_dma_chan *atchan = &atdma->chan[i]; in at_dma_probe()
2026 atchan->ch_regs = atdma->regs + ch_regs(i); in at_dma_probe()
2029 atchan->atdma = atdma; in at_dma_probe()
2031 vchan_init(&atchan->vc, &atdma->dma_device); in at_dma_probe()
2032 atc_enable_chan_irq(atdma, i); in at_dma_probe()
2036 atdma->dma_device.device_alloc_chan_resources = atc_alloc_chan_resources; in at_dma_probe()
2037 atdma->dma_device.device_free_chan_resources = atc_free_chan_resources; in at_dma_probe()
2038 atdma->dma_device.device_tx_status = atc_tx_status; in at_dma_probe()
2039 atdma->dma_device.device_issue_pending = atc_issue_pending; in at_dma_probe()
2040 atdma->dma_device.dev = &pdev->dev; in at_dma_probe()
2043 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_device.cap_mask)) in at_dma_probe()
2044 atdma->dma_device.device_prep_interleaved_dma = atc_prep_dma_interleaved; in at_dma_probe()
2046 if (dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask)) in at_dma_probe()
2047 atdma->dma_device.device_prep_dma_memcpy = atc_prep_dma_memcpy; in at_dma_probe()
2049 if (dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask)) { in at_dma_probe()
2050 atdma->dma_device.device_prep_dma_memset = atc_prep_dma_memset; in at_dma_probe()
2051 atdma->dma_device.device_prep_dma_memset_sg = atc_prep_dma_memset_sg; in at_dma_probe()
2052 atdma->dma_device.fill_align = DMAENGINE_ALIGN_4_BYTES; in at_dma_probe()
2055 if (dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask)) { in at_dma_probe()
2056 atdma->dma_device.device_prep_slave_sg = atc_prep_slave_sg; in at_dma_probe()
2058 dma_cap_set(DMA_CYCLIC, atdma->dma_device.cap_mask); in at_dma_probe()
2059 atdma->dma_device.device_prep_dma_cyclic = atc_prep_dma_cyclic; in at_dma_probe()
2060 atdma->dma_device.device_config = atc_config; in at_dma_probe()
2061 atdma->dma_device.device_pause = atc_pause; in at_dma_probe()
2062 atdma->dma_device.device_resume = atc_resume; in at_dma_probe()
2063 atdma->dma_device.device_terminate_all = atc_terminate_all; in at_dma_probe()
2064 atdma->dma_device.src_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
2065 atdma->dma_device.dst_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
2066 atdma->dma_device.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in at_dma_probe()
2067 atdma->dma_device.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in at_dma_probe()
2070 dma_writel(atdma, EN, AT_DMA_ENABLE); in at_dma_probe()
2073 dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask) ? "cpy " : "", in at_dma_probe()
2074 dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask) ? "set " : "", in at_dma_probe()
2075 dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask) ? "slave " : "", in at_dma_probe()
2078 err = dma_async_device_register(&atdma->dma_device); in at_dma_probe()
2091 at_dma_xlate, atdma); in at_dma_probe()
2101 dma_async_device_unregister(&atdma->dma_device); in at_dma_probe()
2103 dma_pool_destroy(atdma->memset_pool); in at_dma_probe()
2105 dma_pool_destroy(atdma->lli_pool); in at_dma_probe()
2107 free_irq(platform_get_irq(pdev, 0), atdma); in at_dma_probe()
2109 clk_disable_unprepare(atdma->clk); in at_dma_probe()
2115 struct at_dma *atdma = platform_get_drvdata(pdev); in at_dma_remove() local
2118 at_dma_off(atdma); in at_dma_remove()
2121 dma_async_device_unregister(&atdma->dma_device); in at_dma_remove()
2123 dma_pool_destroy(atdma->memset_pool); in at_dma_remove()
2124 dma_pool_destroy(atdma->lli_pool); in at_dma_remove()
2125 free_irq(platform_get_irq(pdev, 0), atdma); in at_dma_remove()
2127 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_remove()
2130 atc_disable_chan_irq(atdma, chan->chan_id); in at_dma_remove()
2134 clk_disable_unprepare(atdma->clk); in at_dma_remove()
2139 struct at_dma *atdma = platform_get_drvdata(pdev); in at_dma_shutdown() local
2142 clk_disable_unprepare(atdma->clk); in at_dma_shutdown()
2147 struct at_dma *atdma = dev_get_drvdata(dev); in at_dma_prepare() local
2150 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_prepare()
2181 struct at_dma *atdma = dev_get_drvdata(dev); in at_dma_suspend_noirq() local
2185 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_suspend_noirq()
2193 atdma->save_imr = dma_readl(atdma, EBCIMR); in at_dma_suspend_noirq()
2196 at_dma_off(atdma); in at_dma_suspend_noirq()
2197 clk_disable_unprepare(atdma->clk); in at_dma_suspend_noirq()
2203 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device); in atc_resume_cyclic() local
2212 dma_writel(atdma, CHER, atchan->mask); in atc_resume_cyclic()
2222 struct at_dma *atdma = dev_get_drvdata(dev); in at_dma_resume_noirq() local
2226 clk_prepare_enable(atdma->clk); in at_dma_resume_noirq()
2227 dma_writel(atdma, EN, AT_DMA_ENABLE); in at_dma_resume_noirq()
2230 while (dma_readl(atdma, EBCISR)) in at_dma_resume_noirq()
2234 dma_writel(atdma, EBCIER, atdma->save_imr); in at_dma_resume_noirq()
2235 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_resume_noirq()