Lines Matching full:od

266 static void dma_update(struct owl_dma *od, u32 reg, u32 val, bool state)  in dma_update()  argument
270 regval = readl(od->base + reg); in dma_update()
277 writel(regval, od->base + reg); in dma_update()
280 static void dma_writel(struct owl_dma *od, u32 reg, u32 data) in dma_writel() argument
282 writel(data, od->base + reg); in dma_writel()
285 static u32 dma_readl(struct owl_dma *od, u32 reg) in dma_readl() argument
287 return readl(od->base + reg); in dma_readl()
342 static void owl_dma_free_lli(struct owl_dma *od, in owl_dma_free_lli() argument
346 dma_pool_free(od->lli_pool, lli, lli->phys); in owl_dma_free_lli()
349 static struct owl_dma_lli *owl_dma_alloc_lli(struct owl_dma *od) in owl_dma_alloc_lli() argument
354 lli = dma_pool_alloc(od->lli_pool, GFP_NOWAIT, &phys); in owl_dma_alloc_lli()
388 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_cfg_lli() local
445 if (od->devid == S700_DMA) { in owl_dma_cfg_lli()
468 static struct owl_dma_pchan *owl_dma_get_pchan(struct owl_dma *od, in owl_dma_get_pchan() argument
475 for (i = 0; i < od->nr_pchans; i++) { in owl_dma_get_pchan()
476 pchan = &od->pchans[i]; in owl_dma_get_pchan()
478 spin_lock_irqsave(&od->lock, flags); in owl_dma_get_pchan()
481 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_get_pchan()
485 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_get_pchan()
491 static int owl_dma_pchan_busy(struct owl_dma *od, struct owl_dma_pchan *pchan) in owl_dma_pchan_busy() argument
495 val = dma_readl(od, OWL_DMA_IDLE_STAT); in owl_dma_pchan_busy()
500 static void owl_dma_terminate_pchan(struct owl_dma *od, in owl_dma_terminate_pchan() argument
509 spin_lock_irqsave(&od->lock, flags); in owl_dma_terminate_pchan()
510 dma_update(od, OWL_DMA_IRQ_EN0, (1 << pchan->id), false); in owl_dma_terminate_pchan()
512 irq_pd = dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_terminate_pchan()
514 dev_warn(od->dma.dev, in owl_dma_terminate_pchan()
517 dma_writel(od, OWL_DMA_IRQ_PD0, (1 << pchan->id)); in owl_dma_terminate_pchan()
522 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_terminate_pchan()
537 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_start_next_txd() local
550 while (owl_dma_pchan_busy(od, pchan)) in owl_dma_start_next_txd()
570 spin_lock_irqsave(&od->lock, flags); in owl_dma_start_next_txd()
572 dma_update(od, OWL_DMA_IRQ_EN0, (1 << pchan->id), true); in owl_dma_start_next_txd()
574 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_start_next_txd()
584 static void owl_dma_phy_free(struct owl_dma *od, struct owl_dma_vchan *vchan) in owl_dma_phy_free() argument
587 owl_dma_terminate_pchan(od, vchan->pchan); in owl_dma_phy_free()
594 struct owl_dma *od = dev_id; in owl_dma_interrupt() local
601 spin_lock(&od->lock); in owl_dma_interrupt()
603 pending = dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_interrupt()
606 for_each_set_bit(i, &pending, od->nr_pchans) { in owl_dma_interrupt()
607 pchan = &od->pchans[i]; in owl_dma_interrupt()
612 dma_writel(od, OWL_DMA_IRQ_PD0, pending); in owl_dma_interrupt()
615 for (i = 0; i < od->nr_pchans; i++) { in owl_dma_interrupt()
616 pchan = &od->pchans[i]; in owl_dma_interrupt()
621 dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_interrupt()
623 global_irq_pending = dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_interrupt()
626 dev_dbg(od->dma.dev, in owl_dma_interrupt()
638 spin_unlock(&od->lock); in owl_dma_interrupt()
640 for_each_set_bit(i, &pending, od->nr_pchans) { in owl_dma_interrupt()
643 pchan = &od->pchans[i]; in owl_dma_interrupt()
647 dev_warn(od->dma.dev, "no vchan attached on pchan %d\n", in owl_dma_interrupt()
667 owl_dma_phy_free(od, vchan); in owl_dma_interrupt()
676 static void owl_dma_free_txd(struct owl_dma *od, struct owl_dma_txd *txd) in owl_dma_free_txd() argument
684 owl_dma_free_lli(od, lli); in owl_dma_free_txd()
691 struct owl_dma *od = to_owl_dma(vd->tx.chan->device); in owl_dma_desc_free() local
694 owl_dma_free_txd(od, txd); in owl_dma_desc_free()
699 struct owl_dma *od = to_owl_dma(chan->device); in owl_dma_terminate_all() local
707 owl_dma_phy_free(od, vchan); in owl_dma_terminate_all()
840 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_phy_alloc_and_start() local
843 pchan = owl_dma_get_pchan(od, vchan); in owl_dma_phy_alloc_and_start()
847 dev_dbg(od->dma.dev, "allocated pchan %d\n", pchan->id); in owl_dma_phy_alloc_and_start()
871 struct owl_dma *od = to_owl_dma(chan->device); in owl_dma_prep_memcpy() local
889 lli = owl_dma_alloc_lli(od); in owl_dma_prep_memcpy()
911 owl_dma_free_txd(od, txd); in owl_dma_prep_memcpy()
922 struct owl_dma *od = to_owl_dma(chan->device); in owl_dma_prep_slave_sg() local
943 dev_err(od->dma.dev, in owl_dma_prep_slave_sg()
948 lli = owl_dma_alloc_lli(od); in owl_dma_prep_slave_sg()
975 owl_dma_free_txd(od, txd); in owl_dma_prep_slave_sg()
987 struct owl_dma *od = to_owl_dma(chan->device); in owl_prep_dma_cyclic() local
1004 lli = owl_dma_alloc_lli(od); in owl_prep_dma_cyclic()
1037 owl_dma_free_txd(od, txd); in owl_prep_dma_cyclic()
1050 static inline void owl_dma_free(struct owl_dma *od) in owl_dma_free() argument
1056 next, &od->dma.channels, vc.chan.device_node) { in owl_dma_free()
1065 struct owl_dma *od = ofdma->of_dma_data; in owl_dma_of_xlate() local
1070 if (drq > od->nr_vchans) in owl_dma_of_xlate()
1073 chan = dma_get_any_slave_channel(&od->dma); in owl_dma_of_xlate()
1094 struct owl_dma *od; in owl_dma_probe() local
1097 od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); in owl_dma_probe()
1098 if (!od) in owl_dma_probe()
1101 od->base = devm_platform_ioremap_resource(pdev, 0); in owl_dma_probe()
1102 if (IS_ERR(od->base)) in owl_dma_probe()
1103 return PTR_ERR(od->base); in owl_dma_probe()
1120 od->devid = (uintptr_t)of_device_get_match_data(&pdev->dev); in owl_dma_probe()
1122 od->nr_pchans = nr_channels; in owl_dma_probe()
1123 od->nr_vchans = nr_requests; in owl_dma_probe()
1127 platform_set_drvdata(pdev, od); in owl_dma_probe()
1128 spin_lock_init(&od->lock); in owl_dma_probe()
1130 dma_cap_set(DMA_MEMCPY, od->dma.cap_mask); in owl_dma_probe()
1131 dma_cap_set(DMA_SLAVE, od->dma.cap_mask); in owl_dma_probe()
1132 dma_cap_set(DMA_CYCLIC, od->dma.cap_mask); in owl_dma_probe()
1134 od->dma.dev = &pdev->dev; in owl_dma_probe()
1135 od->dma.device_free_chan_resources = owl_dma_free_chan_resources; in owl_dma_probe()
1136 od->dma.device_tx_status = owl_dma_tx_status; in owl_dma_probe()
1137 od->dma.device_issue_pending = owl_dma_issue_pending; in owl_dma_probe()
1138 od->dma.device_prep_dma_memcpy = owl_dma_prep_memcpy; in owl_dma_probe()
1139 od->dma.device_prep_slave_sg = owl_dma_prep_slave_sg; in owl_dma_probe()
1140 od->dma.device_prep_dma_cyclic = owl_prep_dma_cyclic; in owl_dma_probe()
1141 od->dma.device_config = owl_dma_config; in owl_dma_probe()
1142 od->dma.device_pause = owl_dma_pause; in owl_dma_probe()
1143 od->dma.device_resume = owl_dma_resume; in owl_dma_probe()
1144 od->dma.device_terminate_all = owl_dma_terminate_all; in owl_dma_probe()
1145 od->dma.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in owl_dma_probe()
1146 od->dma.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in owl_dma_probe()
1147 od->dma.directions = BIT(DMA_MEM_TO_MEM); in owl_dma_probe()
1148 od->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in owl_dma_probe()
1150 INIT_LIST_HEAD(&od->dma.channels); in owl_dma_probe()
1152 od->clk = devm_clk_get(&pdev->dev, NULL); in owl_dma_probe()
1153 if (IS_ERR(od->clk)) { in owl_dma_probe()
1155 return PTR_ERR(od->clk); in owl_dma_probe()
1163 od->irq = platform_get_irq(pdev, 0); in owl_dma_probe()
1164 ret = devm_request_irq(&pdev->dev, od->irq, owl_dma_interrupt, 0, in owl_dma_probe()
1165 dev_name(&pdev->dev), od); in owl_dma_probe()
1172 od->pchans = devm_kcalloc(&pdev->dev, od->nr_pchans, in owl_dma_probe()
1174 if (!od->pchans) in owl_dma_probe()
1177 for (i = 0; i < od->nr_pchans; i++) { in owl_dma_probe()
1178 struct owl_dma_pchan *pchan = &od->pchans[i]; in owl_dma_probe()
1181 pchan->base = od->base + OWL_DMA_CHAN_BASE(i); in owl_dma_probe()
1185 od->vchans = devm_kcalloc(&pdev->dev, od->nr_vchans, in owl_dma_probe()
1187 if (!od->vchans) in owl_dma_probe()
1190 for (i = 0; i < od->nr_vchans; i++) { in owl_dma_probe()
1191 struct owl_dma_vchan *vchan = &od->vchans[i]; in owl_dma_probe()
1194 vchan_init(&vchan->vc, &od->dma); in owl_dma_probe()
1198 od->lli_pool = dma_pool_create(dev_name(od->dma.dev), od->dma.dev, in owl_dma_probe()
1202 if (!od->lli_pool) { in owl_dma_probe()
1207 clk_prepare_enable(od->clk); in owl_dma_probe()
1209 ret = dma_async_device_register(&od->dma); in owl_dma_probe()
1217 owl_dma_of_xlate, od); in owl_dma_probe()
1226 dma_async_device_unregister(&od->dma); in owl_dma_probe()
1228 clk_disable_unprepare(od->clk); in owl_dma_probe()
1229 dma_pool_destroy(od->lli_pool); in owl_dma_probe()
1236 struct owl_dma *od = platform_get_drvdata(pdev); in owl_dma_remove() local
1239 dma_async_device_unregister(&od->dma); in owl_dma_remove()
1242 dma_writel(od, OWL_DMA_IRQ_EN0, 0x0); in owl_dma_remove()
1245 devm_free_irq(od->dma.dev, od->irq, od); in owl_dma_remove()
1247 owl_dma_free(od); in owl_dma_remove()
1249 clk_disable_unprepare(od->clk); in owl_dma_remove()
1250 dma_pool_destroy(od->lli_pool); in owl_dma_remove()