Lines Matching refs:fsl_edma
35 struct fsl_edma_engine *fsl_edma = dev_id; in fsl_edma_tx_handler() local
37 struct edma_regs *regs = &fsl_edma->regs; in fsl_edma_tx_handler()
39 intr = edma_readl(fsl_edma, regs->intl); in fsl_edma_tx_handler()
43 for (ch = 0; ch < fsl_edma->n_chans; ch++) { in fsl_edma_tx_handler()
45 edma_writeb(fsl_edma, EDMA_CINT_CINT(ch), regs->cint); in fsl_edma_tx_handler()
46 fsl_edma_tx_chan_handler(&fsl_edma->chans[ch]); in fsl_edma_tx_handler()
77 struct fsl_edma_engine *fsl_edma = dev_id; in fsl_edma_err_handler() local
79 struct edma_regs *regs = &fsl_edma->regs; in fsl_edma_err_handler()
81 err = edma_readl(fsl_edma, regs->errl); in fsl_edma_err_handler()
85 for (ch = 0; ch < fsl_edma->n_chans; ch++) { in fsl_edma_err_handler()
87 fsl_edma_disable_request(&fsl_edma->chans[ch]); in fsl_edma_err_handler()
88 edma_writeb(fsl_edma, EDMA_CERR_CERR(ch), regs->cerr); in fsl_edma_err_handler()
89 fsl_edma_err_chan_handler(&fsl_edma->chans[ch]); in fsl_edma_err_handler()
103 static bool fsl_edma_srcid_in_use(struct fsl_edma_engine *fsl_edma, u32 srcid) in fsl_edma_srcid_in_use() argument
108 for (i = 0; i < fsl_edma->n_chans; i++) { in fsl_edma_srcid_in_use()
109 fsl_chan = &fsl_edma->chans[i]; in fsl_edma_srcid_in_use()
122 struct fsl_edma_engine *fsl_edma = ofdma->of_dma_data; in fsl_edma_xlate() local
125 u32 dmamux_nr = fsl_edma->drvdata->dmamuxs; in fsl_edma_xlate()
126 unsigned long chans_per_mux = fsl_edma->n_chans / dmamux_nr; in fsl_edma_xlate()
131 guard(mutex)(&fsl_edma->fsl_edma_mutex); in fsl_edma_xlate()
133 list_for_each_entry_safe(chan, _chan, &fsl_edma->dma_dev.channels, device_node) { in fsl_edma_xlate()
137 if (fsl_edma_srcid_in_use(fsl_edma, dma_spec->args[1])) in fsl_edma_xlate()
165 struct fsl_edma_engine *fsl_edma = ofdma->of_dma_data; in fsl_edma3_xlate() local
174 b_chmux = !!(fsl_edma->drvdata->flags & FSL_EDMA_DRV_HAS_CHMUX); in fsl_edma3_xlate()
176 guard(mutex)(&fsl_edma->fsl_edma_mutex); in fsl_edma3_xlate()
177 list_for_each_entry_safe(chan, _chan, &fsl_edma->dma_dev.channels, in fsl_edma3_xlate()
184 if (fsl_edma_srcid_in_use(fsl_edma, dma_spec->args[0])) in fsl_edma3_xlate()
186 i = fsl_chan - fsl_edma->chans; in fsl_edma3_xlate()
215 fsl_edma_irq_init(struct platform_device *pdev, struct fsl_edma_engine *fsl_edma) in fsl_edma_irq_init() argument
219 edma_writel(fsl_edma, ~0, fsl_edma->regs.intl); in fsl_edma_irq_init()
221 fsl_edma->txirq = platform_get_irq_byname(pdev, "edma-tx"); in fsl_edma_irq_init()
222 if (fsl_edma->txirq < 0) in fsl_edma_irq_init()
223 return fsl_edma->txirq; in fsl_edma_irq_init()
225 fsl_edma->errirq = platform_get_irq_byname(pdev, "edma-err"); in fsl_edma_irq_init()
226 if (fsl_edma->errirq < 0) in fsl_edma_irq_init()
227 return fsl_edma->errirq; in fsl_edma_irq_init()
229 if (fsl_edma->txirq == fsl_edma->errirq) { in fsl_edma_irq_init()
230 ret = devm_request_irq(&pdev->dev, fsl_edma->txirq, in fsl_edma_irq_init()
231 fsl_edma_irq_handler, 0, "eDMA", fsl_edma); in fsl_edma_irq_init()
237 ret = devm_request_irq(&pdev->dev, fsl_edma->txirq, in fsl_edma_irq_init()
238 fsl_edma_tx_handler, 0, "eDMA tx", fsl_edma); in fsl_edma_irq_init()
244 ret = devm_request_irq(&pdev->dev, fsl_edma->errirq, in fsl_edma_irq_init()
245 fsl_edma_err_handler, 0, "eDMA err", fsl_edma); in fsl_edma_irq_init()
255 static int fsl_edma3_irq_init(struct platform_device *pdev, struct fsl_edma_engine *fsl_edma) in fsl_edma3_irq_init() argument
259 for (i = 0; i < fsl_edma->n_chans; i++) { in fsl_edma3_irq_init()
261 struct fsl_edma_chan *fsl_chan = &fsl_edma->chans[i]; in fsl_edma3_irq_init()
263 if (fsl_edma->chan_masked & BIT(i)) in fsl_edma3_irq_init()
279 struct fsl_edma_engine *fsl_edma) in fsl_edma2_irq_init() argument
284 edma_writel(fsl_edma, ~0, fsl_edma->regs.intl); in fsl_edma2_irq_init()
308 0, "eDMA2-ERR", fsl_edma); in fsl_edma2_irq_init()
310 fsl_edma->chans[i].txirq = irq; in fsl_edma2_irq_init()
311 fsl_edma->chans[i].irq_handler = fsl_edma2_tx_handler; in fsl_edma2_irq_init()
322 struct platform_device *pdev, struct fsl_edma_engine *fsl_edma) in fsl_edma_irq_exit() argument
324 if (fsl_edma->txirq == fsl_edma->errirq) { in fsl_edma_irq_exit()
325 devm_free_irq(&pdev->dev, fsl_edma->txirq, fsl_edma); in fsl_edma_irq_exit()
327 devm_free_irq(&pdev->dev, fsl_edma->txirq, fsl_edma); in fsl_edma_irq_exit()
328 devm_free_irq(&pdev->dev, fsl_edma->errirq, fsl_edma); in fsl_edma_irq_exit()
332 static void fsl_disable_clocks(struct fsl_edma_engine *fsl_edma, int nr_clocks) in fsl_disable_clocks() argument
337 clk_disable_unprepare(fsl_edma->muxclk[i]); in fsl_disable_clocks()
420 static int fsl_edma3_attach_pd(struct platform_device *pdev, struct fsl_edma_engine *fsl_edma) in fsl_edma3_attach_pd() argument
430 for (i = 0; i < fsl_edma->n_chans; i++) { in fsl_edma3_attach_pd()
431 if (fsl_edma->chan_masked & BIT(i)) in fsl_edma3_attach_pd()
434 fsl_chan = &fsl_edma->chans[i]; in fsl_edma3_attach_pd()
463 struct fsl_edma_engine *fsl_edma; in fsl_edma_probe() local
483 fsl_edma = devm_kzalloc(&pdev->dev, struct_size(fsl_edma, chans, chans), in fsl_edma_probe()
485 if (!fsl_edma) in fsl_edma_probe()
488 fsl_edma->drvdata = drvdata; in fsl_edma_probe()
489 fsl_edma->n_chans = chans; in fsl_edma_probe()
490 mutex_init(&fsl_edma->fsl_edma_mutex); in fsl_edma_probe()
492 fsl_edma->membase = devm_platform_ioremap_resource(pdev, 0); in fsl_edma_probe()
493 if (IS_ERR(fsl_edma->membase)) in fsl_edma_probe()
494 return PTR_ERR(fsl_edma->membase); in fsl_edma_probe()
497 fsl_edma_setup_regs(fsl_edma); in fsl_edma_probe()
498 regs = &fsl_edma->regs; in fsl_edma_probe()
502 fsl_edma->dmaclk = devm_clk_get_enabled(&pdev->dev, "dma"); in fsl_edma_probe()
503 if (IS_ERR(fsl_edma->dmaclk)) { in fsl_edma_probe()
505 return PTR_ERR(fsl_edma->dmaclk); in fsl_edma_probe()
512 fsl_edma->chan_masked = chan_mask[1]; in fsl_edma_probe()
513 fsl_edma->chan_masked <<= 32; in fsl_edma_probe()
514 fsl_edma->chan_masked |= chan_mask[0]; in fsl_edma_probe()
517 for (i = 0; i < fsl_edma->drvdata->dmamuxs; i++) { in fsl_edma_probe()
524 fsl_edma->muxbase[i] = devm_platform_ioremap_resource(pdev, in fsl_edma_probe()
526 if (IS_ERR(fsl_edma->muxbase[i])) { in fsl_edma_probe()
528 fsl_disable_clocks(fsl_edma, i); in fsl_edma_probe()
529 return PTR_ERR(fsl_edma->muxbase[i]); in fsl_edma_probe()
533 fsl_edma->muxclk[i] = devm_clk_get_enabled(&pdev->dev, clkname); in fsl_edma_probe()
534 if (IS_ERR(fsl_edma->muxclk[i])) { in fsl_edma_probe()
537 return PTR_ERR(fsl_edma->muxclk[i]); in fsl_edma_probe()
541 fsl_edma->big_endian = of_property_read_bool(np, "big-endian"); in fsl_edma_probe()
544 ret = fsl_edma3_attach_pd(pdev, fsl_edma); in fsl_edma_probe()
552 INIT_LIST_HEAD(&fsl_edma->dma_dev.channels); in fsl_edma_probe()
553 for (i = 0; i < fsl_edma->n_chans; i++) { in fsl_edma_probe()
554 struct fsl_edma_chan *fsl_chan = &fsl_edma->chans[i]; in fsl_edma_probe()
557 if (fsl_edma->chan_masked & BIT(i)) in fsl_edma_probe()
563 fsl_chan->edma = fsl_edma; in fsl_edma_probe()
571 fsl_chan->tcd = fsl_edma->membase in fsl_edma_probe()
573 fsl_chan->mux_addr = fsl_edma->membase + drvdata->mux_off + i * drvdata->mux_skip; in fsl_edma_probe()
584 vchan_init(&fsl_chan->vchan, &fsl_edma->dma_dev); in fsl_edma_probe()
592 ret = fsl_edma->drvdata->setup_irq(pdev, fsl_edma); in fsl_edma_probe()
596 dma_cap_set(DMA_PRIVATE, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe()
597 dma_cap_set(DMA_SLAVE, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe()
598 dma_cap_set(DMA_CYCLIC, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe()
599 dma_cap_set(DMA_MEMCPY, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe()
601 fsl_edma->dma_dev.dev = &pdev->dev; in fsl_edma_probe()
602 fsl_edma->dma_dev.device_alloc_chan_resources in fsl_edma_probe()
604 fsl_edma->dma_dev.device_free_chan_resources in fsl_edma_probe()
606 fsl_edma->dma_dev.device_tx_status = fsl_edma_tx_status; in fsl_edma_probe()
607 fsl_edma->dma_dev.device_prep_slave_sg = fsl_edma_prep_slave_sg; in fsl_edma_probe()
608 fsl_edma->dma_dev.device_prep_dma_cyclic = fsl_edma_prep_dma_cyclic; in fsl_edma_probe()
609 fsl_edma->dma_dev.device_prep_dma_memcpy = fsl_edma_prep_memcpy; in fsl_edma_probe()
610 fsl_edma->dma_dev.device_config = fsl_edma_slave_config; in fsl_edma_probe()
611 fsl_edma->dma_dev.device_pause = fsl_edma_pause; in fsl_edma_probe()
612 fsl_edma->dma_dev.device_resume = fsl_edma_resume; in fsl_edma_probe()
613 fsl_edma->dma_dev.device_terminate_all = fsl_edma_terminate_all; in fsl_edma_probe()
614 fsl_edma->dma_dev.device_synchronize = fsl_edma_synchronize; in fsl_edma_probe()
615 fsl_edma->dma_dev.device_issue_pending = fsl_edma_issue_pending; in fsl_edma_probe()
617 fsl_edma->dma_dev.src_addr_widths = FSL_EDMA_BUSWIDTHS; in fsl_edma_probe()
618 fsl_edma->dma_dev.dst_addr_widths = FSL_EDMA_BUSWIDTHS; in fsl_edma_probe()
621 fsl_edma->dma_dev.src_addr_widths |= BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); in fsl_edma_probe()
622 fsl_edma->dma_dev.dst_addr_widths |= BIT(DMA_SLAVE_BUSWIDTH_8_BYTES); in fsl_edma_probe()
625 fsl_edma->dma_dev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in fsl_edma_probe()
627 fsl_edma->dma_dev.directions |= BIT(DMA_DEV_TO_DEV); in fsl_edma_probe()
629 fsl_edma->dma_dev.copy_align = drvdata->flags & FSL_EDMA_DRV_ALIGN_64BYTE ? in fsl_edma_probe()
634 dma_set_max_seg_size(fsl_edma->dma_dev.dev, in fsl_edma_probe()
637 fsl_edma->dma_dev.residue_granularity = DMA_RESIDUE_GRANULARITY_SEGMENT; in fsl_edma_probe()
639 platform_set_drvdata(pdev, fsl_edma); in fsl_edma_probe()
641 ret = dma_async_device_register(&fsl_edma->dma_dev); in fsl_edma_probe()
650 fsl_edma); in fsl_edma_probe()
654 dma_async_device_unregister(&fsl_edma->dma_dev); in fsl_edma_probe()
660 edma_writel(fsl_edma, EDMA_CR_ERGA | EDMA_CR_ERCA, regs->cr); in fsl_edma_probe()
668 struct fsl_edma_engine *fsl_edma = platform_get_drvdata(pdev); in fsl_edma_remove() local
670 fsl_edma_irq_exit(pdev, fsl_edma); in fsl_edma_remove()
671 fsl_edma_cleanup_vchan(&fsl_edma->dma_dev); in fsl_edma_remove()
673 dma_async_device_unregister(&fsl_edma->dma_dev); in fsl_edma_remove()
674 fsl_disable_clocks(fsl_edma, fsl_edma->drvdata->dmamuxs); in fsl_edma_remove()
679 struct fsl_edma_engine *fsl_edma = dev_get_drvdata(dev); in fsl_edma_suspend_late() local
684 for (i = 0; i < fsl_edma->n_chans; i++) { in fsl_edma_suspend_late()
685 fsl_chan = &fsl_edma->chans[i]; in fsl_edma_suspend_late()
686 if (fsl_edma->chan_masked & BIT(i)) in fsl_edma_suspend_late()
705 struct fsl_edma_engine *fsl_edma = dev_get_drvdata(dev); in fsl_edma_resume_early() local
707 struct edma_regs *regs = &fsl_edma->regs; in fsl_edma_resume_early()
710 for (i = 0; i < fsl_edma->n_chans; i++) { in fsl_edma_resume_early()
711 fsl_chan = &fsl_edma->chans[i]; in fsl_edma_resume_early()
712 if (fsl_edma->chan_masked & BIT(i)) in fsl_edma_resume_early()
720 if (!(fsl_edma->drvdata->flags & FSL_EDMA_DRV_SPLIT_REG)) in fsl_edma_resume_early()
721 edma_writel(fsl_edma, EDMA_CR_ERGA | EDMA_CR_ERCA, regs->cr); in fsl_edma_resume_early()