Lines Matching +full:sc9860 +full:- +full:ap +full:- +full:clk

4  * SPDX-License-Identifier: GPL-2.0
7 #include <linux/clk.h>
8 #include <linux/dma-mapping.h>
9 #include <linux/dma/sprd-dma.h>
22 #include "virt-dma.h"
41 #define SPRD_DMA_GLB_REQ_UID(uid) (0x4 * ((uid) - 1))
211 struct clk *clk; member
212 struct clk *ashb_clk;
233 return container_of(schan, struct sprd_dma_dev, channels[c->chan_id]); in to_sprd_dma_dev()
244 u32 orig = readl(sdev->glb_base + reg); in sprd_dma_glb_update()
248 writel(tmp, sdev->glb_base + reg); in sprd_dma_glb_update()
254 u32 orig = readl(schan->chn_base + reg); in sprd_dma_chn_update()
258 writel(tmp, schan->chn_base + reg); in sprd_dma_chn_update()
265 ret = clk_prepare_enable(sdev->clk); in sprd_dma_enable()
273 if (!IS_ERR(sdev->ashb_clk)) in sprd_dma_enable()
274 ret = clk_prepare_enable(sdev->ashb_clk); in sprd_dma_enable()
281 clk_disable_unprepare(sdev->clk); in sprd_dma_disable()
286 if (!IS_ERR(sdev->ashb_clk)) in sprd_dma_disable()
287 clk_disable_unprepare(sdev->ashb_clk); in sprd_dma_disable()
292 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); in sprd_dma_set_uid()
293 u32 dev_id = schan->dev_id; in sprd_dma_set_uid()
299 writel(schan->chn_num + 1, sdev->glb_base + uid_offset); in sprd_dma_set_uid()
305 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); in sprd_dma_unset_uid()
306 u32 dev_id = schan->dev_id; in sprd_dma_unset_uid()
312 writel(0, sdev->glb_base + uid_offset); in sprd_dma_unset_uid()
342 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); in sprd_dma_pause_resume()
350 pause = readl(schan->chn_base + SPRD_DMA_CHN_PAUSE); in sprd_dma_pause_resume()
355 } while (--timeout > 0); in sprd_dma_pause_resume()
358 dev_warn(sdev->dma_dev.dev, in sprd_dma_pause_resume()
368 u32 cfg = readl(schan->chn_base + SPRD_DMA_CHN_CFG); in sprd_dma_stop_and_disable()
381 addr = readl(schan->chn_base + SPRD_DMA_CHN_SRC_ADDR); in sprd_dma_get_src_addr()
382 addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_PTR) & in sprd_dma_get_src_addr()
392 addr = readl(schan->chn_base + SPRD_DMA_CHN_DES_ADDR); in sprd_dma_get_dst_addr()
393 addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_TO) & in sprd_dma_get_dst_addr()
401 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); in sprd_dma_get_int_type()
402 u32 intc_sts = readl(schan->chn_base + SPRD_DMA_CHN_INTC) & in sprd_dma_get_int_type()
422 dev_warn(sdev->dma_dev.dev, "incorrect dma interrupt type\n"); in sprd_dma_get_int_type()
429 u32 frag_reg = readl(schan->chn_base + SPRD_DMA_CHN_FRG_LEN); in sprd_dma_get_req_type()
436 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); in sprd_dma_set_2stage_config()
437 u32 val, chn = schan->chn_num + 1; in sprd_dma_set_2stage_config()
439 switch (schan->chn_mode) { in sprd_dma_set_2stage_config()
442 val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET; in sprd_dma_set_2stage_config()
444 if (schan->int_type != SPRD_DMA_NO_INT) in sprd_dma_set_2stage_config()
452 val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET; in sprd_dma_set_2stage_config()
454 if (schan->int_type != SPRD_DMA_NO_INT) in sprd_dma_set_2stage_config()
464 if (schan->int_type != SPRD_DMA_NO_INT) in sprd_dma_set_2stage_config()
474 if (schan->int_type != SPRD_DMA_NO_INT) in sprd_dma_set_2stage_config()
481 dev_err(sdev->dma_dev.dev, "invalid channel mode setting %d\n", in sprd_dma_set_2stage_config()
482 schan->chn_mode); in sprd_dma_set_2stage_config()
483 return -EINVAL; in sprd_dma_set_2stage_config()
491 struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); in sprd_dma_set_pending()
494 if (schan->dev_id == SPRD_DMA_SOFTWARE_UID) in sprd_dma_set_pending()
498 req_id = schan->dev_id - 1; in sprd_dma_set_pending()
505 val = BIT(req_id - 32); in sprd_dma_set_pending()
514 struct sprd_dma_chn_hw *cfg = &sdesc->chn_hw; in sprd_dma_set_chn_config()
516 writel(cfg->pause, schan->chn_base + SPRD_DMA_CHN_PAUSE); in sprd_dma_set_chn_config()
517 writel(cfg->cfg, schan->chn_base + SPRD_DMA_CHN_CFG); in sprd_dma_set_chn_config()
518 writel(cfg->intc, schan->chn_base + SPRD_DMA_CHN_INTC); in sprd_dma_set_chn_config()
519 writel(cfg->src_addr, schan->chn_base + SPRD_DMA_CHN_SRC_ADDR); in sprd_dma_set_chn_config()
520 writel(cfg->des_addr, schan->chn_base + SPRD_DMA_CHN_DES_ADDR); in sprd_dma_set_chn_config()
521 writel(cfg->frg_len, schan->chn_base + SPRD_DMA_CHN_FRG_LEN); in sprd_dma_set_chn_config()
522 writel(cfg->blk_len, schan->chn_base + SPRD_DMA_CHN_BLK_LEN); in sprd_dma_set_chn_config()
523 writel(cfg->trsc_len, schan->chn_base + SPRD_DMA_CHN_TRSC_LEN); in sprd_dma_set_chn_config()
524 writel(cfg->trsf_step, schan->chn_base + SPRD_DMA_CHN_TRSF_STEP); in sprd_dma_set_chn_config()
525 writel(cfg->wrap_ptr, schan->chn_base + SPRD_DMA_CHN_WARP_PTR); in sprd_dma_set_chn_config()
526 writel(cfg->wrap_to, schan->chn_base + SPRD_DMA_CHN_WARP_TO); in sprd_dma_set_chn_config()
527 writel(cfg->llist_ptr, schan->chn_base + SPRD_DMA_CHN_LLIST_PTR); in sprd_dma_set_chn_config()
528 writel(cfg->frg_step, schan->chn_base + SPRD_DMA_CHN_FRAG_STEP); in sprd_dma_set_chn_config()
529 writel(cfg->src_blk_step, schan->chn_base + SPRD_DMA_CHN_SRC_BLK_STEP); in sprd_dma_set_chn_config()
530 writel(cfg->des_blk_step, schan->chn_base + SPRD_DMA_CHN_DES_BLK_STEP); in sprd_dma_set_chn_config()
531 writel(cfg->req, schan->chn_base + SPRD_DMA_CHN_REQ); in sprd_dma_set_chn_config()
536 struct virt_dma_desc *vd = vchan_next_desc(&schan->vc); in sprd_dma_start()
541 list_del(&vd->node); in sprd_dma_start()
542 schan->cur_desc = to_sprd_dma_desc(vd); in sprd_dma_start()
545 * Set 2-stage configuration if the channel starts one 2-stage in sprd_dma_start()
548 if (schan->chn_mode && sprd_dma_set_2stage_config(schan)) in sprd_dma_start()
555 sprd_dma_set_chn_config(schan, schan->cur_desc); in sprd_dma_start()
560 if (schan->dev_id == SPRD_DMA_SOFTWARE_UID && in sprd_dma_start()
561 schan->chn_mode != SPRD_DMA_DST_CHN0 && in sprd_dma_start()
562 schan->chn_mode != SPRD_DMA_DST_CHN1) in sprd_dma_start()
572 schan->cur_desc = NULL; in sprd_dma_stop()
590 u32 irq_status = readl(sdev->glb_base + SPRD_DMA_GLB_INT_MSK_STS); in dma_irq_handle()
600 irq_status &= (irq_status - 1); in dma_irq_handle()
601 schan = &sdev->channels[i]; in dma_irq_handle()
603 spin_lock(&schan->vc.lock); in dma_irq_handle()
605 sdesc = schan->cur_desc; in dma_irq_handle()
607 spin_unlock(&schan->vc.lock); in dma_irq_handle()
616 cyclic = schan->linklist.phy_addr ? true : false; in dma_irq_handle()
618 vchan_cyclic_callback(&sdesc->vd); in dma_irq_handle()
623 vchan_cookie_complete(&sdesc->vd); in dma_irq_handle()
624 schan->cur_desc = NULL; in dma_irq_handle()
628 spin_unlock(&schan->vc.lock); in dma_irq_handle()
636 return pm_runtime_get_sync(chan->device->dev); in sprd_dma_alloc_chan_resources()
645 spin_lock_irqsave(&schan->vc.lock, flags); in sprd_dma_free_chan_resources()
646 if (schan->cur_desc) in sprd_dma_free_chan_resources()
647 cur_vd = &schan->cur_desc->vd; in sprd_dma_free_chan_resources()
650 spin_unlock_irqrestore(&schan->vc.lock, flags); in sprd_dma_free_chan_resources()
655 vchan_free_chan_resources(&schan->vc); in sprd_dma_free_chan_resources()
656 pm_runtime_put(chan->device->dev); in sprd_dma_free_chan_resources()
673 spin_lock_irqsave(&schan->vc.lock, flags); in sprd_dma_tx_status()
674 vd = vchan_find_desc(&schan->vc, cookie); in sprd_dma_tx_status()
677 struct sprd_dma_chn_hw *hw = &sdesc->chn_hw; in sprd_dma_tx_status()
679 if (hw->trsc_len > 0) in sprd_dma_tx_status()
680 pos = hw->trsc_len; in sprd_dma_tx_status()
681 else if (hw->blk_len > 0) in sprd_dma_tx_status()
682 pos = hw->blk_len; in sprd_dma_tx_status()
683 else if (hw->frg_len > 0) in sprd_dma_tx_status()
684 pos = hw->frg_len; in sprd_dma_tx_status()
687 } else if (schan->cur_desc && schan->cur_desc->vd.tx.cookie == cookie) { in sprd_dma_tx_status()
688 struct sprd_dma_desc *sdesc = schan->cur_desc; in sprd_dma_tx_status()
690 if (sdesc->dir == DMA_DEV_TO_MEM) in sprd_dma_tx_status()
697 spin_unlock_irqrestore(&schan->vc.lock, flags); in sprd_dma_tx_status()
708 spin_lock_irqsave(&schan->vc.lock, flags); in sprd_dma_issue_pending()
709 if (vchan_issue_pending(&schan->vc) && !schan->cur_desc) in sprd_dma_issue_pending()
711 spin_unlock_irqrestore(&schan->vc.lock, flags); in sprd_dma_issue_pending()
721 return ffs(buswidth) - 1; in sprd_dma_get_datawidth()
724 return -EINVAL; in sprd_dma_get_datawidth()
738 return -EINVAL; in sprd_dma_get_step()
752 enum sprd_dma_chn_mode chn_mode = schan->chn_mode; in sprd_dma_fill_desc()
760 src_step = sprd_dma_get_step(slave_cfg->src_addr_width); in sprd_dma_fill_desc()
762 dev_err(sdev->dma_dev.dev, "invalid source step\n"); in sprd_dma_fill_desc()
767 * For 2-stage transfer, destination channel step can not be 0, in sprd_dma_fill_desc()
776 dst_step = sprd_dma_get_step(slave_cfg->dst_addr_width); in sprd_dma_fill_desc()
778 dev_err(sdev->dma_dev.dev, "invalid destination step\n"); in sprd_dma_fill_desc()
784 src_datawidth = sprd_dma_get_datawidth(slave_cfg->src_addr_width); in sprd_dma_fill_desc()
786 dev_err(sdev->dma_dev.dev, "invalid source datawidth\n"); in sprd_dma_fill_desc()
790 dst_datawidth = sprd_dma_get_datawidth(slave_cfg->dst_addr_width); in sprd_dma_fill_desc()
792 dev_err(sdev->dma_dev.dev, "invalid destination datawidth\n"); in sprd_dma_fill_desc()
796 hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET; in sprd_dma_fill_desc()
802 hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK; in sprd_dma_fill_desc()
803 hw->wrap_to = (dst >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK; in sprd_dma_fill_desc()
804 hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK; in sprd_dma_fill_desc()
805 hw->des_addr = dst & SPRD_DMA_LOW_ADDR_MASK; in sprd_dma_fill_desc()
822 hw->intc = int_mode | SPRD_DMA_CFG_ERR_INT_EN; in sprd_dma_fill_desc()
829 temp |= schan->linklist.wrap_addr ? in sprd_dma_fill_desc()
831 temp |= slave_cfg->src_maxburst & SPRD_DMA_FRG_LEN_MASK; in sprd_dma_fill_desc()
832 hw->frg_len = temp; in sprd_dma_fill_desc()
834 hw->blk_len = slave_cfg->src_maxburst & SPRD_DMA_BLK_LEN_MASK; in sprd_dma_fill_desc()
835 hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK; in sprd_dma_fill_desc()
839 hw->trsf_step = temp; in sprd_dma_fill_desc()
841 /* link-list configuration */ in sprd_dma_fill_desc()
842 if (schan->linklist.phy_addr) { in sprd_dma_fill_desc()
843 hw->cfg |= SPRD_DMA_LINKLIST_EN; in sprd_dma_fill_desc()
845 /* link-list index */ in sprd_dma_fill_desc()
848 /* Next link-list configuration's physical address offset */ in sprd_dma_fill_desc()
851 * Set the link-list pointer point to next link-list in sprd_dma_fill_desc()
854 llist_ptr = schan->linklist.phy_addr + temp; in sprd_dma_fill_desc()
855 hw->llist_ptr = lower_32_bits(llist_ptr); in sprd_dma_fill_desc()
856 hw->src_blk_step = (upper_32_bits(llist_ptr) << SPRD_DMA_LLIST_HIGH_SHIFT) & in sprd_dma_fill_desc()
859 if (schan->linklist.wrap_addr) { in sprd_dma_fill_desc()
860 hw->wrap_ptr |= schan->linklist.wrap_addr & in sprd_dma_fill_desc()
862 hw->wrap_to |= dst & SPRD_DMA_WRAP_ADDR_MASK; in sprd_dma_fill_desc()
865 hw->llist_ptr = 0; in sprd_dma_fill_desc()
866 hw->src_blk_step = 0; in sprd_dma_fill_desc()
869 hw->frg_step = 0; in sprd_dma_fill_desc()
870 hw->des_blk_step = 0; in sprd_dma_fill_desc()
884 if (!schan->linklist.virt_addr) in sprd_dma_fill_linklist_desc()
885 return -EINVAL; in sprd_dma_fill_linklist_desc()
887 hw = (struct sprd_dma_chn_hw *)(schan->linklist.virt_addr + in sprd_dma_fill_linklist_desc()
908 hw = &sdesc->chn_hw; in sprd_dma_prep_dma_memcpy()
910 hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET; in sprd_dma_prep_dma_memcpy()
911 hw->intc = SPRD_DMA_TRANS_INT | SPRD_DMA_CFG_ERR_INT_EN; in sprd_dma_prep_dma_memcpy()
912 hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK; in sprd_dma_prep_dma_memcpy()
913 hw->des_addr = dest & SPRD_DMA_LOW_ADDR_MASK; in sprd_dma_prep_dma_memcpy()
914 hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) & in sprd_dma_prep_dma_memcpy()
916 hw->wrap_to = (dest >> SPRD_DMA_HIGH_ADDR_OFFSET) & in sprd_dma_prep_dma_memcpy()
937 hw->frg_len = temp; in sprd_dma_prep_dma_memcpy()
939 hw->blk_len = len & SPRD_DMA_BLK_LEN_MASK; in sprd_dma_prep_dma_memcpy()
940 hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK; in sprd_dma_prep_dma_memcpy()
944 hw->trsf_step = temp; in sprd_dma_prep_dma_memcpy()
946 return vchan_tx_prep(&schan->vc, &sdesc->vd, flags); in sprd_dma_prep_dma_memcpy()
955 struct dma_slave_config *slave_cfg = &schan->slave_cfg; in sprd_dma_prep_slave_sg()
970 schan->linklist.phy_addr = ll_cfg->phy_addr; in sprd_dma_prep_slave_sg()
971 schan->linklist.virt_addr = ll_cfg->virt_addr; in sprd_dma_prep_slave_sg()
972 schan->linklist.wrap_addr = ll_cfg->wrap_addr; in sprd_dma_prep_slave_sg()
974 schan->linklist.phy_addr = 0; in sprd_dma_prep_slave_sg()
975 schan->linklist.virt_addr = 0; in sprd_dma_prep_slave_sg()
976 schan->linklist.wrap_addr = 0; in sprd_dma_prep_slave_sg()
980 * Set channel mode, interrupt mode and trigger mode for 2-stage in sprd_dma_prep_slave_sg()
983 schan->chn_mode = in sprd_dma_prep_slave_sg()
985 schan->trg_mode = in sprd_dma_prep_slave_sg()
987 schan->int_type = flags & SPRD_DMA_INT_TYPE_MASK; in sprd_dma_prep_slave_sg()
993 sdesc->dir = dir; in sprd_dma_prep_slave_sg()
1000 dst = slave_cfg->dst_addr; in sprd_dma_prep_slave_sg()
1002 src = slave_cfg->src_addr; in sprd_dma_prep_slave_sg()
1012 * The link-list mode needs at least 2 link-list in sprd_dma_prep_slave_sg()
1014 * need to fill the link-list configuration. in sprd_dma_prep_slave_sg()
1027 ret = sprd_dma_fill_desc(chan, &sdesc->chn_hw, 0, 0, start_src, in sprd_dma_prep_slave_sg()
1034 return vchan_tx_prep(&schan->vc, &sdesc->vd, flags); in sprd_dma_prep_slave_sg()
1041 struct dma_slave_config *slave_cfg = &schan->slave_cfg; in sprd_dma_slave_config()
1052 spin_lock_irqsave(&schan->vc.lock, flags); in sprd_dma_pause()
1054 spin_unlock_irqrestore(&schan->vc.lock, flags); in sprd_dma_pause()
1064 spin_lock_irqsave(&schan->vc.lock, flags); in sprd_dma_resume()
1066 spin_unlock_irqrestore(&schan->vc.lock, flags); in sprd_dma_resume()
1078 spin_lock_irqsave(&schan->vc.lock, flags); in sprd_dma_terminate_all()
1079 if (schan->cur_desc) in sprd_dma_terminate_all()
1080 cur_vd = &schan->cur_desc->vd; in sprd_dma_terminate_all()
1084 vchan_get_all_descriptors(&schan->vc, &head); in sprd_dma_terminate_all()
1085 spin_unlock_irqrestore(&schan->vc.lock, flags); in sprd_dma_terminate_all()
1090 vchan_dma_desc_free_list(&schan->vc, &head); in sprd_dma_terminate_all()
1106 schan->dev_id = slave_id; in sprd_dma_filter_fn()
1112 struct device_node *np = pdev->dev.of_node; in sprd_dma_probe()
1118 ret = dma_set_mask_and_coherent(&pdev->dev, DMA_BIT_MASK(36)); in sprd_dma_probe()
1120 ret = dma_set_mask_and_coherent(&pdev->dev, DMA_BIT_MASK(32)); in sprd_dma_probe()
1122 dev_err(&pdev->dev, "unable to set coherent mask to 32\n"); in sprd_dma_probe()
1127 /* Parse new and deprecated dma-channels properties */ in sprd_dma_probe()
1128 ret = device_property_read_u32(&pdev->dev, "dma-channels", &chn_count); in sprd_dma_probe()
1130 ret = device_property_read_u32(&pdev->dev, "#dma-channels", in sprd_dma_probe()
1133 dev_err(&pdev->dev, "get dma channels count failed\n"); in sprd_dma_probe()
1137 sdev = devm_kzalloc(&pdev->dev, in sprd_dma_probe()
1141 return -ENOMEM; in sprd_dma_probe()
1143 sdev->clk = devm_clk_get(&pdev->dev, "enable"); in sprd_dma_probe()
1144 if (IS_ERR(sdev->clk)) { in sprd_dma_probe()
1145 dev_err(&pdev->dev, "get enable clock failed\n"); in sprd_dma_probe()
1146 return PTR_ERR(sdev->clk); in sprd_dma_probe()
1150 sdev->ashb_clk = devm_clk_get(&pdev->dev, "ashb_eb"); in sprd_dma_probe()
1151 if (IS_ERR(sdev->ashb_clk)) in sprd_dma_probe()
1152 dev_warn(&pdev->dev, "no optional ashb eb clock\n"); in sprd_dma_probe()
1155 * We have three DMA controllers: AP DMA, AON DMA and AGCP DMA. For AGCP in sprd_dma_probe()
1161 sdev->irq = platform_get_irq(pdev, 0); in sprd_dma_probe()
1162 if (sdev->irq > 0) { in sprd_dma_probe()
1163 ret = devm_request_irq(&pdev->dev, sdev->irq, dma_irq_handle, in sprd_dma_probe()
1166 dev_err(&pdev->dev, "request dma irq failed\n"); in sprd_dma_probe()
1170 dev_warn(&pdev->dev, "no interrupts for the dma controller\n"); in sprd_dma_probe()
1173 sdev->glb_base = devm_platform_ioremap_resource(pdev, 0); in sprd_dma_probe()
1174 if (IS_ERR(sdev->glb_base)) in sprd_dma_probe()
1175 return PTR_ERR(sdev->glb_base); in sprd_dma_probe()
1177 dma_cap_set(DMA_MEMCPY, sdev->dma_dev.cap_mask); in sprd_dma_probe()
1178 sdev->total_chns = chn_count; in sprd_dma_probe()
1179 INIT_LIST_HEAD(&sdev->dma_dev.channels); in sprd_dma_probe()
1180 INIT_LIST_HEAD(&sdev->dma_dev.global_node); in sprd_dma_probe()
1181 sdev->dma_dev.dev = &pdev->dev; in sprd_dma_probe()
1182 sdev->dma_dev.device_alloc_chan_resources = sprd_dma_alloc_chan_resources; in sprd_dma_probe()
1183 sdev->dma_dev.device_free_chan_resources = sprd_dma_free_chan_resources; in sprd_dma_probe()
1184 sdev->dma_dev.device_tx_status = sprd_dma_tx_status; in sprd_dma_probe()
1185 sdev->dma_dev.device_issue_pending = sprd_dma_issue_pending; in sprd_dma_probe()
1186 sdev->dma_dev.device_prep_dma_memcpy = sprd_dma_prep_dma_memcpy; in sprd_dma_probe()
1187 sdev->dma_dev.device_prep_slave_sg = sprd_dma_prep_slave_sg; in sprd_dma_probe()
1188 sdev->dma_dev.device_config = sprd_dma_slave_config; in sprd_dma_probe()
1189 sdev->dma_dev.device_pause = sprd_dma_pause; in sprd_dma_probe()
1190 sdev->dma_dev.device_resume = sprd_dma_resume; in sprd_dma_probe()
1191 sdev->dma_dev.device_terminate_all = sprd_dma_terminate_all; in sprd_dma_probe()
1194 dma_chn = &sdev->channels[i]; in sprd_dma_probe()
1195 dma_chn->chn_num = i; in sprd_dma_probe()
1196 dma_chn->cur_desc = NULL; in sprd_dma_probe()
1198 dma_chn->chn_base = sdev->glb_base + SPRD_DMA_CHN_REG_OFFSET + in sprd_dma_probe()
1201 dma_chn->vc.desc_free = sprd_dma_free_desc; in sprd_dma_probe()
1202 vchan_init(&dma_chn->vc, &sdev->dma_dev); in sprd_dma_probe()
1210 pm_runtime_set_active(&pdev->dev); in sprd_dma_probe()
1211 pm_runtime_enable(&pdev->dev); in sprd_dma_probe()
1213 ret = pm_runtime_get_sync(&pdev->dev); in sprd_dma_probe()
1217 ret = dma_async_device_register(&sdev->dma_dev); in sprd_dma_probe()
1219 dev_err(&pdev->dev, "register dma device failed:%d\n", ret); in sprd_dma_probe()
1223 sprd_dma_info.dma_cap = sdev->dma_dev.cap_mask; in sprd_dma_probe()
1229 pm_runtime_put(&pdev->dev); in sprd_dma_probe()
1233 dma_async_device_unregister(&sdev->dma_dev); in sprd_dma_probe()
1235 pm_runtime_put_noidle(&pdev->dev); in sprd_dma_probe()
1236 pm_runtime_disable(&pdev->dev); in sprd_dma_probe()
1247 pm_runtime_get_sync(&pdev->dev); in sprd_dma_remove()
1250 if (sdev->irq > 0) in sprd_dma_remove()
1251 devm_free_irq(&pdev->dev, sdev->irq, sdev); in sprd_dma_remove()
1253 list_for_each_entry_safe(c, cn, &sdev->dma_dev.channels, in sprd_dma_remove()
1255 list_del(&c->vc.chan.device_node); in sprd_dma_remove()
1256 tasklet_kill(&c->vc.task); in sprd_dma_remove()
1259 of_dma_controller_free(pdev->dev.of_node); in sprd_dma_remove()
1260 dma_async_device_unregister(&sdev->dma_dev); in sprd_dma_remove()
1263 pm_runtime_put_noidle(&pdev->dev); in sprd_dma_remove()
1264 pm_runtime_disable(&pdev->dev); in sprd_dma_remove()
1268 { .compatible = "sprd,sc9860-dma", },
1288 dev_err(sdev->dma_dev.dev, "enable dma failed\n"); in sprd_dma_runtime_resume()
1303 .name = "sprd-dma",
1314 MODULE_ALIAS("platform:sprd-dma");