Lines Matching +full:axi +full:- +full:max +full:- +full:burst +full:- +full:len
1 // SPDX-License-Identifier: GPL-2.0
2 // (C) 2017-2018 Synopsys, Inc. (www.synopsys.com)
5 * Synopsys DesignWare AXI DMA Controller driver.
15 #include <linux/dma-mapping.h>
20 #include <linux/io-64-nonatomic-lo-hi.h>
32 #include "dw-axi-dmac.h"
34 #include "../virt-dma.h"
37 * The set of bus widths supported by the DMA controller. DW AXI DMAC supports
38 * master data bus width up to 512 bits (for both AXI master interfaces), but
57 iowrite32(val, chip->regs + reg); in axi_dma_iowrite32()
62 return ioread32(chip->regs + reg); in axi_dma_ioread32()
68 iowrite64(val, chip->regs + reg); in axi_dma_iowrite64()
73 return ioread64(chip->regs + reg); in axi_dma_ioread64()
79 iowrite32(val, chan->chan_regs + reg); in axi_chan_iowrite32()
84 return ioread32(chan->chan_regs + reg); in axi_chan_ioread32()
94 iowrite32(lower_32_bits(val), chan->chan_regs + reg); in axi_chan_iowrite64()
95 iowrite32(upper_32_bits(val), chan->chan_regs + reg + 4); in axi_chan_iowrite64()
103 cfg_lo = (config->dst_multblk_type << CH_CFG_L_DST_MULTBLK_TYPE_POS | in axi_chan_config_write()
104 config->src_multblk_type << CH_CFG_L_SRC_MULTBLK_TYPE_POS); in axi_chan_config_write()
105 if (chan->chip->dw->hdata->reg_map_8_channels && in axi_chan_config_write()
106 !chan->chip->dw->hdata->use_cfg2) { in axi_chan_config_write()
107 cfg_hi = config->tt_fc << CH_CFG_H_TT_FC_POS | in axi_chan_config_write()
108 config->hs_sel_src << CH_CFG_H_HS_SEL_SRC_POS | in axi_chan_config_write()
109 config->hs_sel_dst << CH_CFG_H_HS_SEL_DST_POS | in axi_chan_config_write()
110 config->src_per << CH_CFG_H_SRC_PER_POS | in axi_chan_config_write()
111 config->dst_per << CH_CFG_H_DST_PER_POS | in axi_chan_config_write()
112 config->prior << CH_CFG_H_PRIORITY_POS; in axi_chan_config_write()
114 cfg_lo |= config->src_per << CH_CFG2_L_SRC_PER_POS | in axi_chan_config_write()
115 config->dst_per << CH_CFG2_L_DST_PER_POS; in axi_chan_config_write()
116 cfg_hi = config->tt_fc << CH_CFG2_H_TT_FC_POS | in axi_chan_config_write()
117 config->hs_sel_src << CH_CFG2_H_HS_SEL_SRC_POS | in axi_chan_config_write()
118 config->hs_sel_dst << CH_CFG2_H_HS_SEL_DST_POS | in axi_chan_config_write()
119 config->prior << CH_CFG2_H_PRIORITY_POS; in axi_chan_config_write()
198 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) { in axi_chan_disable()
199 val = axi_dma_ioread64(chan->chip, DMAC_CHEN); in axi_chan_disable()
200 if (chan->id >= DMAC_CHAN_16) { in axi_chan_disable()
201 val &= ~((u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_disable()
203 val |= (u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_disable()
206 val &= ~(BIT(chan->id) << DMAC_CHAN_EN_SHIFT); in axi_chan_disable()
207 val |= BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_disable()
209 axi_dma_iowrite64(chan->chip, DMAC_CHEN, val); in axi_chan_disable()
211 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_disable()
212 val &= ~(BIT(chan->id) << DMAC_CHAN_EN_SHIFT); in axi_chan_disable()
213 if (chan->chip->dw->hdata->reg_map_8_channels) in axi_chan_disable()
214 val |= BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_disable()
216 val |= BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_disable()
217 axi_dma_iowrite32(chan->chip, DMAC_CHEN, (u32)val); in axi_chan_disable()
225 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) { in axi_chan_enable()
226 val = axi_dma_ioread64(chan->chip, DMAC_CHEN); in axi_chan_enable()
227 if (chan->id >= DMAC_CHAN_16) { in axi_chan_enable()
228 val |= (u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_enable()
230 (u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_enable()
233 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
234 BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_enable()
236 axi_dma_iowrite64(chan->chip, DMAC_CHEN, val); in axi_chan_enable()
238 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_enable()
239 if (chan->chip->dw->hdata->reg_map_8_channels) { in axi_chan_enable()
240 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
241 BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_enable()
243 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
244 BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_enable()
246 axi_dma_iowrite32(chan->chip, DMAC_CHEN, (u32)val); in axi_chan_enable()
254 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) in axi_chan_is_hw_enable()
255 val = axi_dma_ioread64(chan->chip, DMAC_CHEN); in axi_chan_is_hw_enable()
257 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_is_hw_enable()
259 if (chan->id >= DMAC_CHAN_16) in axi_chan_is_hw_enable()
260 return !!(val & ((u64)(BIT(chan->id) >> DMAC_CHAN_16) << DMAC_CHAN_BLOCK_SHIFT)); in axi_chan_is_hw_enable()
262 return !!(val & (BIT(chan->id) << DMAC_CHAN_EN_SHIFT)); in axi_chan_is_hw_enable()
270 for (i = 0; i < chip->dw->hdata->nr_channels; i++) { in axi_dma_hw_init()
271 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in axi_dma_hw_init()
272 axi_chan_disable(&chip->dw->chan[i]); in axi_dma_hw_init()
274 ret = dma_set_mask_and_coherent(chip->dev, DMA_BIT_MASK(64)); in axi_dma_hw_init()
276 dev_warn(chip->dev, "Unable to set coherent mask\n"); in axi_dma_hw_init()
280 dma_addr_t dst, size_t len) in axi_chan_get_xfer_width() argument
282 u32 max_width = chan->chip->dw->hdata->m_data_width; in axi_chan_get_xfer_width()
284 return __ffs(src | dst | len | BIT(max_width)); in axi_chan_get_xfer_width()
289 return dma_chan_name(&chan->vc.chan); in axi_chan_name()
300 desc->hw_desc = kcalloc(num, sizeof(*desc->hw_desc), GFP_NOWAIT); in axi_desc_alloc()
301 if (!desc->hw_desc) { in axi_desc_alloc()
305 desc->nr_hw_descs = num; in axi_desc_alloc()
316 lli = dma_pool_zalloc(chan->desc_pool, GFP_NOWAIT, &phys); in axi_desc_get()
323 atomic_inc(&chan->descs_allocated); in axi_desc_get()
331 struct axi_dma_chan *chan = desc->chan; in axi_desc_put()
332 int count = desc->nr_hw_descs; in axi_desc_put()
337 hw_desc = &desc->hw_desc[descs_put]; in axi_desc_put()
338 dma_pool_free(chan->desc_pool, hw_desc->lli, hw_desc->llp); in axi_desc_put()
341 kfree(desc->hw_desc); in axi_desc_put()
343 atomic_sub(descs_put, &chan->descs_allocated); in axi_desc_put()
346 atomic_read(&chan->descs_allocated)); in axi_desc_put()
366 u32 len; in dma_chan_tx_status() local
372 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_tx_status()
374 vdesc = vchan_find_desc(&chan->vc, cookie); in dma_chan_tx_status()
376 length = vd_to_axi_desc(vdesc)->length; in dma_chan_tx_status()
377 completed_blocks = vd_to_axi_desc(vdesc)->completed_blocks; in dma_chan_tx_status()
378 len = vd_to_axi_desc(vdesc)->hw_desc[0].len; in dma_chan_tx_status()
379 completed_length = completed_blocks * len; in dma_chan_tx_status()
380 bytes = length - completed_length; in dma_chan_tx_status()
383 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_tx_status()
391 desc->lli->llp = cpu_to_le64(adr); in write_desc_llp()
404 if (!chan->chip->apb_regs) { in dw_axi_dma_set_byte_halfword()
405 dev_dbg(chan->chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_byte_halfword()
409 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_byte_halfword()
413 val = ioread32(chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
416 val |= BIT(chan->id); in dw_axi_dma_set_byte_halfword()
418 val &= ~BIT(chan->id); in dw_axi_dma_set_byte_halfword()
420 iowrite32(val, chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
426 u32 priority = chan->chip->dw->hdata->priority[chan->id]; in axi_chan_block_xfer_start()
432 dev_err(chan2dev(chan), "%s is non-idle!\n", in axi_chan_block_xfer_start()
438 axi_dma_enable(chan->chip); in axi_chan_block_xfer_start()
446 switch (chan->direction) { in axi_chan_block_xfer_start()
449 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
452 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
453 config.dst_per = chan->id; in axi_chan_block_xfer_start()
455 config.dst_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
458 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
461 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
462 config.src_per = chan->id; in axi_chan_block_xfer_start()
464 config.src_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
471 write_chan_llp(chan, first->hw_desc[0].llp | lms); in axi_chan_block_xfer_start()
488 vd = vchan_next_desc(&chan->vc); in axi_chan_start_first_queued()
494 vd->tx.cookie); in axi_chan_start_first_queued()
503 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_issue_pending()
504 if (vchan_issue_pending(&chan->vc)) in dma_chan_issue_pending()
506 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_issue_pending()
513 vchan_synchronize(&chan->vc); in dw_axi_dma_synchronize()
522 dev_err(chan2dev(chan), "%s is non-idle!\n", in dma_chan_alloc_chan_resources()
524 return -EBUSY; in dma_chan_alloc_chan_resources()
527 /* LLI address must be aligned to a 64-byte boundary */ in dma_chan_alloc_chan_resources()
528 chan->desc_pool = dma_pool_create(dev_name(chan2dev(chan)), in dma_chan_alloc_chan_resources()
529 chan->chip->dev, in dma_chan_alloc_chan_resources()
532 if (!chan->desc_pool) { in dma_chan_alloc_chan_resources()
534 return -ENOMEM; in dma_chan_alloc_chan_resources()
538 pm_runtime_get(chan->chip->dev); in dma_chan_alloc_chan_resources()
549 dev_err(dchan2dev(dchan), "%s is non-idle!\n", in dma_chan_free_chan_resources()
555 vchan_free_chan_resources(&chan->vc); in dma_chan_free_chan_resources()
557 dma_pool_destroy(chan->desc_pool); in dma_chan_free_chan_resources()
558 chan->desc_pool = NULL; in dma_chan_free_chan_resources()
561 axi_chan_name(chan), atomic_read(&chan->descs_allocated)); in dma_chan_free_chan_resources()
563 pm_runtime_put(chan->chip->dev); in dma_chan_free_chan_resources()
568 struct axi_dma_chip *chip = chan->chip; in dw_axi_dma_set_hw_channel()
571 if (!chip->apb_regs) { in dw_axi_dma_set_hw_channel()
572 dev_err(chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_hw_channel()
582 val = chan->hw_handshake_num; in dw_axi_dma_set_hw_channel()
586 reg_value = lo_hi_readq(chip->apb_regs + DMAC_APB_HW_HS_SEL_0); in dw_axi_dma_set_hw_channel()
592 (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
593 reg_value |= (val << (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
594 lo_hi_writeq(reg_value, chip->apb_regs + DMAC_APB_HW_HS_SEL_0); in dw_axi_dma_set_hw_channel()
609 val = le32_to_cpu(desc->lli->ctl_hi); in set_desc_last()
611 desc->lli->ctl_hi = cpu_to_le32(val); in set_desc_last()
616 desc->lli->sar = cpu_to_le64(adr); in write_desc_sar()
621 desc->lli->dar = cpu_to_le64(adr); in write_desc_dar()
629 val = le32_to_cpu(desc->lli->ctl_lo); in set_desc_src_master()
631 desc->lli->ctl_lo = cpu_to_le32(val); in set_desc_src_master()
640 val = le32_to_cpu(hw_desc->lli->ctl_lo); in set_desc_dest_master()
641 if (desc->chan->chip->dw->hdata->nr_masters > 1) in set_desc_dest_master()
646 hw_desc->lli->ctl_lo = cpu_to_le32(val); in set_desc_dest_master()
651 dma_addr_t mem_addr, size_t len) in dw_axi_dma_set_hw_desc() argument
653 unsigned int data_width = BIT(chan->chip->dw->hdata->m_data_width); in dw_axi_dma_set_hw_desc()
662 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dw_axi_dma_set_hw_desc()
664 mem_width = __ffs(data_width | mem_addr | len); in dw_axi_dma_set_hw_desc()
669 dev_err(chan->chip->dev, "invalid buffer alignment\n"); in dw_axi_dma_set_hw_desc()
670 return -EINVAL; in dw_axi_dma_set_hw_desc()
673 switch (chan->direction) { in dw_axi_dma_set_hw_desc()
675 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_hw_desc()
676 device_addr = chan->config.dst_addr; in dw_axi_dma_set_hw_desc()
681 block_ts = len >> mem_width; in dw_axi_dma_set_hw_desc()
684 reg_width = __ffs(chan->config.src_addr_width); in dw_axi_dma_set_hw_desc()
685 device_addr = chan->config.src_addr; in dw_axi_dma_set_hw_desc()
690 block_ts = len >> reg_width; in dw_axi_dma_set_hw_desc()
693 return -EINVAL; in dw_axi_dma_set_hw_desc()
697 return -EINVAL; in dw_axi_dma_set_hw_desc()
699 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dw_axi_dma_set_hw_desc()
700 if (unlikely(!hw_desc->lli)) in dw_axi_dma_set_hw_desc()
701 return -ENOMEM; in dw_axi_dma_set_hw_desc()
705 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dw_axi_dma_set_hw_desc()
706 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dw_axi_dma_set_hw_desc()
712 hw_desc->lli->ctl_hi = cpu_to_le32(ctlhi); in dw_axi_dma_set_hw_desc()
714 if (chan->direction == DMA_MEM_TO_DEV) { in dw_axi_dma_set_hw_desc()
722 hw_desc->lli->block_ts_lo = cpu_to_le32(block_ts - 1); in dw_axi_dma_set_hw_desc()
726 hw_desc->lli->ctl_lo = cpu_to_le32(ctllo); in dw_axi_dma_set_hw_desc()
730 hw_desc->len = len; in dw_axi_dma_set_hw_desc()
741 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in calculate_block_len()
745 data_width = BIT(chan->chip->dw->hdata->m_data_width); in calculate_block_len()
753 reg_width = __ffs(chan->config.src_addr_width); in calculate_block_len()
797 chan->direction = direction; in dw_axi_dma_chan_prep_cyclic()
798 desc->chan = chan; in dw_axi_dma_chan_prep_cyclic()
799 chan->cyclic = true; in dw_axi_dma_chan_prep_cyclic()
800 desc->length = 0; in dw_axi_dma_chan_prep_cyclic()
801 desc->period_len = period_len; in dw_axi_dma_chan_prep_cyclic()
804 hw_desc = &desc->hw_desc[i]; in dw_axi_dma_chan_prep_cyclic()
811 desc->length += hw_desc->len; in dw_axi_dma_chan_prep_cyclic()
812 /* Set end-of-link to the linked descriptor, so that cyclic in dw_axi_dma_chan_prep_cyclic()
820 llp = desc->hw_desc[0].llp; in dw_axi_dma_chan_prep_cyclic()
824 hw_desc = &desc->hw_desc[--total_segments]; in dw_axi_dma_chan_prep_cyclic()
826 llp = hw_desc->llp; in dw_axi_dma_chan_prep_cyclic()
831 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_cyclic()
853 u32 len, num_sgs = 0; in dw_axi_dma_chan_prep_slave_sg() local
864 len = sg_dma_len(sgl); in dw_axi_dma_chan_prep_slave_sg()
866 axi_block_len = calculate_block_len(chan, mem, len, direction); in dw_axi_dma_chan_prep_slave_sg()
877 desc->chan = chan; in dw_axi_dma_chan_prep_slave_sg()
878 desc->length = 0; in dw_axi_dma_chan_prep_slave_sg()
879 chan->direction = direction; in dw_axi_dma_chan_prep_slave_sg()
883 len = sg_dma_len(sg); in dw_axi_dma_chan_prep_slave_sg()
888 hw_desc = &desc->hw_desc[loop++]; in dw_axi_dma_chan_prep_slave_sg()
893 desc->length += hw_desc->len; in dw_axi_dma_chan_prep_slave_sg()
894 len -= segment_len; in dw_axi_dma_chan_prep_slave_sg()
896 } while (len >= segment_len); in dw_axi_dma_chan_prep_slave_sg()
899 /* Set end-of-link to the last link descriptor of list */ in dw_axi_dma_chan_prep_slave_sg()
900 set_desc_last(&desc->hw_desc[num_sgs - 1]); in dw_axi_dma_chan_prep_slave_sg()
904 hw_desc = &desc->hw_desc[--num_sgs]; in dw_axi_dma_chan_prep_slave_sg()
906 llp = hw_desc->llp; in dw_axi_dma_chan_prep_slave_sg()
911 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_slave_sg()
922 dma_addr_t src_adr, size_t len, unsigned long flags) in dma_chan_prep_dma_memcpy() argument
933 axi_chan_name(chan), &src_adr, &dst_adr, len, flags); in dma_chan_prep_dma_memcpy()
935 max_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dma_chan_prep_dma_memcpy()
936 xfer_width = axi_chan_get_xfer_width(chan, src_adr, dst_adr, len); in dma_chan_prep_dma_memcpy()
937 num = DIV_ROUND_UP(len, max_block_ts << xfer_width); in dma_chan_prep_dma_memcpy()
942 desc->chan = chan; in dma_chan_prep_dma_memcpy()
944 desc->length = 0; in dma_chan_prep_dma_memcpy()
945 while (len) { in dma_chan_prep_dma_memcpy()
946 xfer_len = len; in dma_chan_prep_dma_memcpy()
948 hw_desc = &desc->hw_desc[num]; in dma_chan_prep_dma_memcpy()
959 * BLOCK_TS register should be set to block_ts - 1 in dma_chan_prep_dma_memcpy()
967 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dma_chan_prep_dma_memcpy()
968 if (unlikely(!hw_desc->lli)) in dma_chan_prep_dma_memcpy()
973 hw_desc->lli->block_ts_lo = cpu_to_le32(block_ts - 1); in dma_chan_prep_dma_memcpy()
976 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dma_chan_prep_dma_memcpy()
977 u32 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dma_chan_prep_dma_memcpy()
984 hw_desc->lli->ctl_hi = cpu_to_le32(reg); in dma_chan_prep_dma_memcpy()
992 hw_desc->lli->ctl_lo = cpu_to_le32(reg); in dma_chan_prep_dma_memcpy()
997 hw_desc->len = xfer_len; in dma_chan_prep_dma_memcpy()
998 desc->length += hw_desc->len; in dma_chan_prep_dma_memcpy()
1000 len -= xfer_len; in dma_chan_prep_dma_memcpy()
1006 /* Set end-of-link to the last link descriptor of list */ in dma_chan_prep_dma_memcpy()
1007 set_desc_last(&desc->hw_desc[num - 1]); in dma_chan_prep_dma_memcpy()
1010 hw_desc = &desc->hw_desc[--num]; in dma_chan_prep_dma_memcpy()
1012 llp = hw_desc->llp; in dma_chan_prep_dma_memcpy()
1015 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dma_chan_prep_dma_memcpy()
1028 memcpy(&chan->config, config, sizeof(*config)); in dw_axi_dma_chan_slave_config()
1036 if (!desc->lli) { in axi_chan_dump_lli()
1037 dev_err(dchan2dev(&chan->vc.chan), "NULL LLI\n"); in axi_chan_dump_lli()
1041 dev_err(dchan2dev(&chan->vc.chan), in axi_chan_dump_lli()
1043 le64_to_cpu(desc->lli->sar), in axi_chan_dump_lli()
1044 le64_to_cpu(desc->lli->dar), in axi_chan_dump_lli()
1045 le64_to_cpu(desc->lli->llp), in axi_chan_dump_lli()
1046 le32_to_cpu(desc->lli->block_ts_lo), in axi_chan_dump_lli()
1047 le32_to_cpu(desc->lli->ctl_hi), in axi_chan_dump_lli()
1048 le32_to_cpu(desc->lli->ctl_lo)); in axi_chan_dump_lli()
1054 int count = atomic_read(&chan->descs_allocated); in axi_chan_list_dump_lli()
1058 axi_chan_dump_lli(chan, &desc_head->hw_desc[i]); in axi_chan_list_dump_lli()
1066 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_handle_err()
1071 vd = vchan_next_desc(&chan->vc); in axi_chan_handle_err()
1078 list_del(&vd->node); in axi_chan_handle_err()
1083 axi_chan_name(chan), vd->tx.cookie, status); in axi_chan_handle_err()
1092 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_handle_err()
1097 int count = atomic_read(&chan->descs_allocated); in axi_chan_block_xfer_complete()
1105 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1113 vd = vchan_next_desc(&chan->vc); in axi_chan_block_xfer_complete()
1120 if (chan->cyclic) { in axi_chan_block_xfer_complete()
1123 llp = lo_hi_readq(chan->chan_regs + CH_LLP); in axi_chan_block_xfer_complete()
1125 hw_desc = &desc->hw_desc[i]; in axi_chan_block_xfer_complete()
1126 if (hw_desc->llp == llp) { in axi_chan_block_xfer_complete()
1127 axi_chan_irq_clear(chan, hw_desc->lli->status_lo); in axi_chan_block_xfer_complete()
1128 hw_desc->lli->ctl_hi |= CH_CTL_H_LLI_VALID; in axi_chan_block_xfer_complete()
1129 desc->completed_blocks = i; in axi_chan_block_xfer_complete()
1131 if (((hw_desc->len * (i + 1)) % desc->period_len) == 0) in axi_chan_block_xfer_complete()
1141 list_del(&vd->node); in axi_chan_block_xfer_complete()
1146 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1152 struct dw_axi_dma *dw = chip->dw; in dw_axi_dma_interrupt()
1161 for (i = 0; i < dw->hdata->nr_channels; i++) { in dw_axi_dma_interrupt()
1162 chan = &dw->chan[i]; in dw_axi_dma_interrupt()
1166 dev_vdbg(chip->dev, "%s %u IRQ status: 0x%08x\n", in dw_axi_dma_interrupt()
1175 /* Re-enable interrupts */ in dw_axi_dma_interrupt()
1184 u32 chan_active = BIT(chan->id) << DMAC_CHAN_EN_SHIFT; in dma_chan_terminate_all()
1192 ret = readl_poll_timeout_atomic(chan->chip->regs + DMAC_CHEN, val, in dma_chan_terminate_all()
1194 if (ret == -ETIMEDOUT) in dma_chan_terminate_all()
1198 if (chan->direction != DMA_MEM_TO_MEM) in dma_chan_terminate_all()
1200 if (chan->direction == DMA_MEM_TO_DEV) in dma_chan_terminate_all()
1203 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_terminate_all()
1205 vchan_get_all_descriptors(&chan->vc, &head); in dma_chan_terminate_all()
1207 chan->cyclic = false; in dma_chan_terminate_all()
1208 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_terminate_all()
1210 vchan_dma_desc_free_list(&chan->vc, &head); in dma_chan_terminate_all()
1224 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_pause()
1226 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) { in dma_chan_pause()
1227 val = axi_dma_ioread64(chan->chip, DMAC_CHSUSPREG); in dma_chan_pause()
1228 if (chan->id >= DMAC_CHAN_16) { in dma_chan_pause()
1229 val |= (u64)(BIT(chan->id) >> DMAC_CHAN_16) in dma_chan_pause()
1231 (u64)(BIT(chan->id) >> DMAC_CHAN_16) in dma_chan_pause()
1234 val |= BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT | in dma_chan_pause()
1235 BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT; in dma_chan_pause()
1237 axi_dma_iowrite64(chan->chip, DMAC_CHSUSPREG, val); in dma_chan_pause()
1239 if (chan->chip->dw->hdata->reg_map_8_channels) { in dma_chan_pause()
1240 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in dma_chan_pause()
1241 val |= BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT | in dma_chan_pause()
1242 BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT; in dma_chan_pause()
1243 axi_dma_iowrite32(chan->chip, DMAC_CHEN, (u32)val); in dma_chan_pause()
1245 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in dma_chan_pause()
1246 val |= BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT | in dma_chan_pause()
1247 BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT; in dma_chan_pause()
1248 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, (u32)val); in dma_chan_pause()
1257 } while (--timeout); in dma_chan_pause()
1261 chan->is_paused = true; in dma_chan_pause()
1263 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_pause()
1265 return timeout ? 0 : -EAGAIN; in dma_chan_pause()
1273 if (chan->chip->dw->hdata->nr_channels >= DMAC_CHAN_16) { in axi_chan_resume()
1274 val = axi_dma_ioread64(chan->chip, DMAC_CHSUSPREG); in axi_chan_resume()
1275 if (chan->id >= DMAC_CHAN_16) { in axi_chan_resume()
1276 val &= ~((u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_resume()
1278 val |= ((u64)(BIT(chan->id) >> DMAC_CHAN_16) in axi_chan_resume()
1281 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT); in axi_chan_resume()
1282 val |= (BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT); in axi_chan_resume()
1284 axi_dma_iowrite64(chan->chip, DMAC_CHSUSPREG, val); in axi_chan_resume()
1286 if (chan->chip->dw->hdata->reg_map_8_channels) { in axi_chan_resume()
1287 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_resume()
1288 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT); in axi_chan_resume()
1289 val |= (BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT); in axi_chan_resume()
1290 axi_dma_iowrite32(chan->chip, DMAC_CHEN, (u32)val); in axi_chan_resume()
1292 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in axi_chan_resume()
1293 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT); in axi_chan_resume()
1294 val |= (BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT); in axi_chan_resume()
1295 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, (u32)val); in axi_chan_resume()
1299 chan->is_paused = false; in axi_chan_resume()
1307 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_resume()
1309 if (chan->is_paused) in dma_chan_resume()
1312 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_resume()
1322 clk_disable_unprepare(chip->core_clk); in axi_dma_suspend()
1323 clk_disable_unprepare(chip->cfgr_clk); in axi_dma_suspend()
1332 ret = clk_prepare_enable(chip->cfgr_clk); in axi_dma_resume()
1336 ret = clk_prepare_enable(chip->core_clk); in axi_dma_resume()
1363 struct dw_axi_dma *dw = ofdma->of_dma_data; in dw_axi_dma_of_xlate()
1367 dchan = dma_get_any_slave_channel(&dw->dma); in dw_axi_dma_of_xlate()
1372 chan->hw_handshake_num = dma_spec->args[0]; in dw_axi_dma_of_xlate()
1378 struct device *dev = chip->dev; in parse_device_properties()
1382 ret = device_property_read_u32(dev, "dma-channels", &tmp); in parse_device_properties()
1386 return -EINVAL; in parse_device_properties()
1388 chip->dw->hdata->nr_channels = tmp; in parse_device_properties()
1390 chip->dw->hdata->reg_map_8_channels = true; in parse_device_properties()
1392 ret = device_property_read_u32(dev, "snps,dma-masters", &tmp); in parse_device_properties()
1396 return -EINVAL; in parse_device_properties()
1398 chip->dw->hdata->nr_masters = tmp; in parse_device_properties()
1400 ret = device_property_read_u32(dev, "snps,data-width", &tmp); in parse_device_properties()
1404 return -EINVAL; in parse_device_properties()
1406 chip->dw->hdata->m_data_width = tmp; in parse_device_properties()
1408 ret = device_property_read_u32_array(dev, "snps,block-size", carr, in parse_device_properties()
1409 chip->dw->hdata->nr_channels); in parse_device_properties()
1412 for (tmp = 0; tmp < chip->dw->hdata->nr_channels; tmp++) { in parse_device_properties()
1414 return -EINVAL; in parse_device_properties()
1416 chip->dw->hdata->block_size[tmp] = carr[tmp]; in parse_device_properties()
1420 chip->dw->hdata->nr_channels); in parse_device_properties()
1423 /* Priority value must be programmed within [0:nr_channels-1] range */ in parse_device_properties()
1424 for (tmp = 0; tmp < chip->dw->hdata->nr_channels; tmp++) { in parse_device_properties()
1425 if (carr[tmp] >= chip->dw->hdata->nr_channels) in parse_device_properties()
1426 return -EINVAL; in parse_device_properties()
1428 chip->dw->hdata->priority[tmp] = carr[tmp]; in parse_device_properties()
1431 /* axi-max-burst-len is optional property */ in parse_device_properties()
1432 ret = device_property_read_u32(dev, "snps,axi-max-burst-len", &tmp); in parse_device_properties()
1435 return -EINVAL; in parse_device_properties()
1437 return -EINVAL; in parse_device_properties()
1439 chip->dw->hdata->restrict_axi_burst_len = true; in parse_device_properties()
1440 chip->dw->hdata->axi_rw_burst_len = tmp; in parse_device_properties()
1452 chip->irq[i] = platform_get_irq(pdev, i); in axi_req_irqs()
1453 if (chip->irq[i] < 0) in axi_req_irqs()
1454 return chip->irq[i]; in axi_req_irqs()
1455 ret = devm_request_irq(chip->dev, chip->irq[i], dw_axi_dma_interrupt, in axi_req_irqs()
1474 chip = devm_kzalloc(&pdev->dev, sizeof(*chip), GFP_KERNEL); in dw_probe()
1476 return -ENOMEM; in dw_probe()
1478 dw = devm_kzalloc(&pdev->dev, sizeof(*dw), GFP_KERNEL); in dw_probe()
1480 return -ENOMEM; in dw_probe()
1482 hdata = devm_kzalloc(&pdev->dev, sizeof(*hdata), GFP_KERNEL); in dw_probe()
1484 return -ENOMEM; in dw_probe()
1486 chip->dw = dw; in dw_probe()
1487 chip->dev = &pdev->dev; in dw_probe()
1488 chip->dw->hdata = hdata; in dw_probe()
1490 chip->regs = devm_platform_ioremap_resource(pdev, 0); in dw_probe()
1491 if (IS_ERR(chip->regs)) in dw_probe()
1492 return PTR_ERR(chip->regs); in dw_probe()
1494 flags = (uintptr_t)of_device_get_match_data(&pdev->dev); in dw_probe()
1496 chip->apb_regs = devm_platform_ioremap_resource(pdev, 1); in dw_probe()
1497 if (IS_ERR(chip->apb_regs)) in dw_probe()
1498 return PTR_ERR(chip->apb_regs); in dw_probe()
1502 resets = devm_reset_control_array_get_exclusive(&pdev->dev); in dw_probe()
1511 chip->dw->hdata->use_cfg2 = !!(flags & AXI_DMA_FLAG_USE_CFG2); in dw_probe()
1513 chip->core_clk = devm_clk_get(chip->dev, "core-clk"); in dw_probe()
1514 if (IS_ERR(chip->core_clk)) in dw_probe()
1515 return PTR_ERR(chip->core_clk); in dw_probe()
1517 chip->cfgr_clk = devm_clk_get(chip->dev, "cfgr-clk"); in dw_probe()
1518 if (IS_ERR(chip->cfgr_clk)) in dw_probe()
1519 return PTR_ERR(chip->cfgr_clk); in dw_probe()
1525 dw->chan = devm_kcalloc(chip->dev, hdata->nr_channels, in dw_probe()
1526 sizeof(*dw->chan), GFP_KERNEL); in dw_probe()
1527 if (!dw->chan) in dw_probe()
1528 return -ENOMEM; in dw_probe()
1534 INIT_LIST_HEAD(&dw->dma.channels); in dw_probe()
1535 for (i = 0; i < hdata->nr_channels; i++) { in dw_probe()
1536 struct axi_dma_chan *chan = &dw->chan[i]; in dw_probe()
1538 chan->chip = chip; in dw_probe()
1539 chan->id = i; in dw_probe()
1540 chan->chan_regs = chip->regs + COMMON_REG_LEN + i * CHAN_REG_LEN; in dw_probe()
1541 atomic_set(&chan->descs_allocated, 0); in dw_probe()
1543 chan->vc.desc_free = vchan_desc_put; in dw_probe()
1544 vchan_init(&chan->vc, &dw->dma); in dw_probe()
1548 dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask); in dw_probe()
1549 dma_cap_set(DMA_SLAVE, dw->dma.cap_mask); in dw_probe()
1550 dma_cap_set(DMA_CYCLIC, dw->dma.cap_mask); in dw_probe()
1553 dw->dma.max_burst = hdata->axi_rw_burst_len; in dw_probe()
1554 dw->dma.src_addr_widths = AXI_DMA_BUSWIDTHS; in dw_probe()
1555 dw->dma.dst_addr_widths = AXI_DMA_BUSWIDTHS; in dw_probe()
1556 dw->dma.directions = BIT(DMA_MEM_TO_MEM); in dw_probe()
1557 dw->dma.directions |= BIT(DMA_MEM_TO_DEV) | BIT(DMA_DEV_TO_MEM); in dw_probe()
1558 dw->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in dw_probe()
1560 dw->dma.dev = chip->dev; in dw_probe()
1561 dw->dma.device_tx_status = dma_chan_tx_status; in dw_probe()
1562 dw->dma.device_issue_pending = dma_chan_issue_pending; in dw_probe()
1563 dw->dma.device_terminate_all = dma_chan_terminate_all; in dw_probe()
1564 dw->dma.device_pause = dma_chan_pause; in dw_probe()
1565 dw->dma.device_resume = dma_chan_resume; in dw_probe()
1567 dw->dma.device_alloc_chan_resources = dma_chan_alloc_chan_resources; in dw_probe()
1568 dw->dma.device_free_chan_resources = dma_chan_free_chan_resources; in dw_probe()
1570 dw->dma.device_prep_dma_memcpy = dma_chan_prep_dma_memcpy; in dw_probe()
1571 dw->dma.device_synchronize = dw_axi_dma_synchronize; in dw_probe()
1572 dw->dma.device_config = dw_axi_dma_chan_slave_config; in dw_probe()
1573 dw->dma.device_prep_slave_sg = dw_axi_dma_chan_prep_slave_sg; in dw_probe()
1574 dw->dma.device_prep_dma_cyclic = dw_axi_dma_chan_prep_cyclic; in dw_probe()
1581 dw->dma.dev->dma_parms = &dw->dma_parms; in dw_probe()
1582 dma_set_max_seg_size(&pdev->dev, MAX_BLOCK_SIZE); in dw_probe()
1585 pm_runtime_enable(chip->dev); in dw_probe()
1592 pm_runtime_get_noresume(chip->dev); in dw_probe()
1599 pm_runtime_put(chip->dev); in dw_probe()
1601 ret = dmaenginem_async_device_register(&dw->dma); in dw_probe()
1606 ret = of_dma_controller_register(pdev->dev.of_node, in dw_probe()
1609 dev_warn(&pdev->dev, in dw_probe()
1612 dev_info(chip->dev, "DesignWare AXI DMA Controller, %d channels\n", in dw_probe()
1613 dw->hdata->nr_channels); in dw_probe()
1618 pm_runtime_disable(chip->dev); in dw_probe()
1626 struct dw_axi_dma *dw = chip->dw; in dw_remove()
1631 clk_prepare_enable(chip->cfgr_clk); in dw_remove()
1632 clk_prepare_enable(chip->core_clk); in dw_remove()
1634 for (i = 0; i < dw->hdata->nr_channels; i++) { in dw_remove()
1635 axi_chan_disable(&chip->dw->chan[i]); in dw_remove()
1636 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in dw_remove()
1640 pm_runtime_disable(chip->dev); in dw_remove()
1644 if (chip->irq[i] > 0) in dw_remove()
1645 devm_free_irq(chip->dev, chip->irq[i], chip); in dw_remove()
1647 of_dma_controller_free(chip->dev->of_node); in dw_remove()
1649 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_remove()
1651 list_del(&chan->vc.chan.device_node); in dw_remove()
1652 tasklet_kill(&chan->vc.task); in dw_remove()
1662 .compatible = "snps,axi-dma-1.01a"
1664 .compatible = "intel,kmb-axi-dma",
1667 .compatible = "starfive,jh7110-axi-dma",
1670 .compatible = "starfive,jh8100-axi-dma",
1689 MODULE_DESCRIPTION("Synopsys DesignWare AXI DMA Controller platform driver");