Lines Matching +full:ahb +full:- +full:addr +full:- +full:masks

1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Driver for the Atmel AHB DMA Controller (aka HDMA or DMAC on AT91 systems)
8 * This supports the Atmel AHB DMA Controller found in several Atmel SoCs.
13 #include <dt-bindings/dma/at91.h>
18 #include <linux/dma-mapping.h>
29 #include "virt-dma.h"
33 * --------
35 * at_hdmac : Name of the ATmel AHB DMA Controller
44 #define AT_DMA_IF_BIGEND(i) BIT((i)) /* AHB-Lite Interface i in Big-endian mode */
99 #define ch_regs(x) (AT_DMA_CH_REGS_BASE + (x) * 0x28) /* Channel x base addr */
115 #define ATC_DSCR_IF GENMASK(1, 0) /* Dsc feched via AHB-Lite Interface */
127 #define ATC_SIF GENMASK(1, 0) /* Src tx done via AHB-Lite Interface i */
128 #define ATC_DIF GENMASK(5, 4) /* Dst tx done via AHB-Lite Interface i */
131 #define ATC_SRC_PIP BIT(8) /* Source Picture-in-Picture enabled */
132 #define ATC_DST_PIP BIT(12) /* Destination Picture-in-Picture enabled */
136 #define ATC_FC_MEM2MEM 0x0 /* Mem-to-Mem (DMA) */
137 #define ATC_FC_MEM2PER 0x1 /* Mem-to-Periph (DMA) */
138 #define ATC_FC_PER2MEM 0x2 /* Periph-to-Mem (DMA) */
139 #define ATC_FC_PER2PER 0x3 /* Periph-to-Periph (DMA) */
140 #define ATC_FC_PER2MEM_PER 0x4 /* Periph-to-Mem (Peripheral) */
141 #define ATC_FC_MEM2PER_PER 0x5 /* Mem-to-Periph (Peripheral) */
142 #define ATC_FC_PER2PER_SRCPER 0x6 /* Periph-to-Periph (Src Peripheral) */
143 #define ATC_FC_PER2PER_DSTPER 0x7 /* Periph-to-Periph (Dst Peripheral) */
166 #define ATC_LOCK_B BIT(21) /* AHB Bus Lock */
168 #define ATC_AHB_PROT GENMASK(26, 24) /* AHB Protection */
194 /*-- descriptors -----------------------------------------------------*/
209 * struct atdma_sg - atdma scatter gather entry
221 * struct at_desc - software descriptor
251 /*-- Channels --------------------------------------------------------*/
254 * enum atc_status - information bits stored in channel status flag
267 * struct at_dma_chan - internal representation of an Atmel HDMAC channel
298 __raw_readl((atchan)->ch_regs + ATC_##name##_OFFSET)
301 __raw_writel((val), (atchan)->ch_regs + ATC_##name##_OFFSET)
305 * 1 -> 0, 4 -> 1, 8 -> 2, 16 -> 3, 32 -> 4, 64 -> 5, 128 -> 6, 256 -> 7.
312 *maxburst = fls(*maxburst) - 2; in convert_burst()
319 * 1 byte -> 0, 2 bytes -> 1, 4 bytes -> 2.
334 /*-- Controller ------------------------------------------------------*/
337 * struct at_dma - internal representation of an Atmel HDMA Controller
362 __raw_readl((atdma)->regs + AT_DMA_##name)
364 __raw_writel((val), (atdma)->regs + AT_DMA_##name)
382 /*-- Helper functions ------------------------------------------------*/
386 return &chan->dev->device; in chan2dev()
392 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device); in vdbg_dump_regs()
394 dev_err(chan2dev(&atchan->vc.chan), in vdbg_dump_regs()
396 atchan->vc.chan.chan_id, in vdbg_dump_regs()
400 dev_err(chan2dev(&atchan->vc.chan), in vdbg_dump_regs()
415 dev_crit(chan2dev(&atchan->vc.chan), in atc_dump_lli()
417 &lli->saddr, &lli->daddr, in atc_dump_lli()
418 lli->ctrla, lli->ctrlb, &lli->dscr); in atc_dump_lli()
447 * atc_chan_is_enabled - test if given channel is enabled
452 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device); in atc_chan_is_enabled()
454 return !!(dma_readl(atdma, CHSR) & atchan->mask); in atc_chan_is_enabled()
458 * atc_chan_is_paused - test channel pause/resume status
463 return test_bit(ATC_IS_PAUSED, &atchan->status); in atc_chan_is_paused()
467 * atc_chan_is_cyclic - test if given channel has cyclic property set
472 return test_bit(ATC_IS_CYCLIC, &atchan->status); in atc_chan_is_cyclic()
476 * set_lli_eol - set end-of-link to descriptor so it will end transfer
482 u32 ctrlb = desc->sg[i].lli->ctrlb; in set_lli_eol()
487 desc->sg[i].lli->ctrlb = ctrlb; in set_lli_eol()
488 desc->sg[i].lli->dscr = 0; in set_lli_eol()
512 * struct at_dma_platform_data - Controller configuration parameters
522 * struct at_dma_slave - Controller-specific information about a slave
524 * @cfg: Platform-specific initializer for the CFG register
548 struct atdma_sg *atdma_sg = &desc->sg[i]; in atdma_lli_chain()
551 desc->sg[i - 1].lli->dscr = atdma_sg->lli_phys; in atdma_lli_chain()
555 * atc_dostart - starts the DMA engine for real
560 struct virt_dma_desc *vd = vchan_next_desc(&atchan->vc); in atc_dostart()
564 atchan->desc = NULL; in atc_dostart()
570 list_del(&vd->node); in atc_dostart()
571 atchan->desc = desc = to_atdma_desc(&vd->tx); in atc_dostart()
577 channel_writel(atchan, DSCR, desc->sg[0].lli_phys); in atc_dostart()
579 FIELD_PREP(ATC_SPIP_HOLE, desc->src_hole) | in atc_dostart()
580 FIELD_PREP(ATC_SPIP_BOUNDARY, desc->boundary)); in atc_dostart()
582 FIELD_PREP(ATC_DPIP_HOLE, desc->dst_hole) | in atc_dostart()
583 FIELD_PREP(ATC_DPIP_BOUNDARY, desc->boundary)); in atc_dostart()
587 dma_writel(atchan->atdma, CHER, atchan->mask); in atc_dostart()
594 struct at_dma *atdma = to_at_dma(vd->tx.chan->device); in atdma_desc_free()
595 struct at_desc *desc = to_atdma_desc(&vd->tx); in atdma_desc_free()
598 for (i = 0; i < desc->sglen; i++) { in atdma_desc_free()
599 if (desc->sg[i].lli) in atdma_desc_free()
600 dma_pool_free(atdma->lli_pool, desc->sg[i].lli, in atdma_desc_free()
601 desc->sg[i].lli_phys); in atdma_desc_free()
605 if (desc->memset_buffer) { in atdma_desc_free()
606 dma_pool_free(atdma->memset_pool, desc->memset_vaddr, in atdma_desc_free()
607 desc->memset_paddr); in atdma_desc_free()
608 desc->memset_buffer = false; in atdma_desc_free()
615 * atc_calc_bytes_left - calculates the number of bytes left according to the
632 return current_len - (btsize << src_width); in atc_calc_bytes_left()
636 * atc_get_llis_residue - Get residue for a hardware linked list transfer
658 * - If the DMA transfer is paused, RX overruns or TX underruns are more likey
664 * - The atc_pause() function masks interrupts but we'd rather avoid to do so
676 * Returns: %0 on success, -errno otherwise.
684 len = desc->total_len; in atc_get_llis_residue()
713 return -ETIMEDOUT; in atc_get_llis_residue()
716 if (desc->sg[0].lli->dscr == dscr) { in atc_get_llis_residue()
720 len -= desc->sg[0].len; in atc_get_llis_residue()
722 for (i = 1; i < desc->sglen; i++) { in atc_get_llis_residue()
723 if (desc->sg[i].lli && desc->sg[i].lli->dscr == dscr) in atc_get_llis_residue()
725 len -= desc->sg[i].len; in atc_get_llis_residue()
738 * atc_get_residue - get the number of bytes residue for a cookie.
744 * Return: %0 on success, -errno otherwise.
754 vd = vchan_find_desc(&atchan->vc, cookie); in atc_get_residue()
756 desc = to_atdma_desc(&vd->tx); in atc_get_residue()
757 else if (atchan->desc && atchan->desc->vd.tx.cookie == cookie) in atc_get_residue()
758 desc = atchan->desc; in atc_get_residue()
761 return -EINVAL; in atc_get_residue()
763 if (desc->sg[0].lli->dscr) in atc_get_residue()
768 len = desc->total_len; in atc_get_residue()
775 * atc_handle_error - handle errors reported by DMA controller
781 struct at_desc *desc = atchan->desc; in atc_handle_error()
783 /* Disable channel on AHB error */ in atc_handle_error()
784 dma_writel(atchan->atdma, CHDR, AT_DMA_RES(i) | atchan->mask); in atc_handle_error()
793 dev_crit(chan2dev(&atchan->vc.chan), "Bad descriptor submitted for DMA!\n"); in atc_handle_error()
794 dev_crit(chan2dev(&atchan->vc.chan), "cookie: %d\n", in atc_handle_error()
795 desc->vd.tx.cookie); in atc_handle_error()
796 for (i = 0; i < desc->sglen; i++) in atc_handle_error()
797 atc_dump_lli(atchan, desc->sg[i].lli); in atc_handle_error()
805 spin_lock(&atchan->vc.lock); in atdma_handle_chan_done()
806 desc = atchan->desc; in atdma_handle_chan_done()
815 vchan_cyclic_callback(&desc->vd); in atdma_handle_chan_done()
817 vchan_cookie_complete(&desc->vd); in atdma_handle_chan_done()
818 atchan->desc = NULL; in atdma_handle_chan_done()
823 spin_unlock(&atchan->vc.lock); in atdma_handle_chan_done()
842 dev_vdbg(atdma->dma_device.dev, in at_dma_interrupt()
846 for (i = 0; i < atdma->dma_device.chancnt; i++) { in at_dma_interrupt()
847 atchan = &atdma->chan[i]; in at_dma_interrupt()
859 /*-- DMA Engine API --------------------------------------------------*/
861 * atc_prep_dma_interleaved - prepare memory to memory interleaved operation
871 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_interleaved()
884 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved()
887 first = xt->sgl; in atc_prep_dma_interleaved()
891 __func__, &xt->src_start, &xt->dst_start, xt->numf, in atc_prep_dma_interleaved()
892 xt->frame_size, flags); in atc_prep_dma_interleaved()
900 for (i = 0; i < xt->frame_size; i++) { in atc_prep_dma_interleaved()
901 struct data_chunk *chunk = xt->sgl + i; in atc_prep_dma_interleaved()
903 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved()
912 len += chunk->size; in atc_prep_dma_interleaved()
915 dwidth = atc_get_xfer_width(xt->src_start, xt->dst_start, len); in atc_prep_dma_interleaved()
935 desc->sglen = 1; in atc_prep_dma_interleaved()
937 atdma_sg = desc->sg; in atc_prep_dma_interleaved()
938 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT, in atc_prep_dma_interleaved()
939 &atdma_sg->lli_phys); in atc_prep_dma_interleaved()
940 if (!atdma_sg->lli) { in atc_prep_dma_interleaved()
944 lli = atdma_sg->lli; in atc_prep_dma_interleaved()
946 lli->saddr = xt->src_start; in atc_prep_dma_interleaved()
947 lli->daddr = xt->dst_start; in atc_prep_dma_interleaved()
948 lli->ctrla = ctrla | xfer_count; in atc_prep_dma_interleaved()
949 lli->ctrlb = ctrlb; in atc_prep_dma_interleaved()
951 desc->boundary = first->size >> dwidth; in atc_prep_dma_interleaved()
952 desc->dst_hole = (dmaengine_get_dst_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
953 desc->src_hole = (dmaengine_get_src_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
955 atdma_sg->len = len; in atc_prep_dma_interleaved()
956 desc->total_len = len; in atc_prep_dma_interleaved()
959 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_interleaved()
963 * atc_prep_dma_memcpy - prepare a memcpy operation
974 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memcpy()
998 desc->sglen = sg_len; in atc_prep_dma_memcpy()
1016 struct atdma_sg *atdma_sg = &desc->sg[i]; in atc_prep_dma_memcpy()
1019 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT, in atc_prep_dma_memcpy()
1020 &atdma_sg->lli_phys); in atc_prep_dma_memcpy()
1021 if (!atdma_sg->lli) in atc_prep_dma_memcpy()
1023 lli = atdma_sg->lli; in atc_prep_dma_memcpy()
1025 xfer_count = min_t(size_t, (len - offset) >> src_width, in atc_prep_dma_memcpy()
1028 lli->saddr = src + offset; in atc_prep_dma_memcpy()
1029 lli->daddr = dest + offset; in atc_prep_dma_memcpy()
1030 lli->ctrla = ctrla | xfer_count; in atc_prep_dma_memcpy()
1031 lli->ctrlb = ctrlb; in atc_prep_dma_memcpy()
1033 desc->sg[i].len = xfer_count << src_width; in atc_prep_dma_memcpy()
1038 desc->total_len = len; in atc_prep_dma_memcpy()
1040 /* set end-of-link to the last link descriptor of list*/ in atc_prep_dma_memcpy()
1041 set_lli_eol(desc, i - 1); in atc_prep_dma_memcpy()
1043 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_memcpy()
1046 atdma_desc_free(&desc->vd); in atc_prep_dma_memcpy()
1054 struct at_dma *atdma = to_at_dma(chan->device); in atdma_create_memset_lli()
1066 return -EINVAL; in atdma_create_memset_lli()
1069 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT, in atdma_create_memset_lli()
1070 &atdma_sg->lli_phys); in atdma_create_memset_lli()
1071 if (!atdma_sg->lli) in atdma_create_memset_lli()
1072 return -ENOMEM; in atdma_create_memset_lli()
1073 lli = atdma_sg->lli; in atdma_create_memset_lli()
1075 lli->saddr = psrc; in atdma_create_memset_lli()
1076 lli->daddr = pdst; in atdma_create_memset_lli()
1077 lli->ctrla = ctrla | xfer_count; in atdma_create_memset_lli()
1078 lli->ctrlb = ctrlb; in atdma_create_memset_lli()
1080 atdma_sg->len = len; in atdma_create_memset_lli()
1086 * atc_prep_dma_memset - prepare a memcpy operation
1098 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset()
1113 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset()
1119 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset()
1137 desc->sglen = 1; in atc_prep_dma_memset()
1139 ret = atdma_create_memset_lli(chan, desc->sg, paddr, dest, len); in atc_prep_dma_memset()
1143 desc->memset_paddr = paddr; in atc_prep_dma_memset()
1144 desc->memset_vaddr = vaddr; in atc_prep_dma_memset()
1145 desc->memset_buffer = true; in atc_prep_dma_memset()
1147 desc->total_len = len; in atc_prep_dma_memset()
1149 /* set end-of-link on the descriptor */ in atc_prep_dma_memset()
1152 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_memset()
1157 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset()
1168 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset_sg()
1186 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset_sg()
1197 desc->sglen = sg_len; in atc_prep_dma_memset_sg()
1206 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset_sg()
1212 ret = atdma_create_memset_lli(chan, &desc->sg[i], paddr, dest, in atc_prep_dma_memset_sg()
1221 desc->memset_paddr = paddr; in atc_prep_dma_memset_sg()
1222 desc->memset_vaddr = vaddr; in atc_prep_dma_memset_sg()
1223 desc->memset_buffer = true; in atc_prep_dma_memset_sg()
1225 desc->total_len = total_len; in atc_prep_dma_memset_sg()
1227 /* set end-of-link on the descriptor */ in atc_prep_dma_memset_sg()
1228 set_lli_eol(desc, i - 1); in atc_prep_dma_memset_sg()
1230 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_memset_sg()
1233 atdma_desc_free(&desc->vd); in atc_prep_dma_memset_sg()
1235 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset_sg()
1240 * atc_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
1253 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_slave_sg()
1255 struct at_dma_slave *atslave = chan->private; in atc_prep_slave_sg()
1256 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_slave_sg()
1280 desc->sglen = sg_len; in atc_prep_slave_sg()
1282 ctrla = FIELD_PREP(ATC_SCSIZE, sconfig->src_maxburst) | in atc_prep_slave_sg()
1283 FIELD_PREP(ATC_DCSIZE, sconfig->dst_maxburst); in atc_prep_slave_sg()
1288 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_slave_sg()
1294 FIELD_PREP(ATC_SIF, atchan->mem_if) | in atc_prep_slave_sg()
1295 FIELD_PREP(ATC_DIF, atchan->per_if); in atc_prep_slave_sg()
1296 reg = sconfig->dst_addr; in atc_prep_slave_sg()
1298 struct atdma_sg *atdma_sg = &desc->sg[i]; in atc_prep_slave_sg()
1303 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, in atc_prep_slave_sg()
1305 &atdma_sg->lli_phys); in atc_prep_slave_sg()
1306 if (!atdma_sg->lli) in atc_prep_slave_sg()
1308 lli = atdma_sg->lli; in atc_prep_slave_sg()
1321 lli->saddr = mem; in atc_prep_slave_sg()
1322 lli->daddr = reg; in atc_prep_slave_sg()
1323 lli->ctrla = ctrla | in atc_prep_slave_sg()
1326 lli->ctrlb = ctrlb; in atc_prep_slave_sg()
1328 atdma_sg->len = len; in atc_prep_slave_sg()
1331 desc->sg[i].len = len; in atc_prep_slave_sg()
1336 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_slave_sg()
1342 FIELD_PREP(ATC_SIF, atchan->per_if) | in atc_prep_slave_sg()
1343 FIELD_PREP(ATC_DIF, atchan->mem_if); in atc_prep_slave_sg()
1345 reg = sconfig->src_addr; in atc_prep_slave_sg()
1347 struct atdma_sg *atdma_sg = &desc->sg[i]; in atc_prep_slave_sg()
1352 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, in atc_prep_slave_sg()
1354 &atdma_sg->lli_phys); in atc_prep_slave_sg()
1355 if (!atdma_sg->lli) in atc_prep_slave_sg()
1357 lli = atdma_sg->lli; in atc_prep_slave_sg()
1370 lli->saddr = reg; in atc_prep_slave_sg()
1371 lli->daddr = mem; in atc_prep_slave_sg()
1372 lli->ctrla = ctrla | in atc_prep_slave_sg()
1375 lli->ctrlb = ctrlb; in atc_prep_slave_sg()
1377 desc->sg[i].len = len; in atc_prep_slave_sg()
1387 /* set end-of-link to the last link descriptor of list*/ in atc_prep_slave_sg()
1388 set_lli_eol(desc, i - 1); in atc_prep_slave_sg()
1390 desc->total_len = total_len; in atc_prep_slave_sg()
1392 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_slave_sg()
1397 atdma_desc_free(&desc->vd); in atc_prep_slave_sg()
1411 if (unlikely(period_len & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1413 if (unlikely(buf_addr & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1419 return -EINVAL; in atc_dma_cyclic_check_values()
1423 * atc_dma_cyclic_fill_desc - Fill one period descriptor
1431 struct at_dma *atdma = to_at_dma(chan->device); in atc_dma_cyclic_fill_desc()
1433 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_dma_cyclic_fill_desc()
1434 struct atdma_sg *atdma_sg = &desc->sg[i]; in atc_dma_cyclic_fill_desc()
1437 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_ATOMIC, in atc_dma_cyclic_fill_desc()
1438 &atdma_sg->lli_phys); in atc_dma_cyclic_fill_desc()
1439 if (!atdma_sg->lli) in atc_dma_cyclic_fill_desc()
1440 return -ENOMEM; in atc_dma_cyclic_fill_desc()
1441 lli = atdma_sg->lli; in atc_dma_cyclic_fill_desc()
1445 lli->saddr = buf_addr + (period_len * i); in atc_dma_cyclic_fill_desc()
1446 lli->daddr = sconfig->dst_addr; in atc_dma_cyclic_fill_desc()
1447 lli->ctrlb = FIELD_PREP(ATC_DST_ADDR_MODE, in atc_dma_cyclic_fill_desc()
1452 FIELD_PREP(ATC_SIF, atchan->mem_if) | in atc_dma_cyclic_fill_desc()
1453 FIELD_PREP(ATC_DIF, atchan->per_if); in atc_dma_cyclic_fill_desc()
1458 lli->saddr = sconfig->src_addr; in atc_dma_cyclic_fill_desc()
1459 lli->daddr = buf_addr + (period_len * i); in atc_dma_cyclic_fill_desc()
1460 lli->ctrlb = FIELD_PREP(ATC_DST_ADDR_MODE, in atc_dma_cyclic_fill_desc()
1465 FIELD_PREP(ATC_SIF, atchan->per_if) | in atc_dma_cyclic_fill_desc()
1466 FIELD_PREP(ATC_DIF, atchan->mem_if); in atc_dma_cyclic_fill_desc()
1470 return -EINVAL; in atc_dma_cyclic_fill_desc()
1473 lli->ctrla = FIELD_PREP(ATC_SCSIZE, sconfig->src_maxburst) | in atc_dma_cyclic_fill_desc()
1474 FIELD_PREP(ATC_DCSIZE, sconfig->dst_maxburst) | in atc_dma_cyclic_fill_desc()
1478 desc->sg[i].len = period_len; in atc_dma_cyclic_fill_desc()
1484 * atc_prep_dma_cyclic - prepare the cyclic DMA transfer
1498 struct at_dma_slave *atslave = chan->private; in atc_prep_dma_cyclic()
1499 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_dma_cyclic()
1506 dev_vdbg(chan2dev(chan), "prep_dma_cyclic: %s buf@%pad - %d (%d/%d)\n", in atc_prep_dma_cyclic()
1516 was_cyclic = test_and_set_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1526 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_dma_cyclic()
1528 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_dma_cyclic()
1537 desc->sglen = periods; in atc_prep_dma_cyclic()
1546 desc->total_len = buf_len; in atc_prep_dma_cyclic()
1548 desc->sg[i - 1].lli->dscr = desc->sg[0].lli_phys; in atc_prep_dma_cyclic()
1550 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_cyclic()
1553 atdma_desc_free(&desc->vd); in atc_prep_dma_cyclic()
1555 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1567 if (!chan->private) in atc_config()
1568 return -EINVAL; in atc_config()
1570 memcpy(&atchan->dma_sconfig, sconfig, sizeof(*sconfig)); in atc_config()
1572 convert_burst(&atchan->dma_sconfig.src_maxburst); in atc_config()
1573 convert_burst(&atchan->dma_sconfig.dst_maxburst); in atc_config()
1581 struct at_dma *atdma = to_at_dma(chan->device); in atc_pause()
1582 int chan_id = atchan->vc.chan.chan_id; in atc_pause()
1587 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_pause()
1590 set_bit(ATC_IS_PAUSED, &atchan->status); in atc_pause()
1592 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_pause()
1600 struct at_dma *atdma = to_at_dma(chan->device); in atc_resume()
1601 int chan_id = atchan->vc.chan.chan_id; in atc_resume()
1609 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_resume()
1612 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_resume()
1614 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_resume()
1622 struct at_dma *atdma = to_at_dma(chan->device); in atc_terminate_all()
1623 int chan_id = atchan->vc.chan.chan_id; in atc_terminate_all()
1634 * to AHB/HSB limitations. in atc_terminate_all()
1636 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_terminate_all()
1639 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask); in atc_terminate_all()
1642 while (dma_readl(atdma, CHSR) & atchan->mask) in atc_terminate_all()
1645 if (atchan->desc) { in atc_terminate_all()
1646 vchan_terminate_vdesc(&atchan->desc->vd); in atc_terminate_all()
1647 atchan->desc = NULL; in atc_terminate_all()
1650 vchan_get_all_descriptors(&atchan->vc, &list); in atc_terminate_all()
1652 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_terminate_all()
1654 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_terminate_all()
1656 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_terminate_all()
1658 vchan_dma_desc_free_list(&atchan->vc, &list); in atc_terminate_all()
1664 * atc_tx_status - poll for transaction completion
1671 * the status of multiple cookies without re-checking hardware state.
1688 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_tx_status()
1691 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_tx_status()
1711 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_issue_pending()
1712 if (vchan_issue_pending(&atchan->vc) && !atchan->desc) { in atc_issue_pending()
1716 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_issue_pending()
1720 * atc_alloc_chan_resources - allocate resources for DMA channel
1728 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_chan_resources()
1737 return -EIO; in atc_alloc_chan_resources()
1742 atslave = chan->private; in atc_alloc_chan_resources()
1745 * We need controller-specific data to set up slave in atc_alloc_chan_resources()
1748 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_device.dev); in atc_alloc_chan_resources()
1751 if (atslave->cfg) in atc_alloc_chan_resources()
1752 cfg = atslave->cfg; in atc_alloc_chan_resources()
1762 * atc_free_chan_resources - free all channel resources
1772 atchan->status = 0; in atc_free_chan_resources()
1777 kfree(chan->private); in atc_free_chan_resources()
1778 chan->private = NULL; in atc_free_chan_resources()
1788 if (atslave->dma_dev == chan->device->dev) { in at_dma_filter()
1789 chan->private = atslave; in at_dma_filter()
1806 if (dma_spec->args_count != 2) in at_dma_xlate()
1809 dmac_pdev = of_find_device_by_node(dma_spec->np); in at_dma_xlate()
1818 put_device(&dmac_pdev->dev); in at_dma_xlate()
1822 atslave->cfg = ATC_DST_H2SEL | ATC_SRC_H2SEL; in at_dma_xlate()
1827 per_id = dma_spec->args[1] & AT91_DMA_CFG_PER_ID_MASK; in at_dma_xlate()
1828 atslave->cfg |= ATC_DST_PER_ID(per_id) | ATC_SRC_PER_ID(per_id); in at_dma_xlate()
1834 switch (dma_spec->args[1] & AT91_DMA_CFG_FIFOCFG_MASK) { in at_dma_xlate()
1836 atslave->cfg |= FIELD_PREP(ATC_FIFOCFG, in at_dma_xlate()
1840 atslave->cfg |= FIELD_PREP(ATC_FIFOCFG, in at_dma_xlate()
1845 atslave->cfg |= FIELD_PREP(ATC_FIFOCFG, ATC_FIFOCFG_HALFFIFO); in at_dma_xlate()
1847 atslave->dma_dev = &dmac_pdev->dev; in at_dma_xlate()
1851 put_device(&dmac_pdev->dev); in at_dma_xlate()
1857 atchan->per_if = dma_spec->args[0] & 0xff; in at_dma_xlate()
1858 atchan->mem_if = (dma_spec->args[0] >> 16) & 0xff; in at_dma_xlate()
1870 /*-- Module Management -----------------------------------------------*/
1872 /* cap_mask is a multi-u32 bitfield, fill it with proper C code. */
1883 .compatible = "atmel,at91sam9rl-dma",
1886 .compatible = "atmel,at91sam9g45-dma",
1911 if (pdev->dev.of_node) { in at_dma_get_driver_data()
1913 match = of_match_node(atmel_dma_dt_ids, pdev->dev.of_node); in at_dma_get_driver_data()
1916 return match->data; in at_dma_get_driver_data()
1919 platform_get_device_id(pdev)->driver_data; in at_dma_get_driver_data()
1923 * at_dma_off - disable DMA controller
1931 dma_writel(atdma, EBCIDR, -1L); in at_dma_off()
1934 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask) in at_dma_off()
1958 return -ENODEV; in at_dma_probe()
1960 atdma = devm_kzalloc(&pdev->dev, in at_dma_probe()
1961 struct_size(atdma, chan, plat_dat->nr_channels), in at_dma_probe()
1964 return -ENOMEM; in at_dma_probe()
1966 atdma->regs = devm_platform_ioremap_resource(pdev, 0); in at_dma_probe()
1967 if (IS_ERR(atdma->regs)) in at_dma_probe()
1968 return PTR_ERR(atdma->regs); in at_dma_probe()
1975 atdma->dma_device.cap_mask = plat_dat->cap_mask; in at_dma_probe()
1976 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1; in at_dma_probe()
1978 atdma->clk = devm_clk_get(&pdev->dev, "dma_clk"); in at_dma_probe()
1979 if (IS_ERR(atdma->clk)) in at_dma_probe()
1980 return PTR_ERR(atdma->clk); in at_dma_probe()
1982 err = clk_prepare_enable(atdma->clk); in at_dma_probe()
1996 atdma->lli_pool = dma_pool_create("at_hdmac_lli_pool", in at_dma_probe()
1997 &pdev->dev, sizeof(struct at_lli), in at_dma_probe()
1999 if (!atdma->lli_pool) { in at_dma_probe()
2000 dev_err(&pdev->dev, "Unable to allocate DMA LLI descriptor pool\n"); in at_dma_probe()
2001 err = -ENOMEM; in at_dma_probe()
2006 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool", in at_dma_probe()
2007 &pdev->dev, sizeof(int), 4, 0); in at_dma_probe()
2008 if (!atdma->memset_pool) { in at_dma_probe()
2009 dev_err(&pdev->dev, "No memory for memset dma pool\n"); in at_dma_probe()
2010 err = -ENOMEM; in at_dma_probe()
2019 INIT_LIST_HEAD(&atdma->dma_device.channels); in at_dma_probe()
2020 for (i = 0; i < plat_dat->nr_channels; i++) { in at_dma_probe()
2021 struct at_dma_chan *atchan = &atdma->chan[i]; in at_dma_probe()
2023 atchan->mem_if = AT_DMA_MEM_IF; in at_dma_probe()
2024 atchan->per_if = AT_DMA_PER_IF; in at_dma_probe()
2026 atchan->ch_regs = atdma->regs + ch_regs(i); in at_dma_probe()
2027 atchan->mask = 1 << i; in at_dma_probe()
2029 atchan->atdma = atdma; in at_dma_probe()
2030 atchan->vc.desc_free = atdma_desc_free; in at_dma_probe()
2031 vchan_init(&atchan->vc, &atdma->dma_device); in at_dma_probe()
2036 atdma->dma_device.device_alloc_chan_resources = atc_alloc_chan_resources; in at_dma_probe()
2037 atdma->dma_device.device_free_chan_resources = atc_free_chan_resources; in at_dma_probe()
2038 atdma->dma_device.device_tx_status = atc_tx_status; in at_dma_probe()
2039 atdma->dma_device.device_issue_pending = atc_issue_pending; in at_dma_probe()
2040 atdma->dma_device.dev = &pdev->dev; in at_dma_probe()
2043 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_device.cap_mask)) in at_dma_probe()
2044 atdma->dma_device.device_prep_interleaved_dma = atc_prep_dma_interleaved; in at_dma_probe()
2046 if (dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask)) in at_dma_probe()
2047 atdma->dma_device.device_prep_dma_memcpy = atc_prep_dma_memcpy; in at_dma_probe()
2049 if (dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask)) { in at_dma_probe()
2050 atdma->dma_device.device_prep_dma_memset = atc_prep_dma_memset; in at_dma_probe()
2051 atdma->dma_device.device_prep_dma_memset_sg = atc_prep_dma_memset_sg; in at_dma_probe()
2052 atdma->dma_device.fill_align = DMAENGINE_ALIGN_4_BYTES; in at_dma_probe()
2055 if (dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask)) { in at_dma_probe()
2056 atdma->dma_device.device_prep_slave_sg = atc_prep_slave_sg; in at_dma_probe()
2058 dma_cap_set(DMA_CYCLIC, atdma->dma_device.cap_mask); in at_dma_probe()
2059 atdma->dma_device.device_prep_dma_cyclic = atc_prep_dma_cyclic; in at_dma_probe()
2060 atdma->dma_device.device_config = atc_config; in at_dma_probe()
2061 atdma->dma_device.device_pause = atc_pause; in at_dma_probe()
2062 atdma->dma_device.device_resume = atc_resume; in at_dma_probe()
2063 atdma->dma_device.device_terminate_all = atc_terminate_all; in at_dma_probe()
2064 atdma->dma_device.src_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
2065 atdma->dma_device.dst_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
2066 atdma->dma_device.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in at_dma_probe()
2067 atdma->dma_device.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in at_dma_probe()
2072 dev_info(&pdev->dev, "Atmel AHB DMA Controller ( %s%s%s), %d channels\n", in at_dma_probe()
2073 dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask) ? "cpy " : "", in at_dma_probe()
2074 dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask) ? "set " : "", in at_dma_probe()
2075 dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask) ? "slave " : "", in at_dma_probe()
2076 plat_dat->nr_channels); in at_dma_probe()
2078 err = dma_async_device_register(&atdma->dma_device); in at_dma_probe()
2080 dev_err(&pdev->dev, "Unable to register: %d.\n", err); in at_dma_probe()
2089 if (pdev->dev.of_node) { in at_dma_probe()
2090 err = of_dma_controller_register(pdev->dev.of_node, in at_dma_probe()
2093 dev_err(&pdev->dev, "could not register of_dma_controller\n"); in at_dma_probe()
2101 dma_async_device_unregister(&atdma->dma_device); in at_dma_probe()
2103 dma_pool_destroy(atdma->memset_pool); in at_dma_probe()
2105 dma_pool_destroy(atdma->lli_pool); in at_dma_probe()
2109 clk_disable_unprepare(atdma->clk); in at_dma_probe()
2119 if (pdev->dev.of_node) in at_dma_remove()
2120 of_dma_controller_free(pdev->dev.of_node); in at_dma_remove()
2121 dma_async_device_unregister(&atdma->dma_device); in at_dma_remove()
2123 dma_pool_destroy(atdma->memset_pool); in at_dma_remove()
2124 dma_pool_destroy(atdma->lli_pool); in at_dma_remove()
2127 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_remove()
2130 atc_disable_chan_irq(atdma, chan->chan_id); in at_dma_remove()
2131 list_del(&chan->device_node); in at_dma_remove()
2134 clk_disable_unprepare(atdma->clk); in at_dma_remove()
2142 clk_disable_unprepare(atdma->clk); in at_dma_shutdown()
2150 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_prepare()
2155 return -EAGAIN; in at_dma_prepare()
2162 struct dma_chan *chan = &atchan->vc.chan; in atc_suspend_cyclic()
2174 atchan->save_dscr = channel_readl(atchan, DSCR); in atc_suspend_cyclic()
2185 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_suspend_noirq()
2191 atchan->save_cfg = channel_readl(atchan, CFG); in at_dma_suspend_noirq()
2193 atdma->save_imr = dma_readl(atdma, EBCIMR); in at_dma_suspend_noirq()
2197 clk_disable_unprepare(atdma->clk); in at_dma_suspend_noirq()
2203 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device); in atc_resume_cyclic()
2211 channel_writel(atchan, DSCR, atchan->save_dscr); in atc_resume_cyclic()
2212 dma_writel(atdma, CHER, atchan->mask); in atc_resume_cyclic()
2226 clk_prepare_enable(atdma->clk); in at_dma_resume_noirq()
2234 dma_writel(atdma, EBCIER, atdma->save_imr); in at_dma_resume_noirq()
2235 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_resume_noirq()
2239 channel_writel(atchan, CFG, atchan->save_cfg); in at_dma_resume_noirq()
2275 MODULE_DESCRIPTION("Atmel AHB DMA Controller driver");