Lines Matching +full:t8103 +full:- +full:admac
1 // SPDX-License-Identifier: GPL-2.0-only
3 * Driver for Audio DMA Controller (ADMAC) on t8103 (M1) and other Apple chips
115 * SRAM_CARVEOUT has 16-bit fields, so the SRAM cannot be larger than
116 * 64K and a 32-bit bitfield over 2K blocks covers it.
158 sram = &ad->txcache; in admac_alloc_sram_carveout()
160 sram = &ad->rxcache; in admac_alloc_sram_carveout()
162 mutex_lock(&ad->cache_alloc_lock); in admac_alloc_sram_carveout()
164 nblocks = sram->size / SRAM_BLOCK; in admac_alloc_sram_carveout()
166 if (!(sram->allocated & BIT(i))) in admac_alloc_sram_carveout()
172 sram->allocated |= BIT(i); in admac_alloc_sram_carveout()
174 ret = -EBUSY; in admac_alloc_sram_carveout()
177 mutex_unlock(&ad->cache_alloc_lock); in admac_alloc_sram_carveout()
191 sram = &ad->txcache; in admac_free_sram_carveout()
193 sram = &ad->rxcache; in admac_free_sram_carveout()
195 if (WARN_ON(base >= sram->size)) in admac_free_sram_carveout()
198 mutex_lock(&ad->cache_alloc_lock); in admac_free_sram_carveout()
200 sram->allocated &= ~BIT(i); in admac_free_sram_carveout()
201 mutex_unlock(&ad->cache_alloc_lock); in admac_free_sram_carveout()
206 void __iomem *addr = ad->base + reg; in admac_modify()
231 struct admac_chan *adchan = to_admac_chan(tx->chan); in admac_tx_submit()
235 spin_lock_irqsave(&adchan->lock, flags); in admac_tx_submit()
237 list_add_tail(&adtx->node, &adchan->submitted); in admac_tx_submit()
238 spin_unlock_irqrestore(&adchan->lock, flags); in admac_tx_submit()
258 if (direction != admac_chan_direction(adchan->no)) in admac_prep_dma_cyclic()
265 adtx->cyclic = true; in admac_prep_dma_cyclic()
267 adtx->buf_addr = buf_addr; in admac_prep_dma_cyclic()
268 adtx->buf_len = buf_len; in admac_prep_dma_cyclic()
269 adtx->buf_end = buf_addr + buf_len; in admac_prep_dma_cyclic()
270 adtx->period_len = period_len; in admac_prep_dma_cyclic()
272 adtx->submitted_pos = 0; in admac_prep_dma_cyclic()
273 adtx->reclaimed_pos = 0; in admac_prep_dma_cyclic()
275 dma_async_tx_descriptor_init(&adtx->tx, chan); in admac_prep_dma_cyclic()
276 adtx->tx.tx_submit = admac_tx_submit; in admac_prep_dma_cyclic()
277 adtx->tx.desc_free = admac_desc_free; in admac_prep_dma_cyclic()
279 return &adtx->tx; in admac_prep_dma_cyclic()
290 addr = tx->buf_addr + (tx->submitted_pos % tx->buf_len); in admac_cyclic_write_one_desc()
293 WARN_ON_ONCE(addr + tx->period_len > tx->buf_end); in admac_cyclic_write_one_desc()
295 dev_dbg(ad->dev, "ch%d descriptor: addr=0x%pad len=0x%zx flags=0x%lx\n", in admac_cyclic_write_one_desc()
296 channo, &addr, tx->period_len, FLAG_DESC_NOTIFY); in admac_cyclic_write_one_desc()
298 writel_relaxed(lower_32_bits(addr), ad->base + REG_DESC_WRITE(channo)); in admac_cyclic_write_one_desc()
299 writel_relaxed(upper_32_bits(addr), ad->base + REG_DESC_WRITE(channo)); in admac_cyclic_write_one_desc()
300 writel_relaxed(tx->period_len, ad->base + REG_DESC_WRITE(channo)); in admac_cyclic_write_one_desc()
301 writel_relaxed(FLAG_DESC_NOTIFY, ad->base + REG_DESC_WRITE(channo)); in admac_cyclic_write_one_desc()
303 tx->submitted_pos += tx->period_len; in admac_cyclic_write_one_desc()
304 tx->submitted_pos %= 2 * tx->buf_len; in admac_cyclic_write_one_desc()
317 if (readl_relaxed(ad->base + REG_DESC_RING(channo)) & RING_FULL) in admac_cyclic_write_desc()
329 return (wrslot + 4 - rdslot) % 4; in admac_ring_noccupied_slots()
351 ring1 = readl_relaxed(ad->base + REG_REPORT_RING(channo)); in admac_cyclic_read_residue()
352 residue1 = readl_relaxed(ad->base + REG_RESIDUE(channo)); in admac_cyclic_read_residue()
353 ring2 = readl_relaxed(ad->base + REG_REPORT_RING(channo)); in admac_cyclic_read_residue()
354 residue2 = readl_relaxed(ad->base + REG_RESIDUE(channo)); in admac_cyclic_read_residue()
367 pos = adtx->reclaimed_pos + adtx->period_len * (nreports + 1) - residue2; in admac_cyclic_read_residue()
369 return adtx->buf_len - pos % adtx->buf_len; in admac_cyclic_read_residue()
376 struct admac_data *ad = adchan->host; in admac_tx_status()
387 spin_lock_irqsave(&adchan->lock, flags); in admac_tx_status()
388 adtx = adchan->current_tx; in admac_tx_status()
390 if (adtx && adtx->tx.cookie == cookie) { in admac_tx_status()
392 residue = admac_cyclic_read_residue(ad, adchan->no, adtx); in admac_tx_status()
396 list_for_each_entry(adtx, &adchan->issued, node) { in admac_tx_status()
397 if (adtx->tx.cookie == cookie) { in admac_tx_status()
398 residue = adtx->buf_len; in admac_tx_status()
403 spin_unlock_irqrestore(&adchan->lock, flags); in admac_tx_status()
411 struct admac_data *ad = adchan->host; in admac_start_chan()
412 u32 startbit = 1 << (adchan->no / 2); in admac_start_chan()
415 ad->base + REG_CHAN_INTSTATUS(adchan->no, ad->irq_index)); in admac_start_chan()
417 ad->base + REG_CHAN_INTMASK(adchan->no, ad->irq_index)); in admac_start_chan()
419 switch (admac_chan_direction(adchan->no)) { in admac_start_chan()
421 writel_relaxed(startbit, ad->base + REG_TX_START); in admac_start_chan()
424 writel_relaxed(startbit, ad->base + REG_RX_START); in admac_start_chan()
429 dev_dbg(adchan->host->dev, "ch%d start\n", adchan->no); in admac_start_chan()
434 struct admac_data *ad = adchan->host; in admac_stop_chan()
435 u32 stopbit = 1 << (adchan->no / 2); in admac_stop_chan()
437 switch (admac_chan_direction(adchan->no)) { in admac_stop_chan()
439 writel_relaxed(stopbit, ad->base + REG_TX_STOP); in admac_stop_chan()
442 writel_relaxed(stopbit, ad->base + REG_RX_STOP); in admac_stop_chan()
447 dev_dbg(adchan->host->dev, "ch%d stop\n", adchan->no); in admac_stop_chan()
452 struct admac_data *ad = adchan->host; in admac_reset_rings()
455 ad->base + REG_CHAN_CTL(adchan->no)); in admac_reset_rings()
456 writel_relaxed(0, ad->base + REG_CHAN_CTL(adchan->no)); in admac_reset_rings()
461 struct admac_data *ad = adchan->host; in admac_start_current_tx()
462 int ch = adchan->no; in admac_start_current_tx()
465 writel_relaxed(0, ad->base + REG_CHAN_CTL(ch)); in admac_start_current_tx()
467 admac_cyclic_write_one_desc(ad, ch, adchan->current_tx); in admac_start_current_tx()
469 admac_cyclic_write_desc(ad, ch, adchan->current_tx); in admac_start_current_tx()
478 spin_lock_irqsave(&adchan->lock, flags); in admac_issue_pending()
479 list_splice_tail_init(&adchan->submitted, &adchan->issued); in admac_issue_pending()
480 if (!list_empty(&adchan->issued) && !adchan->current_tx) { in admac_issue_pending()
481 tx = list_first_entry(&adchan->issued, struct admac_tx, node); in admac_issue_pending()
482 list_del(&tx->node); in admac_issue_pending()
484 adchan->current_tx = tx; in admac_issue_pending()
485 adchan->nperiod_acks = 0; in admac_issue_pending()
488 spin_unlock_irqrestore(&adchan->lock, flags); in admac_issue_pending()
514 spin_lock_irqsave(&adchan->lock, flags); in admac_terminate_all()
518 if (adchan->current_tx) { in admac_terminate_all()
519 list_add_tail(&adchan->current_tx->node, &adchan->to_free); in admac_terminate_all()
520 adchan->current_tx = NULL; in admac_terminate_all()
526 list_splice_tail_init(&adchan->submitted, &adchan->to_free); in admac_terminate_all()
527 list_splice_tail_init(&adchan->issued, &adchan->to_free); in admac_terminate_all()
528 spin_unlock_irqrestore(&adchan->lock, flags); in admac_terminate_all()
540 spin_lock_irqsave(&adchan->lock, flags); in admac_synchronize()
541 list_splice_tail_init(&adchan->to_free, &head); in admac_synchronize()
542 spin_unlock_irqrestore(&adchan->lock, flags); in admac_synchronize()
544 tasklet_kill(&adchan->tasklet); in admac_synchronize()
547 list_del(&adtx->node); in admac_synchronize()
548 admac_desc_free(&adtx->tx); in admac_synchronize()
555 struct admac_data *ad = adchan->host; in admac_alloc_chan_resources()
558 dma_cookie_init(&adchan->chan); in admac_alloc_chan_resources()
559 ret = admac_alloc_sram_carveout(ad, admac_chan_direction(adchan->no), in admac_alloc_chan_resources()
560 &adchan->carveout); in admac_alloc_chan_resources()
564 writel_relaxed(adchan->carveout, in admac_alloc_chan_resources()
565 ad->base + REG_CHAN_SRAM_CARVEOUT(adchan->no)); in admac_alloc_chan_resources()
575 admac_free_sram_carveout(adchan->host, admac_chan_direction(adchan->no), in admac_free_chan_resources()
576 adchan->carveout); in admac_free_chan_resources()
582 struct admac_data *ad = (struct admac_data *) ofdma->of_dma_data; in admac_dma_of_xlate()
585 if (dma_spec->args_count != 1) in admac_dma_of_xlate()
588 index = dma_spec->args[0]; in admac_dma_of_xlate()
590 if (index >= ad->nchannels) { in admac_dma_of_xlate()
591 dev_err(ad->dev, "channel index %u out of bounds\n", index); in admac_dma_of_xlate()
595 return dma_get_slave_channel(&ad->channels[index].chan); in admac_dma_of_xlate()
605 if (readl_relaxed(ad->base + REG_REPORT_RING(channo)) & RING_EMPTY) in admac_drain_reports()
608 countval_lo = readl_relaxed(ad->base + REG_REPORT_READ(channo)); in admac_drain_reports()
609 countval_hi = readl_relaxed(ad->base + REG_REPORT_READ(channo)); in admac_drain_reports()
610 unk1 = readl_relaxed(ad->base + REG_REPORT_READ(channo)); in admac_drain_reports()
611 flags = readl_relaxed(ad->base + REG_REPORT_READ(channo)); in admac_drain_reports()
613 dev_dbg(ad->dev, "ch%d report: countval=0x%llx unk1=0x%x flags=0x%x\n", in admac_drain_reports()
624 if (readl_relaxed(ad->base + REG_DESC_RING(channo)) & RING_ERR) { in admac_handle_status_err()
625 writel_relaxed(RING_ERR, ad->base + REG_DESC_RING(channo)); in admac_handle_status_err()
626 dev_err_ratelimited(ad->dev, "ch%d descriptor ring error\n", channo); in admac_handle_status_err()
630 if (readl_relaxed(ad->base + REG_REPORT_RING(channo)) & RING_ERR) { in admac_handle_status_err()
631 writel_relaxed(RING_ERR, ad->base + REG_REPORT_RING(channo)); in admac_handle_status_err()
632 dev_err_ratelimited(ad->dev, "ch%d report ring error\n", channo); in admac_handle_status_err()
637 dev_err(ad->dev, "ch%d unknown error, masking errors as cause of IRQs\n", channo); in admac_handle_status_err()
638 admac_modify(ad, REG_CHAN_INTMASK(channo, ad->irq_index), in admac_handle_status_err()
645 struct admac_chan *adchan = &ad->channels[channo]; in admac_handle_status_desc_done()
650 ad->base + REG_CHAN_INTSTATUS(channo, ad->irq_index)); in admac_handle_status_desc_done()
652 spin_lock_irqsave(&adchan->lock, flags); in admac_handle_status_desc_done()
655 if (adchan->current_tx) { in admac_handle_status_desc_done()
656 struct admac_tx *tx = adchan->current_tx; in admac_handle_status_desc_done()
658 adchan->nperiod_acks += nreports; in admac_handle_status_desc_done()
659 tx->reclaimed_pos += nreports * tx->period_len; in admac_handle_status_desc_done()
660 tx->reclaimed_pos %= 2 * tx->buf_len; in admac_handle_status_desc_done()
663 tasklet_schedule(&adchan->tasklet); in admac_handle_status_desc_done()
665 spin_unlock_irqrestore(&adchan->lock, flags); in admac_handle_status_desc_done()
670 u32 cause = readl_relaxed(ad->base + REG_CHAN_INTSTATUS(no, ad->irq_index)); in admac_handle_chan_int()
685 rx_intstate = readl_relaxed(ad->base + REG_RX_INTSTATE(ad->irq_index)); in admac_interrupt()
686 tx_intstate = readl_relaxed(ad->base + REG_TX_INTSTATE(ad->irq_index)); in admac_interrupt()
687 global_intstate = readl_relaxed(ad->base + REG_GLOBAL_INTSTATE(ad->irq_index)); in admac_interrupt()
692 for (i = 0; i < ad->nchannels; i += 2) { in admac_interrupt()
698 for (i = 1; i < ad->nchannels; i += 2) { in admac_interrupt()
705 dev_warn(ad->dev, "clearing unknown global interrupt flag: %x\n", in admac_interrupt()
707 writel_relaxed(~(u32) 0, ad->base + REG_GLOBAL_INTSTATE(ad->irq_index)); in admac_interrupt()
721 spin_lock_irq(&adchan->lock); in admac_chan_tasklet()
722 adtx = adchan->current_tx; in admac_chan_tasklet()
723 nacks = adchan->nperiod_acks; in admac_chan_tasklet()
724 adchan->nperiod_acks = 0; in admac_chan_tasklet()
725 spin_unlock_irq(&adchan->lock); in admac_chan_tasklet()
733 dmaengine_desc_get_callback(&adtx->tx, &cb); in admac_chan_tasklet()
734 while (nacks--) in admac_chan_tasklet()
742 struct admac_data *ad = adchan->host; in admac_device_config()
743 bool is_tx = admac_chan_direction(adchan->no) == DMA_MEM_TO_DEV; in admac_device_config()
745 u32 bus_width = readl_relaxed(ad->base + REG_BUS_WIDTH(adchan->no)) & in admac_device_config()
748 switch (is_tx ? config->dst_addr_width : config->src_addr_width) { in admac_device_config()
762 return -EINVAL; in admac_device_config()
768 * The controller has some means of out-of-band signalling, to the peripheral, in admac_device_config()
772 switch (is_tx ? config->dst_port_window_size : config->src_port_window_size) { in admac_device_config()
782 return -EINVAL; in admac_device_config()
785 writel_relaxed(bus_width, ad->base + REG_BUS_WIDTH(adchan->no)); in admac_device_config()
789 * held in controller's per-channel FIFO. Transfers seem to be triggered in admac_device_config()
796 ad->base + REG_CHAN_FIFOCTL(adchan->no)); in admac_device_config()
803 struct device_node *np = pdev->dev.of_node; in admac_probe()
809 err = of_property_read_u32(np, "dma-channels", &nchannels); in admac_probe()
811 dev_err(&pdev->dev, "missing or invalid dma-channels property\n"); in admac_probe()
812 return -EINVAL; in admac_probe()
815 ad = devm_kzalloc(&pdev->dev, struct_size(ad, channels, nchannels), GFP_KERNEL); in admac_probe()
817 return -ENOMEM; in admac_probe()
820 ad->dev = &pdev->dev; in admac_probe()
821 ad->nchannels = nchannels; in admac_probe()
822 mutex_init(&ad->cache_alloc_lock); in admac_probe()
831 ad->irq_index = i; in admac_probe()
837 return dev_err_probe(&pdev->dev, irq, "no usable interrupt\n"); in admac_probe()
838 ad->irq = irq; in admac_probe()
840 ad->base = devm_platform_ioremap_resource(pdev, 0); in admac_probe()
841 if (IS_ERR(ad->base)) in admac_probe()
842 return dev_err_probe(&pdev->dev, PTR_ERR(ad->base), in admac_probe()
845 ad->rstc = devm_reset_control_get_optional_shared(&pdev->dev, NULL); in admac_probe()
846 if (IS_ERR(ad->rstc)) in admac_probe()
847 return PTR_ERR(ad->rstc); in admac_probe()
849 dma = &ad->dma; in admac_probe()
851 dma_cap_set(DMA_PRIVATE, dma->cap_mask); in admac_probe()
852 dma_cap_set(DMA_CYCLIC, dma->cap_mask); in admac_probe()
854 dma->dev = &pdev->dev; in admac_probe()
855 dma->device_alloc_chan_resources = admac_alloc_chan_resources; in admac_probe()
856 dma->device_free_chan_resources = admac_free_chan_resources; in admac_probe()
857 dma->device_tx_status = admac_tx_status; in admac_probe()
858 dma->device_issue_pending = admac_issue_pending; in admac_probe()
859 dma->device_terminate_all = admac_terminate_all; in admac_probe()
860 dma->device_synchronize = admac_synchronize; in admac_probe()
861 dma->device_prep_dma_cyclic = admac_prep_dma_cyclic; in admac_probe()
862 dma->device_config = admac_device_config; in admac_probe()
863 dma->device_pause = admac_pause; in admac_probe()
864 dma->device_resume = admac_resume; in admac_probe()
866 dma->directions = BIT(DMA_MEM_TO_DEV) | BIT(DMA_DEV_TO_MEM); in admac_probe()
867 dma->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in admac_probe()
868 dma->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in admac_probe()
871 dma->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in admac_probe()
875 INIT_LIST_HEAD(&dma->channels); in admac_probe()
877 struct admac_chan *adchan = &ad->channels[i]; in admac_probe()
879 adchan->host = ad; in admac_probe()
880 adchan->no = i; in admac_probe()
881 adchan->chan.device = &ad->dma; in admac_probe()
882 spin_lock_init(&adchan->lock); in admac_probe()
883 INIT_LIST_HEAD(&adchan->submitted); in admac_probe()
884 INIT_LIST_HEAD(&adchan->issued); in admac_probe()
885 INIT_LIST_HEAD(&adchan->to_free); in admac_probe()
886 list_add_tail(&adchan->chan.device_node, &dma->channels); in admac_probe()
887 tasklet_setup(&adchan->tasklet, admac_chan_tasklet); in admac_probe()
890 err = reset_control_reset(ad->rstc); in admac_probe()
892 return dev_err_probe(&pdev->dev, err, in admac_probe()
895 err = request_irq(irq, admac_interrupt, 0, dev_name(&pdev->dev), ad); in admac_probe()
897 dev_err_probe(&pdev->dev, err, in admac_probe()
902 err = dma_async_device_register(&ad->dma); in admac_probe()
904 dev_err_probe(&pdev->dev, err, "failed to register DMA device\n"); in admac_probe()
908 err = of_dma_controller_register(pdev->dev.of_node, admac_dma_of_xlate, ad); in admac_probe()
910 dma_async_device_unregister(&ad->dma); in admac_probe()
911 dev_err_probe(&pdev->dev, err, "failed to register with OF\n"); in admac_probe()
915 ad->txcache.size = readl_relaxed(ad->base + REG_TX_SRAM_SIZE); in admac_probe()
916 ad->rxcache.size = readl_relaxed(ad->base + REG_RX_SRAM_SIZE); in admac_probe()
918 dev_info(&pdev->dev, "Audio DMA Controller\n"); in admac_probe()
919 dev_info(&pdev->dev, "imprint %x TX cache %u RX cache %u\n", in admac_probe()
920 readl_relaxed(ad->base + REG_IMPRINT), ad->txcache.size, ad->rxcache.size); in admac_probe()
925 free_irq(ad->irq, ad); in admac_probe()
927 reset_control_rearm(ad->rstc); in admac_probe()
935 of_dma_controller_free(pdev->dev.of_node); in admac_remove()
936 dma_async_device_unregister(&ad->dma); in admac_remove()
937 free_irq(ad->irq, ad); in admac_remove()
938 reset_control_rearm(ad->rstc); in admac_remove()
942 { .compatible = "apple,admac", },
949 .name = "apple-admac",
958 MODULE_DESCRIPTION("Driver for Audio DMA Controller (ADMAC) on Apple SoCs");