Lines Matching +full:sub +full:- +full:function

1 // SPDX-License-Identifier: GPL-2.0
5 // Copyright (c) 2016-2018 Socionext Inc.
7 #include <linux/dma-mapping.h>
28 static void aiodma_pcm_irq(struct uniphier_aio_sub *sub) in aiodma_pcm_irq() argument
30 struct snd_pcm_runtime *runtime = sub->substream->runtime; in aiodma_pcm_irq()
31 int bytes = runtime->period_size * in aiodma_pcm_irq()
32 runtime->channels * samples_to_bytes(runtime, 1); in aiodma_pcm_irq()
35 spin_lock(&sub->lock); in aiodma_pcm_irq()
36 ret = aiodma_rb_set_threshold(sub, runtime->dma_bytes, in aiodma_pcm_irq()
37 sub->threshold + bytes); in aiodma_pcm_irq()
39 sub->threshold += bytes; in aiodma_pcm_irq()
41 aiodma_rb_sync(sub, runtime->dma_addr, runtime->dma_bytes, bytes); in aiodma_pcm_irq()
42 aiodma_rb_clear_irq(sub); in aiodma_pcm_irq()
43 spin_unlock(&sub->lock); in aiodma_pcm_irq()
45 snd_pcm_period_elapsed(sub->substream); in aiodma_pcm_irq()
48 static void aiodma_compr_irq(struct uniphier_aio_sub *sub) in aiodma_compr_irq() argument
50 struct snd_compr_runtime *runtime = sub->cstream->runtime; in aiodma_compr_irq()
51 int bytes = runtime->fragment_size; in aiodma_compr_irq()
54 spin_lock(&sub->lock); in aiodma_compr_irq()
55 ret = aiodma_rb_set_threshold(sub, sub->compr_bytes, in aiodma_compr_irq()
56 sub->threshold + bytes); in aiodma_compr_irq()
58 sub->threshold += bytes; in aiodma_compr_irq()
60 aiodma_rb_sync(sub, sub->compr_addr, sub->compr_bytes, bytes); in aiodma_compr_irq()
61 aiodma_rb_clear_irq(sub); in aiodma_compr_irq()
62 spin_unlock(&sub->lock); in aiodma_compr_irq()
64 snd_compr_fragment_elapsed(sub->cstream); in aiodma_compr_irq()
74 for (i = 0; i < chip->num_aios; i++) { in aiodma_irq()
75 struct uniphier_aio *aio = &chip->aios[i]; in aiodma_irq()
77 for (j = 0; j < ARRAY_SIZE(aio->sub); j++) { in aiodma_irq()
78 struct uniphier_aio_sub *sub = &aio->sub[j]; in aiodma_irq() local
81 if (!sub->running || !aiodma_rb_is_irq(sub)) in aiodma_irq()
84 if (sub->substream) in aiodma_irq()
85 aiodma_pcm_irq(sub); in aiodma_irq()
86 if (sub->cstream) in aiodma_irq()
87 aiodma_compr_irq(sub); in aiodma_irq()
99 struct snd_pcm_runtime *runtime = substream->runtime; in uniphier_aiodma_open()
110 struct snd_pcm_runtime *runtime = substream->runtime; in uniphier_aiodma_prepare()
113 struct uniphier_aio_sub *sub = &aio->sub[substream->stream]; in uniphier_aiodma_prepare() local
114 int bytes = runtime->period_size * in uniphier_aiodma_prepare()
115 runtime->channels * samples_to_bytes(runtime, 1); in uniphier_aiodma_prepare()
119 ret = aiodma_ch_set_param(sub); in uniphier_aiodma_prepare()
123 spin_lock_irqsave(&sub->lock, flags); in uniphier_aiodma_prepare()
124 ret = aiodma_rb_set_buffer(sub, runtime->dma_addr, in uniphier_aiodma_prepare()
125 runtime->dma_addr + runtime->dma_bytes, in uniphier_aiodma_prepare()
127 spin_unlock_irqrestore(&sub->lock, flags); in uniphier_aiodma_prepare()
137 struct snd_pcm_runtime *runtime = substream->runtime; in uniphier_aiodma_trigger()
140 struct uniphier_aio_sub *sub = &aio->sub[substream->stream]; in uniphier_aiodma_trigger() local
141 struct device *dev = &aio->chip->pdev->dev; in uniphier_aiodma_trigger()
142 int bytes = runtime->period_size * in uniphier_aiodma_trigger()
143 runtime->channels * samples_to_bytes(runtime, 1); in uniphier_aiodma_trigger()
146 spin_lock_irqsave(&sub->lock, flags); in uniphier_aiodma_trigger()
149 aiodma_rb_sync(sub, runtime->dma_addr, runtime->dma_bytes, in uniphier_aiodma_trigger()
151 aiodma_ch_set_enable(sub, 1); in uniphier_aiodma_trigger()
152 sub->running = 1; in uniphier_aiodma_trigger()
156 sub->running = 0; in uniphier_aiodma_trigger()
157 aiodma_ch_set_enable(sub, 0); in uniphier_aiodma_trigger()
164 spin_unlock_irqrestore(&sub->lock, flags); in uniphier_aiodma_trigger()
173 struct snd_pcm_runtime *runtime = substream->runtime; in uniphier_aiodma_pointer()
176 struct uniphier_aio_sub *sub = &aio->sub[substream->stream]; in uniphier_aiodma_pointer() local
177 int bytes = runtime->period_size * in uniphier_aiodma_pointer()
178 runtime->channels * samples_to_bytes(runtime, 1); in uniphier_aiodma_pointer()
182 spin_lock_irqsave(&sub->lock, flags); in uniphier_aiodma_pointer()
183 aiodma_rb_sync(sub, runtime->dma_addr, runtime->dma_bytes, bytes); in uniphier_aiodma_pointer()
185 if (sub->swm->dir == PORT_DIR_OUTPUT) in uniphier_aiodma_pointer()
186 pos = bytes_to_frames(runtime, sub->rd_offs); in uniphier_aiodma_pointer()
188 pos = bytes_to_frames(runtime, sub->wr_offs); in uniphier_aiodma_pointer()
189 spin_unlock_irqrestore(&sub->lock, flags); in uniphier_aiodma_pointer()
198 vma->vm_page_prot = pgprot_writecombine(vma->vm_page_prot); in uniphier_aiodma_mmap()
200 return remap_pfn_range(vma, vma->vm_start, in uniphier_aiodma_mmap()
201 substream->runtime->dma_addr >> PAGE_SHIFT, in uniphier_aiodma_mmap()
202 vma->vm_end - vma->vm_start, vma->vm_page_prot); in uniphier_aiodma_mmap()
208 struct device *dev = rtd->card->snd_card->dev; in uniphier_aiodma_new()
209 struct snd_pcm *pcm = rtd->pcm; in uniphier_aiodma_new()
242 * uniphier_aiodma_soc_register_platform - register the AIO DMA
246 * This function need to call once at driver startup and need NOT to call
247 * unregister function.
254 struct device *dev = &pdev->dev; in uniphier_aiodma_soc_register_platform()
262 chip->regmap = devm_regmap_init_mmio(dev, preg, in uniphier_aiodma_soc_register_platform()
264 if (IS_ERR(chip->regmap)) in uniphier_aiodma_soc_register_platform()
265 return PTR_ERR(chip->regmap); in uniphier_aiodma_soc_register_platform()