Lines Matching full:pd

132 #define dma_readl(pd, name) \  argument
133 readl((pd)->membase + PCH_DMA_##name)
134 #define dma_writel(pd, name, val) \ argument
135 writel((val), (pd)->membase + PCH_DMA_##name)
174 struct pch_dma *pd = to_pd(chan->device); in pdc_enable_irq() local
183 val = dma_readl(pd, CTL2); in pdc_enable_irq()
190 dma_writel(pd, CTL2, val); in pdc_enable_irq()
199 struct pch_dma *pd = to_pd(chan->device); in pdc_set_dir() local
205 val = dma_readl(pd, CTL0); in pdc_set_dir()
220 dma_writel(pd, CTL0, val); in pdc_set_dir()
223 val = dma_readl(pd, CTL3); in pdc_set_dir()
237 dma_writel(pd, CTL3, val); in pdc_set_dir()
246 struct pch_dma *pd = to_pd(chan->device); in pdc_set_mode() local
256 val = dma_readl(pd, CTL0); in pdc_set_mode()
260 dma_writel(pd, CTL0, val); in pdc_set_mode()
267 val = dma_readl(pd, CTL3); in pdc_set_mode()
271 dma_writel(pd, CTL3, val); in pdc_set_mode()
280 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status0() local
283 val = dma_readl(pd, STS0); in pdc_get_status0()
290 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status2() local
293 val = dma_readl(pd, STS2); in pdc_get_status2()
424 struct pch_dma *pd = to_pd(chan->device); in pdc_alloc_desc() local
427 desc = dma_pool_zalloc(pd->pool, flags, &addr); in pdc_alloc_desc()
525 struct pch_dma *pd = to_pd(chan->device); in pd_free_chan_resources() local
539 dma_pool_free(pd->pool, desc, desc->txd.phys); in pd_free_chan_resources()
689 struct pch_dma *pd = (struct pch_dma *)devid; in pd_irq() local
697 sts0 = dma_readl(pd, STS0); in pd_irq()
698 sts2 = dma_readl(pd, STS2); in pd_irq()
700 dev_dbg(pd->dma.dev, "pd_irq sts0: %x\n", sts0); in pd_irq()
702 for (i = 0; i < pd->dma.chancnt; i++) { in pd_irq()
703 pd_chan = &pd->channels[i]; in pd_irq()
726 dma_writel(pd, STS0, sts0); in pd_irq()
728 dma_writel(pd, STS2, sts2); in pd_irq()
733 static void __maybe_unused pch_dma_save_regs(struct pch_dma *pd) in pch_dma_save_regs() argument
739 pd->regs.dma_ctl0 = dma_readl(pd, CTL0); in pch_dma_save_regs()
740 pd->regs.dma_ctl1 = dma_readl(pd, CTL1); in pch_dma_save_regs()
741 pd->regs.dma_ctl2 = dma_readl(pd, CTL2); in pch_dma_save_regs()
742 pd->regs.dma_ctl3 = dma_readl(pd, CTL3); in pch_dma_save_regs()
744 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_save_regs()
747 pd->ch_regs[i].dev_addr = channel_readl(pd_chan, DEV_ADDR); in pch_dma_save_regs()
748 pd->ch_regs[i].mem_addr = channel_readl(pd_chan, MEM_ADDR); in pch_dma_save_regs()
749 pd->ch_regs[i].size = channel_readl(pd_chan, SIZE); in pch_dma_save_regs()
750 pd->ch_regs[i].next = channel_readl(pd_chan, NEXT); in pch_dma_save_regs()
756 static void __maybe_unused pch_dma_restore_regs(struct pch_dma *pd) in pch_dma_restore_regs() argument
762 dma_writel(pd, CTL0, pd->regs.dma_ctl0); in pch_dma_restore_regs()
763 dma_writel(pd, CTL1, pd->regs.dma_ctl1); in pch_dma_restore_regs()
764 dma_writel(pd, CTL2, pd->regs.dma_ctl2); in pch_dma_restore_regs()
765 dma_writel(pd, CTL3, pd->regs.dma_ctl3); in pch_dma_restore_regs()
767 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_restore_regs()
770 channel_writel(pd_chan, DEV_ADDR, pd->ch_regs[i].dev_addr); in pch_dma_restore_regs()
771 channel_writel(pd_chan, MEM_ADDR, pd->ch_regs[i].mem_addr); in pch_dma_restore_regs()
772 channel_writel(pd_chan, SIZE, pd->ch_regs[i].size); in pch_dma_restore_regs()
773 channel_writel(pd_chan, NEXT, pd->ch_regs[i].next); in pch_dma_restore_regs()
781 struct pch_dma *pd = dev_get_drvdata(dev); in pch_dma_suspend() local
783 if (pd) in pch_dma_suspend()
784 pch_dma_save_regs(pd); in pch_dma_suspend()
791 struct pch_dma *pd = dev_get_drvdata(dev); in pch_dma_resume() local
793 if (pd) in pch_dma_resume()
794 pch_dma_restore_regs(pd); in pch_dma_resume()
802 struct pch_dma *pd; in pch_dma_probe() local
809 pd = kzalloc(sizeof(*pd), GFP_KERNEL); in pch_dma_probe()
810 if (!pd) in pch_dma_probe()
813 pci_set_drvdata(pdev, pd); in pch_dma_probe()
839 regs = pd->membase = pci_iomap(pdev, 1, 0); in pch_dma_probe()
840 if (!pd->membase) { in pch_dma_probe()
847 pd->dma.dev = &pdev->dev; in pch_dma_probe()
849 err = request_irq(pdev->irq, pd_irq, IRQF_SHARED, DRV_NAME, pd); in pch_dma_probe()
855 pd->pool = dma_pool_create("pch_dma_desc_pool", &pdev->dev, in pch_dma_probe()
857 if (!pd->pool) { in pch_dma_probe()
864 INIT_LIST_HEAD(&pd->dma.channels); in pch_dma_probe()
867 struct pch_dma_chan *pd_chan = &pd->channels[i]; in pch_dma_probe()
869 pd_chan->chan.device = &pd->dma; in pch_dma_probe()
881 list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels); in pch_dma_probe()
884 dma_cap_zero(pd->dma.cap_mask); in pch_dma_probe()
885 dma_cap_set(DMA_PRIVATE, pd->dma.cap_mask); in pch_dma_probe()
886 dma_cap_set(DMA_SLAVE, pd->dma.cap_mask); in pch_dma_probe()
888 pd->dma.device_alloc_chan_resources = pd_alloc_chan_resources; in pch_dma_probe()
889 pd->dma.device_free_chan_resources = pd_free_chan_resources; in pch_dma_probe()
890 pd->dma.device_tx_status = pd_tx_status; in pch_dma_probe()
891 pd->dma.device_issue_pending = pd_issue_pending; in pch_dma_probe()
892 pd->dma.device_prep_slave_sg = pd_prep_slave_sg; in pch_dma_probe()
893 pd->dma.device_terminate_all = pd_device_terminate_all; in pch_dma_probe()
895 err = dma_async_device_register(&pd->dma); in pch_dma_probe()
904 dma_pool_destroy(pd->pool); in pch_dma_probe()
906 free_irq(pdev->irq, pd); in pch_dma_probe()
908 pci_iounmap(pdev, pd->membase); in pch_dma_probe()
914 kfree(pd); in pch_dma_probe()
920 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_remove() local
924 if (pd) { in pch_dma_remove()
925 dma_async_device_unregister(&pd->dma); in pch_dma_remove()
927 free_irq(pdev->irq, pd); in pch_dma_remove()
929 list_for_each_entry_safe(chan, _c, &pd->dma.channels, in pch_dma_remove()
936 dma_pool_destroy(pd->pool); in pch_dma_remove()
937 pci_iounmap(pdev, pd->membase); in pch_dma_remove()
940 kfree(pd); in pch_dma_remove()