Lines Matching full:od

253 		struct omap_dmadev *od = to_omap_dma_dev(vd->tx.chan->device);  in omap_dma_desc_free()  local
258 dma_pool_free(od->desc_pool, d->sg[i].t2_desc, in omap_dma_desc_free()
351 static void omap_dma_glbl_write(struct omap_dmadev *od, unsigned reg, unsigned val) in omap_dma_glbl_write() argument
353 const struct omap_dma_reg *r = od->reg_map + reg; in omap_dma_glbl_write()
357 omap_dma_write(val, r->type, od->base + r->offset); in omap_dma_glbl_write()
360 static unsigned omap_dma_glbl_read(struct omap_dmadev *od, unsigned reg) in omap_dma_glbl_read() argument
362 const struct omap_dma_reg *r = od->reg_map + reg; in omap_dma_glbl_read()
366 return omap_dma_read(r->type, od->base + r->offset); in omap_dma_glbl_read()
401 static void omap_dma_clear_lch(struct omap_dmadev *od, int lch) in omap_dma_clear_lch() argument
406 c = od->lch_map[lch]; in omap_dma_clear_lch()
410 for (i = CSDP; i <= od->cfg->lch_end; i++) in omap_dma_clear_lch()
414 static void omap_dma_assign(struct omap_dmadev *od, struct omap_chan *c, in omap_dma_assign() argument
417 c->channel_base = od->base + od->plat->channel_stride * lch; in omap_dma_assign()
419 od->lch_map[lch] = c; in omap_dma_assign()
424 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_start() local
427 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_start()
449 } else if (od->ll123_supported) { in omap_dma_start()
487 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_stop() local
496 if (od->plat->errata & DMA_ERRATA_i541 && val & CCR_TRIGGER_SRC) { in omap_dma_stop()
499 sysconfig = omap_dma_glbl_read(od, OCP_SYSCONFIG); in omap_dma_stop()
502 omap_dma_glbl_write(od, OCP_SYSCONFIG, val); in omap_dma_stop()
511 omap_dma_glbl_write(od, OCP_SYSCONFIG, sysconfig); in omap_dma_stop()
525 if (!__dma_omap15xx(od->plat->dma_attr) && c->cyclic) { in omap_dma_stop()
633 struct omap_dmadev *od = devid; in omap_dma_irq() local
636 spin_lock(&od->irq_lock); in omap_dma_irq()
638 status = omap_dma_glbl_read(od, IRQSTATUS_L1); in omap_dma_irq()
639 status &= od->irq_enable_mask; in omap_dma_irq()
641 spin_unlock(&od->irq_lock); in omap_dma_irq()
653 c = od->lch_map[channel]; in omap_dma_irq()
656 dev_err(od->ddev.dev, "invalid channel %u\n", channel); in omap_dma_irq()
661 omap_dma_glbl_write(od, IRQSTATUS_L1, mask); in omap_dma_irq()
666 spin_unlock(&od->irq_lock); in omap_dma_irq()
671 static int omap_dma_get_lch(struct omap_dmadev *od, int *lch) in omap_dma_get_lch() argument
675 mutex_lock(&od->lch_lock); in omap_dma_get_lch()
676 channel = find_first_zero_bit(od->lch_bitmap, od->lch_count); in omap_dma_get_lch()
677 if (channel >= od->lch_count) in omap_dma_get_lch()
679 set_bit(channel, od->lch_bitmap); in omap_dma_get_lch()
680 mutex_unlock(&od->lch_lock); in omap_dma_get_lch()
682 omap_dma_clear_lch(od, channel); in omap_dma_get_lch()
688 mutex_unlock(&od->lch_lock); in omap_dma_get_lch()
694 static void omap_dma_put_lch(struct omap_dmadev *od, int lch) in omap_dma_put_lch() argument
696 omap_dma_clear_lch(od, lch); in omap_dma_put_lch()
697 mutex_lock(&od->lch_lock); in omap_dma_put_lch()
698 clear_bit(lch, od->lch_bitmap); in omap_dma_put_lch()
699 mutex_unlock(&od->lch_lock); in omap_dma_put_lch()
702 static inline bool omap_dma_legacy(struct omap_dmadev *od) in omap_dma_legacy() argument
704 return IS_ENABLED(CONFIG_ARCH_OMAP1) && od->legacy; in omap_dma_legacy()
709 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_alloc_chan_resources() local
711 struct device *dev = od->ddev.dev; in omap_dma_alloc_chan_resources()
714 if (omap_dma_legacy(od)) { in omap_dma_alloc_chan_resources()
718 ret = omap_dma_get_lch(od, &c->dma_ch); in omap_dma_alloc_chan_resources()
724 omap_dma_assign(od, c, c->dma_ch); in omap_dma_alloc_chan_resources()
726 if (!omap_dma_legacy(od)) { in omap_dma_alloc_chan_resources()
729 spin_lock_irq(&od->irq_lock); in omap_dma_alloc_chan_resources()
731 omap_dma_glbl_write(od, IRQSTATUS_L1, val); in omap_dma_alloc_chan_resources()
732 od->irq_enable_mask |= val; in omap_dma_alloc_chan_resources()
733 omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); in omap_dma_alloc_chan_resources()
735 val = omap_dma_glbl_read(od, IRQENABLE_L0); in omap_dma_alloc_chan_resources()
737 omap_dma_glbl_write(od, IRQENABLE_L0, val); in omap_dma_alloc_chan_resources()
738 spin_unlock_irq(&od->irq_lock); in omap_dma_alloc_chan_resources()
743 if (__dma_omap16xx(od->plat->dma_attr)) { in omap_dma_alloc_chan_resources()
754 if (od->plat->errata & DMA_ERRATA_IFRAME_BUFFERING) in omap_dma_alloc_chan_resources()
762 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_free_chan_resources() local
765 if (!omap_dma_legacy(od)) { in omap_dma_free_chan_resources()
766 spin_lock_irq(&od->irq_lock); in omap_dma_free_chan_resources()
767 od->irq_enable_mask &= ~BIT(c->dma_ch); in omap_dma_free_chan_resources()
768 omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); in omap_dma_free_chan_resources()
769 spin_unlock_irq(&od->irq_lock); in omap_dma_free_chan_resources()
773 od->lch_map[c->dma_ch] = NULL; in omap_dma_free_chan_resources()
776 if (omap_dma_legacy(od)) in omap_dma_free_chan_resources()
779 omap_dma_put_lch(od, c->dma_ch); in omap_dma_free_chan_resources()
781 dev_dbg(od->ddev.dev, "freeing channel %u used for %u\n", c->dma_ch, in omap_dma_free_chan_resources()
825 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_chan_read_3_3() local
829 if (val == 0 && od->plat->errata & DMA_ERRATA_3_3) in omap_dma_chan_read_3_3()
837 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_get_src_pos() local
840 if (__dma_omap15xx(od->plat->dma_attr)) { in omap_dma_get_src_pos()
863 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_get_dst_pos() local
866 if (__dma_omap15xx(od->plat->dma_attr)) { in omap_dma_get_dst_pos()
963 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_prep_slave_sg() local
1084 if (od->plat->errata & DMA_ERRATA_PARALLEL_CHANNELS) in omap_dma_prep_slave_sg()
1100 d->using_ll = od->ll123_supported; in omap_dma_prep_slave_sg()
1110 osg->t2_desc = dma_pool_alloc(od->desc_pool, GFP_ATOMIC, in omap_dma_prep_slave_sg()
1130 dma_pool_free(od->desc_pool, osg->t2_desc, in omap_dma_prep_slave_sg()
1144 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_prep_dma_cyclic() local
1231 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_prep_dma_cyclic()
1428 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_pause() local
1433 spin_lock_irqsave(&od->irq_lock, flags); in omap_dma_pause()
1473 spin_unlock_irqrestore(&od->irq_lock, flags); in omap_dma_pause()
1481 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_resume() local
1485 spin_lock_irqsave(&od->irq_lock, flags); in omap_dma_resume()
1497 spin_unlock_irqrestore(&od->irq_lock, flags); in omap_dma_resume()
1502 static int omap_dma_chan_init(struct omap_dmadev *od) in omap_dma_chan_init() argument
1510 c->reg_map = od->reg_map; in omap_dma_chan_init()
1512 vchan_init(&c->vc, &od->ddev); in omap_dma_chan_init()
1517 static void omap_dma_free(struct omap_dmadev *od) in omap_dma_free() argument
1519 while (!list_empty(&od->ddev.channels)) { in omap_dma_free()
1520 struct omap_chan *c = list_first_entry(&od->ddev.channels, in omap_dma_free()
1530 static bool omap_dma_busy(struct omap_dmadev *od) in omap_dma_busy() argument
1536 lch = find_next_bit(od->lch_bitmap, od->lch_count, lch + 1); in omap_dma_busy()
1537 if (lch >= od->lch_count) in omap_dma_busy()
1539 c = od->lch_map[lch]; in omap_dma_busy()
1553 struct omap_dmadev *od; in omap_dma_busy_notifier() local
1555 od = container_of(nb, struct omap_dmadev, nb); in omap_dma_busy_notifier()
1559 if (omap_dma_busy(od)) in omap_dma_busy_notifier()
1575 static void omap_dma_context_save(struct omap_dmadev *od) in omap_dma_context_save() argument
1577 od->context.irqenable_l0 = omap_dma_glbl_read(od, IRQENABLE_L0); in omap_dma_context_save()
1578 od->context.irqenable_l1 = omap_dma_glbl_read(od, IRQENABLE_L1); in omap_dma_context_save()
1579 od->context.ocp_sysconfig = omap_dma_glbl_read(od, OCP_SYSCONFIG); in omap_dma_context_save()
1580 od->context.gcr = omap_dma_glbl_read(od, GCR); in omap_dma_context_save()
1583 static void omap_dma_context_restore(struct omap_dmadev *od) in omap_dma_context_restore() argument
1587 omap_dma_glbl_write(od, GCR, od->context.gcr); in omap_dma_context_restore()
1588 omap_dma_glbl_write(od, OCP_SYSCONFIG, od->context.ocp_sysconfig); in omap_dma_context_restore()
1589 omap_dma_glbl_write(od, IRQENABLE_L0, od->context.irqenable_l0); in omap_dma_context_restore()
1590 omap_dma_glbl_write(od, IRQENABLE_L1, od->context.irqenable_l1); in omap_dma_context_restore()
1593 if (od->plat->errata & DMA_ROMCODE_BUG) in omap_dma_context_restore()
1594 omap_dma_glbl_write(od, IRQSTATUS_L0, 0); in omap_dma_context_restore()
1597 for (i = 0; i < od->lch_count; i++) in omap_dma_context_restore()
1598 omap_dma_clear_lch(od, i); in omap_dma_context_restore()
1605 struct omap_dmadev *od; in omap_dma_context_notifier() local
1607 od = container_of(nb, struct omap_dmadev, nb); in omap_dma_context_notifier()
1611 if (omap_dma_busy(od)) in omap_dma_context_notifier()
1613 omap_dma_context_save(od); in omap_dma_context_notifier()
1618 omap_dma_context_restore(od); in omap_dma_context_notifier()
1625 static void omap_dma_init_gcr(struct omap_dmadev *od, int arb_rate, in omap_dma_init_gcr() argument
1631 if (!od->cfg->rw_priority) in omap_dma_init_gcr()
1643 omap_dma_glbl_write(od, GCR, val); in omap_dma_init_gcr()
1659 struct omap_dmadev *od; in omap_dma_probe() local
1663 od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); in omap_dma_probe()
1664 if (!od) in omap_dma_probe()
1667 od->base = devm_platform_ioremap_resource(pdev, 0); in omap_dma_probe()
1668 if (IS_ERR(od->base)) in omap_dma_probe()
1669 return PTR_ERR(od->base); in omap_dma_probe()
1673 od->cfg = conf; in omap_dma_probe()
1674 od->plat = dev_get_platdata(&pdev->dev); in omap_dma_probe()
1675 if (!od->plat) { in omap_dma_probe()
1680 od->cfg = &default_cfg; in omap_dma_probe()
1682 od->plat = omap_get_plat_info(); in omap_dma_probe()
1683 if (!od->plat) in omap_dma_probe()
1689 od->reg_map = od->plat->reg_map; in omap_dma_probe()
1691 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask); in omap_dma_probe()
1692 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask); in omap_dma_probe()
1693 dma_cap_set(DMA_MEMCPY, od->ddev.cap_mask); in omap_dma_probe()
1694 dma_cap_set(DMA_INTERLEAVE, od->ddev.cap_mask); in omap_dma_probe()
1695 od->ddev.device_alloc_chan_resources = omap_dma_alloc_chan_resources; in omap_dma_probe()
1696 od->ddev.device_free_chan_resources = omap_dma_free_chan_resources; in omap_dma_probe()
1697 od->ddev.device_tx_status = omap_dma_tx_status; in omap_dma_probe()
1698 od->ddev.device_issue_pending = omap_dma_issue_pending; in omap_dma_probe()
1699 od->ddev.device_prep_slave_sg = omap_dma_prep_slave_sg; in omap_dma_probe()
1700 od->ddev.device_prep_dma_cyclic = omap_dma_prep_dma_cyclic; in omap_dma_probe()
1701 od->ddev.device_prep_dma_memcpy = omap_dma_prep_dma_memcpy; in omap_dma_probe()
1702 od->ddev.device_prep_interleaved_dma = omap_dma_prep_dma_interleaved; in omap_dma_probe()
1703 od->ddev.device_config = omap_dma_slave_config; in omap_dma_probe()
1704 od->ddev.device_pause = omap_dma_pause; in omap_dma_probe()
1705 od->ddev.device_resume = omap_dma_resume; in omap_dma_probe()
1706 od->ddev.device_terminate_all = omap_dma_terminate_all; in omap_dma_probe()
1707 od->ddev.device_synchronize = omap_dma_synchronize; in omap_dma_probe()
1708 od->ddev.src_addr_widths = OMAP_DMA_BUSWIDTHS; in omap_dma_probe()
1709 od->ddev.dst_addr_widths = OMAP_DMA_BUSWIDTHS; in omap_dma_probe()
1710 od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in omap_dma_probe()
1711 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_probe()
1712 od->ddev.residue_granularity = in omap_dma_probe()
1715 od->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in omap_dma_probe()
1716 od->ddev.max_burst = SZ_16M - 1; /* CCEN: 24bit unsigned */ in omap_dma_probe()
1717 od->ddev.dev = &pdev->dev; in omap_dma_probe()
1718 INIT_LIST_HEAD(&od->ddev.channels); in omap_dma_probe()
1719 mutex_init(&od->lch_lock); in omap_dma_probe()
1720 spin_lock_init(&od->lock); in omap_dma_probe()
1721 spin_lock_init(&od->irq_lock); in omap_dma_probe()
1724 od->dma_requests = OMAP_SDMA_REQUESTS; in omap_dma_probe()
1727 &od->dma_requests)) { in omap_dma_probe()
1735 od->lch_count = od->plat->dma_attr->lch_count; in omap_dma_probe()
1736 if (unlikely(!od->lch_count)) in omap_dma_probe()
1737 od->lch_count = OMAP_SDMA_CHANNELS; in omap_dma_probe()
1739 &od->lch_count)) { in omap_dma_probe()
1743 od->lch_count = OMAP_SDMA_CHANNELS; in omap_dma_probe()
1752 bitmap_from_arr32(od->lch_bitmap, &val, od->lch_count); in omap_dma_probe()
1754 if (od->plat->dma_attr->dev_caps & HS_CHANNELS_RESERVED) in omap_dma_probe()
1755 bitmap_set(od->lch_bitmap, 0, 2); in omap_dma_probe()
1757 od->lch_map = devm_kcalloc(&pdev->dev, od->lch_count, in omap_dma_probe()
1758 sizeof(*od->lch_map), in omap_dma_probe()
1760 if (!od->lch_map) in omap_dma_probe()
1763 for (i = 0; i < od->dma_requests; i++) { in omap_dma_probe()
1764 rc = omap_dma_chan_init(od); in omap_dma_probe()
1766 omap_dma_free(od); in omap_dma_probe()
1774 od->legacy = true; in omap_dma_probe()
1777 od->irq_enable_mask = 0; in omap_dma_probe()
1778 omap_dma_glbl_write(od, IRQENABLE_L1, 0); in omap_dma_probe()
1781 IRQF_SHARED, "omap-dma-engine", od); in omap_dma_probe()
1783 omap_dma_free(od); in omap_dma_probe()
1788 if (omap_dma_glbl_read(od, CAPS_0) & CAPS_0_SUPPORT_LL123) in omap_dma_probe()
1789 od->ll123_supported = true; in omap_dma_probe()
1791 od->ddev.filter.map = od->plat->slave_map; in omap_dma_probe()
1792 od->ddev.filter.mapcnt = od->plat->slavecnt; in omap_dma_probe()
1793 od->ddev.filter.fn = omap_dma_filter_fn; in omap_dma_probe()
1795 if (od->ll123_supported) { in omap_dma_probe()
1796 od->desc_pool = dma_pool_create(dev_name(&pdev->dev), in omap_dma_probe()
1800 if (!od->desc_pool) { in omap_dma_probe()
1803 od->ll123_supported = false; in omap_dma_probe()
1807 rc = dma_async_device_register(&od->ddev); in omap_dma_probe()
1811 omap_dma_free(od); in omap_dma_probe()
1815 platform_set_drvdata(pdev, od); in omap_dma_probe()
1818 omap_dma_info.dma_cap = od->ddev.cap_mask; in omap_dma_probe()
1825 dma_async_device_unregister(&od->ddev); in omap_dma_probe()
1826 omap_dma_free(od); in omap_dma_probe()
1830 omap_dma_init_gcr(od, DMA_DEFAULT_ARB_RATE, DMA_DEFAULT_FIFO_DEPTH, 0); in omap_dma_probe()
1832 if (od->cfg->needs_busy_check) { in omap_dma_probe()
1833 od->nb.notifier_call = omap_dma_busy_notifier; in omap_dma_probe()
1834 cpu_pm_register_notifier(&od->nb); in omap_dma_probe()
1835 } else if (od->cfg->may_lose_context) { in omap_dma_probe()
1836 od->nb.notifier_call = omap_dma_context_notifier; in omap_dma_probe()
1837 cpu_pm_register_notifier(&od->nb); in omap_dma_probe()
1841 od->ll123_supported ? " (LinkedList1/2/3 supported)" : ""); in omap_dma_probe()
1848 struct omap_dmadev *od = platform_get_drvdata(pdev); in omap_dma_remove() local
1851 if (od->cfg->may_lose_context) in omap_dma_remove()
1852 cpu_pm_unregister_notifier(&od->nb); in omap_dma_remove()
1858 devm_free_irq(&pdev->dev, irq, od); in omap_dma_remove()
1860 dma_async_device_unregister(&od->ddev); in omap_dma_remove()
1862 if (!omap_dma_legacy(od)) { in omap_dma_remove()
1864 omap_dma_glbl_write(od, IRQENABLE_L0, 0); in omap_dma_remove()
1867 if (od->ll123_supported) in omap_dma_remove()
1868 dma_pool_destroy(od->desc_pool); in omap_dma_remove()
1870 omap_dma_free(od); in omap_dma_remove()
1928 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_filter_fn() local
1932 if (req <= od->dma_requests) { in omap_dma_filter_fn()