Lines Matching full:sdma

102 	/* protect SDMA with concurrent access from multiple CPUs */
107 struct prestera_sdma sdma; member
110 static int prestera_sdma_buf_init(struct prestera_sdma *sdma, in prestera_sdma_buf_init() argument
116 desc = dma_pool_alloc(sdma->desc_pool, GFP_DMA | GFP_KERNEL, &dma); in prestera_sdma_buf_init()
128 static u32 prestera_sdma_map(struct prestera_sdma *sdma, dma_addr_t pa) in prestera_sdma_map() argument
130 return sdma->map_addr + pa; in prestera_sdma_map()
133 static void prestera_sdma_rx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_init() argument
142 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_rx_desc_init()
150 static void prestera_sdma_rx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_set_next() argument
154 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_rx_desc_set_next()
157 static int prestera_sdma_rx_skb_alloc(struct prestera_sdma *sdma, in prestera_sdma_rx_skb_alloc() argument
160 struct device *dev = sdma->sw->dev->dev; in prestera_sdma_rx_skb_alloc()
187 static struct sk_buff *prestera_sdma_rx_skb_get(struct prestera_sdma *sdma, in prestera_sdma_rx_skb_get() argument
195 err = prestera_sdma_rx_skb_alloc(sdma, buf); in prestera_sdma_rx_skb_get()
207 prestera_sdma_rx_desc_init(sdma, buf->desc, buf->buf_dma); in prestera_sdma_rx_skb_get()
212 static int prestera_rxtx_process_skb(struct prestera_sdma *sdma, in prestera_rxtx_process_skb() argument
231 port = prestera_port_find_by_hwid(sdma->sw, dev_id, hw_port); in prestera_rxtx_process_skb()
233 dev_warn_ratelimited(prestera_dev(sdma->sw), "received pkt for non-existent port(%u, %u)\n", in prestera_rxtx_process_skb()
276 struct prestera_sdma *sdma; in prestera_sdma_rx_poll() local
287 sdma = container_of(napi, struct prestera_sdma, rx_napi); in prestera_sdma_rx_poll()
291 struct prestera_rx_ring *ring = &sdma->rx_ring[q]; in prestera_sdma_rx_poll()
311 skb = prestera_sdma_rx_skb_get(sdma, buf); in prestera_sdma_rx_poll()
315 if (unlikely(prestera_rxtx_process_skb(sdma, skb))) in prestera_sdma_rx_poll()
325 prestera_write(sdma->sw, PRESTERA_SDMA_RX_INTR_MASK_REG, in prestera_sdma_rx_poll()
333 static void prestera_sdma_rx_fini(struct prestera_sdma *sdma) in prestera_sdma_rx_fini() argument
339 prestera_write(sdma->sw, PRESTERA_SDMA_RX_QUEUE_STATUS_REG, in prestera_sdma_rx_fini()
343 struct prestera_rx_ring *ring = &sdma->rx_ring[q]; in prestera_sdma_rx_fini()
352 dma_pool_free(sdma->desc_pool, buf->desc, in prestera_sdma_rx_fini()
359 dma_unmap_single(sdma->sw->dev->dev, in prestera_sdma_rx_fini()
367 static int prestera_sdma_rx_init(struct prestera_sdma *sdma) in prestera_sdma_rx_init() argument
375 prestera_write(sdma->sw, PRESTERA_SDMA_RX_QUEUE_STATUS_REG, in prestera_sdma_rx_init()
380 struct prestera_rx_ring *ring = &sdma->rx_ring[q]; in prestera_sdma_rx_init()
394 err = prestera_sdma_buf_init(sdma, next); in prestera_sdma_rx_init()
398 err = prestera_sdma_rx_skb_alloc(sdma, next); in prestera_sdma_rx_init()
402 prestera_sdma_rx_desc_init(sdma, next->desc, in prestera_sdma_rx_init()
405 prestera_sdma_rx_desc_set_next(sdma, prev->desc, in prestera_sdma_rx_init()
413 prestera_sdma_rx_desc_set_next(sdma, tail->desc, head->desc_dma); in prestera_sdma_rx_init()
415 prestera_write(sdma->sw, PRESTERA_SDMA_RX_QUEUE_DESC_REG(q), in prestera_sdma_rx_init()
416 prestera_sdma_map(sdma, head->desc_dma)); in prestera_sdma_rx_init()
422 prestera_write(sdma->sw, PRESTERA_SDMA_RX_QUEUE_STATUS_REG, in prestera_sdma_rx_init()
428 static void prestera_sdma_tx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_tx_desc_init() argument
435 static void prestera_sdma_tx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_tx_desc_set_next() argument
439 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_tx_desc_set_next()
442 static void prestera_sdma_tx_desc_set_buf(struct prestera_sdma *sdma, in prestera_sdma_tx_desc_set_buf() argument
450 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_tx_desc_set_buf()
466 static int prestera_sdma_tx_buf_map(struct prestera_sdma *sdma, in prestera_sdma_tx_buf_map() argument
470 struct device *dma_dev = sdma->sw->dev->dev; in prestera_sdma_tx_buf_map()
483 static void prestera_sdma_tx_buf_unmap(struct prestera_sdma *sdma, in prestera_sdma_tx_buf_unmap() argument
486 struct device *dma_dev = sdma->sw->dev->dev; in prestera_sdma_tx_buf_unmap()
495 struct prestera_sdma *sdma; in prestera_sdma_tx_recycle_work_fn() local
498 sdma = container_of(work, struct prestera_sdma, tx_work); in prestera_sdma_tx_recycle_work_fn()
500 tx_ring = &sdma->tx_ring; in prestera_sdma_tx_recycle_work_fn()
511 prestera_sdma_tx_buf_unmap(sdma, buf); in prestera_sdma_tx_recycle_work_fn()
522 static int prestera_sdma_tx_init(struct prestera_sdma *sdma) in prestera_sdma_tx_init() argument
525 struct prestera_tx_ring *tx_ring = &sdma->tx_ring; in prestera_sdma_tx_init()
529 INIT_WORK(&sdma->tx_work, prestera_sdma_tx_recycle_work_fn); in prestera_sdma_tx_init()
530 spin_lock_init(&sdma->tx_lock); in prestera_sdma_tx_init()
546 err = prestera_sdma_buf_init(sdma, next); in prestera_sdma_tx_init()
552 prestera_sdma_tx_desc_init(sdma, next->desc); in prestera_sdma_tx_init()
554 prestera_sdma_tx_desc_set_next(sdma, prev->desc, in prestera_sdma_tx_init()
562 prestera_sdma_tx_desc_set_next(sdma, tail->desc, head->desc_dma); in prestera_sdma_tx_init()
567 prestera_write(sdma->sw, PRESTERA_SDMA_TX_QUEUE_DESC_REG, in prestera_sdma_tx_init()
568 prestera_sdma_map(sdma, head->desc_dma)); in prestera_sdma_tx_init()
573 static void prestera_sdma_tx_fini(struct prestera_sdma *sdma) in prestera_sdma_tx_fini() argument
575 struct prestera_tx_ring *ring = &sdma->tx_ring; in prestera_sdma_tx_fini()
579 cancel_work_sync(&sdma->tx_work); in prestera_sdma_tx_fini()
588 dma_pool_free(sdma->desc_pool, buf->desc, in prestera_sdma_tx_fini()
594 dma_unmap_single(sdma->sw->dev->dev, buf->buf_dma, in prestera_sdma_tx_fini()
605 struct prestera_sdma *sdma = arg; in prestera_rxtx_handle_event() local
610 prestera_write(sdma->sw, PRESTERA_SDMA_RX_INTR_MASK_REG, 0); in prestera_rxtx_handle_event()
611 napi_schedule(&sdma->rx_napi); in prestera_rxtx_handle_event()
616 struct prestera_sdma *sdma = &sw->rxtx->sdma; in prestera_sdma_switch_init() local
629 sdma->dma_mask = dma_get_mask(dev); in prestera_sdma_switch_init()
630 sdma->map_addr = p.map_addr; in prestera_sdma_switch_init()
631 sdma->sw = sw; in prestera_sdma_switch_init()
633 sdma->desc_pool = dma_pool_create("desc_pool", dev, in prestera_sdma_switch_init()
636 if (!sdma->desc_pool) in prestera_sdma_switch_init()
639 err = prestera_sdma_rx_init(sdma); in prestera_sdma_switch_init()
645 err = prestera_sdma_tx_init(sdma); in prestera_sdma_switch_init()
653 sdma); in prestera_sdma_switch_init()
657 sdma->napi_dev = alloc_netdev_dummy(0); in prestera_sdma_switch_init()
658 if (!sdma->napi_dev) { in prestera_sdma_switch_init()
664 netif_napi_add(sdma->napi_dev, &sdma->rx_napi, prestera_sdma_rx_poll); in prestera_sdma_switch_init()
665 napi_enable(&sdma->rx_napi); in prestera_sdma_switch_init()
674 prestera_sdma_tx_fini(sdma); in prestera_sdma_switch_init()
676 prestera_sdma_rx_fini(sdma); in prestera_sdma_switch_init()
678 dma_pool_destroy(sdma->desc_pool); in prestera_sdma_switch_init()
684 struct prestera_sdma *sdma = &sw->rxtx->sdma; in prestera_sdma_switch_fini() local
686 napi_disable(&sdma->rx_napi); in prestera_sdma_switch_fini()
687 netif_napi_del(&sdma->rx_napi); in prestera_sdma_switch_fini()
688 free_netdev(sdma->napi_dev); in prestera_sdma_switch_fini()
691 prestera_sdma_tx_fini(sdma); in prestera_sdma_switch_fini()
692 prestera_sdma_rx_fini(sdma); in prestera_sdma_switch_fini()
693 dma_pool_destroy(sdma->desc_pool); in prestera_sdma_switch_fini()
696 static bool prestera_sdma_is_ready(struct prestera_sdma *sdma) in prestera_sdma_is_ready() argument
698 return !(prestera_read(sdma->sw, PRESTERA_SDMA_TX_QUEUE_START_REG) & 1); in prestera_sdma_is_ready()
701 static int prestera_sdma_tx_wait(struct prestera_sdma *sdma, in prestera_sdma_tx_wait() argument
707 if (prestera_sdma_is_ready(sdma)) in prestera_sdma_tx_wait()
716 static void prestera_sdma_tx_start(struct prestera_sdma *sdma) in prestera_sdma_tx_start() argument
718 prestera_write(sdma->sw, PRESTERA_SDMA_TX_QUEUE_START_REG, 1); in prestera_sdma_tx_start()
719 schedule_work(&sdma->tx_work); in prestera_sdma_tx_start()
722 static netdev_tx_t prestera_sdma_xmit(struct prestera_sdma *sdma, in prestera_sdma_xmit() argument
725 struct device *dma_dev = sdma->sw->dev->dev; in prestera_sdma_xmit()
731 spin_lock(&sdma->tx_lock); in prestera_sdma_xmit()
733 tx_ring = &sdma->tx_ring; in prestera_sdma_xmit()
737 schedule_work(&sdma->tx_work); in prestera_sdma_xmit()
744 err = prestera_sdma_tx_buf_map(sdma, buf, skb); in prestera_sdma_xmit()
748 prestera_sdma_tx_desc_set_buf(sdma, buf->desc, buf->buf_dma, skb->len); in prestera_sdma_xmit()
758 err = prestera_sdma_tx_wait(sdma, tx_ring); in prestera_sdma_xmit()
767 prestera_sdma_tx_start(sdma); in prestera_sdma_xmit()
772 prestera_sdma_tx_buf_unmap(sdma, buf); in prestera_sdma_xmit()
778 spin_unlock(&sdma->tx_lock); in prestera_sdma_xmit()
828 return prestera_sdma_xmit(&port->sw->rxtx->sdma, skb); in prestera_rxtx_xmit()