Lines Matching refs:txq
53 static void alx_free_txbuf(struct alx_tx_queue *txq, int entry) in alx_free_txbuf() argument
55 struct alx_buffer *txb = &txq->bufs[entry]; in alx_free_txbuf()
58 dma_unmap_single(txq->dev, in alx_free_txbuf()
149 return alx->qnapi[r_idx]->txq; in alx_tx_queue_mapping()
152 static struct netdev_queue *alx_get_tx_queue(const struct alx_tx_queue *txq) in alx_get_tx_queue() argument
154 return netdev_get_tx_queue(txq->netdev, txq->queue_idx); in alx_get_tx_queue()
157 static inline int alx_tpd_avail(struct alx_tx_queue *txq) in alx_tpd_avail() argument
159 if (txq->write_idx >= txq->read_idx) in alx_tpd_avail()
160 return txq->count + txq->read_idx - txq->write_idx - 1; in alx_tpd_avail()
161 return txq->read_idx - txq->write_idx - 1; in alx_tpd_avail()
164 static bool alx_clean_tx_irq(struct alx_tx_queue *txq) in alx_clean_tx_irq() argument
172 alx = netdev_priv(txq->netdev); in alx_clean_tx_irq()
173 tx_queue = alx_get_tx_queue(txq); in alx_clean_tx_irq()
175 sw_read_idx = txq->read_idx; in alx_clean_tx_irq()
176 hw_read_idx = alx_read_mem16(&alx->hw, txq->c_reg); in alx_clean_tx_irq()
182 skb = txq->bufs[sw_read_idx].skb; in alx_clean_tx_irq()
189 alx_free_txbuf(txq, sw_read_idx); in alx_clean_tx_irq()
191 if (++sw_read_idx == txq->count) in alx_clean_tx_irq()
194 txq->read_idx = sw_read_idx; in alx_clean_tx_irq()
200 alx_tpd_avail(txq) > txq->count / 4) in alx_clean_tx_irq()
305 if (np->txq) in alx_poll()
306 tx_complete = alx_clean_tx_irq(np->txq); in alx_poll()
459 if (np->txq) { in alx_init_ring_ptrs()
460 np->txq->read_idx = 0; in alx_init_ring_ptrs()
461 np->txq->write_idx = 0; in alx_init_ring_ptrs()
463 txring_header_reg[np->txq->queue_idx], in alx_init_ring_ptrs()
464 np->txq->tpd_dma); in alx_init_ring_ptrs()
488 static void alx_free_txring_buf(struct alx_tx_queue *txq) in alx_free_txring_buf() argument
492 if (!txq->bufs) in alx_free_txring_buf()
495 for (i = 0; i < txq->count; i++) in alx_free_txring_buf()
496 alx_free_txbuf(txq, i); in alx_free_txring_buf()
498 memset(txq->bufs, 0, txq->count * sizeof(struct alx_buffer)); in alx_free_txring_buf()
499 memset(txq->tpd, 0, txq->count * sizeof(struct alx_txd)); in alx_free_txring_buf()
500 txq->write_idx = 0; in alx_free_txring_buf()
501 txq->read_idx = 0; in alx_free_txring_buf()
503 netdev_tx_reset_queue(alx_get_tx_queue(txq)); in alx_free_txring_buf()
538 if (alx->qnapi[i] && alx->qnapi[i]->txq) in alx_free_buffers()
539 alx_free_txring_buf(alx->qnapi[i]->txq); in alx_free_buffers()
616 static int alx_alloc_tx_ring(struct alx_priv *alx, struct alx_tx_queue *txq, in alx_alloc_tx_ring() argument
619 txq->bufs = kcalloc(txq->count, sizeof(struct alx_buffer), GFP_KERNEL); in alx_alloc_tx_ring()
620 if (!txq->bufs) in alx_alloc_tx_ring()
623 txq->tpd = alx->descmem.virt + offset; in alx_alloc_tx_ring()
624 txq->tpd_dma = alx->descmem.dma + offset; in alx_alloc_tx_ring()
625 offset += sizeof(struct alx_txd) * txq->count; in alx_alloc_tx_ring()
673 offset = alx_alloc_tx_ring(alx, alx->qnapi[i]->txq, offset); in alx_alloc_rings()
696 if (alx->qnapi[i] && alx->qnapi[i]->txq) in alx_free_rings()
697 kfree(alx->qnapi[i]->txq->bufs); in alx_free_rings()
720 kfree(np->txq); in alx_free_napis()
742 struct alx_tx_queue *txq; in alx_alloc_napis() local
761 txq = kzalloc(sizeof(*txq), GFP_KERNEL); in alx_alloc_napis()
762 if (!txq) in alx_alloc_napis()
765 np->txq = txq; in alx_alloc_napis()
766 txq->p_reg = tx_pidx_reg[i]; in alx_alloc_napis()
767 txq->c_reg = tx_cidx_reg[i]; in alx_alloc_napis()
768 txq->queue_idx = i; in alx_alloc_napis()
769 txq->count = alx->tx_ringsz; in alx_alloc_napis()
770 txq->netdev = alx->dev; in alx_alloc_napis()
771 txq->dev = &alx->hw.pdev->dev; in alx_alloc_napis()
867 if (np->txq && np->rxq) in alx_request_msix()
869 np->txq->queue_idx); in alx_request_msix()
870 else if (np->txq) in alx_request_msix()
872 np->txq->queue_idx); in alx_request_msix()
1460 static int alx_map_tx_skb(struct alx_tx_queue *txq, struct sk_buff *skb) in alx_map_tx_skb() argument
1464 int maplen, f, first_idx = txq->write_idx; in alx_map_tx_skb()
1466 first_tpd = &txq->tpd[txq->write_idx]; in alx_map_tx_skb()
1470 if (++txq->write_idx == txq->count) in alx_map_tx_skb()
1471 txq->write_idx = 0; in alx_map_tx_skb()
1473 tpd = &txq->tpd[txq->write_idx]; in alx_map_tx_skb()
1480 dma = dma_map_single(txq->dev, skb->data, maplen, in alx_map_tx_skb()
1482 if (dma_mapping_error(txq->dev, dma)) in alx_map_tx_skb()
1485 dma_unmap_len_set(&txq->bufs[txq->write_idx], size, maplen); in alx_map_tx_skb()
1486 dma_unmap_addr_set(&txq->bufs[txq->write_idx], dma, dma); in alx_map_tx_skb()
1494 if (++txq->write_idx == txq->count) in alx_map_tx_skb()
1495 txq->write_idx = 0; in alx_map_tx_skb()
1496 tpd = &txq->tpd[txq->write_idx]; in alx_map_tx_skb()
1501 dma = skb_frag_dma_map(txq->dev, frag, 0, in alx_map_tx_skb()
1503 if (dma_mapping_error(txq->dev, dma)) in alx_map_tx_skb()
1505 dma_unmap_len_set(&txq->bufs[txq->write_idx], size, maplen); in alx_map_tx_skb()
1506 dma_unmap_addr_set(&txq->bufs[txq->write_idx], dma, dma); in alx_map_tx_skb()
1514 txq->bufs[txq->write_idx].skb = skb; in alx_map_tx_skb()
1516 if (++txq->write_idx == txq->count) in alx_map_tx_skb()
1517 txq->write_idx = 0; in alx_map_tx_skb()
1523 while (f != txq->write_idx) { in alx_map_tx_skb()
1524 alx_free_txbuf(txq, f); in alx_map_tx_skb()
1525 if (++f == txq->count) in alx_map_tx_skb()
1532 struct alx_tx_queue *txq) in alx_start_xmit_ring() argument
1538 alx = netdev_priv(txq->netdev); in alx_start_xmit_ring()
1540 if (alx_tpd_avail(txq) < alx_tpd_req(skb)) { in alx_start_xmit_ring()
1541 netif_tx_stop_queue(alx_get_tx_queue(txq)); in alx_start_xmit_ring()
1545 first = &txq->tpd[txq->write_idx]; in alx_start_xmit_ring()
1554 if (alx_map_tx_skb(txq, skb) < 0) in alx_start_xmit_ring()
1557 netdev_tx_sent_queue(alx_get_tx_queue(txq), skb->len); in alx_start_xmit_ring()
1561 alx_write_mem16(&alx->hw, txq->p_reg, txq->write_idx); in alx_start_xmit_ring()
1563 if (alx_tpd_avail(txq) < txq->count / 8) in alx_start_xmit_ring()
1564 netif_tx_stop_queue(alx_get_tx_queue(txq)); in alx_start_xmit_ring()