Searched refs:tx_bd_num (Results 1 – 8 of 8) sorted by relevance
/linux-6.12.1/drivers/net/wireless/quantenna/qtnfmac/pcie/ |
D | topaz_pcie.c | 195 len = priv->tx_bd_num * sizeof(struct qtnf_topaz_tx_bd) + in topaz_alloc_bd_table() 208 for (i = 0; i < priv->tx_bd_num; i++) in topaz_alloc_bd_table() 218 vaddr = ((struct qtnf_topaz_tx_bd *)vaddr) + priv->tx_bd_num; in topaz_alloc_bd_table() 219 paddr += priv->tx_bd_num * sizeof(struct qtnf_topaz_tx_bd); in topaz_alloc_bd_table() 319 for (i = 0; i < priv->tx_bd_num; i++) { in qtnf_topaz_free_xfer_buffers() 352 priv->tx_bd_num = tx_bd_size; in qtnf_pcie_topaz_init_xfer() 353 qtnf_non_posted_write(priv->tx_bd_num, &bda->bda_rc_tx_bd_num); in qtnf_pcie_topaz_init_xfer() 407 if (CIRC_CNT(priv->tx_bd_w_index, tx_done_index, priv->tx_bd_num)) in qtnf_topaz_data_tx_reclaim() 411 while (CIRC_CNT(tx_done_index, i, priv->tx_bd_num)) { in qtnf_topaz_data_tx_reclaim() 434 if (++i >= priv->tx_bd_num) in qtnf_topaz_data_tx_reclaim() [all …]
|
D | pearl_pcie.c | 239 len = priv->tx_bd_num * sizeof(struct qtnf_pearl_tx_bd) + in pearl_alloc_bd_table() 262 vaddr = ((struct qtnf_pearl_tx_bd *)vaddr) + priv->tx_bd_num; in pearl_alloc_bd_table() 263 paddr += priv->tx_bd_num * sizeof(struct qtnf_pearl_tx_bd); in pearl_alloc_bd_table() 368 for (i = 0; i < priv->tx_bd_num; i++) { in qtnf_pearl_free_xfer_buffers() 416 priv->tx_bd_num = PEARL_TX_BD_SIZE_DEFAULT; in qtnf_pcie_pearl_init_xfer() 418 priv->tx_bd_num = tx_bd_size; in qtnf_pcie_pearl_init_xfer() 478 & (priv->tx_bd_num - 1); in qtnf_pearl_data_tx_reclaim() 482 while (CIRC_CNT(tx_done_index, i, priv->tx_bd_num)) { in qtnf_pearl_data_tx_reclaim() 505 if (++i >= priv->tx_bd_num) in qtnf_pearl_data_tx_reclaim() 521 priv->tx_bd_num)) { in qtnf_tx_queue_ready() [all …]
|
D | pcie_priv.h | 46 u16 tx_bd_num; member
|
D | pcie.c | 70 len = priv->tx_bd_num * sizeof(*priv->tx_skb) + in qtnf_pcie_alloc_skb_array() 79 vaddr += priv->tx_bd_num; in qtnf_pcie_alloc_skb_array()
|
/linux-6.12.1/drivers/net/ethernet/xilinx/ |
D | ll_temac_main.c | 318 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in temac_dma_bd_release() 340 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in temac_dma_bd_init() 351 for (i = 0; i < lp->tx_bd_num; i++) { in temac_dma_bd_init() 353 + sizeof(*lp->tx_bd_v) * ((i + 1) % lp->tx_bd_num)); in temac_dma_bd_init() 806 if (lp->tx_bd_ci >= lp->tx_bd_num) in temac_start_xmit_done() 835 if (tail >= lp->tx_bd_num) in temac_check_tx_bd_space() 898 if (++lp->tx_bd_tail >= lp->tx_bd_num) in temac_start_xmit() 908 lp->tx_bd_tail = lp->tx_bd_num - 1; in temac_start_xmit() 917 lp->tx_bd_tail = lp->tx_bd_num - 1; in temac_start_xmit() 941 if (lp->tx_bd_tail >= lp->tx_bd_num) in temac_start_xmit() [all …]
|
D | xilinx_axienet_main.c | 191 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_release() 319 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_init() 330 for (i = 0; i < lp->tx_bd_num; i++) { in axienet_dma_bd_init() 333 ((i + 1) % lp->tx_bd_num); in axienet_dma_bd_init() 750 cur_p = &lp->tx_bd_v[(first_bd + i) % lp->tx_bd_num]; in axienet_free_tx_chain() 787 if (lp->tx_bd_ci >= lp->tx_bd_num) in axienet_free_tx_chain() 788 lp->tx_bd_ci %= lp->tx_bd_num; in axienet_free_tx_chain() 815 lp->tx_bd_num]; in axienet_check_tx_bd_space() 964 packets = axienet_free_tx_chain(lp, lp->tx_bd_ci, lp->tx_bd_num, false, in axienet_tx_poll() 1061 if (++new_tail_ptr >= lp->tx_bd_num) in axienet_start_xmit() [all …]
|
D | ll_temac.h | 382 u32 tx_bd_num; member
|
D | xilinx_axienet.h | 582 u32 tx_bd_num; member
|