Searched refs:rx_bd_num (Results 1 – 8 of 8) sorted by relevance
196 priv->rx_bd_num * sizeof(struct qtnf_topaz_rx_bd) + in topaz_alloc_bd_table()228 vaddr = ((struct qtnf_topaz_rx_bd *)vaddr) + priv->rx_bd_num; in topaz_alloc_bd_table()229 paddr += priv->rx_bd_num * sizeof(struct qtnf_topaz_rx_bd); in topaz_alloc_bd_table()279 ts->base.rx_bd_num * sizeof(struct qtnf_topaz_rx_bd)); in topaz_alloc_rx_buffers()281 for (i = 0; i < ts->base.rx_bd_num; i++) { in topaz_alloc_rx_buffers()287 ts->rx_bd_vbase[ts->base.rx_bd_num - 1].info |= in topaz_alloc_rx_buffers()304 for (i = 0; i < priv->rx_bd_num; i++) { in qtnf_topaz_free_xfer_buffers()364 priv->rx_bd_num = rx_bd_size; in qtnf_pcie_topaz_init_xfer()365 qtnf_non_posted_write(priv->rx_bd_num, &bda->bda_rc_rx_bd_num); in qtnf_pcie_topaz_init_xfer()686 if (++r_idx >= priv->rx_bd_num) in qtnf_topaz_rx_poll()[all …]
240 priv->rx_bd_num * sizeof(struct qtnf_pearl_rx_bd); in pearl_alloc_bd_table()274 writel(priv->rx_bd_num | (sizeof(struct qtnf_pearl_rx_bd)) << 16, in pearl_alloc_bd_table()332 ps->base.rx_bd_num * sizeof(struct qtnf_pearl_rx_bd)); in pearl_alloc_rx_buffers()334 for (i = 0; i < ps->base.rx_bd_num; i++) { in pearl_alloc_rx_buffers()354 for (i = 0; i < priv->rx_bd_num; i++) { in qtnf_pearl_free_xfer_buffers()395 writel(ps->base.rx_bd_num, PCIE_HHBM_Q_LIMIT_REG(ps->pcie_reg_base)); in pearl_hhbm_init()429 priv->rx_bd_num = PEARL_RX_BD_SIZE_DEFAULT; in qtnf_pcie_pearl_init_xfer()431 priv->rx_bd_num = rx_bd_size; in qtnf_pcie_pearl_init_xfer()775 if (++r_idx >= priv->rx_bd_num) in qtnf_pcie_pearl_rx_poll()783 priv->rx_bd_num) > 0) { in qtnf_pcie_pearl_rx_poll()[all …]
47 u16 rx_bd_num; member
71 priv->rx_bd_num * sizeof(*priv->rx_skb); in qtnf_pcie_alloc_skb_array()
305 for (i = 0; i < lp->rx_bd_num; i++) { in temac_dma_bd_release()314 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in temac_dma_bd_release()332 lp->rx_skb = devm_kcalloc(&ndev->dev, lp->rx_bd_num, in temac_dma_bd_init()346 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in temac_dma_bd_init()356 for (i = 0; i < lp->rx_bd_num; i++) { in temac_dma_bd_init()358 + sizeof(*lp->rx_bd_v) * ((i + 1) % lp->rx_bd_num)); in temac_dma_bd_init()394 lp->rx_bd_tail = lp->rx_bd_num - 1; in temac_dma_bd_init()964 available += lp->rx_bd_num; in ll_temac_recv_buffers_available()1032 if (++lp->rx_bd_ci >= lp->rx_bd_num) in ll_temac_recv()1063 if (rx_bd >= lp->rx_bd_num) in ll_temac_recv()[all …]
198 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_release()221 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_release()285 (sizeof(*lp->rx_bd_v) * (lp->rx_bd_num - 1))); in axienet_dma_start()325 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_init()340 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_init()344 ((i + 1) % lp->rx_bd_num); in axienet_dma_bd_init()1233 if (++lp->rx_bd_ci >= lp->rx_bd_num) in axienet_rx_poll()1936 ering->rx_pending = lp->rx_bd_num; in axienet_ethtools_get_ringparam()1960 lp->rx_bd_num = ering->rx_pending; in axienet_ethtools_set_ringparam()2512 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_err_handler()[all …]
385 u32 rx_bd_num; member
572 u32 rx_bd_num; member