Lines Matching refs:rxr
717 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_free_rx_mem() local
721 if (rxr->rx_desc_ring[j]) in bnx2_free_rx_mem()
723 rxr->rx_desc_ring[j], in bnx2_free_rx_mem()
724 rxr->rx_desc_mapping[j]); in bnx2_free_rx_mem()
725 rxr->rx_desc_ring[j] = NULL; in bnx2_free_rx_mem()
727 vfree(rxr->rx_buf_ring); in bnx2_free_rx_mem()
728 rxr->rx_buf_ring = NULL; in bnx2_free_rx_mem()
731 if (rxr->rx_pg_desc_ring[j]) in bnx2_free_rx_mem()
733 rxr->rx_pg_desc_ring[j], in bnx2_free_rx_mem()
734 rxr->rx_pg_desc_mapping[j]); in bnx2_free_rx_mem()
735 rxr->rx_pg_desc_ring[j] = NULL; in bnx2_free_rx_mem()
737 vfree(rxr->rx_pg_ring); in bnx2_free_rx_mem()
738 rxr->rx_pg_ring = NULL; in bnx2_free_rx_mem()
771 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_alloc_rx_mem() local
774 rxr->rx_buf_ring = in bnx2_alloc_rx_mem()
776 if (!rxr->rx_buf_ring) in bnx2_alloc_rx_mem()
780 rxr->rx_desc_ring[j] = in bnx2_alloc_rx_mem()
783 &rxr->rx_desc_mapping[j], in bnx2_alloc_rx_mem()
785 if (!rxr->rx_desc_ring[j]) in bnx2_alloc_rx_mem()
791 rxr->rx_pg_ring = in bnx2_alloc_rx_mem()
794 if (!rxr->rx_pg_ring) in bnx2_alloc_rx_mem()
800 rxr->rx_pg_desc_ring[j] = in bnx2_alloc_rx_mem()
803 &rxr->rx_pg_desc_mapping[j], in bnx2_alloc_rx_mem()
805 if (!rxr->rx_pg_desc_ring[j]) in bnx2_alloc_rx_mem()
2722 bnx2_alloc_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_page() argument
2725 struct bnx2_sw_pg *rx_pg = &rxr->rx_pg_ring[index]; in bnx2_alloc_rx_page()
2727 &rxr->rx_pg_desc_ring[BNX2_RX_RING(index)][BNX2_RX_IDX(index)]; in bnx2_alloc_rx_page()
2747 bnx2_free_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index) in bnx2_free_rx_page() argument
2749 struct bnx2_sw_pg *rx_pg = &rxr->rx_pg_ring[index]; in bnx2_free_rx_page()
2763 bnx2_alloc_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_data() argument
2766 struct bnx2_sw_bd *rx_buf = &rxr->rx_buf_ring[index]; in bnx2_alloc_rx_data()
2769 &rxr->rx_desc_ring[BNX2_RX_RING(index)][BNX2_RX_IDX(index)]; in bnx2_alloc_rx_data()
2790 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_alloc_rx_data()
2937 bnx2_reuse_rx_skb_pages(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_skb_pages() argument
2944 u16 cons = rxr->rx_pg_cons; in bnx2_reuse_rx_skb_pages()
2946 cons_rx_pg = &rxr->rx_pg_ring[cons]; in bnx2_reuse_rx_skb_pages()
2964 hw_prod = rxr->rx_pg_prod; in bnx2_reuse_rx_skb_pages()
2969 prod_rx_pg = &rxr->rx_pg_ring[prod]; in bnx2_reuse_rx_skb_pages()
2970 cons_rx_pg = &rxr->rx_pg_ring[cons]; in bnx2_reuse_rx_skb_pages()
2971 cons_bd = &rxr->rx_pg_desc_ring[BNX2_RX_RING(cons)] in bnx2_reuse_rx_skb_pages()
2973 prod_bd = &rxr->rx_pg_desc_ring[BNX2_RX_RING(prod)] in bnx2_reuse_rx_skb_pages()
2989 rxr->rx_pg_prod = hw_prod; in bnx2_reuse_rx_skb_pages()
2990 rxr->rx_pg_cons = cons; in bnx2_reuse_rx_skb_pages()
2994 bnx2_reuse_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_data() argument
3000 cons_rx_buf = &rxr->rx_buf_ring[cons]; in bnx2_reuse_rx_data()
3001 prod_rx_buf = &rxr->rx_buf_ring[prod]; in bnx2_reuse_rx_data()
3007 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_reuse_rx_data()
3017 cons_bd = &rxr->rx_desc_ring[BNX2_RX_RING(cons)][BNX2_RX_IDX(cons)]; in bnx2_reuse_rx_data()
3018 prod_bd = &rxr->rx_desc_ring[BNX2_RX_RING(prod)][BNX2_RX_IDX(prod)]; in bnx2_reuse_rx_data()
3024 bnx2_rx_skb(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u8 *data, in bnx2_rx_skb() argument
3032 err = bnx2_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC); in bnx2_rx_skb()
3034 bnx2_reuse_rx_data(bp, rxr, data, (u16) (ring_idx >> 16), prod); in bnx2_rx_skb()
3040 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_skb()
3059 u16 pg_cons = rxr->rx_pg_cons; in bnx2_rx_skb()
3060 u16 pg_prod = rxr->rx_pg_prod; in bnx2_rx_skb()
3073 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3074 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3075 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, in bnx2_rx_skb()
3088 rx_pg = &rxr->rx_pg_ring[pg_cons]; in bnx2_rx_skb()
3100 err = bnx2_alloc_rx_page(bp, rxr, in bnx2_rx_skb()
3104 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3105 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3106 bnx2_reuse_rx_skb_pages(bp, rxr, skb, in bnx2_rx_skb()
3122 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3123 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3143 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_rx_int() local
3152 sw_cons = rxr->rx_cons; in bnx2_rx_int()
3153 sw_prod = rxr->rx_prod; in bnx2_rx_int()
3171 rx_buf = &rxr->rx_buf_ring[sw_ring_cons]; in bnx2_rx_int()
3185 next_rx_buf = &rxr->rx_buf_ring[next_ring_idx]; in bnx2_rx_int()
3206 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3213 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_int()
3223 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3235 bnx2_reuse_rx_data(bp, rxr, data, in bnx2_rx_int()
3239 skb = bnx2_rx_skb(bp, rxr, data, len, hdr_len, dma_addr, in bnx2_rx_int()
3291 rxr->rx_cons = sw_cons; in bnx2_rx_int()
3292 rxr->rx_prod = sw_prod; in bnx2_rx_int()
3295 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_rx_int()
3297 BNX2_WR16(bp, rxr->rx_bidx_addr, sw_prod); in bnx2_rx_int()
3299 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_rx_int()
3388 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_has_fast_work() local
3390 if ((bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) || in bnx2_has_fast_work()
3482 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_poll_work() local
3487 if (bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) in bnx2_poll_work()
5113 struct bnx2_rx_ring_info *rxr; in bnx2_clear_ring_states() local
5119 rxr = &bnapi->rx_ring; in bnx2_clear_ring_states()
5123 rxr->rx_prod_bseq = 0; in bnx2_clear_ring_states()
5124 rxr->rx_prod = 0; in bnx2_clear_ring_states()
5125 rxr->rx_cons = 0; in bnx2_clear_ring_states()
5126 rxr->rx_pg_prod = 0; in bnx2_clear_ring_states()
5127 rxr->rx_pg_cons = 0; in bnx2_clear_ring_states()
5224 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_init_rx_ring() local
5233 bnx2_init_rxbd_rings(rxr->rx_desc_ring, rxr->rx_desc_mapping, in bnx2_init_rx_ring()
5245 bnx2_init_rxbd_rings(rxr->rx_pg_desc_ring, in bnx2_init_rx_ring()
5246 rxr->rx_pg_desc_mapping, in bnx2_init_rx_ring()
5253 val = (u64) rxr->rx_pg_desc_mapping[0] >> 32; in bnx2_init_rx_ring()
5256 val = (u64) rxr->rx_pg_desc_mapping[0] & 0xffffffff; in bnx2_init_rx_ring()
5263 val = (u64) rxr->rx_desc_mapping[0] >> 32; in bnx2_init_rx_ring()
5266 val = (u64) rxr->rx_desc_mapping[0] & 0xffffffff; in bnx2_init_rx_ring()
5269 ring_prod = prod = rxr->rx_pg_prod; in bnx2_init_rx_ring()
5271 if (bnx2_alloc_rx_page(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5279 rxr->rx_pg_prod = prod; in bnx2_init_rx_ring()
5281 ring_prod = prod = rxr->rx_prod; in bnx2_init_rx_ring()
5283 if (bnx2_alloc_rx_data(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5291 rxr->rx_prod = prod; in bnx2_init_rx_ring()
5293 rxr->rx_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BDIDX; in bnx2_init_rx_ring()
5294 rxr->rx_bseq_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BSEQ; in bnx2_init_rx_ring()
5295 rxr->rx_pg_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_PG_BDIDX; in bnx2_init_rx_ring()
5297 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_init_rx_ring()
5298 BNX2_WR16(bp, rxr->rx_bidx_addr, prod); in bnx2_init_rx_ring()
5300 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_init_rx_ring()
5463 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_free_rx_skbs() local
5466 if (!rxr->rx_buf_ring) in bnx2_free_rx_skbs()
5470 struct bnx2_sw_bd *rx_buf = &rxr->rx_buf_ring[j]; in bnx2_free_rx_skbs()
5486 bnx2_free_rx_page(bp, rxr, j); in bnx2_free_rx_skbs()
5800 struct bnx2_rx_ring_info *rxr; in bnx2_run_loopback() local
5805 rxr = &bnapi->rx_ring; in bnx2_run_loopback()
5881 rx_buf = &rxr->rx_buf_ring[rx_start_idx]; in bnx2_run_loopback()