Home
last modified time | relevance | path

Searched refs:nr_frags (Results 1 – 25 of 289) sorted by relevance

12345678910>>...12

/linux-6.12.1/drivers/net/wireless/intel/ipw2x00/
Dlibipw_tx.c171 for (i = 0; i < txb->nr_frags; i++) in libipw_txb_free()
177 static struct libipw_txb *libipw_alloc_txb(int nr_frags, int txb_size, in libipw_alloc_txb() argument
183 txb = kzalloc(struct_size(txb, fragments, nr_frags), gfp_mask); in libipw_alloc_txb()
187 txb->nr_frags = nr_frags; in libipw_alloc_txb()
190 for (i = 0; i < nr_frags; i++) { in libipw_alloc_txb()
199 if (unlikely(i != nr_frags)) { in libipw_alloc_txb()
245 int i, bytes_per_frag, nr_frags, bytes_last_frag, frag_size, in libipw_xmit() local
391 nr_frags = bytes / bytes_per_frag; in libipw_xmit()
394 nr_frags++; in libipw_xmit()
398 nr_frags = 1; in libipw_xmit()
[all …]
/linux-6.12.1/drivers/net/ethernet/intel/ice/
Dice_txrx_lib.h21 u32 sinfo_frags = xdp_get_shared_info_from_buff(xdp)->nr_frags; in ice_set_rx_bufs_act()
22 u32 nr_frags = rx_ring->nr_frags + 1; in ice_set_rx_bufs_act() local
27 for (int i = 0; i < nr_frags; i++) { in ice_set_rx_bufs_act()
36 if (sinfo_frags < rx_ring->nr_frags && act == ICE_XDP_CONSUMED) { in ice_set_rx_bufs_act()
37 u32 delta = rx_ring->nr_frags - sinfo_frags; in ice_set_rx_bufs_act()
Dice_txrx_lib.c303 frags = tx_buf->nr_frags; in ice_clean_xdp_irq()
354 u32 nr_frags = 0; in __ice_xmit_xdp_ring() local
367 nr_frags = sinfo->nr_frags; in __ice_xmit_xdp_ring()
368 if (free_space < nr_frags + 1) in __ice_xmit_xdp_ring()
401 if (frag == nr_frags) in __ice_xmit_xdp_ring()
414 tx_head->nr_frags = nr_frags; in __ice_xmit_xdp_ring()
Dice_txrx.c858 sinfo->nr_frags = 0; in ice_add_xdp_frag()
863 if (unlikely(sinfo->nr_frags == MAX_SKB_FRAGS)) { in ice_add_xdp_frag()
868 __skb_fill_page_desc_noacc(sinfo, sinfo->nr_frags++, rx_buf->page, in ice_add_xdp_frag()
874 rx_ring->nr_frags = sinfo->nr_frags; in ice_add_xdp_frag()
957 unsigned int nr_frags; in ice_build_skb() local
962 nr_frags = sinfo->nr_frags; in ice_build_skb()
988 xdp_update_skb_shared_info(skb, nr_frags, in ice_build_skb()
990 nr_frags * xdp->frame_sz, in ice_build_skb()
1011 unsigned int nr_frags = 0; in ice_construct_skb() local
1020 nr_frags = sinfo->nr_frags; in ice_construct_skb()
[all …]
Dice_xsk.c555 u32 nr_frags = 0; in ice_construct_skb_zc() local
559 nr_frags = sinfo->nr_frags; in ice_construct_skb_zc()
578 for (int i = 0; i < nr_frags; i++) { in ice_construct_skb_zc()
593 __skb_fill_page_desc_noacc(skinfo, skinfo->nr_frags++, in ice_construct_skb_zc()
687 u32 nr_frags = 0; in ice_xmit_xdp_tx_zc() local
700 nr_frags = sinfo->nr_frags; in ice_xmit_xdp_tx_zc()
701 if (free_space < nr_frags + 1) in ice_xmit_xdp_tx_zc()
725 if (frag == nr_frags) in ice_xmit_xdp_tx_zc()
814 sinfo->nr_frags = 0; in ice_add_xsk_frag()
819 if (unlikely(sinfo->nr_frags == MAX_SKB_FRAGS)) { in ice_add_xsk_frag()
[all …]
/linux-6.12.1/net/openvswitch/
Dopenvswitch_trace.h27 __field( u8, nr_frags )
52 __entry->nr_frags = skb_shinfo(skb)->nr_frags;
72 __entry->truesize, __entry->nr_frags, __entry->gso_size,
98 __field( u8, nr_frags )
122 __entry->nr_frags = skb_shinfo(skb)->nr_frags;
141 __entry->truesize, __entry->nr_frags, __entry->gso_size,
/linux-6.12.1/net/core/
Dgro.c133 int i = skbinfo->nr_frags; in skb_gro_receive()
134 int nr_frags = pinfo->nr_frags + i; in skb_gro_receive() local
136 if (nr_frags > MAX_SKB_FRAGS) in skb_gro_receive()
140 pinfo->nr_frags = nr_frags; in skb_gro_receive()
141 skbinfo->nr_frags = 0; in skb_gro_receive()
143 frag = pinfo->frags + nr_frags; in skb_gro_receive()
163 int nr_frags = pinfo->nr_frags; in skb_gro_receive() local
164 skb_frag_t *frag = pinfo->frags + nr_frags; in skb_gro_receive()
169 if (nr_frags + 1 + skbinfo->nr_frags > MAX_SKB_FRAGS) in skb_gro_receive()
176 pinfo->nr_frags = nr_frags + 1 + skbinfo->nr_frags; in skb_gro_receive()
[all …]
Dnet_test.c45 unsigned int nr_frags; member
74 .nr_frags = 2,
82 .nr_frags = 3,
91 .nr_frags = 2,
166 if (tcase->nr_frags) { in gso_test_func()
171 page_ref_add(page, tcase->nr_frags - 1); in gso_test_func()
173 for (i = 0; i < tcase->nr_frags; i++) { in gso_test_func()
Dskbuff.c1071 for (i = 0; i < shinfo->nr_frags; i++) { in skb_pp_frag_ref()
1118 for (i = 0; i < shinfo->nr_frags; i++) in skb_release_data()
1337 sh->tx_flags, sh->nr_frags, in skb_dump()
1368 for (i = 0; len && i < skb_shinfo(skb)->nr_frags; i++) { in skb_dump()
1925 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) in __skb_zcopy_downgrade_managed()
1967 int num_frags = skb_shinfo(skb)->nr_frags; in skb_copy_ubufs()
2043 skb_shinfo(skb)->nr_frags = new_frags; in skb_copy_ubufs()
2214 if (skb_shinfo(skb)->nr_frags) { in __pskb_copy_fclone()
2223 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in __pskb_copy_fclone()
2227 skb_shinfo(n)->nr_frags = i; in __pskb_copy_fclone()
[all …]
Dxdp.c416 for (i = 0; i < sinfo->nr_frags; i++) { in xdp_return_frame()
435 for (i = 0; i < sinfo->nr_frags; i++) { in xdp_return_frame_rx_napi()
500 for (i = 0; i < sinfo->nr_frags; i++) { in xdp_return_frame_bulk()
521 for (i = 0; i < sinfo->nr_frags; i++) { in xdp_return_buff()
604 u8 nr_frags; in __xdp_build_skb_from_frame() local
608 nr_frags = sinfo->nr_frags; in __xdp_build_skb_from_frame()
629 xdp_update_skb_shared_info(skb, nr_frags, in __xdp_build_skb_from_frame()
631 nr_frags * xdpf->frame_sz, in __xdp_build_skb_from_frame()
/linux-6.12.1/drivers/net/xen-netback/
Dnetback.c395 nr_slots = shinfo->nr_frags + frag_overflow + 1; in xenvif_get_requests()
461 for (shinfo->nr_frags = 0; nr_slots > 0 && shinfo->nr_frags < MAX_SKB_FRAGS; in xenvif_get_requests()
473 frag_set_pending_idx(&frags[shinfo->nr_frags], pending_idx); in xenvif_get_requests()
474 ++shinfo->nr_frags; in xenvif_get_requests()
488 for (shinfo->nr_frags = 0; shinfo->nr_frags < nr_slots; ++txp) { in xenvif_get_requests()
499 frag_set_pending_idx(&frags[shinfo->nr_frags], in xenvif_get_requests()
501 ++shinfo->nr_frags; in xenvif_get_requests()
505 if (shinfo->nr_frags) { in xenvif_get_requests()
565 int nr_frags = shinfo->nr_frags; in xenvif_tx_check_gop() local
566 const bool sharedslot = nr_frags && in xenvif_tx_check_gop()
[all …]
/linux-6.12.1/net/xdp/
Dxsk_queue.h242 u32 total_descs = 0, nr_frags = 0; in xskq_cons_read_desc_batch() local
259 total_descs += (nr_frags + 1); in xskq_cons_read_desc_batch()
260 nr_frags = 0; in xskq_cons_read_desc_batch()
262 nr_frags++; in xskq_cons_read_desc_batch()
263 if (nr_frags == pool->netdev->xdp_zc_max_segs) { in xskq_cons_read_desc_batch()
264 nr_frags = 0; in xskq_cons_read_desc_batch()
271 cached_cons -= nr_frags; in xskq_cons_read_desc_batch()
/linux-6.12.1/drivers/staging/rtl8192e/
Drtllib_tx.c201 static struct rtllib_txb *rtllib_alloc_txb(int nr_frags, int txb_size, in rtllib_alloc_txb() argument
207 txb = kzalloc(struct_size(txb, fragments, nr_frags), gfp_mask); in rtllib_alloc_txb()
211 txb->nr_frags = nr_frags; in rtllib_alloc_txb()
214 for (i = 0; i < nr_frags; i++) { in rtllib_alloc_txb()
525 int i, bytes_per_frag, nr_frags, bytes_last_frag, frag_size; in rtllib_xmit_inter() local
709 nr_frags = bytes / bytes_per_frag; in rtllib_xmit_inter()
712 nr_frags++; in rtllib_xmit_inter()
720 txb = rtllib_alloc_txb(nr_frags, frag_size + in rtllib_xmit_inter()
734 for (i = 0; i < nr_frags; i++) { in rtllib_xmit_inter()
763 if (i != nr_frags - 1) { in rtllib_xmit_inter()
/linux-6.12.1/drivers/net/thunderbolt/
Dtrace.h105 __field(unsigned int, nr_frags)
111 __entry->nr_frags = skb_shinfo(skb)->nr_frags;
115 __entry->nr_frags)
/linux-6.12.1/drivers/net/ethernet/hisilicon/hns3/
Dhns3_trace.h22 __field(__u8, nr_frags)
35 __entry->nr_frags = skb_shinfo(skb)->nr_frags;
50 __entry->gso_type, __entry->fraglist, __entry->nr_frags,
/linux-6.12.1/drivers/net/ethernet/netronome/nfp/nfdk/
Drings.c20 int nr_frags, rd_idx; in nfp_nfdk_tx_ring_reset() local
32 nr_frags = skb_shinfo(skb)->nr_frags; in nfp_nfdk_tx_ring_reset()
42 fend = frag + nr_frags; in nfp_nfdk_tx_ring_reset()
/linux-6.12.1/drivers/net/ethernet/aeroflex/
Dgreth.c101 if (skb_shinfo(skb)->nr_frags == 0) in greth_print_tx_packet()
109 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in greth_print_tx_packet()
188 int nr_frags = skb_shinfo(skb)->nr_frags; in greth_clean_rings() local
197 for (i = 0; i < nr_frags; i++) { in greth_clean_rings()
208 greth->tx_free += nr_frags+1; in greth_clean_rings()
468 int curr_tx, nr_frags, i, err = NETDEV_TX_OK; in greth_start_xmit_gbit() local
472 nr_frags = skb_shinfo(skb)->nr_frags; in greth_start_xmit_gbit()
476 if (greth_num_free_bds(tx_last, greth->tx_next) < nr_frags + 1) { in greth_start_xmit_gbit()
494 if (nr_frags != 0) in greth_start_xmit_gbit()
518 for (i = 0; i < nr_frags; i++) { in greth_start_xmit_gbit()
[all …]
/linux-6.12.1/drivers/net/ethernet/tehuti/
Dtn40.c578 int nr_frags = skb_shinfo(skb)->nr_frags; in tn40_tx_map_skb() local
586 skb->len, skb->data_len, nr_frags); in tn40_tx_map_skb()
587 if (nr_frags > TN40_MAX_PBL - 1) { in tn40_tx_map_skb()
591 nr_frags = skb_shinfo(skb)->nr_frags; in tn40_tx_map_skb()
605 for (i = 0; i < nr_frags; i++) { in tn40_tx_map_skb()
619 for (i = 0; i < nr_frags; i++) { in tn40_tx_map_skb()
628 ++nr_frags; in tn40_tx_map_skb()
632 db->wptr->len = -tn40_txd_sizes[nr_frags].bytes; in tn40_tx_map_skb()
714 int nr_frags, len, err; in tn40_start_xmit() local
731 nr_frags = skb_shinfo(skb)->nr_frags; in tn40_start_xmit()
[all …]
/linux-6.12.1/drivers/net/ethernet/netronome/nfp/nfd3/
Ddp.c258 int f, nr_frags, wr_idx, md_bytes; in nfp_nfd3_tx() local
277 nr_frags = skb_shinfo(skb)->nr_frags; in nfp_nfd3_tx()
279 if (unlikely(nfp_net_tx_full(tx_ring, nr_frags + 1))) { in nfp_nfd3_tx()
291 skb = nfp_net_tls_tx(dp, r_vec, skb, &tls_handle, &nr_frags); in nfp_nfd3_tx()
319 txd->offset_eop = (nr_frags ? 0 : NFD3_DESC_TX_EOP) | md_bytes; in nfp_nfd3_tx()
340 if (nr_frags > 0) { in nfp_nfd3_tx()
346 for (f = 0; f < nr_frags; f++) { in nfp_nfd3_tx()
364 ((f == nr_frags - 1) ? NFD3_DESC_TX_EOP : 0); in nfp_nfd3_tx()
377 tx_ring->wr_p += nr_frags + 1; in nfp_nfd3_tx()
381 tx_ring->wr_ptr_add += nr_frags + 1; in nfp_nfd3_tx()
[all …]
Drings.c50 int idx, nr_frags; in nfp_nfd3_tx_ring_reset() local
56 nr_frags = skb_shinfo(skb)->nr_frags; in nfp_nfd3_tx_ring_reset()
70 if (tx_buf->fidx == nr_frags - 1) in nfp_nfd3_tx_ring_reset()
/linux-6.12.1/drivers/net/ethernet/intel/i40e/
Di40e_txrx.c2045 u32 nr_frags = xdp_get_shared_info_from_buff(xdp)->nr_frags; in i40e_process_rx_buffs() local
2061 else if (i++ <= nr_frags) in i40e_process_rx_buffs()
2089 u32 nr_frags = 0; in i40e_construct_skb() local
2127 nr_frags = sinfo->nr_frags; in i40e_construct_skb()
2133 if (unlikely(nr_frags >= MAX_SKB_FRAGS)) { in i40e_construct_skb()
2150 memcpy(&skinfo->frags[skinfo->nr_frags], &sinfo->frags[0], in i40e_construct_skb()
2151 sizeof(skb_frag_t) * nr_frags); in i40e_construct_skb()
2153 xdp_update_skb_shared_info(skb, skinfo->nr_frags + nr_frags, in i40e_construct_skb()
2155 nr_frags * xdp->frame_sz, in i40e_construct_skb()
2182 u32 nr_frags; in i40e_build_skb() local
[all …]
Di40e_xsk.c295 u32 nr_frags = 0; in i40e_construct_skb_zc() local
299 nr_frags = sinfo->nr_frags; in i40e_construct_skb_zc()
319 for (int i = 0; i < nr_frags; i++) { in i40e_construct_skb_zc()
334 __skb_fill_page_desc_noacc(skinfo, skinfo->nr_frags++, in i40e_construct_skb_zc()
405 sinfo->nr_frags = 0; in i40e_add_xsk_frag()
410 if (unlikely(sinfo->nr_frags == MAX_SKB_FRAGS)) { in i40e_add_xsk_frag()
415 __skb_fill_page_desc_noacc(sinfo, sinfo->nr_frags++, in i40e_add_xsk_frag()
/linux-6.12.1/drivers/net/ethernet/freescale/fs_enet/
Dfs_enet-main.c455 int curidx, nr_frags, len; in fs_enet_start_xmit() local
465 nr_frags = skb_shinfo(skb)->nr_frags; in fs_enet_start_xmit()
467 for (i = 0; i < nr_frags; i++, frag++) { in fs_enet_start_xmit()
492 nr_frags = skb_shinfo(skb)->nr_frags; in fs_enet_start_xmit()
493 if (fep->tx_free <= nr_frags || (CBDR_SC(bdp) & BD_ENET_TX_READY)) { in fs_enet_start_xmit()
508 if (nr_frags) in fs_enet_start_xmit()
511 fep->tx_free -= nr_frags + 1; in fs_enet_start_xmit()
520 while (nr_frags) { in fs_enet_start_xmit()
543 nr_frags--; in fs_enet_start_xmit()
/linux-6.12.1/drivers/net/ethernet/broadcom/bnxt/
Dbnxt_xdp.c40 num_frags = sinfo->nr_frags; in bnxt_xmit_bd()
46 tx_buf->nr_frags = num_frags; in bnxt_xmit_bd()
153 frags = tx_buf->nr_frags; in bnxt_tx_int_xdp()
212 for (i = 0; i < shinfo->nr_frags; i++) { in bnxt_xdp_buff_frags_free()
217 shinfo->nr_frags = 0; in bnxt_xdp_buff_frags_free()
276 tx_needed += sinfo->nr_frags; in bnxt_rx_xdp()
482 BNXT_RX_PAGE_SIZE * sinfo->nr_frags, in bnxt_xdp_build_skb()
/linux-6.12.1/drivers/net/wireless/ath/wil6210/
Dtxrx.c1622 void wil_tx_desc_set_nr_frags(struct vring_tx_desc *d, int nr_frags) in wil_tx_desc_set_nr_frags() argument
1624 d->mac.d[2] |= (nr_frags << MAC_CFG_DESC_TX_2_NUM_OF_DESCRIPTORS_POS); in wil_tx_desc_set_nr_frags()
1747 int nr_frags = skb_shinfo(skb)->nr_frags; in __wil_tx_vring_tso() local
1748 int min_desc_required = nr_frags + 1; in __wil_tx_vring_tso()
1838 for (f = headlen ? -1 : 0; f < nr_frags; f++) { in __wil_tx_vring_tso()
1913 if (rem_data == 0 || (f == nr_frags - 1 && len == 0)) { in __wil_tx_vring_tso()
1918 hdr_ctx->nr_frags = sg_desc_cnt; in __wil_tx_vring_tso()
1927 first_ctx->nr_frags = sg_desc_cnt - 1; in __wil_tx_vring_tso()
1941 if (f < nr_frags - 1 || len > 0) in __wil_tx_vring_tso()
2034 int nr_frags = skb_shinfo(skb)->nr_frags; in __wil_tx_ring() local
[all …]

12345678910>>...12