Home
last modified time | relevance | path

Searched refs:tx_skbuff (Results 1 – 25 of 40) sorted by relevance

12

/linux-6.12.1/drivers/net/hippi/
Drrunner.c605 rrpriv->tx_skbuff[i] = NULL; in rr_init1()
1068 if(rrpriv->tx_skbuff[txcon]){ in rr_interrupt()
1073 skb = rrpriv->tx_skbuff[txcon]; in rr_interrupt()
1083 rrpriv->tx_skbuff[txcon] = NULL; in rr_interrupt()
1115 struct sk_buff *skb = rrpriv->tx_skbuff[i]; in rr_raz_tx()
1126 rrpriv->tx_skbuff[i] = NULL; in rr_raz_tx()
1291 if (rrpriv->tx_skbuff[index]){ in rr_dump()
1292 len = min_t(int, 0x80, rrpriv->tx_skbuff[index]->len); in rr_dump()
1297 printk("%02x ", (unsigned char) rrpriv->tx_skbuff[index]->data[i]); in rr_dump()
1302 if (rrpriv->tx_skbuff[cons]){ in rr_dump()
[all …]
/linux-6.12.1/drivers/net/ethernet/calxeda/
Dxgmac.c361 struct sk_buff **tx_skbuff; member
745 priv->tx_skbuff = kcalloc(DMA_TX_RING_SZ, sizeof(struct sk_buff *), in xgmac_dma_desc_rings_init()
747 if (!priv->tx_skbuff) in xgmac_dma_desc_rings_init()
779 kfree(priv->tx_skbuff); in xgmac_dma_desc_rings_init()
815 if (!priv->tx_skbuff) in xgmac_free_tx_skbufs()
819 if (priv->tx_skbuff[i] == NULL) in xgmac_free_tx_skbufs()
831 dev_kfree_skb_any(priv->tx_skbuff[i]); in xgmac_free_tx_skbufs()
832 priv->tx_skbuff[i] = NULL; in xgmac_free_tx_skbufs()
857 kfree(priv->tx_skbuff); in xgmac_free_dma_desc_rings()
858 priv->tx_skbuff = NULL; in xgmac_free_dma_desc_rings()
[all …]
/linux-6.12.1/drivers/net/ethernet/freescale/fs_enet/
Dfs_enet-main.c113 skb = fep->tx_skbuff[dirtyidx]; in fs_enet_napi()
159 fep->tx_skbuff[dirtyidx] = NULL; in fs_enet_napi()
377 fep->tx_skbuff[i] = NULL; in fs_init_bds()
393 skb = fep->tx_skbuff[i]; in fs_cleanup_bds()
401 fep->tx_skbuff[i] = NULL; in fs_cleanup_bds()
539 fep->tx_skbuff[curidx] = NULL; in fs_enet_start_xmit()
561 fep->tx_skbuff[curidx] = skb; in fs_enet_start_xmit()
947 fep->tx_skbuff = fep->rx_skbuff + fpi->rx_ring; in fs_enet_probe()
/linux-6.12.1/drivers/net/ethernet/amd/
Dlance.c243 struct sk_buff* tx_skbuff[TX_RING_SIZE]; member
857 if (lp->tx_skbuff[i]) { in lance_purge_ring()
858 dev_kfree_skb_any(lp->tx_skbuff[i]); in lance_purge_ring()
859 lp->tx_skbuff[i] = NULL; in lance_purge_ring()
894 lp->tx_skbuff[i] = NULL; in lance_init_ring()
1008 lp->tx_skbuff[entry] = skb; in lance_start_xmit()
1092 if (lp->tx_skbuff[entry]) { in lance_interrupt()
1093 dev_consume_skb_irq(lp->tx_skbuff[entry]); in lance_interrupt()
1094 lp->tx_skbuff[entry] = NULL; in lance_interrupt()
Dpcnet32.c265 struct sk_buff **tx_skbuff; member
502 kfree(lp->tx_skbuff); in pcnet32_realloc_tx_ring()
514 lp->tx_skbuff = new_skb_list; in pcnet32_realloc_tx_ring()
1017 lp->tx_skbuff[x] = skb; in pcnet32_loopback_test()
1094 packet = lp->tx_skbuff[x]->data; in pcnet32_loopback_test()
1355 if (lp->tx_skbuff[entry]) { in pcnet32_tx()
1358 lp->tx_skbuff[entry]->len, in pcnet32_tx()
1360 dev_kfree_skb_any(lp->tx_skbuff[entry]); in pcnet32_tx()
1361 lp->tx_skbuff[entry] = NULL; in pcnet32_tx()
2044 lp->tx_skbuff = kcalloc(lp->tx_ring_size, sizeof(struct sk_buff *), in pcnet32_alloc_ring()
[all …]
Damd8111e.c227 if (lp->tx_skbuff[i]) { in amd8111e_free_skbs()
230 lp->tx_skbuff[i]->len, DMA_TO_DEVICE); in amd8111e_free_skbs()
231 dev_kfree_skb(lp->tx_skbuff[i]); in amd8111e_free_skbs()
232 lp->tx_skbuff[i] = NULL; in amd8111e_free_skbs()
655 if (lp->tx_skbuff[tx_index]) { in amd8111e_tx()
658 lp->tx_skbuff[tx_index]->len, in amd8111e_tx()
660 dev_consume_skb_irq(lp->tx_skbuff[tx_index]); in amd8111e_tx()
661 lp->tx_skbuff[tx_index] = NULL; in amd8111e_tx()
1233 if (lp->tx_skbuff[tx_index]) in amd8111e_tx_queue_avail()
1258 lp->tx_skbuff[tx_index] = skb; in amd8111e_start_xmit()
/linux-6.12.1/drivers/net/ethernet/packetengines/
Dyellowfin.c312 struct sk_buff* tx_skbuff[TX_RING_SIZE]; member
767 yp->tx_skbuff[i] = NULL; in yellowfin_init_ring()
779 yp->tx_skbuff[i] = 0; in yellowfin_init_ring()
834 yp->tx_skbuff[entry] = NULL; in yellowfin_start_xmit()
840 yp->tx_skbuff[entry] = skb; in yellowfin_start_xmit()
933 skb = yp->tx_skbuff[entry]; in yellowfin_interrupt()
941 yp->tx_skbuff[entry] = NULL; in yellowfin_interrupt()
971 skb = yp->tx_skbuff[entry]; in yellowfin_interrupt()
999 yp->tx_skbuff[entry] = 0; in yellowfin_interrupt()
1276 dev_kfree_skb(yp->tx_skbuff[i]); in yellowfin_close()
[all …]
Dhamachi.c487 struct sk_buff* tx_skbuff[TX_RING_SIZE]; member
1008 skb = hmp->tx_skbuff[entry]; in hamachi_tx()
1014 hmp->tx_skbuff[entry] = NULL; in hamachi_tx()
1100 skb = hmp->tx_skbuff[i]; in hamachi_tx_timeout()
1106 hmp->tx_skbuff[i] = NULL; in hamachi_tx_timeout()
1207 hmp->tx_skbuff[i] = NULL; in hamachi_init_ring()
1245 hmp->tx_skbuff[entry] = skb; in hamachi_start_xmit()
1346 skb = hmp->tx_skbuff[entry]; in hamachi_interrupt()
1354 hmp->tx_skbuff[entry] = NULL; in hamachi_interrupt()
1731 skb = hmp->tx_skbuff[i]; in hamachi_close()
[all …]
/linux-6.12.1/drivers/net/ethernet/dec/tulip/
Dwinbond-840.c293 struct sk_buff* tx_skbuff[TX_RING_SIZE]; member
817 np->tx_skbuff[i] = NULL; in init_rxtx_rings()
844 if (np->tx_skbuff[i]) { in free_rxtx_rings()
846 np->tx_skbuff[i]->len, DMA_TO_DEVICE); in free_rxtx_rings()
847 dev_kfree_skb(np->tx_skbuff[i]); in free_rxtx_rings()
849 np->tx_skbuff[i] = NULL; in free_rxtx_rings()
996 np->tx_skbuff[entry] = skb; in start_tx()
1073 np->stats.tx_bytes += np->tx_skbuff[entry]->len; in netdev_tx_done()
1079 np->tx_skbuff[entry]->len, DMA_TO_DEVICE); in netdev_tx_done()
1080 np->tx_q_bytes -= np->tx_skbuff[entry]->len; in netdev_tx_done()
[all …]
/linux-6.12.1/drivers/net/ethernet/dlink/
Dsundance.c366 struct sk_buff* tx_skbuff[TX_RING_SIZE]; member
1056 np->tx_skbuff[i] = NULL; in init_ring()
1095 np->tx_skbuff[entry] = skb; in start_tx()
1129 np->tx_skbuff[entry] = NULL; in start_tx()
1151 skb = np->tx_skbuff[i]; in reset_tx()
1157 np->tx_skbuff[i] = NULL; in reset_tx()
1271 skb = np->tx_skbuff[entry]; in intr_handler()
1276 dev_consume_skb_irq(np->tx_skbuff[entry]); in intr_handler()
1277 np->tx_skbuff[entry] = NULL; in intr_handler()
1290 skb = np->tx_skbuff[entry]; in intr_handler()
[all …]
Ddl2k.c451 skb = np->tx_skbuff[i]; in free_list()
457 np->tx_skbuff[i] = NULL; in free_list()
489 np->tx_skbuff[i] = NULL; in alloc_list()
718 np->tx_skbuff[entry] = skb; in start_xmit()
829 skb = np->tx_skbuff[entry]; in rio_free_tx()
838 np->tx_skbuff[entry] = NULL; in rio_free_tx()
/linux-6.12.1/drivers/net/wan/
Dfsl_ucc_hdlc.c214 priv->tx_skbuff = kcalloc(priv->tx_ring_size, in uhdlc_init()
215 sizeof(*priv->tx_skbuff), in uhdlc_init()
217 if (!priv->tx_skbuff) { in uhdlc_init()
333 kfree(priv->tx_skbuff); in uhdlc_init()
407 priv->tx_skbuff[priv->skb_curtx] = skb; in ucc_hdlc_tx()
484 skb = priv->tx_skbuff[priv->skb_dirtytx]; in hdlc_tx_done()
495 priv->tx_skbuff[priv->skb_dirtytx] = NULL; in hdlc_tx_done()
778 kfree(priv->tx_skbuff); in uhdlc_memclean()
779 priv->tx_skbuff = NULL; in uhdlc_memclean()
Dfsl_ucc_hdlc.h93 struct sk_buff **tx_skbuff; member
/linux-6.12.1/drivers/net/ethernet/3com/
D3c515.c302 struct sk_buff *tx_skbuff[TX_RING_SIZE]; member
833 vp->tx_skbuff[i] = NULL; in corkscrew_open()
1015 vp->tx_skbuff[entry] = skb; in corkscrew_start_xmit()
1166 if (lp->tx_skbuff[entry]) { in corkscrew_interrupt()
1167 dev_consume_skb_irq(lp->tx_skbuff[entry]); in corkscrew_interrupt()
1168 lp->tx_skbuff[entry] = NULL; in corkscrew_interrupt()
1446 if (vp->tx_skbuff[i]) { in corkscrew_close()
1447 dev_kfree_skb(vp->tx_skbuff[i]); in corkscrew_close()
1448 vp->tx_skbuff[i] = NULL; in corkscrew_close()
/linux-6.12.1/drivers/net/ethernet/smsc/
Depic100.c256 struct sk_buff* tx_skbuff[TX_RING_SIZE]; member
938 ep->tx_skbuff[i] = NULL; in epic_init_ring()
965 ep->tx_skbuff[entry] = skb; in epic_start_xmit()
1042 dev->stats.tx_bytes += ep->tx_skbuff[entry]->len; in epic_tx()
1047 skb = ep->tx_skbuff[entry]; in epic_tx()
1052 ep->tx_skbuff[entry] = NULL; in epic_tx()
1317 skb = ep->tx_skbuff[i]; in epic_close()
1318 ep->tx_skbuff[i] = NULL; in epic_close()
/linux-6.12.1/drivers/net/ethernet/sis/
Dsis900.c183 struct sk_buff *tx_skbuff[NUM_TX_DESC]; member
1141 sis_priv->tx_skbuff[i] = NULL; in sis900_init_tx_ring()
1570 struct sk_buff *skb = sis_priv->tx_skbuff[i]; in sis900_tx_timeout()
1577 sis_priv->tx_skbuff[i] = NULL; in sis900_tx_timeout()
1621 sis_priv->tx_skbuff[entry] = skb; in sis900_start_xmit()
1630 sis_priv->tx_skbuff[entry] = NULL; in sis900_start_xmit()
1942 skb = sis_priv->tx_skbuff[entry]; in sis900_finish_xmit()
1947 sis_priv->tx_skbuff[entry] = NULL; in sis900_finish_xmit()
2002 skb = sis_priv->tx_skbuff[i]; in sis900_close()
2008 sis_priv->tx_skbuff[i] = NULL; in sis900_close()
/linux-6.12.1/drivers/net/ethernet/aeroflex/
Dgreth.h103 struct sk_buff *tx_skbuff[GRETH_TXBD_NUM]; member
Dgreth.c187 struct sk_buff *skb = greth->tx_skbuff[greth->tx_last]; in greth_clean_rings()
491 greth->tx_skbuff[greth->tx_next] = skb; in greth_start_xmit_gbit()
520 greth->tx_skbuff[curr_tx] = NULL; in greth_start_xmit_gbit()
687 skb = greth->tx_skbuff[tx_last]; in greth_clean_tx_gbit()
701 greth->tx_skbuff[tx_last] = NULL; in greth_clean_tx_gbit()
/linux-6.12.1/drivers/net/ethernet/freescale/
Dgianfar.c420 priv->tx_queue[i]->tx_skbuff = NULL; in gfar_alloc_tx_queues()
1073 if (!tx_queue->tx_skbuff[i]) in free_skb_tx_queue()
1079 for (j = 0; j < skb_shinfo(tx_queue->tx_skbuff[i])->nr_frags; in free_skb_tx_queue()
1087 dev_kfree_skb_any(tx_queue->tx_skbuff[i]); in free_skb_tx_queue()
1088 tx_queue->tx_skbuff[i] = NULL; in free_skb_tx_queue()
1090 kfree(tx_queue->tx_skbuff); in free_skb_tx_queue()
1091 tx_queue->tx_skbuff = NULL; in free_skb_tx_queue()
1138 if (tx_queue->tx_skbuff) in free_skb_resources()
1387 tx_queue->tx_skbuff = in gfar_alloc_skb_resources()
1389 sizeof(*tx_queue->tx_skbuff), in gfar_alloc_skb_resources()
[all …]
/linux-6.12.1/drivers/net/ethernet/via/
Dvia-rhine.c446 struct sk_buff *tx_skbuff[TX_RING_SIZE]; member
1310 rp->tx_skbuff[i] = NULL; in alloc_tbufs()
1333 if (rp->tx_skbuff[i]) { in free_tbufs()
1337 rp->tx_skbuff[i]->len, in free_tbufs()
1340 dev_kfree_skb(rp->tx_skbuff[i]); in free_tbufs()
1342 rp->tx_skbuff[i] = NULL; in free_tbufs()
1786 rp->tx_skbuff[entry] = skb; in rhine_start_tx()
1794 rp->tx_skbuff[entry] = NULL; in rhine_start_tx()
1938 skb = rp->tx_skbuff[entry]; in rhine_tx()
1981 rp->tx_skbuff[entry] = NULL; in rhine_tx()
/linux-6.12.1/drivers/net/ethernet/natsemi/
Dnatsemi.c547 struct sk_buff *tx_skbuff[TX_RING_SIZE]; member
1976 np->tx_skbuff[i] = NULL; in init_ring()
2012 if (np->tx_skbuff[i]) { in drain_tx()
2014 np->tx_skbuff[i]->len, DMA_TO_DEVICE); in drain_tx()
2015 dev_kfree_skb(np->tx_skbuff[i]); in drain_tx()
2018 np->tx_skbuff[i] = NULL; in drain_tx()
2099 np->tx_skbuff[entry] = skb; in start_tx()
2103 np->tx_skbuff[entry] = NULL; in start_tx()
2154 dev->stats.tx_bytes += np->tx_skbuff[entry]->len; in netdev_tx_done()
2169 np->tx_skbuff[entry]->len, DMA_TO_DEVICE); in netdev_tx_done()
[all …]
/linux-6.12.1/drivers/net/ethernet/samsung/sxgbe/
Dsxgbe_main.c415 tx_ring->tx_skbuff = devm_kcalloc(dev, tx_rsize, in init_tx_ring()
418 if (!tx_ring->tx_skbuff) in init_tx_ring()
627 dev_kfree_skb_any(txqueue->tx_skbuff[dma_desc]); in tx_free_ring_skbufs()
628 txqueue->tx_skbuff[dma_desc] = NULL; in tx_free_ring_skbufs()
748 struct sk_buff *skb = tqueue->tx_skbuff[entry]; in sxgbe_tx_queue_clean()
771 tqueue->tx_skbuff[entry] = NULL; in sxgbe_tx_queue_clean()
1322 tqueue->tx_skbuff[entry] = skb; in sxgbe_xmit()
1367 tqueue->tx_skbuff[entry] = NULL; in sxgbe_xmit()
/linux-6.12.1/drivers/net/ethernet/renesas/
Dsh_eth.c1251 if (mdp->tx_skbuff[entry]) { in sh_eth_tx_free()
1256 dev_kfree_skb_irq(mdp->tx_skbuff[entry]); in sh_eth_tx_free()
1257 mdp->tx_skbuff[entry] = NULL; in sh_eth_tx_free()
1313 kfree(mdp->tx_skbuff); in sh_eth_ring_free()
1314 mdp->tx_skbuff = NULL; in sh_eth_ring_free()
1381 mdp->tx_skbuff[i] = NULL; in sh_eth_ring_format()
1418 mdp->tx_skbuff = kcalloc(mdp->num_tx_ring, sizeof(*mdp->tx_skbuff), in sh_eth_ring_init()
1420 if (!mdp->tx_skbuff) in sh_eth_ring_init()
2479 dev_kfree_skb(mdp->tx_skbuff[i]); in sh_eth_tx_timeout()
2480 mdp->tx_skbuff[i] = NULL; in sh_eth_tx_timeout()
[all …]
/linux-6.12.1/drivers/net/ethernet/stmicro/stmmac/
Dring_mode.c53 tx_q->tx_skbuff[entry] = NULL; in jumbo_frm()
Dchain_mode.c48 tx_q->tx_skbuff[entry] = NULL; in jumbo_frm()

12