Lines Matching refs:cur_tx

367 	if (tx_q->dirty_tx > tx_q->cur_tx)  in stmmac_tx_avail()
368 avail = tx_q->dirty_tx - tx_q->cur_tx - 1; in stmmac_tx_avail()
370 avail = priv->dma_conf.dma_tx_size - tx_q->cur_tx + tx_q->dirty_tx - 1; in stmmac_tx_avail()
418 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_enable_eee_mode()
2484 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2576 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_xdp_xmit_zc()
2577 entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2637 while ((entry != tx_q->cur_tx) && count < priv->dma_conf.dma_tx_size) { in stmmac_tx_clean()
2792 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_tx_clean()
4099 p = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_vlan_insert()
4101 p = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_vlan_insert()
4107 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_vlan_insert()
4135 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_allocator()
4137 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_allocator()
4140 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_allocator()
4142 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_allocator()
4180 tx_q->tx_tail_addr = tx_q->dma_tx_phy + (tx_q->cur_tx * desc_size); in stmmac_flush_tx_descriptors()
4243 first_tx = tx_q->cur_tx; in stmmac_tso_xmit()
4275 mss_desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4277 mss_desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4281 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_xmit()
4283 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_xmit()
4293 first_entry = tx_q->cur_tx; in stmmac_tso_xmit()
4338 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf = des; in stmmac_tso_xmit()
4339 tx_q->tx_skbuff_dma[tx_q->cur_tx].len = skb_headlen(skb); in stmmac_tso_xmit()
4340 tx_q->tx_skbuff_dma[tx_q->cur_tx].map_as_page = false; in stmmac_tso_xmit()
4341 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4356 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf = des; in stmmac_tso_xmit()
4357 tx_q->tx_skbuff_dma[tx_q->cur_tx].len = skb_frag_size(frag); in stmmac_tso_xmit()
4358 tx_q->tx_skbuff_dma[tx_q->cur_tx].map_as_page = true; in stmmac_tso_xmit()
4359 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4362 tx_q->tx_skbuff_dma[tx_q->cur_tx].last_segment = true; in stmmac_tso_xmit()
4365 tx_q->tx_skbuff[tx_q->cur_tx] = skb; in stmmac_tso_xmit()
4366 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4369 tx_packets = (tx_q->cur_tx + 1) - first_tx; in stmmac_tso_xmit()
4386 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4388 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4399 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_tso_xmit()
4447 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_tso_xmit()
4448 tx_q->cur_tx, first, nfrags); in stmmac_tso_xmit()
4516 first_tx = tx_q->cur_tx; in stmmac_xmit()
4551 entry = tx_q->cur_tx; in stmmac_xmit()
4670 tx_q->cur_tx = entry; in stmmac_xmit()
4675 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_xmit()
4902 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_xdpf()
4974 tx_q->cur_tx = entry; in stmmac_xdp_xmit_xdpf()
7914 tx_q->cur_tx = 0; in stmmac_reset_tx_queue()