Lines Matching refs:flaglen
344 __le32 flaglen; member
351 __le32 flaglen; member
952 return le32_to_cpu(prd->flaglen) in nv_descr_getlength()
958 return le32_to_cpu(prd->flaglen) & LEN_MASK_V2; in nv_descr_getlength_ex()
1847 np->put_rx.orig->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX_AVAIL); in nv_alloc_rx()
1889 np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL); in nv_alloc_rx_optimized()
1932 np->rx_ring.orig[i].flaglen = 0; in nv_init_rx()
1935 np->rx_ring.ex[i].flaglen = 0; in nv_init_rx()
1968 np->tx_ring.orig[i].flaglen = 0; in nv_init_tx()
1971 np->tx_ring.ex[i].flaglen = 0; in nv_init_tx()
2031 np->tx_ring.orig[i].flaglen = 0; in nv_drain_tx()
2034 np->tx_ring.ex[i].flaglen = 0; in nv_drain_tx()
2062 np->rx_ring.orig[i].flaglen = 0; in nv_drain_rx()
2065 np->rx_ring.ex[i].flaglen = 0; in nv_drain_rx()
2271 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit()
2321 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit()
2343 prev_tx->flaglen |= cpu_to_le32(tx_flags_extra); in nv_start_xmit()
2357 start_tx->flaglen |= cpu_to_le32(tx_flags | tx_flags_extra); in nv_start_xmit()
2449 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit_optimized()
2499 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit_optimized()
2521 prev_tx->flaglen |= cpu_to_le32(NV_TX2_LASTPACKET); in nv_start_xmit_optimized()
2562 start_tx->flaglen |= cpu_to_le32(tx_flags | tx_flags_extra); in nv_start_xmit_optimized()
2589 np->tx_change_owner->first_tx_desc->flaglen |= in nv_tx_flip_ownership()
2615 !((flags = le32_to_cpu(np->get_tx.orig->flaglen)) & NV_TX_VALID) && in nv_tx_done()
2685 !((flags = le32_to_cpu(np->get_tx.ex->flaglen)) & NV_TX2_VALID) && in nv_tx_done_optimized()
2775 le32_to_cpu(np->tx_ring.orig[i].flaglen), in nv_tx_timeout()
2777 le32_to_cpu(np->tx_ring.orig[i+1].flaglen), in nv_tx_timeout()
2779 le32_to_cpu(np->tx_ring.orig[i+2].flaglen), in nv_tx_timeout()
2781 le32_to_cpu(np->tx_ring.orig[i+3].flaglen)); in nv_tx_timeout()
2791 le32_to_cpu(np->tx_ring.ex[i].flaglen), in nv_tx_timeout()
2794 le32_to_cpu(np->tx_ring.ex[i+1].flaglen), in nv_tx_timeout()
2797 le32_to_cpu(np->tx_ring.ex[i+2].flaglen), in nv_tx_timeout()
2800 le32_to_cpu(np->tx_ring.ex[i+3].flaglen)); in nv_tx_timeout()
2900 !((flags = le32_to_cpu(np->get_rx.orig->flaglen)) & NV_RX_AVAIL) && in nv_rx_process()
3002 !((flags = le32_to_cpu(np->get_rx.ex->flaglen)) & NV_RX2_AVAIL) && in nv_rx_process_optimized()
5198 np->tx_ring.orig[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); in nv_loopback_test()
5202 np->tx_ring.ex[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); in nv_loopback_test()
5211 flags = le32_to_cpu(np->rx_ring.orig[0].flaglen); in nv_loopback_test()
5215 flags = le32_to_cpu(np->rx_ring.ex[0].flaglen); in nv_loopback_test()