Lines Matching refs:tpd_ring
790 adapter->tpd_ring[0].count = 1024; in atl1c_sw_init()
878 struct atl1c_tpd_ring *tpd_ring = &adapter->tpd_ring[queue]; in atl1c_clean_tx_ring() local
883 ring_count = tpd_ring->count; in atl1c_clean_tx_ring()
885 buffer_info = &tpd_ring->buffer_info[index]; in atl1c_clean_tx_ring()
892 memset(tpd_ring->desc, 0, sizeof(struct atl1c_tpd_desc) * in atl1c_clean_tx_ring()
894 atomic_set(&tpd_ring->next_to_clean, 0); in atl1c_clean_tx_ring()
895 tpd_ring->next_to_use = 0; in atl1c_clean_tx_ring()
928 struct atl1c_tpd_ring *tpd_ring = adapter->tpd_ring; in atl1c_init_ring_ptrs() local
935 tpd_ring[i].next_to_use = 0; in atl1c_init_ring_ptrs()
936 atomic_set(&tpd_ring[i].next_to_clean, 0); in atl1c_init_ring_ptrs()
937 buffer_info = tpd_ring[i].buffer_info; in atl1c_init_ring_ptrs()
938 for (j = 0; j < tpd_ring->count; j++) in atl1c_init_ring_ptrs()
971 if (adapter->tpd_ring[0].buffer_info) { in atl1c_free_ring_resources()
972 kfree(adapter->tpd_ring[0].buffer_info); in atl1c_free_ring_resources()
973 adapter->tpd_ring[0].buffer_info = NULL; in atl1c_free_ring_resources()
986 struct atl1c_tpd_ring *tpd_ring = adapter->tpd_ring; in atl1c_setup_ring_resources() local
1004 tpd_ring[i].count = tpd_ring[0].count; in atl1c_setup_ring_resources()
1006 size = sizeof(struct atl1c_buffer) * (tpd_ring->count * tqc + in atl1c_setup_ring_resources()
1008 tpd_ring->buffer_info = kzalloc(size, GFP_KERNEL); in atl1c_setup_ring_resources()
1009 if (unlikely(!tpd_ring->buffer_info)) in atl1c_setup_ring_resources()
1013 tpd_ring[i].adapter = adapter; in atl1c_setup_ring_resources()
1014 tpd_ring[i].num = i; in atl1c_setup_ring_resources()
1015 tpd_ring[i].buffer_info = (tpd_ring->buffer_info + count); in atl1c_setup_ring_resources()
1016 count += tpd_ring[i].count; in atl1c_setup_ring_resources()
1024 rfd_ring[i].buffer_info = (tpd_ring->buffer_info + count); in atl1c_setup_ring_resources()
1034 sizeof(struct atl1c_tpd_desc) * tpd_ring->count * tqc + in atl1c_setup_ring_resources()
1047 tpd_ring[0].dma = roundup(ring_header->dma, 8); in atl1c_setup_ring_resources()
1048 offset = tpd_ring[0].dma - ring_header->dma; in atl1c_setup_ring_resources()
1050 tpd_ring[i].dma = ring_header->dma + offset; in atl1c_setup_ring_resources()
1051 tpd_ring[i].desc = (u8 *)ring_header->desc + offset; in atl1c_setup_ring_resources()
1052 tpd_ring[i].size = in atl1c_setup_ring_resources()
1053 sizeof(struct atl1c_tpd_desc) * tpd_ring[i].count; in atl1c_setup_ring_resources()
1054 offset += roundup(tpd_ring[i].size, 8); in atl1c_setup_ring_resources()
1075 kfree(tpd_ring->buffer_info); in atl1c_setup_ring_resources()
1084 struct atl1c_tpd_ring *tpd_ring = adapter->tpd_ring; in atl1c_configure_des_ring() local
1093 (u32)((tpd_ring[0].dma & AT_DMA_HI_ADDR_MASK) >> 32)); in atl1c_configure_des_ring()
1097 (u32)(tpd_ring[i].dma & AT_DMA_LO_ADDR_MASK)); in atl1c_configure_des_ring()
1100 (u32)(tpd_ring[0].count & TPD_RING_SIZE_MASK)); in atl1c_configure_des_ring()
1590 struct atl1c_tpd_ring *tpd_ring = in atl1c_clean_tx() local
1592 struct atl1c_adapter *adapter = tpd_ring->adapter; in atl1c_clean_tx()
1594 netdev_get_tx_queue(napi->dev, tpd_ring->num); in atl1c_clean_tx()
1597 u16 next_to_clean = atomic_read(&tpd_ring->next_to_clean); in atl1c_clean_tx()
1602 AT_READ_REGW(&adapter->hw, atl1c_qregs[tpd_ring->num].tpd_cons, in atl1c_clean_tx()
1606 buffer_info = &tpd_ring->buffer_info[next_to_clean]; in atl1c_clean_tx()
1612 if (++next_to_clean == tpd_ring->count) in atl1c_clean_tx()
1614 atomic_set(&tpd_ring->next_to_clean, next_to_clean); in atl1c_clean_tx()
1625 adapter->hw.intr_mask |= atl1c_qregs[tpd_ring->num].tx_isr; in atl1c_clean_tx()
1652 if (napi_schedule_prep(&adapter->tpd_ring[i].napi)) { in atl1c_intr_rx_tx()
1654 __napi_schedule(&adapter->tpd_ring[i].napi); in atl1c_intr_rx_tx()
1981 struct atl1c_tpd_ring *tpd_ring = &adapter->tpd_ring[queue]; in atl1c_tpd_avail() local
1985 next_to_clean = atomic_read(&tpd_ring->next_to_clean); in atl1c_tpd_avail()
1986 next_to_use = tpd_ring->next_to_use; in atl1c_tpd_avail()
1990 (tpd_ring->count + next_to_clean - next_to_use - 1); in atl1c_tpd_avail()
2001 struct atl1c_tpd_ring *tpd_ring = &adapter->tpd_ring[queue]; in atl1c_get_tpd() local
2005 next_to_use = tpd_ring->next_to_use; in atl1c_get_tpd()
2006 if (++tpd_ring->next_to_use == tpd_ring->count) in atl1c_get_tpd()
2007 tpd_ring->next_to_use = 0; in atl1c_get_tpd()
2008 tpd_desc = ATL1C_TPD_DESC(tpd_ring, next_to_use); in atl1c_get_tpd()
2016 struct atl1c_tpd_ring *tpd_ring = adapter->tpd_ring; in atl1c_get_tx_buffer() local
2018 return &tpd_ring->buffer_info[tpd - in atl1c_get_tx_buffer()
2019 (struct atl1c_tpd_desc *)tpd_ring->desc]; in atl1c_get_tx_buffer()
2145 struct atl1c_tpd_ring *tpd_ring = &adpt->tpd_ring[queue]; in atl1c_tx_rollback() local
2150 first_index = first_tpd - (struct atl1c_tpd_desc *)tpd_ring->desc; in atl1c_tx_rollback()
2152 while (index != tpd_ring->next_to_use) { in atl1c_tx_rollback()
2153 tpd = ATL1C_TPD_DESC(tpd_ring, index); in atl1c_tx_rollback()
2154 buffer_info = &tpd_ring->buffer_info[index]; in atl1c_tx_rollback()
2157 if (++index == tpd_ring->count) in atl1c_tx_rollback()
2160 tpd_ring->next_to_use = first_index; in atl1c_tx_rollback()
2263 struct atl1c_tpd_ring *tpd_ring = &adapter->tpd_ring[queue]; in atl1c_tx_queue() local
2266 tpd_ring->next_to_use); in atl1c_tx_queue()
2405 napi_enable(&adapter->tpd_ring[i].napi); in atl1c_up()
2430 napi_disable(&adapter->tpd_ring[i].napi); in atl1c_down()
2696 netif_napi_add_tx(netdev, &adapter->tpd_ring[i].napi, in atl1c_probe()