Lines Matching +full:mtl +full:- +full:rx +full:- +full:config

5  * This program is dual-licensed; you may select either version 2 of
26 #include "dwc-xlgmac.h"
27 #include "dwc-xlgmac-reg.h"
31 return !XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_tx_complete()
40 regval = readl(pdata->mac_regs + MAC_RCR); in xlgmac_disable_rx_csum()
43 writel(regval, pdata->mac_regs + MAC_RCR); in xlgmac_disable_rx_csum()
52 regval = readl(pdata->mac_regs + MAC_RCR); in xlgmac_enable_rx_csum()
55 writel(regval, pdata->mac_regs + MAC_RCR); in xlgmac_enable_rx_csum()
68 writel(mac_addr_hi, pdata->mac_regs + MAC_MACA0HR); in xlgmac_set_mac_address()
69 writel(mac_addr_lo, pdata->mac_regs + MAC_MACA0LR); in xlgmac_set_mac_address()
86 mac_addr[0] = ha->addr[0]; in xlgmac_set_mac_reg()
87 mac_addr[1] = ha->addr[1]; in xlgmac_set_mac_reg()
88 mac_addr[2] = ha->addr[2]; in xlgmac_set_mac_reg()
89 mac_addr[3] = ha->addr[3]; in xlgmac_set_mac_reg()
91 mac_addr[0] = ha->addr[4]; in xlgmac_set_mac_reg()
92 mac_addr[1] = ha->addr[5]; in xlgmac_set_mac_reg()
94 netif_dbg(pdata, drv, pdata->netdev, in xlgmac_set_mac_reg()
96 ha->addr, *mac_reg); in xlgmac_set_mac_reg()
104 writel(mac_addr_hi, pdata->mac_regs + *mac_reg); in xlgmac_set_mac_reg()
106 writel(mac_addr_lo, pdata->mac_regs + *mac_reg); in xlgmac_set_mac_reg()
114 regval = readl(pdata->mac_regs + MAC_VLANTR); in xlgmac_enable_rx_vlan_stripping()
115 /* Put the VLAN tag in the Rx descriptor */ in xlgmac_enable_rx_vlan_stripping()
121 /* Check only C-TAG (0x8100) packets */ in xlgmac_enable_rx_vlan_stripping()
124 /* Don't consider an S-TAG (0x88A8) packet as a VLAN packet */ in xlgmac_enable_rx_vlan_stripping()
130 writel(regval, pdata->mac_regs + MAC_VLANTR); in xlgmac_enable_rx_vlan_stripping()
139 regval = readl(pdata->mac_regs + MAC_VLANTR); in xlgmac_disable_rx_vlan_stripping()
142 writel(regval, pdata->mac_regs + MAC_VLANTR); in xlgmac_disable_rx_vlan_stripping()
151 regval = readl(pdata->mac_regs + MAC_PFR); in xlgmac_enable_rx_vlan_filtering()
155 writel(regval, pdata->mac_regs + MAC_PFR); in xlgmac_enable_rx_vlan_filtering()
157 regval = readl(pdata->mac_regs + MAC_VLANTR); in xlgmac_enable_rx_vlan_filtering()
164 /* Only filter on the lower 12-bits of the VLAN tag */ in xlgmac_enable_rx_vlan_filtering()
175 writel(regval, pdata->mac_regs + MAC_VLANTR); in xlgmac_enable_rx_vlan_filtering()
184 regval = readl(pdata->mac_regs + MAC_PFR); in xlgmac_disable_rx_vlan_filtering()
188 writel(regval, pdata->mac_regs + MAC_PFR); in xlgmac_disable_rx_vlan_filtering()
226 for_each_set_bit(vid, pdata->active_vlans, VLAN_N_VID) { in xlgmac_update_vlan_hash_table()
234 regval = readl(pdata->mac_regs + MAC_VLANHTR); in xlgmac_update_vlan_hash_table()
238 writel(regval, pdata->mac_regs + MAC_VLANHTR); in xlgmac_update_vlan_hash_table()
249 regval = XLGMAC_GET_REG_BITS(readl(pdata->mac_regs + MAC_PFR), in xlgmac_set_promiscuous_mode()
254 netif_dbg(pdata, drv, pdata->netdev, "%s promiscuous mode\n", in xlgmac_set_promiscuous_mode()
257 regval = readl(pdata->mac_regs + MAC_PFR); in xlgmac_set_promiscuous_mode()
260 writel(regval, pdata->mac_regs + MAC_PFR); in xlgmac_set_promiscuous_mode()
266 if (pdata->netdev->features & NETIF_F_HW_VLAN_CTAG_FILTER) in xlgmac_set_promiscuous_mode()
279 regval = XLGMAC_GET_REG_BITS(readl(pdata->mac_regs + MAC_PFR), in xlgmac_set_all_multicast_mode()
284 netif_dbg(pdata, drv, pdata->netdev, "%s allmulti mode\n", in xlgmac_set_all_multicast_mode()
287 regval = readl(pdata->mac_regs + MAC_PFR); in xlgmac_set_all_multicast_mode()
290 writel(regval, pdata->mac_regs + MAC_PFR); in xlgmac_set_all_multicast_mode()
297 struct net_device *netdev = pdata->netdev; in xlgmac_set_mac_addn_addrs()
303 addn_macs = pdata->hw_feat.addn_mac; in xlgmac_set_mac_addn_addrs()
310 addn_macs--; in xlgmac_set_mac_addn_addrs()
318 addn_macs--; in xlgmac_set_mac_addn_addrs()
324 while (addn_macs--) in xlgmac_set_mac_addn_addrs()
332 struct net_device *netdev = pdata->netdev; in xlgmac_set_mac_hash_table()
338 hash_table_shift = 26 - (pdata->hw_feat.hash_table_size >> 7); in xlgmac_set_mac_hash_table()
339 hash_table_count = pdata->hw_feat.hash_table_size / 32; in xlgmac_set_mac_hash_table()
344 crc = bitrev32(~crc32_le(~0, ha->addr, ETH_ALEN)); in xlgmac_set_mac_hash_table()
350 crc = bitrev32(~crc32_le(~0, ha->addr, ETH_ALEN)); in xlgmac_set_mac_hash_table()
358 writel(hash_table[i], pdata->mac_regs + hash_reg); in xlgmac_set_mac_hash_table()
365 if (pdata->hw_feat.hash_table_size) in xlgmac_add_mac_addresses()
377 xlgmac_set_mac_address(pdata, pdata->netdev->dev_addr); in xlgmac_config_mac_address()
380 if (pdata->hw_feat.hash_table_size) { in xlgmac_config_mac_address()
381 regval = readl(pdata->mac_regs + MAC_PFR); in xlgmac_config_mac_address()
388 writel(regval, pdata->mac_regs + MAC_PFR); in xlgmac_config_mac_address()
397 val = (pdata->netdev->mtu > XLGMAC_STD_PACKET_MTU) ? 1 : 0; in xlgmac_config_jumbo_enable()
399 regval = readl(pdata->mac_regs + MAC_RCR); in xlgmac_config_jumbo_enable()
402 writel(regval, pdata->mac_regs + MAC_RCR); in xlgmac_config_jumbo_enable()
407 if (pdata->netdev->features & NETIF_F_RXCSUM) in xlgmac_config_checksum_offload()
417 regval = readl(pdata->mac_regs + MAC_VLANIR); in xlgmac_config_vlan_support()
423 writel(regval, pdata->mac_regs + MAC_VLANIR); in xlgmac_config_vlan_support()
428 if (pdata->netdev->features & NETIF_F_HW_VLAN_CTAG_FILTER) in xlgmac_config_vlan_support()
433 if (pdata->netdev->features & NETIF_F_HW_VLAN_CTAG_RX) in xlgmac_config_vlan_support()
441 struct net_device *netdev = pdata->netdev; in xlgmac_config_rx_mode()
444 pr_mode = ((netdev->flags & IFF_PROMISC) != 0); in xlgmac_config_rx_mode()
445 am_mode = ((netdev->flags & IFF_ALLMULTI) != 0); in xlgmac_config_rx_mode()
463 if (channel->queue_index < DMA_DSRX_FIRST_QUEUE) { in xlgmac_prepare_tx_stop()
465 tx_pos = (channel->queue_index * DMA_DSR_Q_LEN) + in xlgmac_prepare_tx_stop()
468 tx_qidx = channel->queue_index - DMA_DSRX_FIRST_QUEUE; in xlgmac_prepare_tx_stop()
481 tx_status = readl(pdata->mac_regs + tx_dsr); in xlgmac_prepare_tx_stop()
492 netdev_info(pdata->netdev, in xlgmac_prepare_tx_stop()
494 channel->queue_index); in xlgmac_prepare_tx_stop()
504 channel = pdata->channel_head; in xlgmac_enable_tx()
505 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_enable_tx()
506 if (!channel->tx_ring) in xlgmac_enable_tx()
516 for (i = 0; i < pdata->tx_q_count; i++) { in xlgmac_enable_tx()
525 regval = readl(pdata->mac_regs + MAC_TCR); in xlgmac_enable_tx()
528 writel(regval, pdata->mac_regs + MAC_TCR); in xlgmac_enable_tx()
538 channel = pdata->channel_head; in xlgmac_disable_tx()
539 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_disable_tx()
540 if (!channel->tx_ring) in xlgmac_disable_tx()
547 regval = readl(pdata->mac_regs + MAC_TCR); in xlgmac_disable_tx()
550 writel(regval, pdata->mac_regs + MAC_TCR); in xlgmac_disable_tx()
553 for (i = 0; i < pdata->tx_q_count; i++) { in xlgmac_disable_tx()
561 channel = pdata->channel_head; in xlgmac_disable_tx()
562 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_disable_tx()
563 if (!channel->tx_ring) in xlgmac_disable_tx()
579 /* The Rx engine cannot be stopped if it is actively processing in xlgmac_prepare_rx_stop()
580 * packets. Wait for the Rx queue to empty the Rx fifo. Don't in xlgmac_prepare_rx_stop()
597 netdev_info(pdata->netdev, in xlgmac_prepare_rx_stop()
598 "timed out waiting for Rx queue %u to empty\n", in xlgmac_prepare_rx_stop()
607 /* Enable each Rx DMA channel */ in xlgmac_enable_rx()
608 channel = pdata->channel_head; in xlgmac_enable_rx()
609 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_enable_rx()
610 if (!channel->rx_ring) in xlgmac_enable_rx()
619 /* Enable each Rx queue */ in xlgmac_enable_rx()
621 for (i = 0; i < pdata->rx_q_count; i++) in xlgmac_enable_rx()
623 writel(regval, pdata->mac_regs + MAC_RQC0R); in xlgmac_enable_rx()
625 /* Enable MAC Rx */ in xlgmac_enable_rx()
626 regval = readl(pdata->mac_regs + MAC_RCR); in xlgmac_enable_rx()
635 writel(regval, pdata->mac_regs + MAC_RCR); in xlgmac_enable_rx()
644 /* Disable MAC Rx */ in xlgmac_disable_rx()
645 regval = readl(pdata->mac_regs + MAC_RCR); in xlgmac_disable_rx()
654 writel(regval, pdata->mac_regs + MAC_RCR); in xlgmac_disable_rx()
656 /* Prepare for Rx DMA channel stop */ in xlgmac_disable_rx()
657 for (i = 0; i < pdata->rx_q_count; i++) in xlgmac_disable_rx()
660 /* Disable each Rx queue */ in xlgmac_disable_rx()
661 writel(0, pdata->mac_regs + MAC_RQC0R); in xlgmac_disable_rx()
663 /* Disable each Rx DMA channel */ in xlgmac_disable_rx()
664 channel = pdata->channel_head; in xlgmac_disable_rx()
665 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_disable_rx()
666 if (!channel->rx_ring) in xlgmac_disable_rx()
679 struct xlgmac_pdata *pdata = channel->pdata; in xlgmac_tx_start_xmit()
688 desc_data = XLGMAC_GET_DESC_DATA(ring, ring->cur); in xlgmac_tx_start_xmit()
689 writel(lower_32_bits(desc_data->dma_desc_addr), in xlgmac_tx_start_xmit()
693 if (pdata->tx_usecs && !channel->tx_timer_active) { in xlgmac_tx_start_xmit()
694 channel->tx_timer_active = 1; in xlgmac_tx_start_xmit()
695 mod_timer(&channel->tx_timer, in xlgmac_tx_start_xmit()
696 jiffies + usecs_to_jiffies(pdata->tx_usecs)); in xlgmac_tx_start_xmit()
699 ring->tx.xmit_more = 0; in xlgmac_tx_start_xmit()
704 struct xlgmac_pdata *pdata = channel->pdata; in xlgmac_dev_xmit()
705 struct xlgmac_ring *ring = channel->tx_ring; in xlgmac_dev_xmit()
711 int start_index = ring->cur; in xlgmac_dev_xmit()
712 int cur_index = ring->cur; in xlgmac_dev_xmit()
716 pkt_info = &ring->pkt_info; in xlgmac_dev_xmit()
717 csum = XLGMAC_GET_REG_BITS(pkt_info->attributes, in xlgmac_dev_xmit()
720 tso = XLGMAC_GET_REG_BITS(pkt_info->attributes, in xlgmac_dev_xmit()
723 vlan = XLGMAC_GET_REG_BITS(pkt_info->attributes, in xlgmac_dev_xmit()
727 if (tso && (pkt_info->mss != ring->tx.cur_mss)) in xlgmac_dev_xmit()
732 if (vlan && (pkt_info->vlan_ctag != ring->tx.cur_vlan_ctag)) in xlgmac_dev_xmit()
739 * - Tx frame count exceeds the frame count setting in xlgmac_dev_xmit()
740 * - Addition of Tx frame count to the frame count since the in xlgmac_dev_xmit()
743 * - No frame count setting specified (ethtool -C ethX tx-frames 0) in xlgmac_dev_xmit()
744 * - Addition of Tx frame count to the frame count since the in xlgmac_dev_xmit()
747 ring->coalesce_count += pkt_info->tx_packets; in xlgmac_dev_xmit()
748 if (!pdata->tx_frames) in xlgmac_dev_xmit()
750 else if (pkt_info->tx_packets > pdata->tx_frames) in xlgmac_dev_xmit()
752 else if ((ring->coalesce_count % pdata->tx_frames) < in xlgmac_dev_xmit()
753 pkt_info->tx_packets) in xlgmac_dev_xmit()
759 dma_desc = desc_data->dma_desc; in xlgmac_dev_xmit()
764 netif_dbg(pdata, tx_queued, pdata->netdev, in xlgmac_dev_xmit()
766 pkt_info->mss); in xlgmac_dev_xmit()
769 dma_desc->desc2 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
770 dma_desc->desc2, in xlgmac_dev_xmit()
773 pkt_info->mss); in xlgmac_dev_xmit()
776 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
777 dma_desc->desc3, in xlgmac_dev_xmit()
783 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
784 dma_desc->desc3, in xlgmac_dev_xmit()
789 ring->tx.cur_mss = pkt_info->mss; in xlgmac_dev_xmit()
793 netif_dbg(pdata, tx_queued, pdata->netdev, in xlgmac_dev_xmit()
795 pkt_info->vlan_ctag); in xlgmac_dev_xmit()
798 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
799 dma_desc->desc3, in xlgmac_dev_xmit()
805 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
806 dma_desc->desc3, in xlgmac_dev_xmit()
809 pkt_info->vlan_ctag); in xlgmac_dev_xmit()
812 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
813 dma_desc->desc3, in xlgmac_dev_xmit()
818 ring->tx.cur_vlan_ctag = pkt_info->vlan_ctag; in xlgmac_dev_xmit()
823 dma_desc = desc_data->dma_desc; in xlgmac_dev_xmit()
827 dma_desc->desc0 = cpu_to_le32(lower_32_bits(desc_data->skb_dma)); in xlgmac_dev_xmit()
828 dma_desc->desc1 = cpu_to_le32(upper_32_bits(desc_data->skb_dma)); in xlgmac_dev_xmit()
831 dma_desc->desc2 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
832 dma_desc->desc2, in xlgmac_dev_xmit()
835 desc_data->skb_dma_len); in xlgmac_dev_xmit()
839 dma_desc->desc2 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
840 dma_desc->desc2, in xlgmac_dev_xmit()
844 pdata->stats.tx_vlan_packets++; in xlgmac_dev_xmit()
848 if (XLGMAC_GET_REG_BITS(pkt_info->attributes, in xlgmac_dev_xmit()
851 dma_desc->desc2 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
852 dma_desc->desc2, in xlgmac_dev_xmit()
858 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
859 dma_desc->desc3, in xlgmac_dev_xmit()
865 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
866 dma_desc->desc3, in xlgmac_dev_xmit()
873 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
874 dma_desc->desc3, in xlgmac_dev_xmit()
881 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
882 dma_desc->desc3, in xlgmac_dev_xmit()
885 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
886 dma_desc->desc3, in xlgmac_dev_xmit()
889 pkt_info->tcp_payload_len); in xlgmac_dev_xmit()
890 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
891 dma_desc->desc3, in xlgmac_dev_xmit()
894 pkt_info->tcp_header_len / 4); in xlgmac_dev_xmit()
896 pdata->stats.tx_tso_packets++; in xlgmac_dev_xmit()
899 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
900 dma_desc->desc3, in xlgmac_dev_xmit()
906 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
907 dma_desc->desc3, in xlgmac_dev_xmit()
913 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
914 dma_desc->desc3, in xlgmac_dev_xmit()
917 pkt_info->length); in xlgmac_dev_xmit()
920 for (i = cur_index - start_index + 1; i < pkt_info->desc_count; i++) { in xlgmac_dev_xmit()
923 dma_desc = desc_data->dma_desc; in xlgmac_dev_xmit()
926 dma_desc->desc0 = in xlgmac_dev_xmit()
927 cpu_to_le32(lower_32_bits(desc_data->skb_dma)); in xlgmac_dev_xmit()
928 dma_desc->desc1 = in xlgmac_dev_xmit()
929 cpu_to_le32(upper_32_bits(desc_data->skb_dma)); in xlgmac_dev_xmit()
932 dma_desc->desc2 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
933 dma_desc->desc2, in xlgmac_dev_xmit()
936 desc_data->skb_dma_len); in xlgmac_dev_xmit()
939 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
940 dma_desc->desc3, in xlgmac_dev_xmit()
945 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
946 dma_desc->desc3, in xlgmac_dev_xmit()
952 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
953 dma_desc->desc3, in xlgmac_dev_xmit()
960 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
961 dma_desc->desc3, in xlgmac_dev_xmit()
967 dma_desc->desc2 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
968 dma_desc->desc2, in xlgmac_dev_xmit()
973 desc_data->tx.packets = pkt_info->tx_packets; in xlgmac_dev_xmit()
974 desc_data->tx.bytes = pkt_info->tx_bytes; in xlgmac_dev_xmit()
984 dma_desc = desc_data->dma_desc; in xlgmac_dev_xmit()
985 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_dev_xmit()
986 dma_desc->desc3, in xlgmac_dev_xmit()
992 pkt_info->desc_count, 1); in xlgmac_dev_xmit()
997 ring->cur = cur_index + 1; in xlgmac_dev_xmit()
999 netif_xmit_stopped(netdev_get_tx_queue(pdata->netdev, in xlgmac_dev_xmit()
1000 channel->queue_index))) in xlgmac_dev_xmit()
1003 ring->tx.xmit_more = 1; in xlgmac_dev_xmit()
1006 channel->name, start_index & (ring->dma_desc_count - 1), in xlgmac_dev_xmit()
1007 (ring->cur - 1) & (ring->dma_desc_count - 1)); in xlgmac_dev_xmit()
1016 tsa = XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_get_rx_tstamp()
1019 tsd = XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_get_rx_tstamp()
1023 nsec = le32_to_cpu(dma_desc->desc1); in xlgmac_get_rx_tstamp()
1025 nsec |= le32_to_cpu(dma_desc->desc0); in xlgmac_get_rx_tstamp()
1027 pkt_info->rx_tstamp = nsec; in xlgmac_get_rx_tstamp()
1028 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_get_rx_tstamp()
1029 pkt_info->attributes, in xlgmac_get_rx_tstamp()
1039 struct xlgmac_dma_desc *dma_desc = desc_data->dma_desc; in xlgmac_tx_desc_reset()
1047 dma_desc->desc0 = 0; in xlgmac_tx_desc_reset()
1048 dma_desc->desc1 = 0; in xlgmac_tx_desc_reset()
1049 dma_desc->desc2 = 0; in xlgmac_tx_desc_reset()
1050 dma_desc->desc3 = 0; in xlgmac_tx_desc_reset()
1058 struct xlgmac_ring *ring = channel->tx_ring; in xlgmac_tx_desc_init()
1060 int start_index = ring->cur; in xlgmac_tx_desc_init()
1064 for (i = 0; i < ring->dma_desc_count; i++) { in xlgmac_tx_desc_init()
1072 writel(ring->dma_desc_count - 1, XLGMAC_DMA_REG(channel, DMA_CH_TDRLR)); in xlgmac_tx_desc_init()
1076 writel(upper_32_bits(desc_data->dma_desc_addr), in xlgmac_tx_desc_init()
1078 writel(lower_32_bits(desc_data->dma_desc_addr), in xlgmac_tx_desc_init()
1086 struct xlgmac_dma_desc *dma_desc = desc_data->dma_desc; in xlgmac_rx_desc_reset()
1087 unsigned int rx_frames = pdata->rx_frames; in xlgmac_rx_desc_reset()
1088 unsigned int rx_usecs = pdata->rx_usecs; in xlgmac_rx_desc_reset()
1096 /* Set interrupt based on Rx frame coalescing setting */ in xlgmac_rx_desc_reset()
1103 /* Reset the Rx descriptor in xlgmac_rx_desc_reset()
1110 hdr_dma = desc_data->rx.hdr.dma_base + desc_data->rx.hdr.dma_off; in xlgmac_rx_desc_reset()
1111 buf_dma = desc_data->rx.buf.dma_base + desc_data->rx.buf.dma_off; in xlgmac_rx_desc_reset()
1112 dma_desc->desc0 = cpu_to_le32(lower_32_bits(hdr_dma)); in xlgmac_rx_desc_reset()
1113 dma_desc->desc1 = cpu_to_le32(upper_32_bits(hdr_dma)); in xlgmac_rx_desc_reset()
1114 dma_desc->desc2 = cpu_to_le32(lower_32_bits(buf_dma)); in xlgmac_rx_desc_reset()
1115 dma_desc->desc3 = cpu_to_le32(upper_32_bits(buf_dma)); in xlgmac_rx_desc_reset()
1117 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_rx_desc_reset()
1118 dma_desc->desc3, in xlgmac_rx_desc_reset()
1123 /* Since the Rx DMA engine is likely running, make sure everything in xlgmac_rx_desc_reset()
1129 dma_desc->desc3 = XLGMAC_SET_REG_BITS_LE( in xlgmac_rx_desc_reset()
1130 dma_desc->desc3, in xlgmac_rx_desc_reset()
1141 struct xlgmac_pdata *pdata = channel->pdata; in xlgmac_rx_desc_init()
1142 struct xlgmac_ring *ring = channel->rx_ring; in xlgmac_rx_desc_init()
1143 unsigned int start_index = ring->cur; in xlgmac_rx_desc_init()
1148 for (i = 0; i < ring->dma_desc_count; i++) { in xlgmac_rx_desc_init()
1151 /* Initialize Rx descriptor */ in xlgmac_rx_desc_init()
1155 /* Update the total number of Rx descriptors */ in xlgmac_rx_desc_init()
1156 writel(ring->dma_desc_count - 1, XLGMAC_DMA_REG(channel, DMA_CH_RDRLR)); in xlgmac_rx_desc_init()
1160 writel(upper_32_bits(desc_data->dma_desc_addr), in xlgmac_rx_desc_init()
1162 writel(lower_32_bits(desc_data->dma_desc_addr), in xlgmac_rx_desc_init()
1165 /* Update the Rx Descriptor Tail Pointer */ in xlgmac_rx_desc_init()
1167 ring->dma_desc_count - 1); in xlgmac_rx_desc_init()
1168 writel(lower_32_bits(desc_data->dma_desc_addr), in xlgmac_rx_desc_init()
1174 /* Rx and Tx share CTXT bit, so check TDES3.CTXT bit */ in xlgmac_is_context_desc()
1175 return XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_is_context_desc()
1182 /* Rx and Tx share LD bit, so check TDES3.LD bit */ in xlgmac_is_last_desc()
1183 return XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_is_last_desc()
1194 /* Clear MTL flow control */ in xlgmac_disable_tx_flow_control()
1195 for (i = 0; i < pdata->rx_q_count; i++) { in xlgmac_disable_tx_flow_control()
1204 q_count = min_t(unsigned int, pdata->tx_q_count, max_q_count); in xlgmac_disable_tx_flow_control()
1207 regval = readl(pdata->mac_regs + reg); in xlgmac_disable_tx_flow_control()
1212 writel(regval, pdata->mac_regs + reg); in xlgmac_disable_tx_flow_control()
1226 /* Set MTL flow control */ in xlgmac_enable_tx_flow_control()
1227 for (i = 0; i < pdata->rx_q_count; i++) { in xlgmac_enable_tx_flow_control()
1236 q_count = min_t(unsigned int, pdata->tx_q_count, max_q_count); in xlgmac_enable_tx_flow_control()
1239 regval = readl(pdata->mac_regs + reg); in xlgmac_enable_tx_flow_control()
1248 writel(regval, pdata->mac_regs + reg); in xlgmac_enable_tx_flow_control()
1260 regval = readl(pdata->mac_regs + MAC_RFCR); in xlgmac_disable_rx_flow_control()
1263 writel(regval, pdata->mac_regs + MAC_RFCR); in xlgmac_disable_rx_flow_control()
1272 regval = readl(pdata->mac_regs + MAC_RFCR); in xlgmac_enable_rx_flow_control()
1275 writel(regval, pdata->mac_regs + MAC_RFCR); in xlgmac_enable_rx_flow_control()
1282 if (pdata->tx_pause) in xlgmac_config_tx_flow_control()
1292 if (pdata->rx_pause) in xlgmac_config_rx_flow_control()
1306 channel = pdata->channel_head; in xlgmac_config_rx_coalesce()
1307 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_config_rx_coalesce()
1308 if (!channel->rx_ring) in xlgmac_config_rx_coalesce()
1314 pdata->rx_riwt); in xlgmac_config_rx_coalesce()
1332 for (i = 0; i < pdata->rx_q_count; i++) { in xlgmac_config_rx_fep_enable()
1345 for (i = 0; i < pdata->rx_q_count; i++) { in xlgmac_config_rx_fup_enable()
1364 channel = pdata->channel_head; in xlgmac_config_rx_buffer_size()
1365 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_config_rx_buffer_size()
1366 if (!channel->rx_ring) in xlgmac_config_rx_buffer_size()
1372 pdata->rx_buf_size); in xlgmac_config_rx_buffer_size()
1383 channel = pdata->channel_head; in xlgmac_config_tso_mode()
1384 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_config_tso_mode()
1385 if (!channel->tx_ring) in xlgmac_config_tso_mode()
1388 if (pdata->hw_feat.tso) { in xlgmac_config_tso_mode()
1403 channel = pdata->channel_head; in xlgmac_config_sph_mode()
1404 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_config_sph_mode()
1405 if (!channel->rx_ring) in xlgmac_config_sph_mode()
1414 regval = readl(pdata->mac_regs + MAC_RCR); in xlgmac_config_sph_mode()
1418 writel(regval, pdata->mac_regs + MAC_RCR); in xlgmac_config_sph_mode()
1427 rate = pdata->sysclk_rate; in xlgmac_usec_to_riwt()
1445 rate = pdata->sysclk_rate; in xlgmac_riwt_to_usec()
1463 for (i = 0; i < pdata->rx_q_count; i++) { in xlgmac_config_rx_threshold()
1479 regval = readl(pdata->mac_regs + MTL_OMR); in xlgmac_config_mtl_mode()
1482 writel(regval, pdata->mac_regs + MTL_OMR); in xlgmac_config_mtl_mode()
1485 for (i = 0; i < pdata->hw_feat.tc_cnt; i++) { in xlgmac_config_mtl_mode()
1497 /* Set Rx to strict priority algorithm */ in xlgmac_config_mtl_mode()
1498 regval = readl(pdata->mac_regs + MTL_OMR); in xlgmac_config_mtl_mode()
1501 writel(regval, pdata->mac_regs + MTL_OMR); in xlgmac_config_mtl_mode()
1512 /* Map the MTL Tx Queues to Traffic Classes in xlgmac_config_queue_mapping()
1515 qptc = pdata->tx_q_count / pdata->hw_feat.tc_cnt; in xlgmac_config_queue_mapping()
1516 qptc_extra = pdata->tx_q_count % pdata->hw_feat.tc_cnt; in xlgmac_config_queue_mapping()
1518 for (i = 0, queue = 0; i < pdata->hw_feat.tc_cnt; i++) { in xlgmac_config_queue_mapping()
1520 netif_dbg(pdata, drv, pdata->netdev, in xlgmac_config_queue_mapping()
1534 netif_dbg(pdata, drv, pdata->netdev, in xlgmac_config_queue_mapping()
1548 /* Map the 8 VLAN priority values to available MTL Rx queues */ in xlgmac_config_queue_mapping()
1550 pdata->rx_q_count); in xlgmac_config_queue_mapping()
1559 netif_dbg(pdata, drv, pdata->netdev, in xlgmac_config_queue_mapping()
1566 netif_dbg(pdata, drv, pdata->netdev, in xlgmac_config_queue_mapping()
1577 writel(regval, pdata->mac_regs + reg); in xlgmac_config_queue_mapping()
1582 /* Configure one to one, MTL Rx queue to DMA Rx channel mapping in xlgmac_config_queue_mapping()
1583 * ie Q0 <--> CH0, Q1 <--> CH1 ... Q11 <--> CH11 in xlgmac_config_queue_mapping()
1586 regval = readl(pdata->mac_regs + reg); in xlgmac_config_queue_mapping()
1589 writel(regval, pdata->mac_regs + reg); in xlgmac_config_queue_mapping()
1592 regval = readl(pdata->mac_regs + reg); in xlgmac_config_queue_mapping()
1595 writel(regval, pdata->mac_regs + reg); in xlgmac_config_queue_mapping()
1598 regval = readl(pdata->mac_regs + reg); in xlgmac_config_queue_mapping()
1601 writel(regval, pdata->mac_regs + reg); in xlgmac_config_queue_mapping()
1625 p_fifo--; in xlgmac_calculate_per_queue_fifo()
1637 pdata->hw_feat.tx_fifo_size, in xlgmac_config_tx_fifo_size()
1638 pdata->tx_q_count); in xlgmac_config_tx_fifo_size()
1640 for (i = 0; i < pdata->tx_q_count; i++) { in xlgmac_config_tx_fifo_size()
1647 netif_info(pdata, drv, pdata->netdev, in xlgmac_config_tx_fifo_size()
1649 pdata->tx_q_count, ((fifo_size + 1) * 256)); in xlgmac_config_tx_fifo_size()
1659 pdata->hw_feat.rx_fifo_size, in xlgmac_config_rx_fifo_size()
1660 pdata->rx_q_count); in xlgmac_config_rx_fifo_size()
1662 for (i = 0; i < pdata->rx_q_count; i++) { in xlgmac_config_rx_fifo_size()
1669 netif_info(pdata, drv, pdata->netdev, in xlgmac_config_rx_fifo_size()
1670 "%d Rx hardware queues, %d byte fifo per queue\n", in xlgmac_config_rx_fifo_size()
1671 pdata->rx_q_count, ((fifo_size + 1) * 256)); in xlgmac_config_rx_fifo_size()
1679 for (i = 0; i < pdata->rx_q_count; i++) { in xlgmac_config_flow_control_threshold()
1684 /* De-activate flow control when more than 6k left in fifo */ in xlgmac_config_flow_control_threshold()
1697 for (i = 0; i < pdata->tx_q_count; i++) { in xlgmac_config_tx_threshold()
1713 for (i = 0; i < pdata->rx_q_count; i++) { in xlgmac_config_rsf_mode()
1729 for (i = 0; i < pdata->tx_q_count; i++) { in xlgmac_config_tsf_mode()
1745 channel = pdata->channel_head; in xlgmac_config_osp_mode()
1746 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_config_osp_mode()
1747 if (!channel->tx_ring) in xlgmac_config_osp_mode()
1753 pdata->tx_osp_mode); in xlgmac_config_osp_mode()
1766 channel = pdata->channel_head; in xlgmac_config_pblx8()
1767 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_config_pblx8()
1771 pdata->pblx8); in xlgmac_config_pblx8()
1782 regval = readl(XLGMAC_DMA_REG(pdata->channel_head, DMA_CH_TCR)); in xlgmac_get_tx_pbl_val()
1794 channel = pdata->channel_head; in xlgmac_config_tx_pbl_val()
1795 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_config_tx_pbl_val()
1796 if (!channel->tx_ring) in xlgmac_config_tx_pbl_val()
1802 pdata->tx_pbl); in xlgmac_config_tx_pbl_val()
1813 regval = readl(XLGMAC_DMA_REG(pdata->channel_head, DMA_CH_RCR)); in xlgmac_get_rx_pbl_val()
1825 channel = pdata->channel_head; in xlgmac_config_rx_pbl_val()
1826 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_config_rx_pbl_val()
1827 if (!channel->rx_ring) in xlgmac_config_rx_pbl_val()
1833 pdata->rx_pbl); in xlgmac_config_rx_pbl_val()
1858 val = (u64)readl(pdata->mac_regs + reg_lo); in xlgmac_mmc_read()
1861 val |= ((u64)readl(pdata->mac_regs + reg_lo + 4) << 32); in xlgmac_mmc_read()
1868 unsigned int mmc_isr = readl(pdata->mac_regs + MMC_TISR); in xlgmac_tx_mmc_int()
1869 struct xlgmac_stats *stats = &pdata->stats; in xlgmac_tx_mmc_int()
1874 stats->txoctetcount_gb += in xlgmac_tx_mmc_int()
1880 stats->txframecount_gb += in xlgmac_tx_mmc_int()
1886 stats->txbroadcastframes_g += in xlgmac_tx_mmc_int()
1892 stats->txmulticastframes_g += in xlgmac_tx_mmc_int()
1898 stats->tx64octets_gb += in xlgmac_tx_mmc_int()
1904 stats->tx65to127octets_gb += in xlgmac_tx_mmc_int()
1910 stats->tx128to255octets_gb += in xlgmac_tx_mmc_int()
1916 stats->tx256to511octets_gb += in xlgmac_tx_mmc_int()
1922 stats->tx512to1023octets_gb += in xlgmac_tx_mmc_int()
1928 stats->tx1024tomaxoctets_gb += in xlgmac_tx_mmc_int()
1934 stats->txunicastframes_gb += in xlgmac_tx_mmc_int()
1940 stats->txmulticastframes_gb += in xlgmac_tx_mmc_int()
1946 stats->txbroadcastframes_g += in xlgmac_tx_mmc_int()
1952 stats->txunderflowerror += in xlgmac_tx_mmc_int()
1958 stats->txoctetcount_g += in xlgmac_tx_mmc_int()
1964 stats->txframecount_g += in xlgmac_tx_mmc_int()
1970 stats->txpauseframes += in xlgmac_tx_mmc_int()
1976 stats->txvlanframes_g += in xlgmac_tx_mmc_int()
1982 unsigned int mmc_isr = readl(pdata->mac_regs + MMC_RISR); in xlgmac_rx_mmc_int()
1983 struct xlgmac_stats *stats = &pdata->stats; in xlgmac_rx_mmc_int()
1988 stats->rxframecount_gb += in xlgmac_rx_mmc_int()
1994 stats->rxoctetcount_gb += in xlgmac_rx_mmc_int()
2000 stats->rxoctetcount_g += in xlgmac_rx_mmc_int()
2006 stats->rxbroadcastframes_g += in xlgmac_rx_mmc_int()
2012 stats->rxmulticastframes_g += in xlgmac_rx_mmc_int()
2018 stats->rxcrcerror += in xlgmac_rx_mmc_int()
2024 stats->rxrunterror += in xlgmac_rx_mmc_int()
2030 stats->rxjabbererror += in xlgmac_rx_mmc_int()
2036 stats->rxundersize_g += in xlgmac_rx_mmc_int()
2042 stats->rxoversize_g += in xlgmac_rx_mmc_int()
2048 stats->rx64octets_gb += in xlgmac_rx_mmc_int()
2054 stats->rx65to127octets_gb += in xlgmac_rx_mmc_int()
2060 stats->rx128to255octets_gb += in xlgmac_rx_mmc_int()
2066 stats->rx256to511octets_gb += in xlgmac_rx_mmc_int()
2072 stats->rx512to1023octets_gb += in xlgmac_rx_mmc_int()
2078 stats->rx1024tomaxoctets_gb += in xlgmac_rx_mmc_int()
2084 stats->rxunicastframes_g += in xlgmac_rx_mmc_int()
2090 stats->rxlengtherror += in xlgmac_rx_mmc_int()
2096 stats->rxoutofrangetype += in xlgmac_rx_mmc_int()
2102 stats->rxpauseframes += in xlgmac_rx_mmc_int()
2108 stats->rxfifooverflow += in xlgmac_rx_mmc_int()
2114 stats->rxvlanframes_gb += in xlgmac_rx_mmc_int()
2120 stats->rxwatchdogerror += in xlgmac_rx_mmc_int()
2126 struct xlgmac_stats *stats = &pdata->stats; in xlgmac_read_mmc_stats()
2130 regval = readl(pdata->mac_regs + MMC_CR); in xlgmac_read_mmc_stats()
2133 writel(regval, pdata->mac_regs + MMC_CR); in xlgmac_read_mmc_stats()
2135 stats->txoctetcount_gb += in xlgmac_read_mmc_stats()
2138 stats->txframecount_gb += in xlgmac_read_mmc_stats()
2141 stats->txbroadcastframes_g += in xlgmac_read_mmc_stats()
2144 stats->txmulticastframes_g += in xlgmac_read_mmc_stats()
2147 stats->tx64octets_gb += in xlgmac_read_mmc_stats()
2150 stats->tx65to127octets_gb += in xlgmac_read_mmc_stats()
2153 stats->tx128to255octets_gb += in xlgmac_read_mmc_stats()
2156 stats->tx256to511octets_gb += in xlgmac_read_mmc_stats()
2159 stats->tx512to1023octets_gb += in xlgmac_read_mmc_stats()
2162 stats->tx1024tomaxoctets_gb += in xlgmac_read_mmc_stats()
2165 stats->txunicastframes_gb += in xlgmac_read_mmc_stats()
2168 stats->txmulticastframes_gb += in xlgmac_read_mmc_stats()
2171 stats->txbroadcastframes_g += in xlgmac_read_mmc_stats()
2174 stats->txunderflowerror += in xlgmac_read_mmc_stats()
2177 stats->txoctetcount_g += in xlgmac_read_mmc_stats()
2180 stats->txframecount_g += in xlgmac_read_mmc_stats()
2183 stats->txpauseframes += in xlgmac_read_mmc_stats()
2186 stats->txvlanframes_g += in xlgmac_read_mmc_stats()
2189 stats->rxframecount_gb += in xlgmac_read_mmc_stats()
2192 stats->rxoctetcount_gb += in xlgmac_read_mmc_stats()
2195 stats->rxoctetcount_g += in xlgmac_read_mmc_stats()
2198 stats->rxbroadcastframes_g += in xlgmac_read_mmc_stats()
2201 stats->rxmulticastframes_g += in xlgmac_read_mmc_stats()
2204 stats->rxcrcerror += in xlgmac_read_mmc_stats()
2207 stats->rxrunterror += in xlgmac_read_mmc_stats()
2210 stats->rxjabbererror += in xlgmac_read_mmc_stats()
2213 stats->rxundersize_g += in xlgmac_read_mmc_stats()
2216 stats->rxoversize_g += in xlgmac_read_mmc_stats()
2219 stats->rx64octets_gb += in xlgmac_read_mmc_stats()
2222 stats->rx65to127octets_gb += in xlgmac_read_mmc_stats()
2225 stats->rx128to255octets_gb += in xlgmac_read_mmc_stats()
2228 stats->rx256to511octets_gb += in xlgmac_read_mmc_stats()
2231 stats->rx512to1023octets_gb += in xlgmac_read_mmc_stats()
2234 stats->rx1024tomaxoctets_gb += in xlgmac_read_mmc_stats()
2237 stats->rxunicastframes_g += in xlgmac_read_mmc_stats()
2240 stats->rxlengtherror += in xlgmac_read_mmc_stats()
2243 stats->rxoutofrangetype += in xlgmac_read_mmc_stats()
2246 stats->rxpauseframes += in xlgmac_read_mmc_stats()
2249 stats->rxfifooverflow += in xlgmac_read_mmc_stats()
2252 stats->rxvlanframes_gb += in xlgmac_read_mmc_stats()
2255 stats->rxwatchdogerror += in xlgmac_read_mmc_stats()
2258 /* Un-freeze counters */ in xlgmac_read_mmc_stats()
2259 regval = readl(pdata->mac_regs + MMC_CR); in xlgmac_read_mmc_stats()
2262 writel(regval, pdata->mac_regs + MMC_CR); in xlgmac_read_mmc_stats()
2269 regval = readl(pdata->mac_regs + MMC_CR); in xlgmac_config_mmc()
2276 writel(regval, pdata->mac_regs + MMC_CR); in xlgmac_config_mmc()
2286 mutex_lock(&pdata->rss_mutex); in xlgmac_write_rss_reg()
2288 regval = XLGMAC_GET_REG_BITS(readl(pdata->mac_regs + MAC_RSSAR), in xlgmac_write_rss_reg()
2291 ret = -EBUSY; in xlgmac_write_rss_reg()
2295 writel(val, pdata->mac_regs + MAC_RSSDR); in xlgmac_write_rss_reg()
2297 regval = readl(pdata->mac_regs + MAC_RSSAR); in xlgmac_write_rss_reg()
2306 writel(regval, pdata->mac_regs + MAC_RSSAR); in xlgmac_write_rss_reg()
2309 while (wait--) { in xlgmac_write_rss_reg()
2310 regval = XLGMAC_GET_REG_BITS(readl(pdata->mac_regs + MAC_RSSAR), in xlgmac_write_rss_reg()
2319 ret = -EBUSY; in xlgmac_write_rss_reg()
2322 mutex_unlock(&pdata->rss_mutex); in xlgmac_write_rss_reg()
2329 unsigned int key_regs = sizeof(pdata->rss_key) / sizeof(u32); in xlgmac_write_rss_hash_key()
2330 unsigned int *key = (unsigned int *)&pdata->rss_key; in xlgmac_write_rss_hash_key()
2333 while (key_regs--) { in xlgmac_write_rss_hash_key()
2348 for (i = 0; i < ARRAY_SIZE(pdata->rss_table); i++) { in xlgmac_write_rss_lookup_table()
2351 pdata->rss_table[i]); in xlgmac_write_rss_lookup_table()
2361 memcpy(pdata->rss_key, key, sizeof(pdata->rss_key)); in xlgmac_set_rss_hash_key()
2372 for (i = 0; i < ARRAY_SIZE(pdata->rss_table); i++) { in xlgmac_set_rss_lookup_table()
2374 pdata->rss_table[i] = XLGMAC_SET_REG_BITS( in xlgmac_set_rss_lookup_table()
2375 pdata->rss_table[i], in xlgmac_set_rss_lookup_table()
2389 if (!pdata->hw_feat.rss) in xlgmac_enable_rss()
2390 return -EOPNOTSUPP; in xlgmac_enable_rss()
2403 writel(pdata->rss_options, pdata->mac_regs + MAC_RSSCR); in xlgmac_enable_rss()
2406 regval = readl(pdata->mac_regs + MAC_RSSCR); in xlgmac_enable_rss()
2409 writel(regval, pdata->mac_regs + MAC_RSSCR); in xlgmac_enable_rss()
2418 if (!pdata->hw_feat.rss) in xlgmac_disable_rss()
2419 return -EOPNOTSUPP; in xlgmac_disable_rss()
2421 regval = readl(pdata->mac_regs + MAC_RSSCR); in xlgmac_disable_rss()
2424 writel(regval, pdata->mac_regs + MAC_RSSCR); in xlgmac_disable_rss()
2433 if (!pdata->hw_feat.rss) in xlgmac_config_rss()
2436 if (pdata->netdev->features & NETIF_F_RXHASH) in xlgmac_config_rss()
2442 netdev_err(pdata->netdev, in xlgmac_config_rss()
2452 channel = pdata->channel_head; in xlgmac_enable_dma_interrupts()
2453 for (i = 0; i < pdata->channel_count; i++, channel++) { in xlgmac_enable_dma_interrupts()
2462 * NIE - Normal Interrupt Summary Enable in xlgmac_enable_dma_interrupts()
2463 * AIE - Abnormal Interrupt Summary Enable in xlgmac_enable_dma_interrupts()
2464 * FBEE - Fatal Bus Error Enable in xlgmac_enable_dma_interrupts()
2476 if (channel->tx_ring) { in xlgmac_enable_dma_interrupts()
2478 * TIE - Transmit Interrupt Enable (unless using in xlgmac_enable_dma_interrupts()
2481 if (!pdata->per_channel_irq) in xlgmac_enable_dma_interrupts()
2488 if (channel->rx_ring) { in xlgmac_enable_dma_interrupts()
2489 /* Enable following Rx interrupts in xlgmac_enable_dma_interrupts()
2490 * RBUE - Receive Buffer Unavailable Enable in xlgmac_enable_dma_interrupts()
2491 * RIE - Receive Interrupt Enable (unless using in xlgmac_enable_dma_interrupts()
2499 if (!pdata->per_channel_irq) in xlgmac_enable_dma_interrupts()
2516 q_count = max(pdata->hw_feat.tx_q_cnt, pdata->hw_feat.rx_q_cnt); in xlgmac_enable_mtl_interrupts()
2522 /* No MTL interrupts to be enabled */ in xlgmac_enable_mtl_interrupts()
2536 writel(mac_ier, pdata->mac_regs + MAC_IER); in xlgmac_enable_mac_interrupts()
2539 regval = readl(pdata->mac_regs + MMC_RIER); in xlgmac_enable_mac_interrupts()
2542 writel(regval, pdata->mac_regs + MMC_RIER); in xlgmac_enable_mac_interrupts()
2543 regval = readl(pdata->mac_regs + MMC_TIER); in xlgmac_enable_mac_interrupts()
2546 writel(regval, pdata->mac_regs + MMC_TIER); in xlgmac_enable_mac_interrupts()
2553 regval = XLGMAC_GET_REG_BITS(readl(pdata->mac_regs + MAC_TCR), in xlgmac_set_xlgmii_25000_speed()
2558 regval = readl(pdata->mac_regs + MAC_TCR); in xlgmac_set_xlgmii_25000_speed()
2561 writel(regval, pdata->mac_regs + MAC_TCR); in xlgmac_set_xlgmii_25000_speed()
2570 regval = XLGMAC_GET_REG_BITS(readl(pdata->mac_regs + MAC_TCR), in xlgmac_set_xlgmii_40000_speed()
2575 regval = readl(pdata->mac_regs + MAC_TCR); in xlgmac_set_xlgmii_40000_speed()
2578 writel(regval, pdata->mac_regs + MAC_TCR); in xlgmac_set_xlgmii_40000_speed()
2587 regval = XLGMAC_GET_REG_BITS(readl(pdata->mac_regs + MAC_TCR), in xlgmac_set_xlgmii_50000_speed()
2592 regval = readl(pdata->mac_regs + MAC_TCR); in xlgmac_set_xlgmii_50000_speed()
2595 writel(regval, pdata->mac_regs + MAC_TCR); in xlgmac_set_xlgmii_50000_speed()
2604 regval = XLGMAC_GET_REG_BITS(readl(pdata->mac_regs + MAC_TCR), in xlgmac_set_xlgmii_100000_speed()
2609 regval = readl(pdata->mac_regs + MAC_TCR); in xlgmac_set_xlgmii_100000_speed()
2612 writel(regval, pdata->mac_regs + MAC_TCR); in xlgmac_set_xlgmii_100000_speed()
2619 switch (pdata->phy_speed) { in xlgmac_config_mac_speed()
2640 struct xlgmac_pdata *pdata = channel->pdata; in xlgmac_dev_read()
2641 struct xlgmac_ring *ring = channel->rx_ring; in xlgmac_dev_read()
2642 struct net_device *netdev = pdata->netdev; in xlgmac_dev_read()
2648 desc_data = XLGMAC_GET_DESC_DATA(ring, ring->cur); in xlgmac_dev_read()
2649 dma_desc = desc_data->dma_desc; in xlgmac_dev_read()
2650 pkt_info = &ring->pkt_info; in xlgmac_dev_read()
2653 if (XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2662 xlgmac_dump_rx_desc(pdata, ring, ring->cur); in xlgmac_dev_read()
2664 if (XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2670 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2671 pkt_info->attributes, in xlgmac_dev_read()
2675 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2676 pkt_info->attributes, in xlgmac_dev_read()
2684 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2685 pkt_info->attributes, in xlgmac_dev_read()
2691 if (XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2694 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2695 pkt_info->attributes, in xlgmac_dev_read()
2701 if (XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2704 desc_data->rx.hdr_len = XLGMAC_GET_REG_BITS_LE(dma_desc->desc2, in xlgmac_dev_read()
2707 if (desc_data->rx.hdr_len) in xlgmac_dev_read()
2708 pdata->stats.rx_split_header_packets++; in xlgmac_dev_read()
2712 if (XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2715 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2716 pkt_info->attributes, in xlgmac_dev_read()
2721 pkt_info->rss_hash = le32_to_cpu(dma_desc->desc1); in xlgmac_dev_read()
2723 l34t = XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2731 pkt_info->rss_hash_type = PKT_HASH_TYPE_L4; in xlgmac_dev_read()
2734 pkt_info->rss_hash_type = PKT_HASH_TYPE_L3; in xlgmac_dev_read()
2739 desc_data->rx.len = XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2743 if (!XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2747 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2748 pkt_info->attributes, in xlgmac_dev_read()
2756 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2757 pkt_info->attributes, in xlgmac_dev_read()
2763 if (netdev->features & NETIF_F_RXCSUM) in xlgmac_dev_read()
2764 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2765 pkt_info->attributes, in xlgmac_dev_read()
2771 err = XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2774 etlt = XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, in xlgmac_dev_read()
2782 (netdev->features & NETIF_F_HW_VLAN_CTAG_RX)) { in xlgmac_dev_read()
2783 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2784 pkt_info->attributes, in xlgmac_dev_read()
2788 pkt_info->vlan_ctag = in xlgmac_dev_read()
2789 XLGMAC_GET_REG_BITS_LE(dma_desc->desc0, in xlgmac_dev_read()
2792 netif_dbg(pdata, rx_status, netdev, "vlan-ctag=%#06x\n", in xlgmac_dev_read()
2793 pkt_info->vlan_ctag); in xlgmac_dev_read()
2797 pkt_info->attributes = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2798 pkt_info->attributes, in xlgmac_dev_read()
2803 pkt_info->errors = XLGMAC_SET_REG_BITS( in xlgmac_dev_read()
2804 pkt_info->errors, in xlgmac_dev_read()
2810 XLGMAC_PR("%s - descriptor=%u (cur=%d)\n", channel->name, in xlgmac_dev_read()
2811 ring->cur & (ring->dma_desc_count - 1), ring->cur); in xlgmac_dev_read()
2868 dma_ch_ier |= channel->saved_ier; in xlgmac_enable_int()
2871 return -1; in xlgmac_enable_int()
2931 channel->saved_ier = dma_ch_ier & XLGMAC_DMA_INTERRUPT_MASK; in xlgmac_disable_int()
2935 return -1; in xlgmac_disable_int()
2948 for (i = 0; i < pdata->tx_q_count; i++) { in xlgmac_flush_tx_queues()
2956 for (i = 0; i < pdata->tx_q_count; i++) { in xlgmac_flush_tx_queues()
2961 while (--count && regval) in xlgmac_flush_tx_queues()
2965 return -EBUSY; in xlgmac_flush_tx_queues()
2975 regval = readl(pdata->mac_regs + DMA_SBMR); in xlgmac_config_dma_bus()
2984 writel(regval, pdata->mac_regs + DMA_SBMR); in xlgmac_config_dma_bus()
2989 struct xlgmac_desc_ops *desc_ops = &pdata->desc_ops; in xlgmac_hw_init()
3009 desc_ops->tx_desc_init(pdata); in xlgmac_hw_init()
3010 desc_ops->rx_desc_init(pdata); in xlgmac_hw_init()
3013 /* Initialize MTL related features */ in xlgmac_hw_init()
3016 xlgmac_config_tsf_mode(pdata, pdata->tx_sf_mode); in xlgmac_hw_init()
3017 xlgmac_config_rsf_mode(pdata, pdata->rx_sf_mode); in xlgmac_hw_init()
3018 xlgmac_config_tx_threshold(pdata, pdata->tx_threshold); in xlgmac_hw_init()
3019 xlgmac_config_rx_threshold(pdata, pdata->rx_threshold); in xlgmac_hw_init()
3047 regval = readl(pdata->mac_regs + DMA_MR); in xlgmac_hw_exit()
3050 writel(regval, pdata->mac_regs + DMA_MR); in xlgmac_hw_exit()
3054 while (--count && in xlgmac_hw_exit()
3055 XLGMAC_GET_REG_BITS(readl(pdata->mac_regs + DMA_MR), in xlgmac_hw_exit()
3060 return -EBUSY; in xlgmac_hw_exit()
3067 hw_ops->init = xlgmac_hw_init; in xlgmac_init_hw_ops()
3068 hw_ops->exit = xlgmac_hw_exit; in xlgmac_init_hw_ops()
3070 hw_ops->tx_complete = xlgmac_tx_complete; in xlgmac_init_hw_ops()
3072 hw_ops->enable_tx = xlgmac_enable_tx; in xlgmac_init_hw_ops()
3073 hw_ops->disable_tx = xlgmac_disable_tx; in xlgmac_init_hw_ops()
3074 hw_ops->enable_rx = xlgmac_enable_rx; in xlgmac_init_hw_ops()
3075 hw_ops->disable_rx = xlgmac_disable_rx; in xlgmac_init_hw_ops()
3077 hw_ops->dev_xmit = xlgmac_dev_xmit; in xlgmac_init_hw_ops()
3078 hw_ops->dev_read = xlgmac_dev_read; in xlgmac_init_hw_ops()
3079 hw_ops->enable_int = xlgmac_enable_int; in xlgmac_init_hw_ops()
3080 hw_ops->disable_int = xlgmac_disable_int; in xlgmac_init_hw_ops()
3082 hw_ops->set_mac_address = xlgmac_set_mac_address; in xlgmac_init_hw_ops()
3083 hw_ops->config_rx_mode = xlgmac_config_rx_mode; in xlgmac_init_hw_ops()
3084 hw_ops->enable_rx_csum = xlgmac_enable_rx_csum; in xlgmac_init_hw_ops()
3085 hw_ops->disable_rx_csum = xlgmac_disable_rx_csum; in xlgmac_init_hw_ops()
3088 hw_ops->set_xlgmii_25000_speed = xlgmac_set_xlgmii_25000_speed; in xlgmac_init_hw_ops()
3089 hw_ops->set_xlgmii_40000_speed = xlgmac_set_xlgmii_40000_speed; in xlgmac_init_hw_ops()
3090 hw_ops->set_xlgmii_50000_speed = xlgmac_set_xlgmii_50000_speed; in xlgmac_init_hw_ops()
3091 hw_ops->set_xlgmii_100000_speed = xlgmac_set_xlgmii_100000_speed; in xlgmac_init_hw_ops()
3094 hw_ops->tx_desc_init = xlgmac_tx_desc_init; in xlgmac_init_hw_ops()
3095 hw_ops->rx_desc_init = xlgmac_rx_desc_init; in xlgmac_init_hw_ops()
3096 hw_ops->tx_desc_reset = xlgmac_tx_desc_reset; in xlgmac_init_hw_ops()
3097 hw_ops->rx_desc_reset = xlgmac_rx_desc_reset; in xlgmac_init_hw_ops()
3098 hw_ops->is_last_desc = xlgmac_is_last_desc; in xlgmac_init_hw_ops()
3099 hw_ops->is_context_desc = xlgmac_is_context_desc; in xlgmac_init_hw_ops()
3100 hw_ops->tx_start_xmit = xlgmac_tx_start_xmit; in xlgmac_init_hw_ops()
3103 hw_ops->config_tx_flow_control = xlgmac_config_tx_flow_control; in xlgmac_init_hw_ops()
3104 hw_ops->config_rx_flow_control = xlgmac_config_rx_flow_control; in xlgmac_init_hw_ops()
3106 /* For Vlan related config */ in xlgmac_init_hw_ops()
3107 hw_ops->enable_rx_vlan_stripping = xlgmac_enable_rx_vlan_stripping; in xlgmac_init_hw_ops()
3108 hw_ops->disable_rx_vlan_stripping = xlgmac_disable_rx_vlan_stripping; in xlgmac_init_hw_ops()
3109 hw_ops->enable_rx_vlan_filtering = xlgmac_enable_rx_vlan_filtering; in xlgmac_init_hw_ops()
3110 hw_ops->disable_rx_vlan_filtering = xlgmac_disable_rx_vlan_filtering; in xlgmac_init_hw_ops()
3111 hw_ops->update_vlan_hash_table = xlgmac_update_vlan_hash_table; in xlgmac_init_hw_ops()
3113 /* For RX coalescing */ in xlgmac_init_hw_ops()
3114 hw_ops->config_rx_coalesce = xlgmac_config_rx_coalesce; in xlgmac_init_hw_ops()
3115 hw_ops->config_tx_coalesce = xlgmac_config_tx_coalesce; in xlgmac_init_hw_ops()
3116 hw_ops->usec_to_riwt = xlgmac_usec_to_riwt; in xlgmac_init_hw_ops()
3117 hw_ops->riwt_to_usec = xlgmac_riwt_to_usec; in xlgmac_init_hw_ops()
3119 /* For RX and TX threshold config */ in xlgmac_init_hw_ops()
3120 hw_ops->config_rx_threshold = xlgmac_config_rx_threshold; in xlgmac_init_hw_ops()
3121 hw_ops->config_tx_threshold = xlgmac_config_tx_threshold; in xlgmac_init_hw_ops()
3123 /* For RX and TX Store and Forward Mode config */ in xlgmac_init_hw_ops()
3124 hw_ops->config_rsf_mode = xlgmac_config_rsf_mode; in xlgmac_init_hw_ops()
3125 hw_ops->config_tsf_mode = xlgmac_config_tsf_mode; in xlgmac_init_hw_ops()
3127 /* For TX DMA Operating on Second Frame config */ in xlgmac_init_hw_ops()
3128 hw_ops->config_osp_mode = xlgmac_config_osp_mode; in xlgmac_init_hw_ops()
3130 /* For RX and TX PBL config */ in xlgmac_init_hw_ops()
3131 hw_ops->config_rx_pbl_val = xlgmac_config_rx_pbl_val; in xlgmac_init_hw_ops()
3132 hw_ops->get_rx_pbl_val = xlgmac_get_rx_pbl_val; in xlgmac_init_hw_ops()
3133 hw_ops->config_tx_pbl_val = xlgmac_config_tx_pbl_val; in xlgmac_init_hw_ops()
3134 hw_ops->get_tx_pbl_val = xlgmac_get_tx_pbl_val; in xlgmac_init_hw_ops()
3135 hw_ops->config_pblx8 = xlgmac_config_pblx8; in xlgmac_init_hw_ops()
3138 hw_ops->tx_mmc_int = xlgmac_tx_mmc_int; in xlgmac_init_hw_ops()
3139 hw_ops->rx_mmc_int = xlgmac_rx_mmc_int; in xlgmac_init_hw_ops()
3140 hw_ops->read_mmc_stats = xlgmac_read_mmc_stats; in xlgmac_init_hw_ops()
3143 hw_ops->enable_rss = xlgmac_enable_rss; in xlgmac_init_hw_ops()
3144 hw_ops->disable_rss = xlgmac_disable_rss; in xlgmac_init_hw_ops()
3145 hw_ops->set_rss_hash_key = xlgmac_set_rss_hash_key; in xlgmac_init_hw_ops()
3146 hw_ops->set_rss_lookup_table = xlgmac_set_rss_lookup_table; in xlgmac_init_hw_ops()