Lines Matching refs:priv

36 static inline u32 enet_readl(struct bcm_enet_priv *priv, u32 off)  in enet_readl()  argument
38 return bcm_readl(priv->base + off); in enet_readl()
41 static inline void enet_writel(struct bcm_enet_priv *priv, in enet_writel() argument
44 bcm_writel(val, priv->base + off); in enet_writel()
50 static inline u32 enetsw_readl(struct bcm_enet_priv *priv, u32 off) in enetsw_readl() argument
52 return bcm_readl(priv->base + off); in enetsw_readl()
55 static inline void enetsw_writel(struct bcm_enet_priv *priv, in enetsw_writel() argument
58 bcm_writel(val, priv->base + off); in enetsw_writel()
61 static inline u16 enetsw_readw(struct bcm_enet_priv *priv, u32 off) in enetsw_readw() argument
63 return bcm_readw(priv->base + off); in enetsw_readw()
66 static inline void enetsw_writew(struct bcm_enet_priv *priv, in enetsw_writew() argument
69 bcm_writew(val, priv->base + off); in enetsw_writew()
72 static inline u8 enetsw_readb(struct bcm_enet_priv *priv, u32 off) in enetsw_readb() argument
74 return bcm_readb(priv->base + off); in enetsw_readb()
77 static inline void enetsw_writeb(struct bcm_enet_priv *priv, in enetsw_writeb() argument
80 bcm_writeb(val, priv->base + off); in enetsw_writeb()
85 static inline u32 enet_dma_readl(struct bcm_enet_priv *priv, u32 off) in enet_dma_readl() argument
90 static inline void enet_dma_writel(struct bcm_enet_priv *priv, in enet_dma_writel() argument
96 static inline u32 enet_dmac_readl(struct bcm_enet_priv *priv, u32 off, int chan) in enet_dmac_readl() argument
99 bcm63xx_enetdmacreg(off) + chan * priv->dma_chan_width); in enet_dmac_readl()
102 static inline void enet_dmac_writel(struct bcm_enet_priv *priv, in enet_dmac_writel() argument
106 bcm63xx_enetdmacreg(off) + chan * priv->dma_chan_width); in enet_dmac_writel()
109 static inline u32 enet_dmas_readl(struct bcm_enet_priv *priv, u32 off, int chan) in enet_dmas_readl() argument
111 return bcm_readl(bcm_enet_shared_base[2] + off + chan * priv->dma_chan_width); in enet_dmas_readl()
114 static inline void enet_dmas_writel(struct bcm_enet_priv *priv, in enet_dmas_writel() argument
117 bcm_writel(val, bcm_enet_shared_base[2] + off + chan * priv->dma_chan_width); in enet_dmas_writel()
124 static int do_mdio_op(struct bcm_enet_priv *priv, unsigned int data) in do_mdio_op() argument
129 enet_writel(priv, ENET_IR_MII, ENET_IR_REG); in do_mdio_op()
131 enet_writel(priv, data, ENET_MIIDATA_REG); in do_mdio_op()
137 if (enet_readl(priv, ENET_IR_REG) & ENET_IR_MII) in do_mdio_op()
148 static int bcm_enet_mdio_read(struct bcm_enet_priv *priv, int mii_id, in bcm_enet_mdio_read() argument
158 if (do_mdio_op(priv, tmp)) in bcm_enet_mdio_read()
161 val = enet_readl(priv, ENET_MIIDATA_REG); in bcm_enet_mdio_read()
169 static int bcm_enet_mdio_write(struct bcm_enet_priv *priv, int mii_id, in bcm_enet_mdio_write() argument
180 (void)do_mdio_op(priv, tmp); in bcm_enet_mdio_write()
190 return bcm_enet_mdio_read(bus->priv, mii_id, regnum); in bcm_enet_mdio_read_phylib()
199 return bcm_enet_mdio_write(bus->priv, mii_id, regnum, value); in bcm_enet_mdio_write_phylib()
225 struct bcm_enet_priv *priv; in bcm_enet_refill_rx() local
227 priv = netdev_priv(dev); in bcm_enet_refill_rx()
229 while (priv->rx_desc_count < priv->rx_ring_size) { in bcm_enet_refill_rx()
234 desc_idx = priv->rx_dirty_desc; in bcm_enet_refill_rx()
235 desc = &priv->rx_desc_cpu[desc_idx]; in bcm_enet_refill_rx()
237 if (!priv->rx_buf[desc_idx]) { in bcm_enet_refill_rx()
241 buf = napi_alloc_frag(priv->rx_frag_size); in bcm_enet_refill_rx()
243 buf = netdev_alloc_frag(priv->rx_frag_size); in bcm_enet_refill_rx()
246 priv->rx_buf[desc_idx] = buf; in bcm_enet_refill_rx()
247 desc->address = dma_map_single(&priv->pdev->dev, in bcm_enet_refill_rx()
248 buf + priv->rx_buf_offset, in bcm_enet_refill_rx()
249 priv->rx_buf_size, in bcm_enet_refill_rx()
253 len_stat = priv->rx_buf_size << DMADESC_LENGTH_SHIFT; in bcm_enet_refill_rx()
255 if (priv->rx_dirty_desc == priv->rx_ring_size - 1) { in bcm_enet_refill_rx()
256 len_stat |= (DMADESC_WRAP_MASK >> priv->dma_desc_shift); in bcm_enet_refill_rx()
257 priv->rx_dirty_desc = 0; in bcm_enet_refill_rx()
259 priv->rx_dirty_desc++; in bcm_enet_refill_rx()
264 priv->rx_desc_count++; in bcm_enet_refill_rx()
267 if (priv->dma_has_sram) in bcm_enet_refill_rx()
268 enet_dma_writel(priv, 1, ENETDMA_BUFALLOC_REG(priv->rx_chan)); in bcm_enet_refill_rx()
270 enet_dmac_writel(priv, 1, ENETDMAC_BUFALLOC, priv->rx_chan); in bcm_enet_refill_rx()
275 if (priv->rx_desc_count == 0 && netif_running(dev)) { in bcm_enet_refill_rx()
276 dev_warn(&priv->pdev->dev, "unable to refill rx ring\n"); in bcm_enet_refill_rx()
277 priv->rx_timeout.expires = jiffies + HZ; in bcm_enet_refill_rx()
278 add_timer(&priv->rx_timeout); in bcm_enet_refill_rx()
289 struct bcm_enet_priv *priv = from_timer(priv, t, rx_timeout); in bcm_enet_refill_rx_timer() local
290 struct net_device *dev = priv->net_dev; in bcm_enet_refill_rx_timer()
292 spin_lock(&priv->rx_lock); in bcm_enet_refill_rx_timer()
294 spin_unlock(&priv->rx_lock); in bcm_enet_refill_rx_timer()
302 struct bcm_enet_priv *priv; in bcm_enet_receive_queue() local
307 priv = netdev_priv(dev); in bcm_enet_receive_queue()
309 kdev = &priv->pdev->dev; in bcm_enet_receive_queue()
314 if (budget > priv->rx_desc_count) in bcm_enet_receive_queue()
315 budget = priv->rx_desc_count; in bcm_enet_receive_queue()
325 desc_idx = priv->rx_curr_desc; in bcm_enet_receive_queue()
326 desc = &priv->rx_desc_cpu[desc_idx]; in bcm_enet_receive_queue()
339 priv->rx_curr_desc++; in bcm_enet_receive_queue()
340 if (priv->rx_curr_desc == priv->rx_ring_size) in bcm_enet_receive_queue()
341 priv->rx_curr_desc = 0; in bcm_enet_receive_queue()
345 if ((len_stat & (DMADESC_ESOP_MASK >> priv->dma_desc_shift)) != in bcm_enet_receive_queue()
346 (DMADESC_ESOP_MASK >> priv->dma_desc_shift)) { in bcm_enet_receive_queue()
352 if (!priv->enet_is_sw && in bcm_enet_receive_queue()
368 buf = priv->rx_buf[desc_idx]; in bcm_enet_receive_queue()
374 skb = napi_alloc_skb(&priv->napi, len); in bcm_enet_receive_queue()
383 memcpy(skb->data, buf + priv->rx_buf_offset, len); in bcm_enet_receive_queue()
388 priv->rx_buf_size, DMA_FROM_DEVICE); in bcm_enet_receive_queue()
389 priv->rx_buf[desc_idx] = NULL; in bcm_enet_receive_queue()
391 skb = napi_build_skb(buf, priv->rx_frag_size); in bcm_enet_receive_queue()
397 skb_reserve(skb, priv->rx_buf_offset); in bcm_enet_receive_queue()
409 priv->rx_desc_count -= processed; in bcm_enet_receive_queue()
411 if (processed || !priv->rx_desc_count) { in bcm_enet_receive_queue()
415 enet_dmac_writel(priv, priv->dma_chan_en_mask, in bcm_enet_receive_queue()
416 ENETDMAC_CHANCFG, priv->rx_chan); in bcm_enet_receive_queue()
428 struct bcm_enet_priv *priv; in bcm_enet_tx_reclaim() local
432 priv = netdev_priv(dev); in bcm_enet_tx_reclaim()
436 while (priv->tx_desc_count < priv->tx_ring_size) { in bcm_enet_tx_reclaim()
442 spin_lock(&priv->tx_lock); in bcm_enet_tx_reclaim()
444 desc = &priv->tx_desc_cpu[priv->tx_dirty_desc]; in bcm_enet_tx_reclaim()
447 spin_unlock(&priv->tx_lock); in bcm_enet_tx_reclaim()
455 skb = priv->tx_skb[priv->tx_dirty_desc]; in bcm_enet_tx_reclaim()
456 priv->tx_skb[priv->tx_dirty_desc] = NULL; in bcm_enet_tx_reclaim()
457 dma_unmap_single(&priv->pdev->dev, desc->address, skb->len, in bcm_enet_tx_reclaim()
460 priv->tx_dirty_desc++; in bcm_enet_tx_reclaim()
461 if (priv->tx_dirty_desc == priv->tx_ring_size) in bcm_enet_tx_reclaim()
462 priv->tx_dirty_desc = 0; in bcm_enet_tx_reclaim()
463 priv->tx_desc_count++; in bcm_enet_tx_reclaim()
465 spin_unlock(&priv->tx_lock); in bcm_enet_tx_reclaim()
488 struct bcm_enet_priv *priv; in bcm_enet_poll() local
492 priv = container_of(napi, struct bcm_enet_priv, napi); in bcm_enet_poll()
493 dev = priv->net_dev; in bcm_enet_poll()
496 enet_dmac_writel(priv, priv->dma_chan_int_mask, in bcm_enet_poll()
497 ENETDMAC_IR, priv->rx_chan); in bcm_enet_poll()
498 enet_dmac_writel(priv, priv->dma_chan_int_mask, in bcm_enet_poll()
499 ENETDMAC_IR, priv->tx_chan); in bcm_enet_poll()
504 spin_lock(&priv->rx_lock); in bcm_enet_poll()
506 spin_unlock(&priv->rx_lock); in bcm_enet_poll()
518 enet_dmac_writel(priv, priv->dma_chan_int_mask, in bcm_enet_poll()
519 ENETDMAC_IRMASK, priv->rx_chan); in bcm_enet_poll()
520 enet_dmac_writel(priv, priv->dma_chan_int_mask, in bcm_enet_poll()
521 ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_poll()
532 struct bcm_enet_priv *priv; in bcm_enet_isr_mac() local
536 priv = netdev_priv(dev); in bcm_enet_isr_mac()
538 stat = enet_readl(priv, ENET_IR_REG); in bcm_enet_isr_mac()
543 enet_writel(priv, ENET_IR_MIB, ENET_IR_REG); in bcm_enet_isr_mac()
544 enet_writel(priv, 0, ENET_IRMASK_REG); in bcm_enet_isr_mac()
547 schedule_work(&priv->mib_update_task); in bcm_enet_isr_mac()
558 struct bcm_enet_priv *priv; in bcm_enet_isr_dma() local
561 priv = netdev_priv(dev); in bcm_enet_isr_dma()
564 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->rx_chan); in bcm_enet_isr_dma()
565 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_isr_dma()
567 napi_schedule(&priv->napi); in bcm_enet_isr_dma()
578 struct bcm_enet_priv *priv; in bcm_enet_start_xmit() local
583 priv = netdev_priv(dev); in bcm_enet_start_xmit()
586 spin_lock(&priv->tx_lock); in bcm_enet_start_xmit()
590 if (unlikely(!priv->tx_desc_count)) { in bcm_enet_start_xmit()
592 dev_err(&priv->pdev->dev, "xmit called with no tx desc " in bcm_enet_start_xmit()
599 if (priv->enet_is_sw && skb->len < 64) { in bcm_enet_start_xmit()
618 desc = &priv->tx_desc_cpu[priv->tx_curr_desc]; in bcm_enet_start_xmit()
619 priv->tx_skb[priv->tx_curr_desc] = skb; in bcm_enet_start_xmit()
622 desc->address = dma_map_single(&priv->pdev->dev, skb->data, skb->len, in bcm_enet_start_xmit()
626 len_stat |= (DMADESC_ESOP_MASK >> priv->dma_desc_shift) | in bcm_enet_start_xmit()
630 priv->tx_curr_desc++; in bcm_enet_start_xmit()
631 if (priv->tx_curr_desc == priv->tx_ring_size) { in bcm_enet_start_xmit()
632 priv->tx_curr_desc = 0; in bcm_enet_start_xmit()
633 len_stat |= (DMADESC_WRAP_MASK >> priv->dma_desc_shift); in bcm_enet_start_xmit()
635 priv->tx_desc_count--; in bcm_enet_start_xmit()
646 if (!netdev_xmit_more() || !priv->tx_desc_count) in bcm_enet_start_xmit()
647 enet_dmac_writel(priv, priv->dma_chan_en_mask, in bcm_enet_start_xmit()
648 ENETDMAC_CHANCFG, priv->tx_chan); in bcm_enet_start_xmit()
651 if (!priv->tx_desc_count) in bcm_enet_start_xmit()
659 spin_unlock(&priv->tx_lock); in bcm_enet_start_xmit()
668 struct bcm_enet_priv *priv; in bcm_enet_set_mac_address() local
672 priv = netdev_priv(dev); in bcm_enet_set_mac_address()
678 enet_writel(priv, val, ENET_PML_REG(0)); in bcm_enet_set_mac_address()
682 enet_writel(priv, val, ENET_PMH_REG(0)); in bcm_enet_set_mac_address()
692 struct bcm_enet_priv *priv; in bcm_enet_set_multicast_list() local
697 priv = netdev_priv(dev); in bcm_enet_set_multicast_list()
699 val = enet_readl(priv, ENET_RXCFG_REG); in bcm_enet_set_multicast_list()
716 enet_writel(priv, val, ENET_RXCFG_REG); in bcm_enet_set_multicast_list()
731 enet_writel(priv, tmp, ENET_PML_REG(i + 1)); in bcm_enet_set_multicast_list()
735 enet_writel(priv, tmp, ENET_PMH_REG(i++ + 1)); in bcm_enet_set_multicast_list()
739 enet_writel(priv, 0, ENET_PML_REG(i + 1)); in bcm_enet_set_multicast_list()
740 enet_writel(priv, 0, ENET_PMH_REG(i + 1)); in bcm_enet_set_multicast_list()
743 enet_writel(priv, val, ENET_RXCFG_REG); in bcm_enet_set_multicast_list()
749 static void bcm_enet_set_duplex(struct bcm_enet_priv *priv, int fullduplex) in bcm_enet_set_duplex() argument
753 val = enet_readl(priv, ENET_TXCTL_REG); in bcm_enet_set_duplex()
758 enet_writel(priv, val, ENET_TXCTL_REG); in bcm_enet_set_duplex()
764 static void bcm_enet_set_flow(struct bcm_enet_priv *priv, int rx_en, int tx_en) in bcm_enet_set_flow() argument
769 val = enet_readl(priv, ENET_RXCFG_REG); in bcm_enet_set_flow()
774 enet_writel(priv, val, ENET_RXCFG_REG); in bcm_enet_set_flow()
776 if (!priv->dma_has_sram) in bcm_enet_set_flow()
780 val = enet_dma_readl(priv, ENETDMA_CFG_REG); in bcm_enet_set_flow()
782 val |= ENETDMA_CFG_FLOWCH_MASK(priv->rx_chan); in bcm_enet_set_flow()
784 val &= ~ENETDMA_CFG_FLOWCH_MASK(priv->rx_chan); in bcm_enet_set_flow()
785 enet_dma_writel(priv, val, ENETDMA_CFG_REG); in bcm_enet_set_flow()
793 struct bcm_enet_priv *priv; in bcm_enet_adjust_phy_link() local
797 priv = netdev_priv(dev); in bcm_enet_adjust_phy_link()
801 if (priv->old_link != phydev->link) { in bcm_enet_adjust_phy_link()
803 priv->old_link = phydev->link; in bcm_enet_adjust_phy_link()
807 if (phydev->link && phydev->duplex != priv->old_duplex) { in bcm_enet_adjust_phy_link()
808 bcm_enet_set_duplex(priv, in bcm_enet_adjust_phy_link()
811 priv->old_duplex = phydev->duplex; in bcm_enet_adjust_phy_link()
816 if (phydev->link && phydev->pause != priv->old_pause) { in bcm_enet_adjust_phy_link()
823 } else if (!priv->pause_auto) { in bcm_enet_adjust_phy_link()
825 rx_pause_en = priv->pause_rx; in bcm_enet_adjust_phy_link()
826 tx_pause_en = priv->pause_tx; in bcm_enet_adjust_phy_link()
832 bcm_enet_set_flow(priv, rx_pause_en, tx_pause_en); in bcm_enet_adjust_phy_link()
834 priv->old_pause = phydev->pause; in bcm_enet_adjust_phy_link()
854 struct bcm_enet_priv *priv; in bcm_enet_adjust_link() local
856 priv = netdev_priv(dev); in bcm_enet_adjust_link()
857 bcm_enet_set_duplex(priv, priv->force_duplex_full); in bcm_enet_adjust_link()
858 bcm_enet_set_flow(priv, priv->pause_rx, priv->pause_tx); in bcm_enet_adjust_link()
863 priv->force_speed_100 ? 100 : 10, in bcm_enet_adjust_link()
864 priv->force_duplex_full ? "full" : "half", in bcm_enet_adjust_link()
865 priv->pause_rx ? "rx" : "off", in bcm_enet_adjust_link()
866 priv->pause_tx ? "tx" : "off"); in bcm_enet_adjust_link()
869 static void bcm_enet_free_rx_buf_ring(struct device *kdev, struct bcm_enet_priv *priv) in bcm_enet_free_rx_buf_ring() argument
873 for (i = 0; i < priv->rx_ring_size; i++) { in bcm_enet_free_rx_buf_ring()
876 if (!priv->rx_buf[i]) in bcm_enet_free_rx_buf_ring()
879 desc = &priv->rx_desc_cpu[i]; in bcm_enet_free_rx_buf_ring()
880 dma_unmap_single(kdev, desc->address, priv->rx_buf_size, in bcm_enet_free_rx_buf_ring()
882 skb_free_frag(priv->rx_buf[i]); in bcm_enet_free_rx_buf_ring()
884 kfree(priv->rx_buf); in bcm_enet_free_rx_buf_ring()
892 struct bcm_enet_priv *priv; in bcm_enet_open() local
902 priv = netdev_priv(dev); in bcm_enet_open()
903 kdev = &priv->pdev->dev; in bcm_enet_open()
905 if (priv->has_phy) { in bcm_enet_open()
908 priv->mii_bus->id, priv->phy_id); in bcm_enet_open()
921 phy_set_sym_pause(phydev, priv->pause_rx, priv->pause_rx, in bcm_enet_open()
922 priv->pause_auto); in bcm_enet_open()
926 priv->old_link = 0; in bcm_enet_open()
927 priv->old_duplex = -1; in bcm_enet_open()
928 priv->old_pause = -1; in bcm_enet_open()
934 enet_writel(priv, 0, ENET_IRMASK_REG); in bcm_enet_open()
935 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->rx_chan); in bcm_enet_open()
936 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_open()
942 ret = request_irq(priv->irq_rx, bcm_enet_isr_dma, 0, in bcm_enet_open()
947 ret = request_irq(priv->irq_tx, bcm_enet_isr_dma, in bcm_enet_open()
954 enet_writel(priv, 0, ENET_PML_REG(i)); in bcm_enet_open()
955 enet_writel(priv, 0, ENET_PMH_REG(i)); in bcm_enet_open()
963 size = priv->rx_ring_size * sizeof(struct bcm_enet_desc); in bcm_enet_open()
964 p = dma_alloc_coherent(kdev, size, &priv->rx_desc_dma, GFP_KERNEL); in bcm_enet_open()
970 priv->rx_desc_alloc_size = size; in bcm_enet_open()
971 priv->rx_desc_cpu = p; in bcm_enet_open()
974 size = priv->tx_ring_size * sizeof(struct bcm_enet_desc); in bcm_enet_open()
975 p = dma_alloc_coherent(kdev, size, &priv->tx_desc_dma, GFP_KERNEL); in bcm_enet_open()
981 priv->tx_desc_alloc_size = size; in bcm_enet_open()
982 priv->tx_desc_cpu = p; in bcm_enet_open()
984 priv->tx_skb = kcalloc(priv->tx_ring_size, sizeof(struct sk_buff *), in bcm_enet_open()
986 if (!priv->tx_skb) { in bcm_enet_open()
991 priv->tx_desc_count = priv->tx_ring_size; in bcm_enet_open()
992 priv->tx_dirty_desc = 0; in bcm_enet_open()
993 priv->tx_curr_desc = 0; in bcm_enet_open()
994 spin_lock_init(&priv->tx_lock); in bcm_enet_open()
997 priv->rx_buf = kcalloc(priv->rx_ring_size, sizeof(void *), in bcm_enet_open()
999 if (!priv->rx_buf) { in bcm_enet_open()
1004 priv->rx_desc_count = 0; in bcm_enet_open()
1005 priv->rx_dirty_desc = 0; in bcm_enet_open()
1006 priv->rx_curr_desc = 0; in bcm_enet_open()
1009 if (priv->dma_has_sram) in bcm_enet_open()
1010 enet_dma_writel(priv, ENETDMA_BUFALLOC_FORCE_MASK | 0, in bcm_enet_open()
1011 ENETDMA_BUFALLOC_REG(priv->rx_chan)); in bcm_enet_open()
1013 enet_dmac_writel(priv, ENETDMA_BUFALLOC_FORCE_MASK | 0, in bcm_enet_open()
1014 ENETDMAC_BUFALLOC, priv->rx_chan); in bcm_enet_open()
1023 if (priv->dma_has_sram) { in bcm_enet_open()
1024 enet_dmas_writel(priv, priv->rx_desc_dma, in bcm_enet_open()
1025 ENETDMAS_RSTART_REG, priv->rx_chan); in bcm_enet_open()
1026 enet_dmas_writel(priv, priv->tx_desc_dma, in bcm_enet_open()
1027 ENETDMAS_RSTART_REG, priv->tx_chan); in bcm_enet_open()
1029 enet_dmac_writel(priv, priv->rx_desc_dma, in bcm_enet_open()
1030 ENETDMAC_RSTART, priv->rx_chan); in bcm_enet_open()
1031 enet_dmac_writel(priv, priv->tx_desc_dma, in bcm_enet_open()
1032 ENETDMAC_RSTART, priv->tx_chan); in bcm_enet_open()
1036 if (priv->dma_has_sram) { in bcm_enet_open()
1037 enet_dmas_writel(priv, 0, ENETDMAS_SRAM2_REG, priv->rx_chan); in bcm_enet_open()
1038 enet_dmas_writel(priv, 0, ENETDMAS_SRAM2_REG, priv->tx_chan); in bcm_enet_open()
1039 enet_dmas_writel(priv, 0, ENETDMAS_SRAM3_REG, priv->rx_chan); in bcm_enet_open()
1040 enet_dmas_writel(priv, 0, ENETDMAS_SRAM3_REG, priv->tx_chan); in bcm_enet_open()
1041 enet_dmas_writel(priv, 0, ENETDMAS_SRAM4_REG, priv->rx_chan); in bcm_enet_open()
1042 enet_dmas_writel(priv, 0, ENETDMAS_SRAM4_REG, priv->tx_chan); in bcm_enet_open()
1044 enet_dmac_writel(priv, 0, ENETDMAC_FC, priv->rx_chan); in bcm_enet_open()
1045 enet_dmac_writel(priv, 0, ENETDMAC_FC, priv->tx_chan); in bcm_enet_open()
1049 enet_writel(priv, priv->hw_mtu, ENET_RXMAXLEN_REG); in bcm_enet_open()
1050 enet_writel(priv, priv->hw_mtu, ENET_TXMAXLEN_REG); in bcm_enet_open()
1053 enet_dmac_writel(priv, priv->dma_maxburst, in bcm_enet_open()
1054 ENETDMAC_MAXBURST, priv->rx_chan); in bcm_enet_open()
1055 enet_dmac_writel(priv, priv->dma_maxburst, in bcm_enet_open()
1056 ENETDMAC_MAXBURST, priv->tx_chan); in bcm_enet_open()
1059 enet_writel(priv, BCMENET_TX_FIFO_TRESH, ENET_TXWMARK_REG); in bcm_enet_open()
1062 if (priv->dma_has_sram) { in bcm_enet_open()
1063 val = priv->rx_ring_size / 3; in bcm_enet_open()
1064 enet_dma_writel(priv, val, ENETDMA_FLOWCL_REG(priv->rx_chan)); in bcm_enet_open()
1065 val = (priv->rx_ring_size * 2) / 3; in bcm_enet_open()
1066 enet_dma_writel(priv, val, ENETDMA_FLOWCH_REG(priv->rx_chan)); in bcm_enet_open()
1068 enet_dmac_writel(priv, 5, ENETDMAC_FC, priv->rx_chan); in bcm_enet_open()
1069 enet_dmac_writel(priv, priv->rx_ring_size, ENETDMAC_LEN, priv->rx_chan); in bcm_enet_open()
1070 enet_dmac_writel(priv, priv->tx_ring_size, ENETDMAC_LEN, priv->tx_chan); in bcm_enet_open()
1076 val = enet_readl(priv, ENET_CTL_REG); in bcm_enet_open()
1078 enet_writel(priv, val, ENET_CTL_REG); in bcm_enet_open()
1079 if (priv->dma_has_sram) in bcm_enet_open()
1080 enet_dma_writel(priv, ENETDMA_CFG_EN_MASK, ENETDMA_CFG_REG); in bcm_enet_open()
1081 enet_dmac_writel(priv, priv->dma_chan_en_mask, in bcm_enet_open()
1082 ENETDMAC_CHANCFG, priv->rx_chan); in bcm_enet_open()
1085 enet_writel(priv, ENET_IR_MIB, ENET_IR_REG); in bcm_enet_open()
1086 enet_writel(priv, ENET_IR_MIB, ENET_IRMASK_REG); in bcm_enet_open()
1089 enet_dmac_writel(priv, priv->dma_chan_int_mask, in bcm_enet_open()
1090 ENETDMAC_IR, priv->rx_chan); in bcm_enet_open()
1091 enet_dmac_writel(priv, priv->dma_chan_int_mask, in bcm_enet_open()
1092 ENETDMAC_IR, priv->tx_chan); in bcm_enet_open()
1095 napi_enable(&priv->napi); in bcm_enet_open()
1097 enet_dmac_writel(priv, priv->dma_chan_int_mask, in bcm_enet_open()
1098 ENETDMAC_IRMASK, priv->rx_chan); in bcm_enet_open()
1099 enet_dmac_writel(priv, priv->dma_chan_int_mask, in bcm_enet_open()
1100 ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_open()
1111 bcm_enet_free_rx_buf_ring(kdev, priv); in bcm_enet_open()
1114 kfree(priv->tx_skb); in bcm_enet_open()
1117 dma_free_coherent(kdev, priv->tx_desc_alloc_size, in bcm_enet_open()
1118 priv->tx_desc_cpu, priv->tx_desc_dma); in bcm_enet_open()
1121 dma_free_coherent(kdev, priv->rx_desc_alloc_size, in bcm_enet_open()
1122 priv->rx_desc_cpu, priv->rx_desc_dma); in bcm_enet_open()
1125 free_irq(priv->irq_tx, dev); in bcm_enet_open()
1128 free_irq(priv->irq_rx, dev); in bcm_enet_open()
1143 static void bcm_enet_disable_mac(struct bcm_enet_priv *priv) in bcm_enet_disable_mac() argument
1148 val = enet_readl(priv, ENET_CTL_REG); in bcm_enet_disable_mac()
1150 enet_writel(priv, val, ENET_CTL_REG); in bcm_enet_disable_mac()
1156 val = enet_readl(priv, ENET_CTL_REG); in bcm_enet_disable_mac()
1166 static void bcm_enet_disable_dma(struct bcm_enet_priv *priv, int chan) in bcm_enet_disable_dma() argument
1170 enet_dmac_writel(priv, 0, ENETDMAC_CHANCFG, chan); in bcm_enet_disable_dma()
1176 val = enet_dmac_readl(priv, ENETDMAC_CHANCFG, chan); in bcm_enet_disable_dma()
1188 struct bcm_enet_priv *priv; in bcm_enet_stop() local
1191 priv = netdev_priv(dev); in bcm_enet_stop()
1192 kdev = &priv->pdev->dev; in bcm_enet_stop()
1195 napi_disable(&priv->napi); in bcm_enet_stop()
1196 if (priv->has_phy) in bcm_enet_stop()
1198 del_timer_sync(&priv->rx_timeout); in bcm_enet_stop()
1201 enet_writel(priv, 0, ENET_IRMASK_REG); in bcm_enet_stop()
1202 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->rx_chan); in bcm_enet_stop()
1203 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_stop()
1206 cancel_work_sync(&priv->mib_update_task); in bcm_enet_stop()
1209 bcm_enet_disable_dma(priv, priv->tx_chan); in bcm_enet_stop()
1210 bcm_enet_disable_dma(priv, priv->rx_chan); in bcm_enet_stop()
1211 bcm_enet_disable_mac(priv); in bcm_enet_stop()
1217 bcm_enet_free_rx_buf_ring(kdev, priv); in bcm_enet_stop()
1220 kfree(priv->tx_skb); in bcm_enet_stop()
1221 dma_free_coherent(kdev, priv->rx_desc_alloc_size, in bcm_enet_stop()
1222 priv->rx_desc_cpu, priv->rx_desc_dma); in bcm_enet_stop()
1223 dma_free_coherent(kdev, priv->tx_desc_alloc_size, in bcm_enet_stop()
1224 priv->tx_desc_cpu, priv->tx_desc_dma); in bcm_enet_stop()
1225 free_irq(priv->irq_tx, dev); in bcm_enet_stop()
1226 free_irq(priv->irq_rx, dev); in bcm_enet_stop()
1230 if (priv->has_phy) in bcm_enet_stop()
1355 static void update_mib_counters(struct bcm_enet_priv *priv) in update_mib_counters() argument
1368 val = enet_readl(priv, ENET_MIB_REG(s->mib_reg)); in update_mib_counters()
1369 p = (char *)priv + s->stat_offset; in update_mib_counters()
1380 (void)enet_readl(priv, ENET_MIB_REG(unused_mib_regs[i])); in update_mib_counters()
1385 struct bcm_enet_priv *priv; in bcm_enet_update_mib_counters_defer() local
1387 priv = container_of(t, struct bcm_enet_priv, mib_update_task); in bcm_enet_update_mib_counters_defer()
1388 mutex_lock(&priv->mib_update_lock); in bcm_enet_update_mib_counters_defer()
1389 update_mib_counters(priv); in bcm_enet_update_mib_counters_defer()
1390 mutex_unlock(&priv->mib_update_lock); in bcm_enet_update_mib_counters_defer()
1393 if (netif_running(priv->net_dev)) in bcm_enet_update_mib_counters_defer()
1394 enet_writel(priv, ENET_IR_MIB, ENET_IRMASK_REG); in bcm_enet_update_mib_counters_defer()
1401 struct bcm_enet_priv *priv; in bcm_enet_get_ethtool_stats() local
1404 priv = netdev_priv(netdev); in bcm_enet_get_ethtool_stats()
1406 mutex_lock(&priv->mib_update_lock); in bcm_enet_get_ethtool_stats()
1407 update_mib_counters(priv); in bcm_enet_get_ethtool_stats()
1417 p = (char *)priv; in bcm_enet_get_ethtool_stats()
1422 mutex_unlock(&priv->mib_update_lock); in bcm_enet_get_ethtool_stats()
1427 struct bcm_enet_priv *priv; in bcm_enet_nway_reset() local
1429 priv = netdev_priv(dev); in bcm_enet_nway_reset()
1430 if (priv->has_phy) in bcm_enet_nway_reset()
1439 struct bcm_enet_priv *priv; in bcm_enet_get_link_ksettings() local
1442 priv = netdev_priv(dev); in bcm_enet_get_link_ksettings()
1444 if (priv->has_phy) { in bcm_enet_get_link_ksettings()
1453 cmd->base.speed = (priv->force_speed_100) ? in bcm_enet_get_link_ksettings()
1455 cmd->base.duplex = (priv->force_duplex_full) ? in bcm_enet_get_link_ksettings()
1474 struct bcm_enet_priv *priv; in bcm_enet_set_link_ksettings() local
1476 priv = netdev_priv(dev); in bcm_enet_set_link_ksettings()
1477 if (priv->has_phy) { in bcm_enet_set_link_ksettings()
1489 priv->force_speed_100 = in bcm_enet_set_link_ksettings()
1491 priv->force_duplex_full = in bcm_enet_set_link_ksettings()
1506 struct bcm_enet_priv *priv; in bcm_enet_get_ringparam() local
1508 priv = netdev_priv(dev); in bcm_enet_get_ringparam()
1513 ering->rx_pending = priv->rx_ring_size; in bcm_enet_get_ringparam()
1514 ering->tx_pending = priv->tx_ring_size; in bcm_enet_get_ringparam()
1522 struct bcm_enet_priv *priv; in bcm_enet_set_ringparam() local
1525 priv = netdev_priv(dev); in bcm_enet_set_ringparam()
1533 priv->rx_ring_size = ering->rx_pending; in bcm_enet_set_ringparam()
1534 priv->tx_ring_size = ering->tx_pending; in bcm_enet_set_ringparam()
1551 struct bcm_enet_priv *priv; in bcm_enet_get_pauseparam() local
1553 priv = netdev_priv(dev); in bcm_enet_get_pauseparam()
1554 ecmd->autoneg = priv->pause_auto; in bcm_enet_get_pauseparam()
1555 ecmd->rx_pause = priv->pause_rx; in bcm_enet_get_pauseparam()
1556 ecmd->tx_pause = priv->pause_tx; in bcm_enet_get_pauseparam()
1562 struct bcm_enet_priv *priv; in bcm_enet_set_pauseparam() local
1564 priv = netdev_priv(dev); in bcm_enet_set_pauseparam()
1566 if (priv->has_phy) { in bcm_enet_set_pauseparam()
1579 priv->pause_auto = ecmd->autoneg; in bcm_enet_set_pauseparam()
1580 priv->pause_rx = ecmd->rx_pause; in bcm_enet_set_pauseparam()
1581 priv->pause_tx = ecmd->tx_pause; in bcm_enet_set_pauseparam()
1603 struct bcm_enet_priv *priv; in bcm_enet_ioctl() local
1605 priv = netdev_priv(dev); in bcm_enet_ioctl()
1606 if (priv->has_phy) { in bcm_enet_ioctl()
1628 struct bcm_enet_priv *priv = netdev_priv(dev); in bcm_enet_change_mtu() local
1643 priv->hw_mtu = actual_mtu; in bcm_enet_change_mtu()
1649 priv->rx_buf_size = ALIGN(actual_mtu + ETH_FCS_LEN, in bcm_enet_change_mtu()
1650 priv->dma_maxburst * 4); in bcm_enet_change_mtu()
1652 priv->rx_frag_size = SKB_DATA_ALIGN(priv->rx_buf_offset + priv->rx_buf_size) + in bcm_enet_change_mtu()
1662 static void bcm_enet_hw_preinit(struct bcm_enet_priv *priv) in bcm_enet_hw_preinit() argument
1668 bcm_enet_disable_mac(priv); in bcm_enet_hw_preinit()
1672 enet_writel(priv, val, ENET_CTL_REG); in bcm_enet_hw_preinit()
1677 val = enet_readl(priv, ENET_CTL_REG); in bcm_enet_hw_preinit()
1684 val = enet_readl(priv, ENET_CTL_REG); in bcm_enet_hw_preinit()
1685 if (priv->use_external_mii) in bcm_enet_hw_preinit()
1689 enet_writel(priv, val, ENET_CTL_REG); in bcm_enet_hw_preinit()
1692 enet_writel(priv, (0x1f << ENET_MIISC_MDCFREQDIV_SHIFT) | in bcm_enet_hw_preinit()
1696 val = enet_readl(priv, ENET_MIBCTL_REG); in bcm_enet_hw_preinit()
1698 enet_writel(priv, val, ENET_MIBCTL_REG); in bcm_enet_hw_preinit()
1716 struct bcm_enet_priv *priv; in bcm_enet_probe() local
1732 dev = alloc_etherdev(sizeof(*priv)); in bcm_enet_probe()
1735 priv = netdev_priv(dev); in bcm_enet_probe()
1737 priv->enet_is_sw = false; in bcm_enet_probe()
1738 priv->dma_maxburst = BCMENET_DMA_MAXBURST; in bcm_enet_probe()
1739 priv->rx_buf_offset = NET_SKB_PAD; in bcm_enet_probe()
1745 priv->base = devm_platform_ioremap_resource(pdev, 0); in bcm_enet_probe()
1746 if (IS_ERR(priv->base)) { in bcm_enet_probe()
1747 ret = PTR_ERR(priv->base); in bcm_enet_probe()
1751 dev->irq = priv->irq = irq; in bcm_enet_probe()
1752 priv->irq_rx = irq_rx; in bcm_enet_probe()
1753 priv->irq_tx = irq_tx; in bcm_enet_probe()
1755 priv->mac_clk = devm_clk_get(&pdev->dev, "enet"); in bcm_enet_probe()
1756 if (IS_ERR(priv->mac_clk)) { in bcm_enet_probe()
1757 ret = PTR_ERR(priv->mac_clk); in bcm_enet_probe()
1760 ret = clk_prepare_enable(priv->mac_clk); in bcm_enet_probe()
1765 priv->rx_ring_size = BCMENET_DEF_RX_DESC; in bcm_enet_probe()
1766 priv->tx_ring_size = BCMENET_DEF_TX_DESC; in bcm_enet_probe()
1771 priv->has_phy = pd->has_phy; in bcm_enet_probe()
1772 priv->phy_id = pd->phy_id; in bcm_enet_probe()
1773 priv->has_phy_interrupt = pd->has_phy_interrupt; in bcm_enet_probe()
1774 priv->phy_interrupt = pd->phy_interrupt; in bcm_enet_probe()
1775 priv->use_external_mii = !pd->use_internal_phy; in bcm_enet_probe()
1776 priv->pause_auto = pd->pause_auto; in bcm_enet_probe()
1777 priv->pause_rx = pd->pause_rx; in bcm_enet_probe()
1778 priv->pause_tx = pd->pause_tx; in bcm_enet_probe()
1779 priv->force_duplex_full = pd->force_duplex_full; in bcm_enet_probe()
1780 priv->force_speed_100 = pd->force_speed_100; in bcm_enet_probe()
1781 priv->dma_chan_en_mask = pd->dma_chan_en_mask; in bcm_enet_probe()
1782 priv->dma_chan_int_mask = pd->dma_chan_int_mask; in bcm_enet_probe()
1783 priv->dma_chan_width = pd->dma_chan_width; in bcm_enet_probe()
1784 priv->dma_has_sram = pd->dma_has_sram; in bcm_enet_probe()
1785 priv->dma_desc_shift = pd->dma_desc_shift; in bcm_enet_probe()
1786 priv->rx_chan = pd->rx_chan; in bcm_enet_probe()
1787 priv->tx_chan = pd->tx_chan; in bcm_enet_probe()
1790 if (priv->has_phy && !priv->use_external_mii) { in bcm_enet_probe()
1792 priv->phy_clk = devm_clk_get(&pdev->dev, "ephy"); in bcm_enet_probe()
1793 if (IS_ERR(priv->phy_clk)) { in bcm_enet_probe()
1794 ret = PTR_ERR(priv->phy_clk); in bcm_enet_probe()
1795 priv->phy_clk = NULL; in bcm_enet_probe()
1798 ret = clk_prepare_enable(priv->phy_clk); in bcm_enet_probe()
1804 bcm_enet_hw_preinit(priv); in bcm_enet_probe()
1807 if (priv->has_phy) { in bcm_enet_probe()
1809 priv->mii_bus = mdiobus_alloc(); in bcm_enet_probe()
1810 if (!priv->mii_bus) { in bcm_enet_probe()
1815 bus = priv->mii_bus; in bcm_enet_probe()
1818 bus->priv = priv; in bcm_enet_probe()
1826 bus->phy_mask = ~(1 << priv->phy_id); in bcm_enet_probe()
1828 if (priv->has_phy_interrupt) in bcm_enet_probe()
1829 bus->irq[priv->phy_id] = priv->phy_interrupt; in bcm_enet_probe()
1847 spin_lock_init(&priv->rx_lock); in bcm_enet_probe()
1850 timer_setup(&priv->rx_timeout, bcm_enet_refill_rx_timer, 0); in bcm_enet_probe()
1853 mutex_init(&priv->mib_update_lock); in bcm_enet_probe()
1854 INIT_WORK(&priv->mib_update_task, bcm_enet_update_mib_counters_defer); in bcm_enet_probe()
1858 enet_writel(priv, 0, ENET_MIB_REG(i)); in bcm_enet_probe()
1862 netif_napi_add_weight(dev, &priv->napi, bcm_enet_poll, 16); in bcm_enet_probe()
1876 priv->pdev = pdev; in bcm_enet_probe()
1877 priv->net_dev = dev; in bcm_enet_probe()
1882 if (priv->mii_bus) in bcm_enet_probe()
1883 mdiobus_unregister(priv->mii_bus); in bcm_enet_probe()
1886 if (priv->mii_bus) in bcm_enet_probe()
1887 mdiobus_free(priv->mii_bus); in bcm_enet_probe()
1891 enet_writel(priv, 0, ENET_MIISC_REG); in bcm_enet_probe()
1892 clk_disable_unprepare(priv->phy_clk); in bcm_enet_probe()
1895 clk_disable_unprepare(priv->mac_clk); in bcm_enet_probe()
1907 struct bcm_enet_priv *priv; in bcm_enet_remove() local
1912 priv = netdev_priv(dev); in bcm_enet_remove()
1916 enet_writel(priv, 0, ENET_MIISC_REG); in bcm_enet_remove()
1918 if (priv->has_phy) { in bcm_enet_remove()
1919 mdiobus_unregister(priv->mii_bus); in bcm_enet_remove()
1920 mdiobus_free(priv->mii_bus); in bcm_enet_remove()
1931 clk_disable_unprepare(priv->phy_clk); in bcm_enet_remove()
1932 clk_disable_unprepare(priv->mac_clk); in bcm_enet_remove()
1948 static int bcmenet_sw_mdio_read(struct bcm_enet_priv *priv, in bcmenet_sw_mdio_read() argument
1954 spin_lock_bh(&priv->enetsw_mdio_lock); in bcmenet_sw_mdio_read()
1955 enetsw_writel(priv, 0, ENETSW_MDIOC_REG); in bcmenet_sw_mdio_read()
1964 enetsw_writel(priv, reg, ENETSW_MDIOC_REG); in bcmenet_sw_mdio_read()
1966 ret = enetsw_readw(priv, ENETSW_MDIOD_REG); in bcmenet_sw_mdio_read()
1967 spin_unlock_bh(&priv->enetsw_mdio_lock); in bcmenet_sw_mdio_read()
1971 static void bcmenet_sw_mdio_write(struct bcm_enet_priv *priv, in bcmenet_sw_mdio_write() argument
1977 spin_lock_bh(&priv->enetsw_mdio_lock); in bcmenet_sw_mdio_write()
1978 enetsw_writel(priv, 0, ENETSW_MDIOC_REG); in bcmenet_sw_mdio_write()
1989 enetsw_writel(priv, reg, ENETSW_MDIOC_REG); in bcmenet_sw_mdio_write()
1991 spin_unlock_bh(&priv->enetsw_mdio_lock); in bcmenet_sw_mdio_write()
2004 struct bcm_enet_priv *priv = from_timer(priv, t, swphy_poll); in swphy_poll_timer() local
2007 for (i = 0; i < priv->num_ports; i++) { in swphy_poll_timer()
2013 port = &priv->used_ports[i]; in swphy_poll_timer()
2022 val = bcmenet_sw_mdio_read(priv, external_phy, in swphy_poll_timer()
2029 if (!(up ^ priv->sw_port_link[i])) in swphy_poll_timer()
2032 priv->sw_port_link[i] = up; in swphy_poll_timer()
2036 dev_info(&priv->pdev->dev, "link DOWN on %s\n", in swphy_poll_timer()
2038 enetsw_writeb(priv, ENETSW_PORTOV_ENABLE_MASK, in swphy_poll_timer()
2040 enetsw_writeb(priv, ENETSW_PTCTRL_RXDIS_MASK | in swphy_poll_timer()
2046 advertise = bcmenet_sw_mdio_read(priv, external_phy, in swphy_poll_timer()
2049 lpa = bcmenet_sw_mdio_read(priv, external_phy, port->phy_id, in swphy_poll_timer()
2062 advertise = bcmenet_sw_mdio_read(priv, external_phy, in swphy_poll_timer()
2065 lpa = bcmenet_sw_mdio_read(priv, external_phy, in swphy_poll_timer()
2075 dev_info(&priv->pdev->dev, in swphy_poll_timer()
2089 enetsw_writeb(priv, override, ENETSW_PORTOV_REG(i)); in swphy_poll_timer()
2090 enetsw_writeb(priv, 0, ENETSW_PTCTRL_REG(i)); in swphy_poll_timer()
2093 priv->swphy_poll.expires = jiffies + HZ; in swphy_poll_timer()
2094 add_timer(&priv->swphy_poll); in swphy_poll_timer()
2102 struct bcm_enet_priv *priv; in bcm_enetsw_open() local
2109 priv = netdev_priv(dev); in bcm_enetsw_open()
2110 kdev = &priv->pdev->dev; in bcm_enetsw_open()
2113 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->rx_chan); in bcm_enetsw_open()
2114 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->tx_chan); in bcm_enetsw_open()
2116 ret = request_irq(priv->irq_rx, bcm_enet_isr_dma, in bcm_enetsw_open()
2121 if (priv->irq_tx != -1) { in bcm_enetsw_open()
2122 ret = request_irq(priv->irq_tx, bcm_enet_isr_dma, in bcm_enetsw_open()
2129 size = priv->rx_ring_size * sizeof(struct bcm_enet_desc); in bcm_enetsw_open()
2130 p = dma_alloc_coherent(kdev, size, &priv->rx_desc_dma, GFP_KERNEL); in bcm_enetsw_open()
2137 priv->rx_desc_alloc_size = size; in bcm_enetsw_open()
2138 priv->rx_desc_cpu = p; in bcm_enetsw_open()
2141 size = priv->tx_ring_size * sizeof(struct bcm_enet_desc); in bcm_enetsw_open()
2142 p = dma_alloc_coherent(kdev, size, &priv->tx_desc_dma, GFP_KERNEL); in bcm_enetsw_open()
2149 priv->tx_desc_alloc_size = size; in bcm_enetsw_open()
2150 priv->tx_desc_cpu = p; in bcm_enetsw_open()
2152 priv->tx_skb = kcalloc(priv->tx_ring_size, sizeof(struct sk_buff *), in bcm_enetsw_open()
2154 if (!priv->tx_skb) { in bcm_enetsw_open()
2160 priv->tx_desc_count = priv->tx_ring_size; in bcm_enetsw_open()
2161 priv->tx_dirty_desc = 0; in bcm_enetsw_open()
2162 priv->tx_curr_desc = 0; in bcm_enetsw_open()
2163 spin_lock_init(&priv->tx_lock); in bcm_enetsw_open()
2166 priv->rx_buf = kcalloc(priv->rx_ring_size, sizeof(void *), in bcm_enetsw_open()
2168 if (!priv->rx_buf) { in bcm_enetsw_open()
2174 priv->rx_desc_count = 0; in bcm_enetsw_open()
2175 priv->rx_dirty_desc = 0; in bcm_enetsw_open()
2176 priv->rx_curr_desc = 0; in bcm_enetsw_open()
2179 for (i = 0; i < priv->num_ports; i++) { in bcm_enetsw_open()
2180 enetsw_writeb(priv, ENETSW_PORTOV_ENABLE_MASK, in bcm_enetsw_open()
2182 enetsw_writeb(priv, ENETSW_PTCTRL_RXDIS_MASK | in bcm_enetsw_open()
2186 priv->sw_port_link[i] = 0; in bcm_enetsw_open()
2190 val = enetsw_readb(priv, ENETSW_GMCR_REG); in bcm_enetsw_open()
2192 enetsw_writeb(priv, val, ENETSW_GMCR_REG); in bcm_enetsw_open()
2195 enetsw_writeb(priv, val, ENETSW_GMCR_REG); in bcm_enetsw_open()
2199 val = enetsw_readb(priv, ENETSW_IMPOV_REG); in bcm_enetsw_open()
2201 enetsw_writeb(priv, val, ENETSW_IMPOV_REG); in bcm_enetsw_open()
2204 val = enetsw_readb(priv, ENETSW_SWMODE_REG); in bcm_enetsw_open()
2206 enetsw_writeb(priv, val, ENETSW_SWMODE_REG); in bcm_enetsw_open()
2209 enetsw_writel(priv, 0x1ff, ENETSW_JMBCTL_PORT_REG); in bcm_enetsw_open()
2210 enetsw_writew(priv, 9728, ENETSW_JMBCTL_MAXSIZE_REG); in bcm_enetsw_open()
2213 enet_dma_writel(priv, ENETDMA_BUFALLOC_FORCE_MASK | 0, in bcm_enetsw_open()
2214 ENETDMA_BUFALLOC_REG(priv->rx_chan)); in bcm_enetsw_open()
2223 enet_dmas_writel(priv, priv->rx_desc_dma, in bcm_enetsw_open()
2224 ENETDMAS_RSTART_REG, priv->rx_chan); in bcm_enetsw_open()
2225 enet_dmas_writel(priv, priv->tx_desc_dma, in bcm_enetsw_open()
2226 ENETDMAS_RSTART_REG, priv->tx_chan); in bcm_enetsw_open()
2229 enet_dmas_writel(priv, 0, ENETDMAS_SRAM2_REG, priv->rx_chan); in bcm_enetsw_open()
2230 enet_dmas_writel(priv, 0, ENETDMAS_SRAM2_REG, priv->tx_chan); in bcm_enetsw_open()
2231 enet_dmas_writel(priv, 0, ENETDMAS_SRAM3_REG, priv->rx_chan); in bcm_enetsw_open()
2232 enet_dmas_writel(priv, 0, ENETDMAS_SRAM3_REG, priv->tx_chan); in bcm_enetsw_open()
2233 enet_dmas_writel(priv, 0, ENETDMAS_SRAM4_REG, priv->rx_chan); in bcm_enetsw_open()
2234 enet_dmas_writel(priv, 0, ENETDMAS_SRAM4_REG, priv->tx_chan); in bcm_enetsw_open()
2237 enet_dmac_writel(priv, priv->dma_maxburst, in bcm_enetsw_open()
2238 ENETDMAC_MAXBURST, priv->rx_chan); in bcm_enetsw_open()
2239 enet_dmac_writel(priv, priv->dma_maxburst, in bcm_enetsw_open()
2240 ENETDMAC_MAXBURST, priv->tx_chan); in bcm_enetsw_open()
2243 val = priv->rx_ring_size / 3; in bcm_enetsw_open()
2244 enet_dma_writel(priv, val, ENETDMA_FLOWCL_REG(priv->rx_chan)); in bcm_enetsw_open()
2245 val = (priv->rx_ring_size * 2) / 3; in bcm_enetsw_open()
2246 enet_dma_writel(priv, val, ENETDMA_FLOWCH_REG(priv->rx_chan)); in bcm_enetsw_open()
2252 enet_dma_writel(priv, ENETDMA_CFG_EN_MASK, ENETDMA_CFG_REG); in bcm_enetsw_open()
2253 enet_dmac_writel(priv, ENETDMAC_CHANCFG_EN_MASK, in bcm_enetsw_open()
2254 ENETDMAC_CHANCFG, priv->rx_chan); in bcm_enetsw_open()
2257 enet_dmac_writel(priv, ENETDMAC_IR_PKTDONE_MASK, in bcm_enetsw_open()
2258 ENETDMAC_IR, priv->rx_chan); in bcm_enetsw_open()
2259 enet_dmac_writel(priv, ENETDMAC_IR_PKTDONE_MASK, in bcm_enetsw_open()
2260 ENETDMAC_IR, priv->tx_chan); in bcm_enetsw_open()
2263 napi_enable(&priv->napi); in bcm_enetsw_open()
2265 enet_dmac_writel(priv, ENETDMAC_IR_PKTDONE_MASK, in bcm_enetsw_open()
2266 ENETDMAC_IRMASK, priv->rx_chan); in bcm_enetsw_open()
2267 enet_dmac_writel(priv, ENETDMAC_IR_PKTDONE_MASK, in bcm_enetsw_open()
2268 ENETDMAC_IRMASK, priv->tx_chan); in bcm_enetsw_open()
2274 for (i = 0; i < priv->num_ports; i++) { in bcm_enetsw_open()
2277 port = &priv->used_ports[i]; in bcm_enetsw_open()
2306 enetsw_writeb(priv, override, ENETSW_PORTOV_REG(i)); in bcm_enetsw_open()
2307 enetsw_writeb(priv, 0, ENETSW_PTCTRL_REG(i)); in bcm_enetsw_open()
2311 timer_setup(&priv->swphy_poll, swphy_poll_timer, 0); in bcm_enetsw_open()
2312 mod_timer(&priv->swphy_poll, jiffies); in bcm_enetsw_open()
2316 bcm_enet_free_rx_buf_ring(kdev, priv); in bcm_enetsw_open()
2319 kfree(priv->tx_skb); in bcm_enetsw_open()
2322 dma_free_coherent(kdev, priv->tx_desc_alloc_size, in bcm_enetsw_open()
2323 priv->tx_desc_cpu, priv->tx_desc_dma); in bcm_enetsw_open()
2326 dma_free_coherent(kdev, priv->rx_desc_alloc_size, in bcm_enetsw_open()
2327 priv->rx_desc_cpu, priv->rx_desc_dma); in bcm_enetsw_open()
2330 if (priv->irq_tx != -1) in bcm_enetsw_open()
2331 free_irq(priv->irq_tx, dev); in bcm_enetsw_open()
2334 free_irq(priv->irq_rx, dev); in bcm_enetsw_open()
2343 struct bcm_enet_priv *priv; in bcm_enetsw_stop() local
2346 priv = netdev_priv(dev); in bcm_enetsw_stop()
2347 kdev = &priv->pdev->dev; in bcm_enetsw_stop()
2349 del_timer_sync(&priv->swphy_poll); in bcm_enetsw_stop()
2351 napi_disable(&priv->napi); in bcm_enetsw_stop()
2352 del_timer_sync(&priv->rx_timeout); in bcm_enetsw_stop()
2355 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->rx_chan); in bcm_enetsw_stop()
2356 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->tx_chan); in bcm_enetsw_stop()
2359 bcm_enet_disable_dma(priv, priv->tx_chan); in bcm_enetsw_stop()
2360 bcm_enet_disable_dma(priv, priv->rx_chan); in bcm_enetsw_stop()
2366 bcm_enet_free_rx_buf_ring(kdev, priv); in bcm_enetsw_stop()
2369 kfree(priv->tx_skb); in bcm_enetsw_stop()
2370 dma_free_coherent(kdev, priv->rx_desc_alloc_size, in bcm_enetsw_stop()
2371 priv->rx_desc_cpu, priv->rx_desc_dma); in bcm_enetsw_stop()
2372 dma_free_coherent(kdev, priv->tx_desc_alloc_size, in bcm_enetsw_stop()
2373 priv->tx_desc_cpu, priv->tx_desc_dma); in bcm_enetsw_stop()
2374 if (priv->irq_tx != -1) in bcm_enetsw_stop()
2375 free_irq(priv->irq_tx, dev); in bcm_enetsw_stop()
2376 free_irq(priv->irq_rx, dev); in bcm_enetsw_stop()
2389 static int bcm_enetsw_phy_is_external(struct bcm_enet_priv *priv, int phy_id) in bcm_enetsw_phy_is_external() argument
2393 for (i = 0; i < priv->num_ports; ++i) { in bcm_enetsw_phy_is_external()
2394 if (!priv->used_ports[i].used) in bcm_enetsw_phy_is_external()
2396 if (priv->used_ports[i].phy_id == phy_id) in bcm_enetsw_phy_is_external()
2411 struct bcm_enet_priv *priv; in bcm_enetsw_mii_mdio_read() local
2413 priv = netdev_priv(dev); in bcm_enetsw_mii_mdio_read()
2414 return bcmenet_sw_mdio_read(priv, in bcm_enetsw_mii_mdio_read()
2415 bcm_enetsw_phy_is_external(priv, phy_id), in bcm_enetsw_mii_mdio_read()
2426 struct bcm_enet_priv *priv; in bcm_enetsw_mii_mdio_write() local
2428 priv = netdev_priv(dev); in bcm_enetsw_mii_mdio_write()
2429 bcmenet_sw_mdio_write(priv, bcm_enetsw_phy_is_external(priv, phy_id), in bcm_enetsw_mii_mdio_write()
2541 struct bcm_enet_priv *priv; in bcm_enetsw_get_ethtool_stats() local
2544 priv = netdev_priv(netdev); in bcm_enetsw_get_ethtool_stats()
2558 lo = enetsw_readl(priv, ENETSW_MIB_REG(reg)); in bcm_enetsw_get_ethtool_stats()
2559 p = (char *)priv + s->stat_offset; in bcm_enetsw_get_ethtool_stats()
2562 hi = enetsw_readl(priv, ENETSW_MIB_REG(reg + 1)); in bcm_enetsw_get_ethtool_stats()
2578 p = (char *)priv + s->stat_offset; in bcm_enetsw_get_ethtool_stats()
2591 struct bcm_enet_priv *priv; in bcm_enetsw_get_ringparam() local
2593 priv = netdev_priv(dev); in bcm_enetsw_get_ringparam()
2600 ering->rx_pending = priv->rx_ring_size; in bcm_enetsw_get_ringparam()
2601 ering->tx_pending = priv->tx_ring_size; in bcm_enetsw_get_ringparam()
2610 struct bcm_enet_priv *priv; in bcm_enetsw_set_ringparam() local
2613 priv = netdev_priv(dev); in bcm_enetsw_set_ringparam()
2621 priv->rx_ring_size = ering->rx_pending; in bcm_enetsw_set_ringparam()
2622 priv->tx_ring_size = ering->tx_pending; in bcm_enetsw_set_ringparam()
2646 struct bcm_enet_priv *priv; in bcm_enetsw_probe() local
2661 dev = alloc_etherdev(sizeof(*priv)); in bcm_enetsw_probe()
2664 priv = netdev_priv(dev); in bcm_enetsw_probe()
2667 priv->enet_is_sw = true; in bcm_enetsw_probe()
2668 priv->irq_rx = irq_rx; in bcm_enetsw_probe()
2669 priv->irq_tx = irq_tx; in bcm_enetsw_probe()
2670 priv->rx_ring_size = BCMENET_DEF_RX_DESC; in bcm_enetsw_probe()
2671 priv->tx_ring_size = BCMENET_DEF_TX_DESC; in bcm_enetsw_probe()
2672 priv->dma_maxburst = BCMENETSW_DMA_MAXBURST; in bcm_enetsw_probe()
2673 priv->rx_buf_offset = NET_SKB_PAD + NET_IP_ALIGN; in bcm_enetsw_probe()
2678 memcpy(priv->used_ports, pd->used_ports, in bcm_enetsw_probe()
2680 priv->num_ports = pd->num_ports; in bcm_enetsw_probe()
2681 priv->dma_has_sram = pd->dma_has_sram; in bcm_enetsw_probe()
2682 priv->dma_chan_en_mask = pd->dma_chan_en_mask; in bcm_enetsw_probe()
2683 priv->dma_chan_int_mask = pd->dma_chan_int_mask; in bcm_enetsw_probe()
2684 priv->dma_chan_width = pd->dma_chan_width; in bcm_enetsw_probe()
2691 priv->base = devm_ioremap_resource(&pdev->dev, res_mem); in bcm_enetsw_probe()
2692 if (IS_ERR(priv->base)) { in bcm_enetsw_probe()
2693 ret = PTR_ERR(priv->base); in bcm_enetsw_probe()
2697 priv->mac_clk = devm_clk_get(&pdev->dev, "enetsw"); in bcm_enetsw_probe()
2698 if (IS_ERR(priv->mac_clk)) { in bcm_enetsw_probe()
2699 ret = PTR_ERR(priv->mac_clk); in bcm_enetsw_probe()
2702 ret = clk_prepare_enable(priv->mac_clk); in bcm_enetsw_probe()
2706 priv->rx_chan = 0; in bcm_enetsw_probe()
2707 priv->tx_chan = 1; in bcm_enetsw_probe()
2708 spin_lock_init(&priv->rx_lock); in bcm_enetsw_probe()
2711 timer_setup(&priv->rx_timeout, bcm_enet_refill_rx_timer, 0); in bcm_enetsw_probe()
2715 netif_napi_add_weight(dev, &priv->napi, bcm_enet_poll, 16); in bcm_enetsw_probe()
2719 spin_lock_init(&priv->enetsw_mdio_lock); in bcm_enetsw_probe()
2727 priv->pdev = pdev; in bcm_enetsw_probe()
2728 priv->net_dev = dev; in bcm_enetsw_probe()
2733 clk_disable_unprepare(priv->mac_clk); in bcm_enetsw_probe()
2743 struct bcm_enet_priv *priv; in bcm_enetsw_remove() local
2748 priv = netdev_priv(dev); in bcm_enetsw_remove()
2751 clk_disable_unprepare(priv->mac_clk); in bcm_enetsw_remove()