Lines Matching refs:alx
71 static int alx_refill_rx_ring(struct alx_priv *alx, gfp_t gfp) in alx_refill_rx_ring() argument
73 struct alx_rx_queue *rxq = alx->qnapi[0]->rxq; in alx_refill_rx_ring()
80 if (++next == alx->rx_ringsz) in alx_refill_rx_ring()
96 skb = __netdev_alloc_skb(alx->dev, alx->rxbuf_size + 64, gfp); in alx_refill_rx_ring()
103 dma = dma_map_single(&alx->hw.pdev->dev, in alx_refill_rx_ring()
104 skb->data, alx->rxbuf_size, in alx_refill_rx_ring()
106 if (dma_mapping_error(&alx->hw.pdev->dev, dma)) { in alx_refill_rx_ring()
120 dma_unmap_len_set(cur_buf, size, alx->rxbuf_size); in alx_refill_rx_ring()
125 if (++next == alx->rx_ringsz) in alx_refill_rx_ring()
135 alx_write_mem16(&alx->hw, ALX_RFD_PIDX, cur); in alx_refill_rx_ring()
141 static struct alx_tx_queue *alx_tx_queue_mapping(struct alx_priv *alx, in alx_tx_queue_mapping() argument
146 if (r_idx >= alx->num_txq) in alx_tx_queue_mapping()
147 r_idx = r_idx % alx->num_txq; in alx_tx_queue_mapping()
149 return alx->qnapi[r_idx]->txq; in alx_tx_queue_mapping()
166 struct alx_priv *alx; in alx_clean_tx_irq() local
172 alx = netdev_priv(txq->netdev); in alx_clean_tx_irq()
176 hw_read_idx = alx_read_mem16(&alx->hw, txq->c_reg); in alx_clean_tx_irq()
199 if (netif_tx_queue_stopped(tx_queue) && netif_carrier_ok(alx->dev) && in alx_clean_tx_irq()
206 static void alx_schedule_link_check(struct alx_priv *alx) in alx_schedule_link_check() argument
208 schedule_work(&alx->link_check_wk); in alx_schedule_link_check()
211 static void alx_schedule_reset(struct alx_priv *alx) in alx_schedule_reset() argument
213 schedule_work(&alx->reset_wk); in alx_schedule_reset()
218 struct alx_priv *alx; in alx_clean_rx_irq() local
225 alx = netdev_priv(rxq->netdev); in alx_clean_rx_irq()
237 alx_schedule_reset(alx); in alx_clean_rx_irq()
263 if (alx->dev->features & NETIF_F_RXCSUM && in alx_clean_rx_irq()
287 rfd_cleaned -= alx_refill_rx_ring(alx, GFP_ATOMIC); in alx_clean_rx_irq()
291 alx_refill_rx_ring(alx, GFP_ATOMIC); in alx_clean_rx_irq()
299 struct alx_priv *alx = np->alx; in alx_poll() local
300 struct alx_hw *hw = &alx->hw; in alx_poll()
316 if (alx->hw.pdev->msix_enabled) { in alx_poll()
319 spin_lock_irqsave(&alx->irq_lock, flags); in alx_poll()
320 alx->int_mask |= ALX_ISR_TX_Q0 | ALX_ISR_RX_Q0; in alx_poll()
321 alx_write_mem32(hw, ALX_IMR, alx->int_mask); in alx_poll()
322 spin_unlock_irqrestore(&alx->irq_lock, flags); in alx_poll()
330 static bool alx_intr_handle_misc(struct alx_priv *alx, u32 intr) in alx_intr_handle_misc() argument
332 struct alx_hw *hw = &alx->hw; in alx_intr_handle_misc()
335 netif_warn(alx, hw, alx->dev, in alx_intr_handle_misc()
337 alx_schedule_reset(alx); in alx_intr_handle_misc()
342 netdev_warn(alx->dev, "alert interrupt: 0x%x\n", intr); in alx_intr_handle_misc()
349 alx->int_mask &= ~ALX_ISR_PHY; in alx_intr_handle_misc()
350 alx_write_mem32(hw, ALX_IMR, alx->int_mask); in alx_intr_handle_misc()
351 alx_schedule_link_check(alx); in alx_intr_handle_misc()
357 static irqreturn_t alx_intr_handle(struct alx_priv *alx, u32 intr) in alx_intr_handle() argument
359 struct alx_hw *hw = &alx->hw; in alx_intr_handle()
361 spin_lock(&alx->irq_lock); in alx_intr_handle()
365 intr &= alx->int_mask; in alx_intr_handle()
367 if (alx_intr_handle_misc(alx, intr)) in alx_intr_handle()
371 napi_schedule(&alx->qnapi[0]->napi); in alx_intr_handle()
373 alx->int_mask &= ~ALX_ISR_ALL_QUEUES; in alx_intr_handle()
374 alx_write_mem32(hw, ALX_IMR, alx->int_mask); in alx_intr_handle()
380 spin_unlock(&alx->irq_lock); in alx_intr_handle()
387 struct alx_hw *hw = &np->alx->hw; in alx_intr_msix_ring()
401 struct alx_priv *alx = data; in alx_intr_msix_misc() local
402 struct alx_hw *hw = &alx->hw; in alx_intr_msix_misc()
410 intr &= (alx->int_mask & ~ALX_ISR_ALL_QUEUES); in alx_intr_msix_misc()
412 if (alx_intr_handle_misc(alx, intr)) in alx_intr_msix_misc()
426 struct alx_priv *alx = data; in alx_intr_msi() local
428 return alx_intr_handle(alx, alx_read_mem32(&alx->hw, ALX_ISR)); in alx_intr_msi()
433 struct alx_priv *alx = data; in alx_intr_legacy() local
434 struct alx_hw *hw = &alx->hw; in alx_intr_legacy()
439 if (intr & ALX_ISR_DIS || !(intr & alx->int_mask)) in alx_intr_legacy()
442 return alx_intr_handle(alx, intr); in alx_intr_legacy()
450 static void alx_init_ring_ptrs(struct alx_priv *alx) in alx_init_ring_ptrs() argument
452 struct alx_hw *hw = &alx->hw; in alx_init_ring_ptrs()
453 u32 addr_hi = ((u64)alx->descmem.dma) >> 32; in alx_init_ring_ptrs()
457 for (i = 0; i < alx->num_napi; i++) { in alx_init_ring_ptrs()
458 np = alx->qnapi[i]; in alx_init_ring_ptrs()
477 alx_write_mem32(hw, ALX_TPD_RING_SZ, alx->tx_ringsz); in alx_init_ring_ptrs()
480 alx_write_mem32(hw, ALX_RRD_RING_SZ, alx->rx_ringsz); in alx_init_ring_ptrs()
481 alx_write_mem32(hw, ALX_RFD_RING_SZ, alx->rx_ringsz); in alx_init_ring_ptrs()
482 alx_write_mem32(hw, ALX_RFD_BUF_SZ, alx->rxbuf_size); in alx_init_ring_ptrs()
533 static void alx_free_buffers(struct alx_priv *alx) in alx_free_buffers() argument
537 for (i = 0; i < alx->num_txq; i++) in alx_free_buffers()
538 if (alx->qnapi[i] && alx->qnapi[i]->txq) in alx_free_buffers()
539 alx_free_txring_buf(alx->qnapi[i]->txq); in alx_free_buffers()
541 if (alx->qnapi[0] && alx->qnapi[0]->rxq) in alx_free_buffers()
542 alx_free_rxring_buf(alx->qnapi[0]->rxq); in alx_free_buffers()
545 static int alx_reinit_rings(struct alx_priv *alx) in alx_reinit_rings() argument
547 alx_free_buffers(alx); in alx_reinit_rings()
549 alx_init_ring_ptrs(alx); in alx_reinit_rings()
551 if (!alx_refill_rx_ring(alx, GFP_KERNEL)) in alx_reinit_rings()
570 struct alx_priv *alx = netdev_priv(netdev); in __alx_set_rx_mode() local
571 struct alx_hw *hw = &alx->hw; in __alx_set_rx_mode()
599 struct alx_priv *alx = netdev_priv(netdev); in alx_set_mac_address() local
600 struct alx_hw *hw = &alx->hw; in alx_set_mac_address()
616 static int alx_alloc_tx_ring(struct alx_priv *alx, struct alx_tx_queue *txq, in alx_alloc_tx_ring() argument
623 txq->tpd = alx->descmem.virt + offset; in alx_alloc_tx_ring()
624 txq->tpd_dma = alx->descmem.dma + offset; in alx_alloc_tx_ring()
630 static int alx_alloc_rx_ring(struct alx_priv *alx, struct alx_rx_queue *rxq, in alx_alloc_rx_ring() argument
637 rxq->rrd = alx->descmem.virt + offset; in alx_alloc_rx_ring()
638 rxq->rrd_dma = alx->descmem.dma + offset; in alx_alloc_rx_ring()
641 rxq->rfd = alx->descmem.virt + offset; in alx_alloc_rx_ring()
642 rxq->rfd_dma = alx->descmem.dma + offset; in alx_alloc_rx_ring()
648 static int alx_alloc_rings(struct alx_priv *alx) in alx_alloc_rings() argument
658 alx->descmem.size = sizeof(struct alx_txd) * alx->tx_ringsz * in alx_alloc_rings()
659 alx->num_txq + in alx_alloc_rings()
660 sizeof(struct alx_rrd) * alx->rx_ringsz + in alx_alloc_rings()
661 sizeof(struct alx_rfd) * alx->rx_ringsz; in alx_alloc_rings()
662 alx->descmem.virt = dma_alloc_coherent(&alx->hw.pdev->dev, in alx_alloc_rings()
663 alx->descmem.size, in alx_alloc_rings()
664 &alx->descmem.dma, GFP_KERNEL); in alx_alloc_rings()
665 if (!alx->descmem.virt) in alx_alloc_rings()
672 for (i = 0; i < alx->num_txq; i++) { in alx_alloc_rings()
673 offset = alx_alloc_tx_ring(alx, alx->qnapi[i]->txq, offset); in alx_alloc_rings()
675 netdev_err(alx->dev, "Allocation of tx buffer failed!\n"); in alx_alloc_rings()
680 offset = alx_alloc_rx_ring(alx, alx->qnapi[0]->rxq, offset); in alx_alloc_rings()
682 netdev_err(alx->dev, "Allocation of rx buffer failed!\n"); in alx_alloc_rings()
689 static void alx_free_rings(struct alx_priv *alx) in alx_free_rings() argument
693 alx_free_buffers(alx); in alx_free_rings()
695 for (i = 0; i < alx->num_txq; i++) in alx_free_rings()
696 if (alx->qnapi[i] && alx->qnapi[i]->txq) in alx_free_rings()
697 kfree(alx->qnapi[i]->txq->bufs); in alx_free_rings()
699 if (alx->qnapi[0] && alx->qnapi[0]->rxq) in alx_free_rings()
700 kfree(alx->qnapi[0]->rxq->bufs); in alx_free_rings()
702 if (alx->descmem.virt) in alx_free_rings()
703 dma_free_coherent(&alx->hw.pdev->dev, in alx_free_rings()
704 alx->descmem.size, in alx_free_rings()
705 alx->descmem.virt, in alx_free_rings()
706 alx->descmem.dma); in alx_free_rings()
709 static void alx_free_napis(struct alx_priv *alx) in alx_free_napis() argument
714 for (i = 0; i < alx->num_napi; i++) { in alx_free_napis()
715 np = alx->qnapi[i]; in alx_free_napis()
723 alx->qnapi[i] = NULL; in alx_free_napis()
738 static int alx_alloc_napis(struct alx_priv *alx) in alx_alloc_napis() argument
745 alx->int_mask &= ~ALX_ISR_ALL_QUEUES; in alx_alloc_napis()
748 for (i = 0; i < alx->num_napi; i++) { in alx_alloc_napis()
753 np->alx = alx; in alx_alloc_napis()
754 netif_napi_add(alx->dev, &np->napi, alx_poll); in alx_alloc_napis()
755 alx->qnapi[i] = np; in alx_alloc_napis()
759 for (i = 0; i < alx->num_txq; i++) { in alx_alloc_napis()
760 np = alx->qnapi[i]; in alx_alloc_napis()
769 txq->count = alx->tx_ringsz; in alx_alloc_napis()
770 txq->netdev = alx->dev; in alx_alloc_napis()
771 txq->dev = &alx->hw.pdev->dev; in alx_alloc_napis()
773 alx->int_mask |= tx_vect_mask[i]; in alx_alloc_napis()
777 np = alx->qnapi[0]; in alx_alloc_napis()
783 rxq->np = alx->qnapi[0]; in alx_alloc_napis()
785 rxq->count = alx->rx_ringsz; in alx_alloc_napis()
786 rxq->netdev = alx->dev; in alx_alloc_napis()
787 rxq->dev = &alx->hw.pdev->dev; in alx_alloc_napis()
789 alx->int_mask |= rx_vect_mask[0]; in alx_alloc_napis()
794 netdev_err(alx->dev, "error allocating internal structures\n"); in alx_alloc_napis()
795 alx_free_napis(alx); in alx_alloc_napis()
806 static void alx_config_vector_mapping(struct alx_priv *alx) in alx_config_vector_mapping() argument
808 struct alx_hw *hw = &alx->hw; in alx_config_vector_mapping()
812 if (alx->hw.pdev->msix_enabled) { in alx_config_vector_mapping()
814 for (i = 0, vector = 1; i < alx->num_txq; i++, vector++) { in alx_config_vector_mapping()
829 static int alx_enable_msix(struct alx_priv *alx) in alx_enable_msix() argument
837 err = pci_alloc_irq_vectors(alx->hw.pdev, num_vec, num_vec, in alx_enable_msix()
840 netdev_warn(alx->dev, "Enabling MSI-X interrupts failed!\n"); in alx_enable_msix()
844 alx->num_vec = num_vec; in alx_enable_msix()
845 alx->num_napi = num_vec - 1; in alx_enable_msix()
846 alx->num_txq = num_txq; in alx_enable_msix()
847 alx->num_rxq = num_rxq; in alx_enable_msix()
852 static int alx_request_msix(struct alx_priv *alx) in alx_request_msix() argument
854 struct net_device *netdev = alx->dev; in alx_request_msix()
857 err = request_irq(pci_irq_vector(alx->hw.pdev, 0), alx_intr_msix_misc, in alx_request_msix()
858 0, netdev->name, alx); in alx_request_msix()
862 for (i = 0; i < alx->num_napi; i++) { in alx_request_msix()
863 struct alx_napi *np = alx->qnapi[i]; in alx_request_msix()
880 err = request_irq(pci_irq_vector(alx->hw.pdev, vector), in alx_request_msix()
888 free_irq(pci_irq_vector(alx->hw.pdev, free_vector++), alx); in alx_request_msix()
892 free_irq(pci_irq_vector(alx->hw.pdev,free_vector++), in alx_request_msix()
893 alx->qnapi[i]); in alx_request_msix()
899 static int alx_init_intr(struct alx_priv *alx) in alx_init_intr() argument
903 ret = pci_alloc_irq_vectors(alx->hw.pdev, 1, 1, in alx_init_intr()
908 alx->num_vec = 1; in alx_init_intr()
909 alx->num_napi = 1; in alx_init_intr()
910 alx->num_txq = 1; in alx_init_intr()
911 alx->num_rxq = 1; in alx_init_intr()
915 static void alx_irq_enable(struct alx_priv *alx) in alx_irq_enable() argument
917 struct alx_hw *hw = &alx->hw; in alx_irq_enable()
922 alx_write_mem32(hw, ALX_IMR, alx->int_mask); in alx_irq_enable()
925 if (alx->hw.pdev->msix_enabled) { in alx_irq_enable()
927 for (i = 0; i < alx->num_vec; i++) in alx_irq_enable()
932 static void alx_irq_disable(struct alx_priv *alx) in alx_irq_disable() argument
934 struct alx_hw *hw = &alx->hw; in alx_irq_disable()
941 if (alx->hw.pdev->msix_enabled) { in alx_irq_disable()
942 for (i = 0; i < alx->num_vec; i++) { in alx_irq_disable()
944 synchronize_irq(pci_irq_vector(alx->hw.pdev, i)); in alx_irq_disable()
947 synchronize_irq(pci_irq_vector(alx->hw.pdev, 0)); in alx_irq_disable()
951 static int alx_realloc_resources(struct alx_priv *alx) in alx_realloc_resources() argument
955 alx_free_rings(alx); in alx_realloc_resources()
956 alx_free_napis(alx); in alx_realloc_resources()
957 pci_free_irq_vectors(alx->hw.pdev); in alx_realloc_resources()
959 err = alx_init_intr(alx); in alx_realloc_resources()
963 err = alx_alloc_napis(alx); in alx_realloc_resources()
967 err = alx_alloc_rings(alx); in alx_realloc_resources()
974 static int alx_request_irq(struct alx_priv *alx) in alx_request_irq() argument
976 struct pci_dev *pdev = alx->hw.pdev; in alx_request_irq()
977 struct alx_hw *hw = &alx->hw; in alx_request_irq()
983 if (alx->hw.pdev->msix_enabled) { in alx_request_irq()
985 err = alx_request_msix(alx); in alx_request_irq()
990 err = alx_realloc_resources(alx); in alx_request_irq()
995 if (alx->hw.pdev->msi_enabled) { in alx_request_irq()
999 alx->dev->name, alx); in alx_request_irq()
1004 pci_free_irq_vectors(alx->hw.pdev); in alx_request_irq()
1009 alx->dev->name, alx); in alx_request_irq()
1012 alx_config_vector_mapping(alx); in alx_request_irq()
1014 netdev_err(alx->dev, "IRQ registration failed!\n"); in alx_request_irq()
1018 static void alx_free_irq(struct alx_priv *alx) in alx_free_irq() argument
1020 struct pci_dev *pdev = alx->hw.pdev; in alx_free_irq()
1023 free_irq(pci_irq_vector(pdev, 0), alx); in alx_free_irq()
1024 if (alx->hw.pdev->msix_enabled) { in alx_free_irq()
1025 for (i = 0; i < alx->num_napi; i++) in alx_free_irq()
1026 free_irq(pci_irq_vector(pdev, i + 1), alx->qnapi[i]); in alx_free_irq()
1032 static int alx_identify_hw(struct alx_priv *alx) in alx_identify_hw() argument
1034 struct alx_hw *hw = &alx->hw; in alx_identify_hw()
1045 static int alx_init_sw(struct alx_priv *alx) in alx_init_sw() argument
1047 struct pci_dev *pdev = alx->hw.pdev; in alx_init_sw()
1048 struct alx_hw *hw = &alx->hw; in alx_init_sw()
1051 err = alx_identify_hw(alx); in alx_init_sw()
1057 alx->hw.lnk_patch = in alx_init_sw()
1064 hw->mtu = alx->dev->mtu; in alx_init_sw()
1065 alx->rxbuf_size = ALX_MAX_FRAME_LEN(hw->mtu); in alx_init_sw()
1067 alx->dev->min_mtu = 34; in alx_init_sw()
1068 alx->dev->max_mtu = ALX_MAX_FRAME_LEN(ALX_MAX_FRAME_SIZE); in alx_init_sw()
1069 alx->tx_ringsz = 256; in alx_init_sw()
1070 alx->rx_ringsz = 512; in alx_init_sw()
1072 alx->int_mask = ALX_ISR_MISC; in alx_init_sw()
1074 hw->ith_tpd = alx->tx_ringsz / 3; in alx_init_sw()
1093 mutex_init(&alx->mtx); in alx_init_sw()
1108 static void alx_netif_stop(struct alx_priv *alx) in alx_netif_stop() argument
1112 netif_trans_update(alx->dev); in alx_netif_stop()
1113 if (netif_carrier_ok(alx->dev)) { in alx_netif_stop()
1114 netif_carrier_off(alx->dev); in alx_netif_stop()
1115 netif_tx_disable(alx->dev); in alx_netif_stop()
1116 for (i = 0; i < alx->num_napi; i++) in alx_netif_stop()
1117 napi_disable(&alx->qnapi[i]->napi); in alx_netif_stop()
1121 static void alx_halt(struct alx_priv *alx) in alx_halt() argument
1123 struct alx_hw *hw = &alx->hw; in alx_halt()
1125 lockdep_assert_held(&alx->mtx); in alx_halt()
1127 alx_netif_stop(alx); in alx_halt()
1135 alx_irq_disable(alx); in alx_halt()
1136 alx_free_buffers(alx); in alx_halt()
1139 static void alx_configure(struct alx_priv *alx) in alx_configure() argument
1141 struct alx_hw *hw = &alx->hw; in alx_configure()
1145 __alx_set_rx_mode(alx->dev); in alx_configure()
1150 static void alx_activate(struct alx_priv *alx) in alx_activate() argument
1152 lockdep_assert_held(&alx->mtx); in alx_activate()
1155 alx_reinit_rings(alx); in alx_activate()
1156 alx_configure(alx); in alx_activate()
1159 alx_write_mem32(&alx->hw, ALX_ISR, ~(u32)ALX_ISR_DIS); in alx_activate()
1161 alx_irq_enable(alx); in alx_activate()
1163 alx_schedule_link_check(alx); in alx_activate()
1166 static void alx_reinit(struct alx_priv *alx) in alx_reinit() argument
1168 lockdep_assert_held(&alx->mtx); in alx_reinit()
1170 alx_halt(alx); in alx_reinit()
1171 alx_activate(alx); in alx_reinit()
1176 struct alx_priv *alx = netdev_priv(netdev); in alx_change_mtu() local
1180 alx->hw.mtu = mtu; in alx_change_mtu()
1181 alx->rxbuf_size = max(max_frame, ALX_DEF_RXBUF_SIZE); in alx_change_mtu()
1184 mutex_lock(&alx->mtx); in alx_change_mtu()
1185 alx_reinit(alx); in alx_change_mtu()
1186 mutex_unlock(&alx->mtx); in alx_change_mtu()
1191 static void alx_netif_start(struct alx_priv *alx) in alx_netif_start() argument
1195 netif_tx_wake_all_queues(alx->dev); in alx_netif_start()
1196 for (i = 0; i < alx->num_napi; i++) in alx_netif_start()
1197 napi_enable(&alx->qnapi[i]->napi); in alx_netif_start()
1198 netif_carrier_on(alx->dev); in alx_netif_start()
1201 static int __alx_open(struct alx_priv *alx, bool resume) in __alx_open() argument
1205 err = alx_enable_msix(alx); in __alx_open()
1207 err = alx_init_intr(alx); in __alx_open()
1213 netif_carrier_off(alx->dev); in __alx_open()
1215 err = alx_alloc_napis(alx); in __alx_open()
1219 err = alx_alloc_rings(alx); in __alx_open()
1223 alx_configure(alx); in __alx_open()
1225 err = alx_request_irq(alx); in __alx_open()
1233 alx_reinit_rings(alx); in __alx_open()
1235 netif_set_real_num_tx_queues(alx->dev, alx->num_txq); in __alx_open()
1236 netif_set_real_num_rx_queues(alx->dev, alx->num_rxq); in __alx_open()
1239 alx_write_mem32(&alx->hw, ALX_ISR, ~(u32)ALX_ISR_DIS); in __alx_open()
1241 alx_irq_enable(alx); in __alx_open()
1244 netif_tx_start_all_queues(alx->dev); in __alx_open()
1246 alx_schedule_link_check(alx); in __alx_open()
1250 alx_free_rings(alx); in __alx_open()
1251 alx_free_napis(alx); in __alx_open()
1253 pci_free_irq_vectors(alx->hw.pdev); in __alx_open()
1257 static void __alx_stop(struct alx_priv *alx) in __alx_stop() argument
1259 lockdep_assert_held(&alx->mtx); in __alx_stop()
1261 alx_free_irq(alx); in __alx_stop()
1263 cancel_work_sync(&alx->link_check_wk); in __alx_stop()
1264 cancel_work_sync(&alx->reset_wk); in __alx_stop()
1266 alx_halt(alx); in __alx_stop()
1267 alx_free_rings(alx); in __alx_stop()
1268 alx_free_napis(alx); in __alx_stop()
1289 static void alx_check_link(struct alx_priv *alx) in alx_check_link() argument
1291 struct alx_hw *hw = &alx->hw; in alx_check_link()
1296 lockdep_assert_held(&alx->mtx); in alx_check_link()
1308 spin_lock_irqsave(&alx->irq_lock, flags); in alx_check_link()
1309 alx->int_mask |= ALX_ISR_PHY; in alx_check_link()
1310 alx_write_mem32(hw, ALX_IMR, alx->int_mask); in alx_check_link()
1311 spin_unlock_irqrestore(&alx->irq_lock, flags); in alx_check_link()
1317 netif_info(alx, link, alx->dev, in alx_check_link()
1324 alx_netif_start(alx); in alx_check_link()
1327 alx_netif_stop(alx); in alx_check_link()
1328 netif_info(alx, link, alx->dev, "Link Down\n"); in alx_check_link()
1332 alx_irq_disable(alx); in alx_check_link()
1335 err = alx_reinit_rings(alx); in alx_check_link()
1338 alx_configure(alx); in alx_check_link()
1341 alx_irq_enable(alx); in alx_check_link()
1347 alx_schedule_reset(alx); in alx_check_link()
1352 struct alx_priv *alx = netdev_priv(netdev); in alx_open() local
1355 mutex_lock(&alx->mtx); in alx_open()
1356 ret = __alx_open(alx, false); in alx_open()
1357 mutex_unlock(&alx->mtx); in alx_open()
1364 struct alx_priv *alx = netdev_priv(netdev); in alx_stop() local
1366 mutex_lock(&alx->mtx); in alx_stop()
1367 __alx_stop(alx); in alx_stop()
1368 mutex_unlock(&alx->mtx); in alx_stop()
1375 struct alx_priv *alx; in alx_link_check() local
1377 alx = container_of(work, struct alx_priv, link_check_wk); in alx_link_check()
1379 mutex_lock(&alx->mtx); in alx_link_check()
1380 alx_check_link(alx); in alx_link_check()
1381 mutex_unlock(&alx->mtx); in alx_link_check()
1386 struct alx_priv *alx = container_of(work, struct alx_priv, reset_wk); in alx_reset() local
1388 mutex_lock(&alx->mtx); in alx_reset()
1389 alx_reinit(alx); in alx_reset()
1390 mutex_unlock(&alx->mtx); in alx_reset()
1534 struct alx_priv *alx; in alx_start_xmit_ring() local
1538 alx = netdev_priv(txq->netdev); in alx_start_xmit_ring()
1561 alx_write_mem16(&alx->hw, txq->p_reg, txq->write_idx); in alx_start_xmit_ring()
1576 struct alx_priv *alx = netdev_priv(netdev); in alx_start_xmit() local
1577 return alx_start_xmit_ring(skb, alx_tx_queue_mapping(alx, skb)); in alx_start_xmit()
1582 struct alx_priv *alx = netdev_priv(dev); in alx_tx_timeout() local
1584 alx_schedule_reset(alx); in alx_tx_timeout()
1590 struct alx_priv *alx = netdev_priv(netdev); in alx_mdio_read() local
1591 struct alx_hw *hw = &alx->hw; in alx_mdio_read()
1611 struct alx_priv *alx = netdev_priv(netdev); in alx_mdio_write() local
1612 struct alx_hw *hw = &alx->hw; in alx_mdio_write()
1625 struct alx_priv *alx = netdev_priv(netdev); in alx_ioctl() local
1630 return mdio_mii_ioctl(&alx->hw.mdio, if_mii(ifr), cmd); in alx_ioctl()
1636 struct alx_priv *alx = netdev_priv(netdev); in alx_poll_controller() local
1639 if (alx->hw.pdev->msix_enabled) { in alx_poll_controller()
1640 alx_intr_msix_misc(0, alx); in alx_poll_controller()
1641 for (i = 0; i < alx->num_txq; i++) in alx_poll_controller()
1642 alx_intr_msix_ring(0, alx->qnapi[i]); in alx_poll_controller()
1643 } else if (alx->hw.pdev->msi_enabled) in alx_poll_controller()
1644 alx_intr_msi(0, alx); in alx_poll_controller()
1646 alx_intr_legacy(0, alx); in alx_poll_controller()
1653 struct alx_priv *alx = netdev_priv(dev); in alx_get_stats64() local
1654 struct alx_hw_stats *hw_stats = &alx->hw.stats; in alx_get_stats64()
1656 spin_lock(&alx->stats_lock); in alx_get_stats64()
1658 alx_update_hw_stats(&alx->hw); in alx_get_stats64()
1694 spin_unlock(&alx->stats_lock); in alx_get_stats64()
1717 struct alx_priv *alx; in alx_probe() local
1756 netdev = alloc_etherdev_mqs(sizeof(*alx), in alx_probe()
1764 alx = netdev_priv(netdev); in alx_probe()
1765 spin_lock_init(&alx->hw.mdio_lock); in alx_probe()
1766 spin_lock_init(&alx->irq_lock); in alx_probe()
1767 spin_lock_init(&alx->stats_lock); in alx_probe()
1768 alx->dev = netdev; in alx_probe()
1769 alx->hw.pdev = pdev; in alx_probe()
1770 alx->msg_enable = NETIF_MSG_LINK | NETIF_MSG_HW | NETIF_MSG_IFUP | in alx_probe()
1772 hw = &alx->hw; in alx_probe()
1773 pci_set_drvdata(pdev, alx); in alx_probe()
1790 err = alx_init_sw(alx); in alx_probe()
1796 mutex_lock(&alx->mtx); in alx_probe()
1854 mutex_unlock(&alx->mtx); in alx_probe()
1856 INIT_WORK(&alx->link_check_wk, alx_link_check); in alx_probe()
1857 INIT_WORK(&alx->reset_wk, alx_reset); in alx_probe()
1873 mutex_unlock(&alx->mtx); in alx_probe()
1887 struct alx_priv *alx = pci_get_drvdata(pdev); in alx_remove() local
1888 struct alx_hw *hw = &alx->hw; in alx_remove()
1893 unregister_netdev(alx->dev); in alx_remove()
1899 mutex_destroy(&alx->mtx); in alx_remove()
1901 free_netdev(alx->dev); in alx_remove()
1906 struct alx_priv *alx = dev_get_drvdata(dev); in alx_suspend() local
1908 if (!netif_running(alx->dev)) in alx_suspend()
1912 netif_device_detach(alx->dev); in alx_suspend()
1914 mutex_lock(&alx->mtx); in alx_suspend()
1915 __alx_stop(alx); in alx_suspend()
1916 mutex_unlock(&alx->mtx); in alx_suspend()
1924 struct alx_priv *alx = dev_get_drvdata(dev); in alx_resume() local
1925 struct alx_hw *hw = &alx->hw; in alx_resume()
1929 mutex_lock(&alx->mtx); in alx_resume()
1932 if (!netif_running(alx->dev)) { in alx_resume()
1937 err = __alx_open(alx, true); in alx_resume()
1941 netif_device_attach(alx->dev); in alx_resume()
1944 mutex_unlock(&alx->mtx); in alx_resume()
1954 struct alx_priv *alx = pci_get_drvdata(pdev); in alx_pci_error_detected() local
1955 struct net_device *netdev = alx->dev; in alx_pci_error_detected()
1960 mutex_lock(&alx->mtx); in alx_pci_error_detected()
1964 alx_halt(alx); in alx_pci_error_detected()
1972 mutex_unlock(&alx->mtx); in alx_pci_error_detected()
1979 struct alx_priv *alx = pci_get_drvdata(pdev); in alx_pci_error_slot_reset() local
1980 struct alx_hw *hw = &alx->hw; in alx_pci_error_slot_reset()
1985 mutex_lock(&alx->mtx); in alx_pci_error_slot_reset()
1998 mutex_unlock(&alx->mtx); in alx_pci_error_slot_reset()
2005 struct alx_priv *alx = pci_get_drvdata(pdev); in alx_pci_error_resume() local
2006 struct net_device *netdev = alx->dev; in alx_pci_error_resume()
2010 mutex_lock(&alx->mtx); in alx_pci_error_resume()
2013 alx_activate(alx); in alx_pci_error_resume()
2017 mutex_unlock(&alx->mtx); in alx_pci_error_resume()