Lines Matching refs:rx_info

396 struct rx_info {  struct
417 struct rx_info rx_info; argument
461 if (test_and_clear_bit(0, &dev->rx_info.idle)) { in kick_rx()
463 writel(dev->rx_info.phy_descs + in kick_rx()
464 (4 * DESC_SIZE * dev->rx_info.next_rx), in kick_rx()
466 if (dev->rx_info.next_rx == dev->rx_info.next_empty) in kick_rx()
499 #define nr_rx_empty(dev) ((NR_RX_DESC-2 + dev->rx_info.next_rx - dev->rx_info.next_empty) % NR_RX_D…
507 next_empty = dev->rx_info.next_empty; in ns83820_add_rx_skb()
517 dev->rx_info.next_empty, in ns83820_add_rx_skb()
518 dev->rx_info.nr_used, in ns83820_add_rx_skb()
519 dev->rx_info.next_rx in ns83820_add_rx_skb()
523 sg = dev->rx_info.descs + (next_empty * DESC_SIZE); in ns83820_add_rx_skb()
524 BUG_ON(NULL != dev->rx_info.skbs[next_empty]); in ns83820_add_rx_skb()
525 dev->rx_info.skbs[next_empty] = skb; in ns83820_add_rx_skb()
527 dev->rx_info.next_empty = (next_empty + 1) % NR_RX_DESC; in ns83820_add_rx_skb()
533 if (likely(next_empty != dev->rx_info.next_rx)) in ns83820_add_rx_skb()
534 …dev->rx_info.descs[((NR_RX_DESC + next_empty - 1) % NR_RX_DESC) * DESC_SIZE] = cpu_to_le32(dev->rx… in ns83820_add_rx_skb()
550 spin_lock_irqsave(&dev->rx_info.lock, flags); in rx_refill()
562 spin_lock_irqsave(&dev->rx_info.lock, flags); in rx_refill()
565 spin_unlock_irqrestore(&dev->rx_info.lock, flags); in rx_refill()
572 spin_unlock_irqrestore(&dev->rx_info.lock, flags); in rx_refill()
589 if (dev->rx_info.up) in queue_refill()
595 build_rx_desc(dev, dev->rx_info.descs + (DESC_SIZE * i), 0, 0, CMDSTS_OWN, 0); in clear_rx_desc()
715 dev->rx_info.idle = 1; in ns83820_setup_rx()
716 dev->rx_info.next_rx = 0; in ns83820_setup_rx()
717 dev->rx_info.next_rx_desc = dev->rx_info.descs; in ns83820_setup_rx()
718 dev->rx_info.next_empty = 0; in ns83820_setup_rx()
724 writel(dev->rx_info.phy_descs, dev->base + RXDP); in ns83820_setup_rx()
730 spin_lock_irq(&dev->rx_info.lock); in ns83820_setup_rx()
737 dev->rx_info.up = 1; in ns83820_setup_rx()
760 spin_unlock_irq(&dev->rx_info.lock); in ns83820_setup_rx()
779 dev->rx_info.up = 0; in ns83820_cleanup_rx()
790 struct sk_buff *skb = dev->rx_info.skbs[i]; in ns83820_cleanup_rx()
791 dev->rx_info.skbs[i] = NULL; in ns83820_cleanup_rx()
801 if (dev->rx_info.up) { in ns83820_rx_kick()
807 if (dev->rx_info.up && nr_rx_empty(dev) > NR_RX_DESC*3/4) in ns83820_rx_kick()
811 if (dev->rx_info.idle) in ns83820_rx_kick()
821 struct rx_info *info = &dev->rx_info;
832 (long)(dev->rx_info.phy_descs),
833 (int)dev->rx_info.next_rx,
834 (dev->rx_info.descs + (DESC_SIZE * dev->rx_info.next_rx)),
835 (int)dev->rx_info.next_empty,
836 (dev->rx_info.descs + (DESC_SIZE * dev->rx_info.next_empty))
1416 dev->rx_info.idle = 1;
1422 prefetch(dev->rx_info.next_rx_desc);
1447 if ((ISR_RXRCMP & isr) && dev->rx_info.up)
1534 dev->rx_info.up = 0;
1924 spin_lock_init(&dev->rx_info.lock);
1946 dev->rx_info.descs = dma_alloc_coherent(&pci_dev->dev,
1948 &dev->rx_info.phy_descs, GFP_KERNEL);
1950 if (!dev->base || !dev->tx_descs || !dev->rx_info.descs)
1955 dev->rx_info.descs, (long)dev->rx_info.phy_descs);
2193 dev->rx_info.descs, dev->rx_info.phy_descs);
2217 dev->rx_info.descs, dev->rx_info.phy_descs);