Lines Matching +full:num +full:- +full:irqs
1 // SPDX-License-Identifier: GPL-2.0+
27 * Copyright (C) 2003-2005 PLX Technology, Inc.
28 * Copyright (C) 2014 Ricardo Ribalda - Qtechnology/AS
39 #include <linux/dma-mapping.h>
87 EP_INFO("ep-a",
89 EP_INFO("ep-b",
91 EP_INFO("ep-c",
93 EP_INFO("ep-d",
95 EP_INFO("ep-e",
97 EP_INFO("ep-f",
99 EP_INFO("ep-g",
101 EP_INFO("ep-h",
126 /* mode 0 == ep-{a,b,c,d} 1K fifo each
127 * mode 1 == ep-{a,b} 2K fifo each, ep-{c,d} unavailable
128 * mode 2 == ep-a 2K fifo, ep-{b,c} 1K each, ep-d unavailable
135 /* enable_suspend -- When enabled, the driver will respond to
138 * self-powered devices
167 /*-------------------------------------------------------------------------*/
170 u32 tmp = readl(&ep->dev->regs->pciirqenb0); in enable_pciirqenb()
172 if (ep->dev->quirks & PLX_LEGACY) in enable_pciirqenb()
173 tmp |= BIT(ep->num); in enable_pciirqenb()
175 tmp |= BIT(ep_bit[ep->num]); in enable_pciirqenb()
176 writel(tmp, &ep->dev->regs->pciirqenb0); in enable_pciirqenb()
194 if (!_ep || !desc || ep->desc || _ep->name == ep0name || in net2280_enable()
195 desc->bDescriptorType != USB_DT_ENDPOINT) { in net2280_enable()
197 return -EINVAL; in net2280_enable()
199 dev = ep->dev; in net2280_enable()
200 if (!dev->driver || dev->gadget.speed == USB_SPEED_UNKNOWN) { in net2280_enable()
201 ret = -ESHUTDOWN; in net2280_enable()
206 if ((desc->bEndpointAddress & 0x0f) == EP_DONTUSE) { in net2280_enable()
207 ret = -EDOM; in net2280_enable()
211 if (dev->quirks & PLX_PCIE) { in net2280_enable()
212 if ((desc->bEndpointAddress & 0x0f) >= 0x0c) { in net2280_enable()
213 ret = -EDOM; in net2280_enable()
216 ep->is_in = !!usb_endpoint_dir_in(desc); in net2280_enable()
217 if (dev->enhanced_mode && ep->is_in && ep_key[ep->num]) { in net2280_enable()
218 ret = -EINVAL; in net2280_enable()
223 /* sanity check ep-e/ep-f since their fifos are small */ in net2280_enable()
225 if (ep->num > 4 && max > 64 && (dev->quirks & PLX_LEGACY)) { in net2280_enable()
226 ret = -ERANGE; in net2280_enable()
230 spin_lock_irqsave(&dev->lock, flags); in net2280_enable()
231 _ep->maxpacket = max; in net2280_enable()
232 ep->desc = desc; in net2280_enable()
235 ep->stopped = 0; in net2280_enable()
236 ep->wedged = 0; in net2280_enable()
237 ep->out_overflow = 0; in net2280_enable()
239 /* set speed-dependent max packet; may kick in high bandwidth */ in net2280_enable()
243 writel(BIT(FIFO_FLUSH), &ep->regs->ep_stat); in net2280_enable()
245 if ((dev->quirks & PLX_PCIE) && dev->enhanced_mode) { in net2280_enable()
246 tmp = readl(&ep->cfg->ep_cfg); in net2280_enable()
249 ret = -EINVAL; in net2280_enable()
250 spin_unlock_irqrestore(&dev->lock, flags); in net2280_enable()
253 if (ep->is_in) in net2280_enable()
258 type = (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK); in net2280_enable()
261 if (dev->chiprev == 0100 && in net2280_enable()
262 dev->gadget.speed == USB_SPEED_HIGH && in net2280_enable()
263 !(desc->bEndpointAddress & USB_DIR_IN)) in net2280_enable()
265 &ep->regs->ep_rsp); in net2280_enable()
268 if ((dev->gadget.speed == USB_SPEED_SUPER && max != 1024) || in net2280_enable()
269 (dev->gadget.speed == USB_SPEED_HIGH && max != 512) || in net2280_enable()
270 (dev->gadget.speed == USB_SPEED_FULL && max > 64)) { in net2280_enable()
271 spin_unlock_irqrestore(&dev->lock, flags); in net2280_enable()
272 ret = -ERANGE; in net2280_enable()
276 ep->is_iso = (type == USB_ENDPOINT_XFER_ISOC); in net2280_enable()
278 if (dev->quirks & PLX_LEGACY) { in net2280_enable()
280 tmp |= desc->bEndpointAddress; in net2280_enable()
284 ep->is_in = (tmp & USB_DIR_IN) != 0; in net2280_enable()
287 if (dev->enhanced_mode && ep->is_in) { in net2280_enable()
293 tmp |= (ep->is_in << ENDPOINT_DIRECTION); in net2280_enable()
297 if (!dev->enhanced_mode) in net2280_enable()
299 tmp |= (ep->ep.maxburst << MAX_BURST_SIZE); in net2280_enable()
306 if (!ep->is_in) in net2280_enable()
307 writel(BIT(SET_NAK_OUT_PACKETS), &ep->regs->ep_rsp); in net2280_enable()
308 else if (!(dev->quirks & PLX_2280)) { in net2280_enable()
313 BIT(CLEAR_NAK_OUT_PACKETS_MODE), &ep->regs->ep_rsp); in net2280_enable()
316 if (dev->quirks & PLX_PCIE) in net2280_enable()
318 writel(tmp, &ep->cfg->ep_cfg); in net2280_enable()
320 /* enable irqs */ in net2280_enable()
321 if (!ep->dma) { /* pio, per-packet */ in net2280_enable()
326 if (dev->quirks & PLX_2280) in net2280_enable()
327 tmp |= readl(&ep->regs->ep_irqenb); in net2280_enable()
328 writel(tmp, &ep->regs->ep_irqenb); in net2280_enable()
329 } else { /* dma, per-request */ in net2280_enable()
330 tmp = BIT((8 + ep->num)); /* completion */ in net2280_enable()
331 tmp |= readl(&dev->regs->pciirqenb1); in net2280_enable()
332 writel(tmp, &dev->regs->pciirqenb1); in net2280_enable()
335 * advance the queue; do it pio-style, by hand. in net2280_enable()
338 if ((desc->bEndpointAddress & USB_DIR_IN) == 0) { in net2280_enable()
340 writel(tmp, &ep->regs->ep_irqenb); in net2280_enable()
346 tmp = desc->bEndpointAddress; in net2280_enable()
347 ep_dbg(dev, "enabled %s (ep%d%s-%s) %s max %04x\n", in net2280_enable()
348 _ep->name, tmp & 0x0f, DIR_STRING(tmp), in net2280_enable()
349 type_string(desc->bmAttributes), in net2280_enable()
350 ep->dma ? "dma" : "pio", max); in net2280_enable()
353 spin_unlock_irqrestore(&dev->lock, flags); in net2280_enable()
357 dev_err(&ep->dev->pdev->dev, "%s: error=%d\n", __func__, ret); in net2280_enable()
371 return -ENODEV; in handshake()
383 ep->desc = NULL; in ep_reset_228x()
384 INIT_LIST_HEAD(&ep->queue); in ep_reset_228x()
386 usb_ep_set_maxpacket_limit(&ep->ep, ~0); in ep_reset_228x()
387 ep->ep.ops = &net2280_ep_ops; in ep_reset_228x()
389 /* disable the dma, irqs, endpoint... */ in ep_reset_228x()
390 if (ep->dma) { in ep_reset_228x()
391 writel(0, &ep->dma->dmactl); in ep_reset_228x()
395 &ep->dma->dmastat); in ep_reset_228x()
397 tmp = readl(®s->pciirqenb0); in ep_reset_228x()
398 tmp &= ~BIT(ep->num); in ep_reset_228x()
399 writel(tmp, ®s->pciirqenb0); in ep_reset_228x()
401 tmp = readl(®s->pciirqenb1); in ep_reset_228x()
402 tmp &= ~BIT((8 + ep->num)); /* completion */ in ep_reset_228x()
403 writel(tmp, ®s->pciirqenb1); in ep_reset_228x()
405 writel(0, &ep->regs->ep_irqenb); in ep_reset_228x()
410 if (!ep->is_in || (ep->dev->quirks & PLX_2280)) { in ep_reset_228x()
423 if (ep->num != 0) { in ep_reset_228x()
427 writel(tmp, &ep->regs->ep_rsp); in ep_reset_228x()
430 if (ep->dev->quirks & PLX_2280) in ep_reset_228x()
449 &ep->regs->ep_stat); in ep_reset_228x()
459 ep->desc = NULL; in ep_reset_338x()
460 INIT_LIST_HEAD(&ep->queue); in ep_reset_338x()
462 usb_ep_set_maxpacket_limit(&ep->ep, ~0); in ep_reset_338x()
463 ep->ep.ops = &net2280_ep_ops; in ep_reset_338x()
465 /* disable the dma, irqs, endpoint... */ in ep_reset_338x()
466 if (ep->dma) { in ep_reset_338x()
467 writel(0, &ep->dma->dmactl); in ep_reset_338x()
473 &ep->dma->dmastat); in ep_reset_338x()
475 dmastat = readl(&ep->dma->dmastat); in ep_reset_338x()
477 ep_warn(ep->dev, "The dmastat return = %x!!\n", in ep_reset_338x()
479 writel(0x5a, &ep->dma->dmastat); in ep_reset_338x()
482 tmp = readl(®s->pciirqenb0); in ep_reset_338x()
483 tmp &= ~BIT(ep_bit[ep->num]); in ep_reset_338x()
484 writel(tmp, ®s->pciirqenb0); in ep_reset_338x()
486 if (ep->num < 5) { in ep_reset_338x()
487 tmp = readl(®s->pciirqenb1); in ep_reset_338x()
488 tmp &= ~BIT((8 + ep->num)); /* completion */ in ep_reset_338x()
489 writel(tmp, ®s->pciirqenb1); in ep_reset_338x()
492 writel(0, &ep->regs->ep_irqenb); in ep_reset_338x()
500 BIT(DATA_IN_TOKEN_INTERRUPT), &ep->regs->ep_stat); in ep_reset_338x()
502 tmp = readl(&ep->cfg->ep_cfg); in ep_reset_338x()
503 if (ep->is_in) in ep_reset_338x()
507 writel(tmp, &ep->cfg->ep_cfg); in ep_reset_338x()
518 if (!_ep || _ep->name == ep0name) { in net2280_disable()
520 return -EINVAL; in net2280_disable()
522 spin_lock_irqsave(&ep->dev->lock, flags); in net2280_disable()
525 if (ep->dev->quirks & PLX_PCIE) in net2280_disable()
526 ep_reset_338x(ep->dev->regs, ep); in net2280_disable()
528 ep_reset_228x(ep->dev->regs, ep); in net2280_disable()
530 ep_vdbg(ep->dev, "disabled %s %s\n", in net2280_disable()
531 ep->dma ? "dma" : "pio", _ep->name); in net2280_disable()
534 (void)readl(&ep->cfg->ep_cfg); in net2280_disable()
536 if (!ep->dma && ep->num >= 1 && ep->num <= 4) in net2280_disable()
537 ep->dma = &ep->dev->dma[ep->num - 1]; in net2280_disable()
539 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_disable()
543 /*-------------------------------------------------------------------------*/
561 INIT_LIST_HEAD(&req->queue); in net2280_alloc_request()
564 if (ep->dma) { in net2280_alloc_request()
567 td = dma_pool_alloc(ep->dev->requests, gfp_flags, in net2280_alloc_request()
568 &req->td_dma); in net2280_alloc_request()
573 td->dmacount = 0; /* not VALID */ in net2280_alloc_request()
574 td->dmadesc = td->dmaaddr; in net2280_alloc_request()
575 req->td = td; in net2280_alloc_request()
577 return &req->req; in net2280_alloc_request()
587 dev_err(&ep->dev->pdev->dev, "%s: Invalid ep=%p or req=%p\n", in net2280_free_request()
593 WARN_ON(!list_empty(&req->queue)); in net2280_free_request()
594 if (req->td) in net2280_free_request()
595 dma_pool_free(ep->dev->requests, req->td, req->td_dma); in net2280_free_request()
599 /*-------------------------------------------------------------------------*/
604 * NOTE: pio with ep-a..ep-d could stuff multiple packets into the fifo
606 * one packet. ep-a..ep-d should use dma instead.
610 struct net2280_ep_regs __iomem *regs = ep->regs; in write_fifo()
618 buf = req->buf + req->actual; in write_fifo()
620 total = req->length - req->actual; in write_fifo()
627 count = ep->ep.maxpacket; in write_fifo()
631 ep_vdbg(ep->dev, "write %s fifo (IN) %d bytes%s req %p\n", in write_fifo()
632 ep->ep.name, count, in write_fifo()
633 (count != ep->ep.maxpacket) ? " (short)" : "", in write_fifo()
642 writel(tmp, ®s->ep_data); in write_fifo()
644 count -= 4; in write_fifo()
651 if (count || total < ep->ep.maxpacket) { in write_fifo()
655 writel(tmp, ®s->ep_data); in write_fifo()
673 statp = &ep->regs->ep_stat; in out_flush()
677 ep_dbg(ep->dev, "%s %s %08x !NAK\n", in out_flush()
678 ep->ep.name, __func__, tmp); in out_flush()
679 writel(BIT(SET_NAK_OUT_PACKETS), &ep->regs->ep_rsp); in out_flush()
691 ep->dev->gadget.speed == USB_SPEED_FULL) { in out_flush()
705 * for ep-a..ep-d this will read multiple packets out when they
710 struct net2280_ep_regs __iomem *regs = ep->regs; in read_fifo()
711 u8 *buf = req->req.buf + req->req.actual; in read_fifo()
718 if (ep->dev->chiprev == 0x0100 && in read_fifo()
719 ep->dev->gadget.speed == USB_SPEED_FULL) { in read_fifo()
721 tmp = readl(&ep->regs->ep_stat); in read_fifo()
735 count = readl(®s->ep_avail); in read_fifo()
738 tmp = readl(&ep->regs->ep_stat); in read_fifo()
739 count = readl(®s->ep_avail); in read_fifo()
745 tmp = req->req.length - req->req.actual; in read_fifo()
748 if ((tmp % ep->ep.maxpacket) != 0) { in read_fifo()
749 ep_err(ep->dev, in read_fifo()
751 ep->ep.name, count, tmp); in read_fifo()
752 req->req.status = -EOVERFLOW; in read_fifo()
760 req->req.actual += count; in read_fifo()
762 is_short = (count == 0) || ((count % ep->ep.maxpacket) != 0); in read_fifo()
764 ep_vdbg(ep->dev, "read %s fifo (OUT) %d bytes%s%s%s req %p %d/%d\n", in read_fifo()
765 ep->ep.name, count, is_short ? " (short)" : "", in read_fifo()
767 req, req->req.actual, req->req.length); in read_fifo()
770 tmp = readl(®s->ep_data); in read_fifo()
774 count -= 4; in read_fifo()
777 tmp = readl(®s->ep_data); in read_fifo()
782 } while (--count); in read_fifo()
787 writel(BIT(CLEAR_NAK_OUT_PACKETS), &ep->regs->ep_rsp); in read_fifo()
788 (void) readl(&ep->regs->ep_rsp); in read_fifo()
791 return is_short || req->req.actual == req->req.length; in read_fifo()
798 struct net2280_dma *td = req->td; in fill_dma_desc()
799 u32 dmacount = req->req.length; in fill_dma_desc()
803 * in case of overruns on max-size packets, we can't in fill_dma_desc()
806 if (ep->is_in) in fill_dma_desc()
808 if ((!ep->is_in && (dmacount % ep->ep.maxpacket) != 0) || in fill_dma_desc()
809 !(ep->dev->quirks & PLX_2280)) in fill_dma_desc()
812 req->valid = valid; in fill_dma_desc()
817 /* td->dmadesc = previously set by caller */ in fill_dma_desc()
818 td->dmaaddr = cpu_to_le32 (req->req.dma); in fill_dma_desc()
820 /* 2280 may be polling VALID_BIT through ep->dma->dmadesc */ in fill_dma_desc()
822 td->dmacount = cpu_to_le32(dmacount); in fill_dma_desc()
838 handshake(&dma->dmactl, BIT(DMA_ENABLE), 0, 50); in spin_stop_dma()
843 writel(readl(&dma->dmactl) & ~BIT(DMA_ENABLE), &dma->dmactl); in stop_dma()
849 struct net2280_dma_regs __iomem *dma = ep->dma; in start_queue()
850 unsigned int tmp = BIT(VALID_BIT) | (ep->is_in << DMA_DIRECTION); in start_queue()
852 if (!(ep->dev->quirks & PLX_2280)) in start_queue()
855 writel(tmp, &dma->dmacount); in start_queue()
856 writel(readl(&dma->dmastat), &dma->dmastat); in start_queue()
858 writel(td_dma, &dma->dmadesc); in start_queue()
859 if (ep->dev->quirks & PLX_PCIE) in start_queue()
861 writel(dmactl, &dma->dmactl); in start_queue()
864 (void) readl(&ep->dev->pci->pcimstctl); in start_queue()
866 writel(BIT(DMA_START), &dma->dmastat); in start_queue()
872 struct net2280_dma_regs __iomem *dma = ep->dma; in start_dma()
877 WARN_ON(readl(&dma->dmactl) & BIT(DMA_ENABLE)); in start_dma()
878 writel(0, &ep->dma->dmactl); in start_dma()
881 if (!ep->is_in && (readl(&ep->regs->ep_stat) & in start_dma()
884 &ep->regs->ep_stat); in start_dma()
886 tmp = readl(&ep->regs->ep_avail); in start_dma()
888 writel(readl(&dma->dmastat), &dma->dmastat); in start_dma()
891 writel(req->req.dma, &dma->dmaaddr); in start_dma()
892 tmp = min(tmp, req->req.length); in start_dma()
895 req->td->dmacount = cpu_to_le32(req->req.length - tmp); in start_dma()
897 &dma->dmacount); in start_dma()
898 req->td->dmadesc = 0; in start_dma()
899 req->valid = 1; in start_dma()
901 writel(BIT(DMA_ENABLE), &dma->dmactl); in start_dma()
902 writel(BIT(DMA_START), &dma->dmastat); in start_dma()
914 if (ep->is_in) { in start_dma()
915 if (likely((req->req.length % ep->ep.maxpacket) || in start_dma()
916 req->req.zero)){ in start_dma()
918 ep->in_fifo_validate = 1; in start_dma()
920 ep->in_fifo_validate = 0; in start_dma()
923 /* init req->td, pointing to the current dummy */ in start_dma()
924 req->td->dmadesc = cpu_to_le32 (ep->td_dma); in start_dma()
927 req->td->dmacount |= cpu_to_le32(BIT(END_OF_CHAIN)); in start_dma()
929 start_queue(ep, tmp, req->td_dma); in start_dma()
936 swap(ep->dummy, req->td); in queue_dma()
937 swap(ep->td_dma, req->td_dma); in queue_dma()
939 req->td->dmadesc = cpu_to_le32 (ep->td_dma); in queue_dma()
948 unsigned stopped = ep->stopped; in done()
950 list_del_init(&req->queue); in done()
952 if (req->req.status == -EINPROGRESS) in done()
953 req->req.status = status; in done()
955 status = req->req.status; in done()
957 dev = ep->dev; in done()
958 if (ep->dma) in done()
959 usb_gadget_unmap_request(&dev->gadget, &req->req, ep->is_in); in done()
961 if (status && status != -ESHUTDOWN) in done()
963 ep->ep.name, &req->req, status, in done()
964 req->req.actual, req->req.length); in done()
967 ep->stopped = 1; in done()
968 spin_unlock(&dev->lock); in done()
969 usb_gadget_giveback_request(&ep->ep, &req->req); in done()
970 spin_lock(&dev->lock); in done()
971 ep->stopped = stopped; in done()
974 /*-------------------------------------------------------------------------*/
985 /* we always require a cpu-view buffer, so that we can in net2280_queue()
989 if (!_ep || (!ep->desc && ep->num != 0)) { in net2280_queue()
990 pr_err("%s: Invalid ep=%p or ep->desc\n", __func__, _ep); in net2280_queue()
991 return -EINVAL; in net2280_queue()
994 if (!_req || !_req->complete || !_req->buf || in net2280_queue()
995 !list_empty(&req->queue)) { in net2280_queue()
996 ret = -EINVAL; in net2280_queue()
999 if (_req->length > (~0 & DMA_BYTE_COUNT_MASK)) { in net2280_queue()
1000 ret = -EDOM; in net2280_queue()
1003 dev = ep->dev; in net2280_queue()
1004 if (!dev->driver || dev->gadget.speed == USB_SPEED_UNKNOWN) { in net2280_queue()
1005 ret = -ESHUTDOWN; in net2280_queue()
1010 if (ep->dma && _req->length == 0) { in net2280_queue()
1011 ret = -EOPNOTSUPP; in net2280_queue()
1016 if (ep->dma) { in net2280_queue()
1017 ret = usb_gadget_map_request(&dev->gadget, _req, in net2280_queue()
1018 ep->is_in); in net2280_queue()
1024 _ep->name, _req, _req->length, _req->buf); in net2280_queue()
1026 spin_lock_irqsave(&dev->lock, flags); in net2280_queue()
1028 _req->status = -EINPROGRESS; in net2280_queue()
1029 _req->actual = 0; in net2280_queue()
1032 if (list_empty(&ep->queue) && !ep->stopped && in net2280_queue()
1033 !((dev->quirks & PLX_PCIE) && ep->dma && in net2280_queue()
1034 (readl(&ep->regs->ep_rsp) & BIT(CLEAR_ENDPOINT_HALT)))) { in net2280_queue()
1037 if (ep->dma) in net2280_queue()
1041 if (ep->num == 0 && _req->length == 0) { in net2280_queue()
1044 ep_vdbg(dev, "%s status ack\n", ep->ep.name); in net2280_queue()
1049 if (ep->is_in) in net2280_queue()
1055 s = readl(&ep->regs->ep_stat); in net2280_queue()
1057 /* note: _req->short_not_ok is in net2280_queue()
1060 * _req->status doesn't change for in net2280_queue()
1061 * short reads (only _req->actual) in net2280_queue()
1064 ep->num == 0) { in net2280_queue()
1070 ep->num != 0) { in net2280_queue()
1074 s = readl(&ep->regs->ep_stat); in net2280_queue()
1080 &ep->regs->ep_rsp); in net2280_queue()
1084 } else if (ep->dma) { in net2280_queue()
1087 if (ep->is_in) { in net2280_queue()
1090 /* preventing magic zlps is per-engine state, not in net2280_queue()
1091 * per-transfer; irq logic must recover hiccups. in net2280_queue()
1093 expect = likely(req->req.zero || in net2280_queue()
1094 (req->req.length % ep->ep.maxpacket)); in net2280_queue()
1095 if (expect != ep->in_fifo_validate) in net2280_queue()
1102 ep->responded = 1; in net2280_queue()
1104 list_add_tail(&req->queue, &ep->queue); in net2280_queue()
1106 spin_unlock_irqrestore(&dev->lock, flags); in net2280_queue()
1112 dev_err(&ep->dev->pdev->dev, "%s: error=%d\n", __func__, ret); in net2280_queue()
1120 req->req.actual = req->req.length - (DMA_BYTE_COUNT_MASK & dmacount); in dma_done()
1131 while (!list_empty(&ep->queue)) { in scan_dma_completions()
1135 req = list_entry(ep->queue.next, in scan_dma_completions()
1137 if (!req->valid) in scan_dma_completions()
1140 req_dma_count = le32_to_cpup(&req->td->dmacount); in scan_dma_completions()
1144 /* SHORT_PACKET_TRANSFERRED_INTERRUPT handles "usb-short" in scan_dma_completions()
1146 * all non-abort DMA completions. in scan_dma_completions()
1148 if (unlikely(req->td->dmadesc == 0)) { in scan_dma_completions()
1150 u32 const ep_dmacount = readl(&ep->dma->dmacount); in scan_dma_completions()
1158 } else if (!ep->is_in && in scan_dma_completions()
1159 (req->req.length % ep->ep.maxpacket) && in scan_dma_completions()
1160 !(ep->dev->quirks & PLX_PCIE)) { in scan_dma_completions()
1162 u32 const ep_stat = readl(&ep->regs->ep_stat); in scan_dma_completions()
1168 ep_warn(ep->dev, "%s lost packet sync!\n", in scan_dma_completions()
1169 ep->ep.name); in scan_dma_completions()
1170 req->req.status = -EOVERFLOW; in scan_dma_completions()
1172 u32 const ep_avail = readl(&ep->regs->ep_avail); in scan_dma_completions()
1175 ep->out_overflow = 1; in scan_dma_completions()
1176 ep_dbg(ep->dev, in scan_dma_completions()
1178 ep->ep.name, ep_avail, in scan_dma_completions()
1179 req->req.length); in scan_dma_completions()
1180 req->req.status = -EOVERFLOW; in scan_dma_completions()
1195 if (ep->stopped) in restart_dma()
1197 req = list_entry(ep->queue.next, struct net2280_request, queue); in restart_dma()
1205 if (likely(!list_empty(&ep->queue))) { in abort_dma()
1207 writel(BIT(DMA_ABORT), &ep->dma->dmastat); in abort_dma()
1208 spin_stop_dma(ep->dma); in abort_dma()
1210 stop_dma(ep->dma); in abort_dma()
1220 ep->stopped = 1; in nuke()
1221 if (ep->dma) in nuke()
1223 while (!list_empty(&ep->queue)) { in nuke()
1224 req = list_entry(ep->queue.next, in nuke()
1227 done(ep, req, -ESHUTDOWN); in nuke()
1242 if (!_ep || (!ep->desc && ep->num != 0) || !_req) { in net2280_dequeue()
1243 pr_err("%s: Invalid ep=%p or ep->desc or req=%p\n", in net2280_dequeue()
1245 return -EINVAL; in net2280_dequeue()
1248 spin_lock_irqsave(&ep->dev->lock, flags); in net2280_dequeue()
1249 stopped = ep->stopped; in net2280_dequeue()
1253 ep->stopped = 1; in net2280_dequeue()
1254 if (ep->dma) { in net2280_dequeue()
1255 dmactl = readl(&ep->dma->dmactl); in net2280_dequeue()
1257 stop_dma(ep->dma); in net2280_dequeue()
1262 list_for_each_entry(iter, &ep->queue, queue) { in net2280_dequeue()
1263 if (&iter->req != _req) in net2280_dequeue()
1269 ep->stopped = stopped; in net2280_dequeue()
1270 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_dequeue()
1271 ep_dbg(ep->dev, "%s: Request mismatch\n", __func__); in net2280_dequeue()
1272 return -EINVAL; in net2280_dequeue()
1276 if (ep->queue.next == &req->queue) { in net2280_dequeue()
1277 if (ep->dma) { in net2280_dequeue()
1278 ep_dbg(ep->dev, "unlink (%s) dma\n", _ep->name); in net2280_dequeue()
1279 _req->status = -ECONNRESET; in net2280_dequeue()
1281 if (likely(ep->queue.next == &req->queue)) { in net2280_dequeue()
1282 /* NOTE: misreports single-transfer mode*/ in net2280_dequeue()
1283 req->td->dmacount = 0; /* invalidate */ in net2280_dequeue()
1285 readl(&ep->dma->dmacount), in net2280_dequeue()
1286 -ECONNRESET); in net2280_dequeue()
1289 ep_dbg(ep->dev, "unlink (%s) pio\n", _ep->name); in net2280_dequeue()
1290 done(ep, req, -ECONNRESET); in net2280_dequeue()
1296 done(ep, req, -ECONNRESET); in net2280_dequeue()
1297 ep->stopped = stopped; in net2280_dequeue()
1299 if (ep->dma) { in net2280_dequeue()
1301 if (list_empty(&ep->queue)) in net2280_dequeue()
1302 stop_dma(ep->dma); in net2280_dequeue()
1303 else if (!ep->stopped) { in net2280_dequeue()
1306 writel(dmactl, &ep->dma->dmactl); in net2280_dequeue()
1308 start_dma(ep, list_entry(ep->queue.next, in net2280_dequeue()
1313 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_dequeue()
1317 /*-------------------------------------------------------------------------*/
1329 if (!_ep || (!ep->desc && ep->num != 0)) { in net2280_set_halt_and_wedge()
1330 pr_err("%s: Invalid ep=%p or ep->desc\n", __func__, _ep); in net2280_set_halt_and_wedge()
1331 return -EINVAL; in net2280_set_halt_and_wedge()
1333 if (!ep->dev->driver || ep->dev->gadget.speed == USB_SPEED_UNKNOWN) { in net2280_set_halt_and_wedge()
1334 retval = -ESHUTDOWN; in net2280_set_halt_and_wedge()
1337 if (ep->desc /* not ep0 */ && (ep->desc->bmAttributes & 0x03) in net2280_set_halt_and_wedge()
1339 retval = -EINVAL; in net2280_set_halt_and_wedge()
1343 spin_lock_irqsave(&ep->dev->lock, flags); in net2280_set_halt_and_wedge()
1344 if (!list_empty(&ep->queue)) { in net2280_set_halt_and_wedge()
1345 retval = -EAGAIN; in net2280_set_halt_and_wedge()
1347 } else if (ep->is_in && value && net2280_fifo_status(_ep) != 0) { in net2280_set_halt_and_wedge()
1348 retval = -EAGAIN; in net2280_set_halt_and_wedge()
1351 ep_vdbg(ep->dev, "%s %s %s\n", _ep->name, in net2280_set_halt_and_wedge()
1356 if (ep->num == 0) in net2280_set_halt_and_wedge()
1357 ep->dev->protocol_stall = 1; in net2280_set_halt_and_wedge()
1361 ep->wedged = 1; in net2280_set_halt_and_wedge()
1364 if (ep->dev->quirks & PLX_PCIE && in net2280_set_halt_and_wedge()
1365 !list_empty(&ep->queue) && ep->td_dma) in net2280_set_halt_and_wedge()
1367 ep->wedged = 0; in net2280_set_halt_and_wedge()
1369 (void) readl(&ep->regs->ep_rsp); in net2280_set_halt_and_wedge()
1371 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_set_halt_and_wedge()
1376 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_set_halt_and_wedge()
1378 dev_err(&ep->dev->pdev->dev, "%s: error=%d\n", __func__, retval); in net2280_set_halt_and_wedge()
1389 if (!_ep || _ep->name == ep0name) { in net2280_set_wedge()
1391 return -EINVAL; in net2280_set_wedge()
1402 if (!_ep || (!ep->desc && ep->num != 0)) { in net2280_fifo_status()
1403 pr_err("%s: Invalid ep=%p or ep->desc\n", __func__, _ep); in net2280_fifo_status()
1404 return -ENODEV; in net2280_fifo_status()
1406 if (!ep->dev->driver || ep->dev->gadget.speed == USB_SPEED_UNKNOWN) { in net2280_fifo_status()
1407 dev_err(&ep->dev->pdev->dev, in net2280_fifo_status()
1409 __func__, ep->dev->driver, ep->dev->gadget.speed); in net2280_fifo_status()
1410 return -ESHUTDOWN; in net2280_fifo_status()
1413 avail = readl(&ep->regs->ep_avail) & (BIT(12) - 1); in net2280_fifo_status()
1414 if (avail > ep->fifo_size) { in net2280_fifo_status()
1415 dev_err(&ep->dev->pdev->dev, "%s: Fifo overflow\n", __func__); in net2280_fifo_status()
1416 return -EOVERFLOW; in net2280_fifo_status()
1418 if (ep->is_in) in net2280_fifo_status()
1419 avail = ep->fifo_size - avail; in net2280_fifo_status()
1428 if (!_ep || (!ep->desc && ep->num != 0)) { in net2280_fifo_flush()
1429 pr_err("%s: Invalid ep=%p or ep->desc\n", __func__, _ep); in net2280_fifo_flush()
1432 if (!ep->dev->driver || ep->dev->gadget.speed == USB_SPEED_UNKNOWN) { in net2280_fifo_flush()
1433 dev_err(&ep->dev->pdev->dev, in net2280_fifo_flush()
1435 __func__, ep->dev->driver, ep->dev->gadget.speed); in net2280_fifo_flush()
1439 writel(BIT(FIFO_FLUSH), &ep->regs->ep_stat); in net2280_fifo_flush()
1440 (void) readl(&ep->regs->ep_rsp); in net2280_fifo_flush()
1459 /*-------------------------------------------------------------------------*/
1468 return -ENODEV; in net2280_get_frame()
1470 spin_lock_irqsave(&dev->lock, flags); in net2280_get_frame()
1471 retval = get_idx_reg(dev->regs, REG_FRAME) & 0x03ff; in net2280_get_frame()
1472 spin_unlock_irqrestore(&dev->lock, flags); in net2280_get_frame()
1486 spin_lock_irqsave(&dev->lock, flags); in net2280_wakeup()
1487 tmp = readl(&dev->usb->usbctl); in net2280_wakeup()
1489 writel(BIT(GENERATE_RESUME), &dev->usb->usbstat); in net2280_wakeup()
1490 spin_unlock_irqrestore(&dev->lock, flags); in net2280_wakeup()
1506 spin_lock_irqsave(&dev->lock, flags); in net2280_set_selfpowered()
1507 tmp = readl(&dev->usb->usbctl); in net2280_set_selfpowered()
1510 _gadget->is_selfpowered = 1; in net2280_set_selfpowered()
1513 _gadget->is_selfpowered = 0; in net2280_set_selfpowered()
1515 writel(tmp, &dev->usb->usbctl); in net2280_set_selfpowered()
1516 spin_unlock_irqrestore(&dev->lock, flags); in net2280_set_selfpowered()
1528 return -ENODEV; in net2280_pullup()
1531 spin_lock_irqsave(&dev->lock, flags); in net2280_pullup()
1532 tmp = readl(&dev->usb->usbctl); in net2280_pullup()
1533 dev->softconnect = (is_on != 0); in net2280_pullup()
1536 writel(tmp | BIT(USB_DETECT_ENABLE), &dev->usb->usbctl); in net2280_pullup()
1538 writel(tmp & ~BIT(USB_DETECT_ENABLE), &dev->usb->usbctl); in net2280_pullup()
1542 spin_unlock_irqrestore(&dev->lock, flags); in net2280_pullup()
1555 /* ep-e, ep-f are PIO with only 64 byte fifos */ in net2280_match_ep()
1556 ep = gadget_find_ep_by_name(_gadget, "ep-e"); in net2280_match_ep()
1559 ep = gadget_find_ep_by_name(_gadget, "ep-f"); in net2280_match_ep()
1628 /*-------------------------------------------------------------------------*/
1643 if (!dev->driver || !dev->driver->function || in function_show()
1644 strlen(dev->driver->function) > PAGE_SIZE) in function_show()
1646 return scnprintf(buf, PAGE_SIZE, "%s\n", dev->driver->function); in function_show()
1664 spin_lock_irqsave(&dev->lock, flags); in registers_show()
1666 if (dev->driver) in registers_show()
1667 s = dev->driver->driver.name; in registers_show()
1677 driver_name, dev->chiprev, in registers_show()
1678 readl(&dev->regs->devinit), in registers_show()
1679 readl(&dev->regs->fifoctl), in registers_show()
1681 readl(&dev->regs->pciirqenb0), in registers_show()
1682 readl(&dev->regs->pciirqenb1), in registers_show()
1683 readl(&dev->regs->irqstat0), in registers_show()
1684 readl(&dev->regs->irqstat1)); in registers_show()
1685 size -= t; in registers_show()
1689 t1 = readl(&dev->usb->usbctl); in registers_show()
1690 t2 = readl(&dev->usb->usbstat); in registers_show()
1694 else if (dev->gadget.speed == USB_SPEED_UNKNOWN) in registers_show()
1704 readl(&dev->usb->stdrsp), t1, t2, in registers_show()
1705 readl(&dev->usb->ouraddr), s); in registers_show()
1706 size -= t; in registers_show()
1714 for (i = 0; i < dev->n_ep; i++) { in registers_show()
1717 ep = &dev->ep[i]; in registers_show()
1718 if (i && !ep->desc) in registers_show()
1721 t1 = readl(&ep->cfg->ep_cfg); in registers_show()
1722 t2 = readl(&ep->regs->ep_rsp) & 0xff; in registers_show()
1726 ep->ep.name, t1, t2, in registers_show()
1743 readl(&ep->regs->ep_irqenb)); in registers_show()
1744 size -= t; in registers_show()
1749 "(ep%d%s-%s)%s\n", in registers_show()
1750 readl(&ep->regs->ep_stat), in registers_show()
1751 readl(&ep->regs->ep_avail), in registers_show()
1754 ep->stopped ? "*" : ""); in registers_show()
1755 size -= t; in registers_show()
1758 if (!ep->dma) in registers_show()
1764 readl(&ep->dma->dmactl), in registers_show()
1765 readl(&ep->dma->dmastat), in registers_show()
1766 readl(&ep->dma->dmacount), in registers_show()
1767 readl(&ep->dma->dmaaddr), in registers_show()
1768 readl(&ep->dma->dmadesc)); in registers_show()
1769 size -= t; in registers_show()
1778 size -= t; in registers_show()
1780 for (i = 0; i < dev->n_ep; i++) { in registers_show()
1783 ep = &dev->ep[i]; in registers_show()
1784 if (i && !ep->irqs) in registers_show()
1786 t = scnprintf(next, size, " %s/%lu", ep->ep.name, ep->irqs); in registers_show()
1787 size -= t; in registers_show()
1792 size -= t; in registers_show()
1795 spin_unlock_irqrestore(&dev->lock, flags); in registers_show()
1797 return PAGE_SIZE - size; in registers_show()
1813 spin_lock_irqsave(&dev->lock, flags); in queues_show()
1815 for (i = 0; i < dev->n_ep; i++) { in queues_show()
1816 struct net2280_ep *ep = &dev->ep[i]; in queues_show()
1823 d = ep->desc; in queues_show()
1826 t = d->bEndpointAddress; in queues_show()
1828 "\n%s (ep%d%s-%s) max %04x %s fifo %d\n", in queues_show()
1829 ep->ep.name, t & USB_ENDPOINT_NUMBER_MASK, in queues_show()
1831 type_string(d->bmAttributes), in queues_show()
1833 ep->dma ? "dma" : "pio", ep->fifo_size in queues_show()
1837 ep->is_in ? "in" : "out"); in queues_show()
1840 size -= t; in queues_show()
1843 if (list_empty(&ep->queue)) { in queues_show()
1847 size -= t; in queues_show()
1851 list_for_each_entry(req, &ep->queue, queue) { in queues_show()
1852 if (ep->dma && req->td_dma == readl(&ep->dma->dmadesc)) in queues_show()
1856 &req->req, req->req.actual, in queues_show()
1857 req->req.length, req->req.buf, in queues_show()
1858 readl(&ep->dma->dmacount)); in queues_show()
1862 &req->req, req->req.actual, in queues_show()
1863 req->req.length, req->req.buf); in queues_show()
1866 size -= t; in queues_show()
1869 if (ep->dma) { in queues_show()
1872 td = req->td; in queues_show()
1875 (u32) req->td_dma, in queues_show()
1876 le32_to_cpu(td->dmacount), in queues_show()
1877 le32_to_cpu(td->dmaaddr), in queues_show()
1878 le32_to_cpu(td->dmadesc)); in queues_show()
1881 size -= t; in queues_show()
1888 spin_unlock_irqrestore(&dev->lock, flags); in queues_show()
1889 return PAGE_SIZE - size; in queues_show()
1901 /*-------------------------------------------------------------------------*/
1903 /* another driver-specific mode might be a request type doing dma
1910 writel((0xffff << PCI_BASE2_RANGE) | mode, &dev->regs->fifoctl); in set_fifo_mode()
1912 /* always ep-{a,b,e,f} ... maybe not ep-c or ep-d */ in set_fifo_mode()
1913 INIT_LIST_HEAD(&dev->gadget.ep_list); in set_fifo_mode()
1914 list_add_tail(&dev->ep[1].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1915 list_add_tail(&dev->ep[2].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1918 list_add_tail(&dev->ep[3].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1919 list_add_tail(&dev->ep[4].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1920 dev->ep[1].fifo_size = dev->ep[2].fifo_size = 1024; in set_fifo_mode()
1923 dev->ep[1].fifo_size = dev->ep[2].fifo_size = 2048; in set_fifo_mode()
1926 list_add_tail(&dev->ep[3].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1927 dev->ep[1].fifo_size = 2048; in set_fifo_mode()
1928 dev->ep[2].fifo_size = 1024; in set_fifo_mode()
1931 /* fifo sizes for ep0, ep-c, ep-d, ep-e, and ep-f never change */ in set_fifo_mode()
1932 list_add_tail(&dev->ep[5].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1933 list_add_tail(&dev->ep[6].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1940 * - This phase undoes the earlier phase of the Defect 7374 workaround, in defect7374_disable_data_eps()
1949 ep = &dev->ep[i]; in defect7374_disable_data_eps()
1950 writel(i, &ep->cfg->ep_cfg); in defect7374_disable_data_eps()
1955 writel(0, &dev->dep[i].dep_cfg); in defect7374_disable_data_eps()
1959 tmp_reg = readl(&dev->plregs->pl_ep_ctrl); in defect7374_disable_data_eps()
1960 writel(((tmp_reg & ~0x1f) | ep_sel), &dev->plregs->pl_ep_ctrl); in defect7374_disable_data_eps()
1967 tmp_reg = readl(&dev->plregs->pl_ep_cfg_4); in defect7374_disable_data_eps()
1969 writel(tmp_reg, &dev->plregs->pl_ep_cfg_4); in defect7374_disable_data_eps()
1970 tmp_reg = readl(&dev->plregs->pl_ep_ctrl); in defect7374_disable_data_eps()
1972 writel(tmp_reg, &dev->plregs->pl_ep_ctrl); in defect7374_disable_data_eps()
1983 scratch = get_idx_reg(dev->regs, SCRATCH); in defect7374_enable_data_eps_zero()
1991 ep_warn(dev, "It will operate on cold-reboot and SS connect"); in defect7374_enable_data_eps_zero()
1996 ((dev->enhanced_mode) ? in defect7374_enable_data_eps_zero()
2001 writel(tmp, &dev->ep[i].cfg->ep_cfg); in defect7374_enable_data_eps_zero()
2005 writel(tmp, &dev->dep[1].dep_cfg); in defect7374_enable_data_eps_zero()
2006 writel(tmp, &dev->dep[3].dep_cfg); in defect7374_enable_data_eps_zero()
2007 writel(tmp, &dev->dep[4].dep_cfg); in defect7374_enable_data_eps_zero()
2008 writel(tmp, &dev->dep[5].dep_cfg); in defect7374_enable_data_eps_zero()
2014 tmp_reg = readl(&dev->plregs->pl_ep_ctrl); in defect7374_enable_data_eps_zero()
2016 &dev->plregs->pl_ep_ctrl); in defect7374_enable_data_eps_zero()
2020 (readl(&dev->plregs->pl_ep_ctrl) | in defect7374_enable_data_eps_zero()
2022 writel(tmp, &dev->plregs->pl_ep_ctrl); in defect7374_enable_data_eps_zero()
2030 tmp = (readl(&dev->plregs->pl_ep_cfg_4) | in defect7374_enable_data_eps_zero()
2032 writel(tmp, &dev->plregs->pl_ep_cfg_4); in defect7374_enable_data_eps_zero()
2034 tmp = readl(&dev->plregs->pl_ep_ctrl) & in defect7374_enable_data_eps_zero()
2036 writel(tmp, &dev->plregs->pl_ep_ctrl); in defect7374_enable_data_eps_zero()
2041 * - Tip: Connection speed is known upon the first in defect7374_enable_data_eps_zero()
2044 set_idx_reg(dev->regs, SCRATCH, scratch); in defect7374_enable_data_eps_zero()
2049 * - one bus driver, initted first;
2050 * - one function driver, initted second
2061 dev->gadget.speed = USB_SPEED_UNKNOWN; in usb_reset_228x()
2062 (void) readl(&dev->usb->usbctl); in usb_reset_228x()
2066 /* disable automatic responses, and irqs */ in usb_reset_228x()
2067 writel(0, &dev->usb->stdrsp); in usb_reset_228x()
2068 writel(0, &dev->regs->pciirqenb0); in usb_reset_228x()
2069 writel(0, &dev->regs->pciirqenb1); in usb_reset_228x()
2073 struct net2280_ep *ep = &dev->ep[tmp + 1]; in usb_reset_228x()
2074 if (ep->dma) in usb_reset_228x()
2078 writel(~0, &dev->regs->irqstat0), in usb_reset_228x()
2079 writel(~(u32)BIT(SUSPEND_REQUEST_INTERRUPT), &dev->regs->irqstat1), in usb_reset_228x()
2082 tmp = readl(&dev->regs->devinit) | in usb_reset_228x()
2087 writel(tmp, &dev->regs->devinit); in usb_reset_228x()
2097 dev->gadget.speed = USB_SPEED_UNKNOWN; in usb_reset_338x()
2098 (void)readl(&dev->usb->usbctl); in usb_reset_338x()
2102 if (dev->bug7734_patched) { in usb_reset_338x()
2103 /* disable automatic responses, and irqs */ in usb_reset_338x()
2104 writel(0, &dev->usb->stdrsp); in usb_reset_338x()
2105 writel(0, &dev->regs->pciirqenb0); in usb_reset_338x()
2106 writel(0, &dev->regs->pciirqenb1); in usb_reset_338x()
2111 struct net2280_ep *ep = &dev->ep[tmp + 1]; in usb_reset_338x()
2114 if (ep->dma) { in usb_reset_338x()
2117 dma = &dev->dma[tmp]; in usb_reset_338x()
2118 writel(BIT(DMA_ABORT), &dma->dmastat); in usb_reset_338x()
2119 writel(0, &dma->dmactl); in usb_reset_338x()
2123 writel(~0, &dev->regs->irqstat0), writel(~0, &dev->regs->irqstat1); in usb_reset_338x()
2125 if (dev->bug7734_patched) { in usb_reset_338x()
2127 tmp = readl(&dev->regs->devinit) | in usb_reset_338x()
2133 writel(tmp, &dev->regs->devinit); in usb_reset_338x()
2136 /* always ep-{1,2,3,4} ... maybe not ep-3 or ep-4 */ in usb_reset_338x()
2137 INIT_LIST_HEAD(&dev->gadget.ep_list); in usb_reset_338x()
2139 for (tmp = 1; tmp < dev->n_ep; tmp++) in usb_reset_338x()
2140 list_add_tail(&dev->ep[tmp].ep.ep_list, &dev->gadget.ep_list); in usb_reset_338x()
2146 if (dev->quirks & PLX_LEGACY) in usb_reset()
2157 struct net2280_ep *ep = &dev->ep[tmp]; in usb_reinit_228x()
2159 ep->ep.name = ep_info_dft[tmp].name; in usb_reinit_228x()
2160 ep->ep.caps = ep_info_dft[tmp].caps; in usb_reinit_228x()
2161 ep->dev = dev; in usb_reinit_228x()
2162 ep->num = tmp; in usb_reinit_228x()
2165 ep->fifo_size = 1024; in usb_reinit_228x()
2166 ep->dma = &dev->dma[tmp - 1]; in usb_reinit_228x()
2168 ep->fifo_size = 64; in usb_reinit_228x()
2169 ep->regs = &dev->epregs[tmp]; in usb_reinit_228x()
2170 ep->cfg = &dev->epregs[tmp]; in usb_reinit_228x()
2171 ep_reset_228x(dev->regs, ep); in usb_reinit_228x()
2173 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, 64); in usb_reinit_228x()
2174 usb_ep_set_maxpacket_limit(&dev->ep[5].ep, 64); in usb_reinit_228x()
2175 usb_ep_set_maxpacket_limit(&dev->ep[6].ep, 64); in usb_reinit_228x()
2177 dev->gadget.ep0 = &dev->ep[0].ep; in usb_reinit_228x()
2178 dev->ep[0].stopped = 0; in usb_reinit_228x()
2179 INIT_LIST_HEAD(&dev->gadget.ep0->ep_list); in usb_reinit_228x()
2185 writel(EP_DONTUSE, &dev->dep[tmp].dep_cfg); in usb_reinit_228x()
2197 for (i = 0; i < dev->n_ep; i++) { in usb_reinit_338x()
2198 struct net2280_ep *ep = &dev->ep[i]; in usb_reinit_338x()
2200 ep->ep.name = dev->enhanced_mode ? ep_info_adv[i].name : in usb_reinit_338x()
2202 ep->ep.caps = dev->enhanced_mode ? ep_info_adv[i].caps : in usb_reinit_338x()
2204 ep->dev = dev; in usb_reinit_338x()
2205 ep->num = i; in usb_reinit_338x()
2208 ep->dma = &dev->dma[i - 1]; in usb_reinit_338x()
2210 if (dev->enhanced_mode) { in usb_reinit_338x()
2211 ep->cfg = &dev->epregs[ne[i]]; in usb_reinit_338x()
2217 writel(ne[i], &ep->cfg->ep_cfg); in usb_reinit_338x()
2218 ep->regs = (struct net2280_ep_regs __iomem *) in usb_reinit_338x()
2219 (((void __iomem *)&dev->epregs[ne[i]]) + in usb_reinit_338x()
2222 ep->cfg = &dev->epregs[i]; in usb_reinit_338x()
2223 ep->regs = &dev->epregs[i]; in usb_reinit_338x()
2226 ep->fifo_size = (i != 0) ? 2048 : 512; in usb_reinit_338x()
2228 ep_reset_338x(dev->regs, ep); in usb_reinit_338x()
2230 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, 512); in usb_reinit_338x()
2232 dev->gadget.ep0 = &dev->ep[0].ep; in usb_reinit_338x()
2233 dev->ep[0].stopped = 0; in usb_reinit_338x()
2236 if (dev->bug7734_patched) { in usb_reinit_338x()
2237 tmp = readl(&dev->usb_ext->usbctl2) & in usb_reinit_338x()
2239 writel(tmp, &dev->usb_ext->usbctl2); in usb_reinit_338x()
2243 val = readl(&dev->llregs->ll_lfps_5); in usb_reinit_338x()
2246 writel(val, &dev->llregs->ll_lfps_5); in usb_reinit_338x()
2248 val = readl(&dev->llregs->ll_lfps_6); in usb_reinit_338x()
2251 writel(val, &dev->llregs->ll_lfps_6); in usb_reinit_338x()
2258 val = readl(&dev->llregs->ll_tsn_counters_2); in usb_reinit_338x()
2261 writel(val, &dev->llregs->ll_tsn_counters_2); in usb_reinit_338x()
2263 val = readl(&dev->llregs->ll_tsn_counters_3); in usb_reinit_338x()
2266 writel(val, &dev->llregs->ll_tsn_counters_3); in usb_reinit_338x()
2270 * Handshake Signaling for Device-Initiated U1 Exit is too short. in usb_reinit_338x()
2274 val = readl(&dev->llregs->ll_lfps_timers_2); in usb_reinit_338x()
2276 &dev->llregs->ll_lfps_timers_2); in usb_reinit_338x()
2280 * - On SS connections, setting Recovery Idle to Recover Fmw improves in usb_reinit_338x()
2282 * - It is safe to set for all connection speeds; all chip revisions. in usb_reinit_338x()
2283 * - R-M-W to leave other bits undisturbed. in usb_reinit_338x()
2284 * - Reference PLX TT-7372 in usb_reinit_338x()
2286 val = readl(&dev->llregs->ll_tsn_chicken_bit); in usb_reinit_338x()
2288 writel(val, &dev->llregs->ll_tsn_chicken_bit); in usb_reinit_338x()
2290 INIT_LIST_HEAD(&dev->gadget.ep0->ep_list); in usb_reinit_338x()
2293 writel(0x0D, &dev->dep[0].dep_cfg); in usb_reinit_338x()
2294 writel(0x0D, &dev->dep[1].dep_cfg); in usb_reinit_338x()
2295 writel(0x0E, &dev->dep[2].dep_cfg); in usb_reinit_338x()
2296 writel(0x0E, &dev->dep[3].dep_cfg); in usb_reinit_338x()
2297 writel(0x0F, &dev->dep[4].dep_cfg); in usb_reinit_338x()
2298 writel(0x0C, &dev->dep[5].dep_cfg); in usb_reinit_338x()
2303 if (dev->quirks & PLX_LEGACY) in usb_reinit()
2313 &dev->epregs[0].ep_rsp); in ep0_start_228x()
2326 &dev->usb->stdrsp); in ep0_start_228x()
2330 (dev->softconnect << USB_DETECT_ENABLE) | in ep0_start_228x()
2332 &dev->usb->usbctl); in ep0_start_228x()
2334 /* enable irqs so we can see ep0 and general operation */ in ep0_start_228x()
2337 &dev->regs->pciirqenb0); in ep0_start_228x()
2345 &dev->regs->pciirqenb1); in ep0_start_228x()
2348 (void) readl(&dev->usb->usbctl); in ep0_start_228x()
2354 if (dev->bug7734_patched) in ep0_start_338x()
2357 &dev->epregs[0].ep_rsp); in ep0_start_338x()
2371 &dev->usb->stdrsp); in ep0_start_338x()
2372 dev->wakeup_enable = 1; in ep0_start_338x()
2374 (dev->softconnect << USB_DETECT_ENABLE) | in ep0_start_338x()
2376 &dev->usb->usbctl); in ep0_start_338x()
2378 /* enable irqs so we can see ep0 and general operation */ in ep0_start_338x()
2381 &dev->regs->pciirqenb0); in ep0_start_338x()
2386 &dev->regs->pciirqenb1); in ep0_start_338x()
2389 (void)readl(&dev->usb->usbctl); in ep0_start_338x()
2394 if (dev->quirks & PLX_LEGACY) in ep0_start()
2401 * non-control requests. then usb traffic follows until a
2413 * (dev->usb->xcvrdiag & FORCE_FULL_SPEED_MODE) in net2280_start()
2416 if (!driver || driver->max_speed < USB_SPEED_HIGH || in net2280_start()
2417 !driver->setup) in net2280_start()
2418 return -EINVAL; in net2280_start()
2422 for (i = 0; i < dev->n_ep; i++) in net2280_start()
2423 dev->ep[i].irqs = 0; in net2280_start()
2426 dev->driver = driver; in net2280_start()
2428 retval = device_create_file(&dev->pdev->dev, &dev_attr_function); in net2280_start()
2431 retval = device_create_file(&dev->pdev->dev, &dev_attr_queues); in net2280_start()
2440 if ((dev->quirks & PLX_PCIE) && !dev->bug7734_patched) in net2280_start()
2449 device_remove_file(&dev->pdev->dev, &dev_attr_function); in net2280_start()
2451 dev->driver = NULL; in net2280_start()
2460 if (dev->gadget.speed == USB_SPEED_UNKNOWN) in stop_activity()
2467 for (i = 0; i < dev->n_ep; i++) in stop_activity()
2468 nuke(&dev->ep[i]); in stop_activity()
2471 if (dev->async_callbacks && driver) { in stop_activity()
2472 spin_unlock(&dev->lock); in stop_activity()
2473 driver->disconnect(&dev->gadget); in stop_activity()
2474 spin_lock(&dev->lock); in stop_activity()
2487 spin_lock_irqsave(&dev->lock, flags); in net2280_stop()
2489 spin_unlock_irqrestore(&dev->lock, flags); in net2280_stop()
2493 device_remove_file(&dev->pdev->dev, &dev_attr_function); in net2280_stop()
2494 device_remove_file(&dev->pdev->dev, &dev_attr_queues); in net2280_stop()
2496 dev->driver = NULL; in net2280_stop()
2505 spin_lock_irq(&dev->lock); in net2280_async_callbacks()
2506 dev->async_callbacks = enable; in net2280_async_callbacks()
2507 spin_unlock_irq(&dev->lock); in net2280_async_callbacks()
2510 /*-------------------------------------------------------------------------*/
2512 /* handle ep0, ep-e, ep-f with 64 byte packets: packet per irq.
2513 * also works for dma-capable endpoints, in pio mode or just
2520 /* 0 error, 1 mid-data, 2 done */ in handle_ep_small()
2523 if (!list_empty(&ep->queue)) in handle_ep_small()
2524 req = list_entry(ep->queue.next, in handle_ep_small()
2530 t = readl(&ep->regs->ep_stat); in handle_ep_small()
2531 ep->irqs++; in handle_ep_small()
2533 ep_vdbg(ep->dev, "%s ack ep_stat %08x, req %p\n", in handle_ep_small()
2534 ep->ep.name, t, req ? &req->req : NULL); in handle_ep_small()
2536 if (!ep->is_in || (ep->dev->quirks & PLX_2280)) in handle_ep_small()
2537 writel(t & ~BIT(NAK_OUT_PACKETS), &ep->regs->ep_stat); in handle_ep_small()
2540 writel(t, &ep->regs->ep_stat); in handle_ep_small()
2542 /* for ep0, monitor token irqs to catch data stage length errors in handle_ep_small()
2552 if (unlikely(ep->num == 0)) { in handle_ep_small()
2553 if (ep->is_in) { in handle_ep_small()
2556 if (ep->dev->protocol_stall) { in handle_ep_small()
2557 ep->stopped = 1; in handle_ep_small()
2565 if (ep->dev->protocol_stall) { in handle_ep_small()
2566 ep->stopped = 1; in handle_ep_small()
2569 } else if (ep->responded && in handle_ep_small()
2570 !req && !ep->stopped) in handle_ep_small()
2576 if (ep->dev->protocol_stall) { in handle_ep_small()
2577 ep->stopped = 1; in handle_ep_small()
2584 req->req.actual == req->req.length) || in handle_ep_small()
2585 (ep->responded && !req)) { in handle_ep_small()
2586 ep->dev->protocol_stall = 1; in handle_ep_small()
2588 ep->stopped = 1; in handle_ep_small()
2590 done(ep, req, -EOVERFLOW); in handle_ep_small()
2600 if (likely(ep->dma)) { in handle_ep_small()
2603 int stopped = ep->stopped; in handle_ep_small()
2612 ep->stopped = 1; in handle_ep_small()
2613 for (count = 0; ; t = readl(&ep->regs->ep_stat)) { in handle_ep_small()
2619 if (unlikely(list_empty(&ep->queue) || in handle_ep_small()
2620 ep->out_overflow)) { in handle_ep_small()
2624 req = list_entry(ep->queue.next, in handle_ep_small()
2631 count = readl(&ep->dma->dmacount); in handle_ep_small()
2633 if (readl(&ep->dma->dmadesc) in handle_ep_small()
2634 != req->td_dma) in handle_ep_small()
2644 readl(&ep->dma->dmadesc) != in handle_ep_small()
2645 req->td_dma && stuck++ > 5) { in handle_ep_small()
2647 &ep->dma->dmacount); in handle_ep_small()
2650 ep_dbg(ep->dev, "%s escape stuck %d, count %u\n", in handle_ep_small()
2651 ep->ep.name, stuck, in handle_ep_small()
2667 writel(BIT(DMA_ABORT), &ep->dma->dmastat); in handle_ep_small()
2668 spin_stop_dma(ep->dma); in handle_ep_small()
2671 req->td->dmacount = 0; in handle_ep_small()
2672 t = readl(&ep->regs->ep_avail); in handle_ep_small()
2674 (ep->out_overflow || t) in handle_ep_small()
2675 ? -EOVERFLOW : 0); in handle_ep_small()
2679 if (unlikely(ep->out_overflow || in handle_ep_small()
2680 (ep->dev->chiprev == 0x0100 && in handle_ep_small()
2681 ep->dev->gadget.speed in handle_ep_small()
2684 ep->out_overflow = 0; in handle_ep_small()
2688 ep->stopped = stopped; in handle_ep_small()
2689 if (!list_empty(&ep->queue)) in handle_ep_small()
2692 ep_dbg(ep->dev, "%s dma ep_stat %08x ??\n", in handle_ep_small()
2693 ep->ep.name, t); in handle_ep_small()
2698 if (read_fifo(ep, req) && ep->num != 0) in handle_ep_small()
2705 len = req->req.length - req->req.actual; in handle_ep_small()
2706 if (len > ep->ep.maxpacket) in handle_ep_small()
2707 len = ep->ep.maxpacket; in handle_ep_small()
2708 req->req.actual += len; in handle_ep_small()
2712 if ((req->req.actual == req->req.length) && in handle_ep_small()
2713 (!req->req.zero || len != ep->ep.maxpacket) && ep->num) in handle_ep_small()
2726 if (ep->num == 0) { in handle_ep_small()
2731 if (!ep->stopped) in handle_ep_small()
2735 if (!list_empty(&ep->queue) && !ep->stopped) in handle_ep_small()
2736 req = list_entry(ep->queue.next, in handle_ep_small()
2740 if (req && !ep->is_in) in handle_ep_small()
2748 if (req && !ep->stopped) { in handle_ep_small()
2752 write_fifo(ep, &req->req); in handle_ep_small()
2761 return &dev->ep[0]; in get_ep_by_addr()
2762 list_for_each_entry(ep, &dev->gadget.ep_list, ep.ep_list) { in get_ep_by_addr()
2765 if (!ep->desc) in get_ep_by_addr()
2767 bEndpointAddress = ep->desc->bEndpointAddress; in get_ep_by_addr()
2782 scratch = get_idx_reg(dev->regs, SCRATCH); in defect7374_workaround()
2791 if (!(readl(&dev->usb->usbstat) & BIT(SUPER_SPEED_MODE))) { in defect7374_workaround()
2794 * - Connection must be FS or HS. in defect7374_workaround()
2795 * - This FSM state should allow workaround software to in defect7374_workaround()
2799 dev->bug7734_patched = 1; in defect7374_workaround()
2808 state = readl(&dev->plregs->pl_ep_status_1) in defect7374_workaround()
2813 dev->bug7734_patched = 1; in defect7374_workaround()
2819 * - Wait and try again. in defect7374_workaround()
2838 * Restore data EPs to their pre-workaround settings (disabled, in defect7374_workaround()
2843 set_idx_reg(dev->regs, SCRATCH, scratch); in defect7374_workaround()
2850 struct net2280 *dev = ep->dev; in ep_clear_seqnum()
2854 val = readl(&dev->plregs->pl_ep_ctrl) & ~0x1f; in ep_clear_seqnum()
2855 val |= ep_pl[ep->num]; in ep_clear_seqnum()
2856 writel(val, &dev->plregs->pl_ep_ctrl); in ep_clear_seqnum()
2858 writel(val, &dev->plregs->pl_ep_ctrl); in ep_clear_seqnum()
2876 dev->addressed_state = !w_value; in handle_stat0_irqs_superspeed()
2882 status = dev->wakeup_enable ? 0x02 : 0x00; in handle_stat0_irqs_superspeed()
2883 if (dev->gadget.is_selfpowered) in handle_stat0_irqs_superspeed()
2885 status |= (dev->u1_enable << 2 | dev->u2_enable << 3 | in handle_stat0_irqs_superspeed()
2886 dev->ltm_enable << 4); in handle_stat0_irqs_superspeed()
2887 writel(0, &dev->epregs[0].ep_irqenb); in handle_stat0_irqs_superspeed()
2889 writel((__force u32) status, &dev->epregs[0].ep_data); in handle_stat0_irqs_superspeed()
2897 status = readl(&e->regs->ep_rsp) & in handle_stat0_irqs_superspeed()
2899 writel(0, &dev->epregs[0].ep_irqenb); in handle_stat0_irqs_superspeed()
2901 writel((__force u32) status, &dev->epregs[0].ep_data); in handle_stat0_irqs_superspeed()
2913 if (!dev->addressed_state) { in handle_stat0_irqs_superspeed()
2916 dev->u1_enable = 0; in handle_stat0_irqs_superspeed()
2917 writel(readl(&dev->usb_ext->usbctl2) & in handle_stat0_irqs_superspeed()
2919 &dev->usb_ext->usbctl2); in handle_stat0_irqs_superspeed()
2924 dev->u2_enable = 0; in handle_stat0_irqs_superspeed()
2925 writel(readl(&dev->usb_ext->usbctl2) & in handle_stat0_irqs_superspeed()
2927 &dev->usb_ext->usbctl2); in handle_stat0_irqs_superspeed()
2932 dev->ltm_enable = 0; in handle_stat0_irqs_superspeed()
2933 writel(readl(&dev->usb_ext->usbctl2) & in handle_stat0_irqs_superspeed()
2935 &dev->usb_ext->usbctl2); in handle_stat0_irqs_superspeed()
2944 dev->wakeup_enable = 0; in handle_stat0_irqs_superspeed()
2945 writel(readl(&dev->usb->usbctl) & in handle_stat0_irqs_superspeed()
2947 &dev->usb->usbctl); in handle_stat0_irqs_superspeed()
2959 ep_vdbg(dev, "%s clear halt\n", e->ep.name); in handle_stat0_irqs_superspeed()
2966 if (!list_empty(&e->queue) && e->td_dma) in handle_stat0_irqs_superspeed()
2969 ep->stopped = 1; in handle_stat0_irqs_superspeed()
2979 if (!dev->addressed_state) { in handle_stat0_irqs_superspeed()
2982 dev->u1_enable = 1; in handle_stat0_irqs_superspeed()
2983 writel(readl(&dev->usb_ext->usbctl2) | in handle_stat0_irqs_superspeed()
2985 &dev->usb_ext->usbctl2); in handle_stat0_irqs_superspeed()
2990 dev->u2_enable = 1; in handle_stat0_irqs_superspeed()
2991 writel(readl(&dev->usb_ext->usbctl2) | in handle_stat0_irqs_superspeed()
2993 &dev->usb_ext->usbctl2); in handle_stat0_irqs_superspeed()
2998 dev->ltm_enable = 1; in handle_stat0_irqs_superspeed()
2999 writel(readl(&dev->usb_ext->usbctl2) | in handle_stat0_irqs_superspeed()
3001 &dev->usb_ext->usbctl2); in handle_stat0_irqs_superspeed()
3010 dev->wakeup_enable = 1; in handle_stat0_irqs_superspeed()
3011 writel(readl(&dev->usb->usbctl) | in handle_stat0_irqs_superspeed()
3013 &dev->usb->usbctl); in handle_stat0_irqs_superspeed()
3023 ep->stopped = 1; in handle_stat0_irqs_superspeed()
3024 if (ep->num == 0) in handle_stat0_irqs_superspeed()
3025 ep->dev->protocol_stall = 1; in handle_stat0_irqs_superspeed()
3027 if (ep->dma) in handle_stat0_irqs_superspeed()
3045 readl(&ep->cfg->ep_cfg)); in handle_stat0_irqs_superspeed()
3047 ep->responded = 0; in handle_stat0_irqs_superspeed()
3048 if (dev->async_callbacks) { in handle_stat0_irqs_superspeed()
3049 spin_unlock(&dev->lock); in handle_stat0_irqs_superspeed()
3050 tmp = dev->driver->setup(&dev->gadget, &r); in handle_stat0_irqs_superspeed()
3051 spin_lock(&dev->lock); in handle_stat0_irqs_superspeed()
3058 dev->protocol_stall = 1; in handle_stat0_irqs_superspeed()
3088 handle_ep_small(&dev->ep[index]); in usb338x_handle_ep_intr()
3095 u32 num, scratch; in handle_stat0_irqs() local
3112 if (dev->gadget.speed == USB_SPEED_UNKNOWN) { in handle_stat0_irqs()
3113 u32 val = readl(&dev->usb->usbstat); in handle_stat0_irqs()
3115 dev->gadget.speed = USB_SPEED_SUPER; in handle_stat0_irqs()
3116 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, in handle_stat0_irqs()
3119 dev->gadget.speed = USB_SPEED_HIGH; in handle_stat0_irqs()
3120 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, in handle_stat0_irqs()
3123 dev->gadget.speed = USB_SPEED_FULL; in handle_stat0_irqs()
3124 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, in handle_stat0_irqs()
3127 net2280_led_speed(dev, dev->gadget.speed); in handle_stat0_irqs()
3129 usb_speed_string(dev->gadget.speed)); in handle_stat0_irqs()
3132 ep = &dev->ep[0]; in handle_stat0_irqs()
3133 ep->irqs++; in handle_stat0_irqs()
3137 while (!list_empty(&ep->queue)) { in handle_stat0_irqs()
3138 req = list_entry(ep->queue.next, in handle_stat0_irqs()
3140 done(ep, req, (req->req.actual == req->req.length) in handle_stat0_irqs()
3141 ? 0 : -EPROTO); in handle_stat0_irqs()
3143 ep->stopped = 0; in handle_stat0_irqs()
3144 dev->protocol_stall = 0; in handle_stat0_irqs()
3145 if (!(dev->quirks & PLX_PCIE)) { in handle_stat0_irqs()
3146 if (ep->dev->quirks & PLX_2280) in handle_stat0_irqs()
3164 &ep->regs->ep_stat); in handle_stat0_irqs()
3166 u.raw[0] = readl(&dev->usb->setup0123); in handle_stat0_irqs()
3167 u.raw[1] = readl(&dev->usb->setup4567); in handle_stat0_irqs()
3172 if ((dev->quirks & PLX_PCIE) && !dev->bug7734_patched) in handle_stat0_irqs()
3182 writel(BIT(SETUP_PACKET_INTERRUPT), &dev->regs->irqstat0); in handle_stat0_irqs()
3188 * that'll mean a lot less irqs for some drivers. in handle_stat0_irqs()
3190 ep->is_in = (u.r.bRequestType & USB_DIR_IN) != 0; in handle_stat0_irqs()
3191 if (ep->is_in) { in handle_stat0_irqs()
3200 writel(scratch, &dev->epregs[0].ep_irqenb); in handle_stat0_irqs()
3205 ep->responded = 1; in handle_stat0_irqs()
3207 if (dev->gadget.speed == USB_SPEED_SUPER) { in handle_stat0_irqs()
3224 if (readl(&e->regs->ep_rsp) & BIT(SET_ENDPOINT_HALT)) in handle_stat0_irqs()
3230 writel(0, &dev->epregs[0].ep_irqenb); in handle_stat0_irqs()
3232 writel((__force u32)status, &dev->epregs[0].ep_data); in handle_stat0_irqs()
3234 ep_vdbg(dev, "%s stat %02x\n", ep->ep.name, status); in handle_stat0_irqs()
3249 if (e->wedged) { in handle_stat0_irqs()
3251 ep->ep.name); in handle_stat0_irqs()
3253 ep_vdbg(dev, "%s clear halt\n", e->ep.name); in handle_stat0_irqs()
3255 if ((ep->dev->quirks & PLX_PCIE) && in handle_stat0_irqs()
3256 !list_empty(&e->queue) && e->td_dma) in handle_stat0_irqs()
3274 if (e->ep.name == ep0name) in handle_stat0_irqs()
3277 if ((dev->quirks & PLX_PCIE) && e->dma) in handle_stat0_irqs()
3280 ep_vdbg(dev, "%s set halt\n", ep->ep.name); in handle_stat0_irqs()
3290 readl(&ep->cfg->ep_cfg)); in handle_stat0_irqs()
3291 ep->responded = 0; in handle_stat0_irqs()
3292 if (dev->async_callbacks) { in handle_stat0_irqs()
3293 spin_unlock(&dev->lock); in handle_stat0_irqs()
3294 tmp = dev->driver->setup(&dev->gadget, &u.r); in handle_stat0_irqs()
3295 spin_lock(&dev->lock); in handle_stat0_irqs()
3304 dev->protocol_stall = 1; in handle_stat0_irqs()
3318 if ((dev->quirks & PLX_PCIE) && dev->enhanced_mode) { in handle_stat0_irqs()
3331 for (num = 0; scratch; num++) { in handle_stat0_irqs()
3335 t = BIT(num); in handle_stat0_irqs()
3340 ep = &dev->ep[num]; in handle_stat0_irqs()
3359 __releases(dev->lock) in handle_stat1_irqs()
3360 __acquires(dev->lock) in handle_stat1_irqs()
3363 u32 tmp, num, mask, scratch; in handle_stat1_irqs() local
3382 writel(tmp, &dev->regs->irqstat1); in handle_stat1_irqs()
3383 if (dev->gadget.speed != USB_SPEED_UNKNOWN) { in handle_stat1_irqs()
3385 (readl(&dev->usb->usbctl) & in handle_stat1_irqs()
3389 dev->driver->driver.name); in handle_stat1_irqs()
3391 (readl(&dev->usb->usbstat) & mask) in handle_stat1_irqs()
3395 dev->driver->driver.name); in handle_stat1_irqs()
3399 stop_activity(dev, dev->driver); in handle_stat1_irqs()
3401 if (dev->async_callbacks) { in handle_stat1_irqs()
3402 spin_unlock(&dev->lock); in handle_stat1_irqs()
3404 usb_gadget_udc_reset(&dev->gadget, dev->driver); in handle_stat1_irqs()
3406 (dev->driver->disconnect)(&dev->gadget); in handle_stat1_irqs()
3407 spin_lock(&dev->lock); in handle_stat1_irqs()
3426 writel(tmp, &dev->regs->irqstat1); in handle_stat1_irqs()
3427 spin_unlock(&dev->lock); in handle_stat1_irqs()
3429 if (dev->async_callbacks && dev->driver->suspend) in handle_stat1_irqs()
3430 dev->driver->suspend(&dev->gadget); in handle_stat1_irqs()
3434 if (dev->async_callbacks && dev->driver->resume) in handle_stat1_irqs()
3435 dev->driver->resume(&dev->gadget); in handle_stat1_irqs()
3438 spin_lock(&dev->lock); in handle_stat1_irqs()
3442 /* clear any other status/irqs */ in handle_stat1_irqs()
3444 writel(stat, &dev->regs->irqstat1); in handle_stat1_irqs()
3447 if (dev->quirks & PLX_2280) in handle_stat1_irqs()
3462 /* DMA status, for ep-{a,b,c,d} */ in handle_stat1_irqs()
3466 for (num = 0; scratch; num++) { in handle_stat1_irqs()
3469 tmp = BIT(num); in handle_stat1_irqs()
3474 ep = &dev->ep[num + 1]; in handle_stat1_irqs()
3475 dma = ep->dma; in handle_stat1_irqs()
3481 tmp = readl(&dma->dmastat); in handle_stat1_irqs()
3482 writel(tmp, &dma->dmastat); in handle_stat1_irqs()
3485 if (dev->quirks & PLX_PCIE) { in handle_stat1_irqs()
3486 u32 r_dmacount = readl(&dma->dmacount); in handle_stat1_irqs()
3487 if (!ep->is_in && (r_dmacount & 0x00FFFFFF) && in handle_stat1_irqs()
3493 ep_dbg(ep->dev, "%s no xact done? %08x\n", in handle_stat1_irqs()
3494 ep->ep.name, tmp); in handle_stat1_irqs()
3497 stop_dma(ep->dma); in handle_stat1_irqs()
3502 * less than req->length. NAK_OUT_PACKETS may be set, in handle_stat1_irqs()
3507 * precisely (like PIO does) needs per-packet irqs in handle_stat1_irqs()
3512 if (!list_empty(&ep->queue)) { in handle_stat1_irqs()
3513 tmp = readl(&dma->dmactl); in handle_stat1_irqs()
3516 ep->irqs++; in handle_stat1_irqs()
3528 stop_activity(dev, dev->driver); in handle_stat1_irqs()
3542 if ((dev->quirks & PLX_LEGACY) && in net2280_irq()
3543 (!(readl(&dev->regs->irqstat0) & BIT(INTA_ASSERTED)))) in net2280_irq()
3546 spin_lock(&dev->lock); in net2280_irq()
3549 handle_stat1_irqs(dev, readl(&dev->regs->irqstat1)); in net2280_irq()
3552 handle_stat0_irqs(dev, readl(&dev->regs->irqstat0)); in net2280_irq()
3554 if (dev->quirks & PLX_PCIE) { in net2280_irq()
3555 /* re-enable interrupt to trigger any possible new interrupt */ in net2280_irq()
3556 u32 pciirqenb1 = readl(&dev->regs->pciirqenb1); in net2280_irq()
3557 writel(pciirqenb1 & 0x7FFFFFFF, &dev->regs->pciirqenb1); in net2280_irq()
3558 writel(pciirqenb1, &dev->regs->pciirqenb1); in net2280_irq()
3561 spin_unlock(&dev->lock); in net2280_irq()
3566 /*-------------------------------------------------------------------------*/
3581 if (dev->added) in net2280_remove()
3582 usb_del_gadget(&dev->gadget); in net2280_remove()
3584 BUG_ON(dev->driver); in net2280_remove()
3587 if (dev->requests) { in net2280_remove()
3590 if (!dev->ep[i].dummy) in net2280_remove()
3592 dma_pool_free(dev->requests, dev->ep[i].dummy, in net2280_remove()
3593 dev->ep[i].td_dma); in net2280_remove()
3595 dma_pool_destroy(dev->requests); in net2280_remove()
3597 if (dev->got_irq) in net2280_remove()
3598 free_irq(pdev->irq, dev); in net2280_remove()
3599 if (dev->quirks & PLX_PCIE) in net2280_remove()
3601 if (dev->regs) { in net2280_remove()
3603 iounmap(dev->regs); in net2280_remove()
3605 if (dev->region) in net2280_remove()
3608 if (dev->enabled) in net2280_remove()
3610 device_remove_file(&pdev->dev, &dev_attr_registers); in net2280_remove()
3613 usb_put_gadget(&dev->gadget); in net2280_remove()
3630 retval = -ENOMEM; in net2280_probe()
3635 usb_initialize_gadget(&pdev->dev, &dev->gadget, gadget_release); in net2280_probe()
3636 spin_lock_init(&dev->lock); in net2280_probe()
3637 dev->quirks = id->driver_data; in net2280_probe()
3638 dev->pdev = pdev; in net2280_probe()
3639 dev->gadget.ops = &net2280_ops; in net2280_probe()
3640 dev->gadget.max_speed = (dev->quirks & PLX_SUPERSPEED) ? in net2280_probe()
3644 dev->gadget.name = driver_name; in net2280_probe()
3648 retval = -ENODEV; in net2280_probe()
3651 dev->enabled = 1; in net2280_probe()
3661 retval = -EBUSY; in net2280_probe()
3664 dev->region = 1; in net2280_probe()
3673 retval = -EFAULT; in net2280_probe()
3676 dev->regs = (struct net2280_regs __iomem *) base; in net2280_probe()
3677 dev->usb = (struct net2280_usb_regs __iomem *) (base + 0x0080); in net2280_probe()
3678 dev->pci = (struct net2280_pci_regs __iomem *) (base + 0x0100); in net2280_probe()
3679 dev->dma = (struct net2280_dma_regs __iomem *) (base + 0x0180); in net2280_probe()
3680 dev->dep = (struct net2280_dep_regs __iomem *) (base + 0x0200); in net2280_probe()
3681 dev->epregs = (struct net2280_ep_regs __iomem *) (base + 0x0300); in net2280_probe()
3683 if (dev->quirks & PLX_PCIE) { in net2280_probe()
3686 dev->usb_ext = (struct usb338x_usb_ext_regs __iomem *) in net2280_probe()
3688 dev->llregs = (struct usb338x_ll_regs __iomem *) in net2280_probe()
3690 dev->plregs = (struct usb338x_pl_regs __iomem *) in net2280_probe()
3692 usbstat = readl(&dev->usb->usbstat); in net2280_probe()
3693 dev->enhanced_mode = !!(usbstat & BIT(11)); in net2280_probe()
3694 dev->n_ep = (dev->enhanced_mode) ? 9 : 5; in net2280_probe()
3696 fsmvalue = get_idx_reg(dev->regs, SCRATCH) & in net2280_probe()
3700 dev->bug7734_patched = 1; in net2280_probe()
3701 writel(0, &dev->usb->usbctl); in net2280_probe()
3703 dev->bug7734_patched = 0; in net2280_probe()
3705 dev->enhanced_mode = 0; in net2280_probe()
3706 dev->n_ep = 7; in net2280_probe()
3708 writel(0, &dev->usb->usbctl); in net2280_probe()
3715 if (!pdev->irq) { in net2280_probe()
3717 retval = -ENODEV; in net2280_probe()
3721 if (dev->quirks & PLX_PCIE) in net2280_probe()
3725 if (request_irq(pdev->irq, net2280_irq, IRQF_SHARED, in net2280_probe()
3727 ep_err(dev, "request interrupt %d failed\n", pdev->irq); in net2280_probe()
3728 retval = -EBUSY; in net2280_probe()
3731 dev->got_irq = 1; in net2280_probe()
3735 dev->requests = dma_pool_create("requests", &pdev->dev, in net2280_probe()
3738 0 /* or page-crossing issues */); in net2280_probe()
3739 if (!dev->requests) { in net2280_probe()
3741 retval = -ENOMEM; in net2280_probe()
3747 td = dma_pool_alloc(dev->requests, GFP_KERNEL, in net2280_probe()
3748 &dev->ep[i].td_dma); in net2280_probe()
3751 retval = -ENOMEM; in net2280_probe()
3754 td->dmacount = 0; /* not VALID */ in net2280_probe()
3755 td->dmadesc = td->dmaaddr; in net2280_probe()
3756 dev->ep[i].dummy = td; in net2280_probe()
3759 /* enable lower-overhead pci memory bursts during DMA */ in net2280_probe()
3760 if (dev->quirks & PLX_LEGACY) in net2280_probe()
3768 &dev->pci->pcimstctl); in net2280_probe()
3774 dev->chiprev = get_idx_reg(dev->regs, REG_CHIPREV) & 0xffff; in net2280_probe()
3779 pdev->irq, base, dev->chiprev); in net2280_probe()
3781 dev->enhanced_mode ? "enhanced mode" : "legacy mode"); in net2280_probe()
3782 retval = device_create_file(&pdev->dev, &dev_attr_registers); in net2280_probe()
3786 retval = usb_add_gadget(&dev->gadget); in net2280_probe()
3789 dev->added = 1; in net2280_probe()
3801 * generating IRQs across the upcoming reboot.
3808 /* disable IRQs */ in net2280_shutdown()
3809 writel(0, &dev->regs->pciirqenb0); in net2280_shutdown()
3810 writel(0, &dev->regs->pciirqenb1); in net2280_shutdown()
3813 writel(0, &dev->usb->usbctl); in net2280_shutdown()
3818 /*-------------------------------------------------------------------------*/