Lines Matching +full:disable +full:- +full:hibernation +full:- +full:mode

1 // SPDX-License-Identifier: GPL-2.0
11 * S3C USB2.0 High-speed / OtG driver
19 #include <linux/dma-mapping.h>
65 return hsotg->eps_in[ep_index]; in index_to_ep()
67 return hsotg->eps_out[ep_index]; in index_to_ep()
74 * using_dma - return the DMA status of the driver.
94 return hsotg->params.g_dma; in using_dma()
98 * using_desc_dma - return the descriptor DMA status of the driver.
105 return hsotg->params.g_dma_desc; in using_desc_dma()
109 * dwc2_gadget_incr_frame_num - Increments the targeted frame number.
117 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_incr_frame_num()
120 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_incr_frame_num()
123 hs_ep->target_frame += hs_ep->interval; in dwc2_gadget_incr_frame_num()
124 if (hs_ep->target_frame > limit) { in dwc2_gadget_incr_frame_num()
125 hs_ep->frame_overrun = true; in dwc2_gadget_incr_frame_num()
126 hs_ep->target_frame &= limit; in dwc2_gadget_incr_frame_num()
128 hs_ep->frame_overrun = false; in dwc2_gadget_incr_frame_num()
133 * dwc2_gadget_dec_frame_num_by_one - Decrements the targeted frame number
138 * descriptor frame number filed value. For service interval mode frame
144 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_dec_frame_num_by_one()
147 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_dec_frame_num_by_one()
150 if (hs_ep->target_frame) in dwc2_gadget_dec_frame_num_by_one()
151 hs_ep->target_frame -= 1; in dwc2_gadget_dec_frame_num_by_one()
153 hs_ep->target_frame = limit; in dwc2_gadget_dec_frame_num_by_one()
157 * dwc2_hsotg_en_gsint - enable one or more of the general interrupt
169 dev_dbg(hsotg->dev, "gsintmsk now 0x%08x\n", new_gsintmsk); in dwc2_hsotg_en_gsint()
175 * dwc2_hsotg_disable_gsint - disable one or more of the general interrupt
191 * dwc2_hsotg_ctrl_epint - enable/disable an endpoint irq
222 * dwc2_hsotg_tx_fifo_count - return count of TX FIFOs in device mode
228 if (hsotg->hw_params.en_multiple_tx_fifo) in dwc2_hsotg_tx_fifo_count()
229 /* In dedicated FIFO mode we need count of IN EPs */ in dwc2_hsotg_tx_fifo_count()
230 return hsotg->hw_params.num_dev_in_eps; in dwc2_hsotg_tx_fifo_count()
232 /* In shared FIFO mode we need count of Periodic IN EPs */ in dwc2_hsotg_tx_fifo_count()
233 return hsotg->hw_params.num_dev_perio_in_ep; in dwc2_hsotg_tx_fifo_count()
237 * dwc2_hsotg_tx_fifo_total_depth - return total FIFO depth available for
238 * device mode TX FIFOs
248 np_tx_fifo_size = min_t(u32, hsotg->hw_params.dev_nperio_tx_fifo_size, in dwc2_hsotg_tx_fifo_total_depth()
249 hsotg->params.g_np_tx_fifo_size); in dwc2_hsotg_tx_fifo_total_depth()
252 tx_addr_max = hsotg->hw_params.total_fifo_size; in dwc2_hsotg_tx_fifo_total_depth()
254 addr = hsotg->params.g_rx_fifo_size + np_tx_fifo_size; in dwc2_hsotg_tx_fifo_total_depth()
258 return tx_addr_max - addr; in dwc2_hsotg_tx_fifo_total_depth()
262 * dwc2_gadget_wkup_alert_handler - Handler for WKUP_ALERT interrupt
277 dev_dbg(hsotg->dev, "%s: Wkup_Alert_Int\n", __func__); in dwc2_gadget_wkup_alert_handler()
284 * dwc2_hsotg_tx_fifo_average_depth - returns average depth of device mode
305 * dwc2_hsotg_init_fifo - initialise non-periodic FIFOs
315 u32 *txfsz = hsotg->params.g_tx_fifo_size; in dwc2_hsotg_init_fifo()
318 WARN_ON(hsotg->fifo_map); in dwc2_hsotg_init_fifo()
319 hsotg->fifo_map = 0; in dwc2_hsotg_init_fifo()
322 dwc2_writel(hsotg, hsotg->params.g_rx_fifo_size, GRXFSIZ); in dwc2_hsotg_init_fifo()
323 dwc2_writel(hsotg, (hsotg->params.g_rx_fifo_size << in dwc2_hsotg_init_fifo()
325 (hsotg->params.g_np_tx_fifo_size << FIFOSIZE_DEPTH_SHIFT), in dwc2_hsotg_init_fifo()
336 addr = hsotg->params.g_rx_fifo_size + hsotg->params.g_np_tx_fifo_size; in dwc2_hsotg_init_fifo()
348 WARN_ONCE(addr + txfsz[ep] > hsotg->fifo_mem, in dwc2_hsotg_init_fifo()
356 dwc2_writel(hsotg, hsotg->hw_params.total_fifo_size | in dwc2_hsotg_init_fifo()
375 if (--timeout == 0) { in dwc2_hsotg_init_fifo()
376 dev_err(hsotg->dev, in dwc2_hsotg_init_fifo()
385 dev_dbg(hsotg->dev, "FIFOs reset, timeout at %d\n", timeout); in dwc2_hsotg_init_fifo()
389 * dwc2_hsotg_ep_alloc_request - allocate USB rerequest structure
404 INIT_LIST_HEAD(&req->queue); in dwc2_hsotg_ep_alloc_request()
406 return &req->req; in dwc2_hsotg_ep_alloc_request()
410 * is_ep_periodic - return true if the endpoint is in periodic mode.
413 * Returns true if the endpoint is in periodic mode, meaning it is being
418 return hs_ep->periodic; in is_ep_periodic()
422 * dwc2_hsotg_unmap_dma - unmap the DMA memory being used for the request
434 struct usb_request *req = &hs_req->req; in dwc2_hsotg_unmap_dma()
436 usb_gadget_unmap_request(&hsotg->gadget, req, hs_ep->map_dir); in dwc2_hsotg_unmap_dma()
440 * dwc2_gadget_alloc_ctrl_desc_chains - allocate DMA descriptor chains
449 hsotg->setup_desc[0] = in dwc2_gadget_alloc_ctrl_desc_chains()
450 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
452 &hsotg->setup_desc_dma[0], in dwc2_gadget_alloc_ctrl_desc_chains()
454 if (!hsotg->setup_desc[0]) in dwc2_gadget_alloc_ctrl_desc_chains()
457 hsotg->setup_desc[1] = in dwc2_gadget_alloc_ctrl_desc_chains()
458 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
460 &hsotg->setup_desc_dma[1], in dwc2_gadget_alloc_ctrl_desc_chains()
462 if (!hsotg->setup_desc[1]) in dwc2_gadget_alloc_ctrl_desc_chains()
465 hsotg->ctrl_in_desc = in dwc2_gadget_alloc_ctrl_desc_chains()
466 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
468 &hsotg->ctrl_in_desc_dma, in dwc2_gadget_alloc_ctrl_desc_chains()
470 if (!hsotg->ctrl_in_desc) in dwc2_gadget_alloc_ctrl_desc_chains()
473 hsotg->ctrl_out_desc = in dwc2_gadget_alloc_ctrl_desc_chains()
474 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
476 &hsotg->ctrl_out_desc_dma, in dwc2_gadget_alloc_ctrl_desc_chains()
478 if (!hsotg->ctrl_out_desc) in dwc2_gadget_alloc_ctrl_desc_chains()
484 return -ENOMEM; in dwc2_gadget_alloc_ctrl_desc_chains()
488 * dwc2_hsotg_write_fifo - write packet Data to the TxFIFO
499 * otherwise -ENOSPC is returned if the FIFO space was used up.
509 int buf_pos = hs_req->req.actual; in dwc2_hsotg_write_fifo()
510 int to_write = hs_ep->size_loaded; in dwc2_hsotg_write_fifo()
516 to_write -= (buf_pos - hs_ep->last_load); in dwc2_hsotg_write_fifo()
522 if (periodic && !hsotg->dedicated_fifos) { in dwc2_hsotg_write_fifo()
523 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index)); in dwc2_hsotg_write_fifo()
538 if (hs_ep->fifo_load != 0) { in dwc2_hsotg_write_fifo()
540 return -ENOSPC; in dwc2_hsotg_write_fifo()
543 dev_dbg(hsotg->dev, "%s: left=%d, load=%d, fifo=%d, size %d\n", in dwc2_hsotg_write_fifo()
545 hs_ep->size_loaded, hs_ep->fifo_load, hs_ep->fifo_size); in dwc2_hsotg_write_fifo()
548 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_write_fifo()
551 can_write = hs_ep->fifo_load - size_done; in dwc2_hsotg_write_fifo()
552 dev_dbg(hsotg->dev, "%s: => can_write1=%d\n", in dwc2_hsotg_write_fifo()
555 can_write = hs_ep->fifo_size - can_write; in dwc2_hsotg_write_fifo()
556 dev_dbg(hsotg->dev, "%s: => can_write2=%d\n", in dwc2_hsotg_write_fifo()
561 return -ENOSPC; in dwc2_hsotg_write_fifo()
563 } else if (hsotg->dedicated_fifos && hs_ep->index != 0) { in dwc2_hsotg_write_fifo()
565 DTXFSTS(hs_ep->fifo_index)); in dwc2_hsotg_write_fifo()
571 dev_dbg(hsotg->dev, in dwc2_hsotg_write_fifo()
576 return -ENOSPC; in dwc2_hsotg_write_fifo()
583 max_transfer = hs_ep->ep.maxpacket * hs_ep->mc; in dwc2_hsotg_write_fifo()
585 dev_dbg(hsotg->dev, "%s: GNPTXSTS=%08x, can=%d, to=%d, max_transfer %d\n", in dwc2_hsotg_write_fifo()
589 * limit to 512 bytes of data, it seems at least on the non-periodic in dwc2_hsotg_write_fifo()
597 * limit the write to one max-packet size worth of data, but allow in dwc2_hsotg_write_fifo()
605 if (!hsotg->dedicated_fifos) in dwc2_hsotg_write_fifo()
626 to_write -= pkt_round; in dwc2_hsotg_write_fifo()
634 if (!hsotg->dedicated_fifos) in dwc2_hsotg_write_fifo()
640 dev_dbg(hsotg->dev, "write %d/%d, can_write %d, done %d\n", in dwc2_hsotg_write_fifo()
641 to_write, hs_req->req.length, can_write, buf_pos); in dwc2_hsotg_write_fifo()
644 return -ENOSPC; in dwc2_hsotg_write_fifo()
646 hs_req->req.actual = buf_pos + to_write; in dwc2_hsotg_write_fifo()
647 hs_ep->total_data += to_write; in dwc2_hsotg_write_fifo()
650 hs_ep->fifo_load += to_write; in dwc2_hsotg_write_fifo()
653 data = hs_req->req.buf + buf_pos; in dwc2_hsotg_write_fifo()
655 dwc2_writel_rep(hsotg, EPFIFO(hs_ep->index), data, to_write); in dwc2_hsotg_write_fifo()
657 return (to_write >= can_write) ? -ENOSPC : 0; in dwc2_hsotg_write_fifo()
661 * get_ep_limit - get the maximum data legnth for this endpoint
669 int index = hs_ep->index; in get_ep_limit()
678 if (hs_ep->dir_in) in get_ep_limit()
685 maxpkt--; in get_ep_limit()
686 maxsize--; in get_ep_limit()
693 if ((maxpkt * hs_ep->ep.maxpacket) < maxsize) in get_ep_limit()
694 maxsize = maxpkt * hs_ep->ep.maxpacket; in get_ep_limit()
700 * dwc2_hsotg_read_frameno - read current frame number
717 * dwc2_gadget_get_chain_limit - get the maximum data payload value of the
727 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_chain_limit()
728 int is_isoc = hs_ep->isochronous; in dwc2_gadget_get_chain_limit()
730 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_chain_limit()
731 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_chain_limit()
734 maxsize = (hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_LIMIT : in dwc2_gadget_get_chain_limit()
741 if (hs_ep->index) in dwc2_gadget_get_chain_limit()
749 * dwc2_gadget_get_desc_params - get DMA descriptor parameters.
756 * Control out - MPS,
757 * Isochronous - descriptor rx/tx bytes bitfield limit,
758 * Control In/Bulk/Interrupt - multiple of mps. This will allow to not
760 * Interrupt OUT - if mps not multiple of 4 then a single packet corresponds
767 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_desc_params()
768 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_desc_params()
769 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_desc_params()
772 if (!hs_ep->index && !dir_in) { in dwc2_gadget_get_desc_params()
775 } else if (hs_ep->isochronous) { in dwc2_gadget_get_desc_params()
788 desc_size -= desc_size % mps; in dwc2_gadget_get_desc_params()
792 if (hs_ep->index) in dwc2_gadget_get_desc_params()
807 int dir_in = hs_ep->dir_in; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
808 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
816 hs_ep->desc_count = (len / maxsize) + in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
819 hs_ep->desc_count = 1; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
821 for (i = 0; i < hs_ep->desc_count; ++i) { in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
822 (*desc)->status = 0; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
823 (*desc)->status |= (DEV_DMA_BUFF_STS_HBUSY in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
827 if (!hs_ep->index && !dir_in) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
828 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC); in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
830 (*desc)->status |= in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
832 (*desc)->buf = dma_buff + offset; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
834 len -= maxsize; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
838 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC); in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
841 (*desc)->status |= (len % mps) ? DEV_DMA_SHORT : in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
842 ((hs_ep->send_zlp && true_last) ? in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
845 (*desc)->status |= in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
847 (*desc)->buf = dma_buff + offset; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
850 (*desc)->status &= ~DEV_DMA_BUFF_STS_MASK; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
851 (*desc)->status |= (DEV_DMA_BUFF_STS_HREADY in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
858 * dwc2_gadget_config_nonisoc_xfer_ddma - prepare non ISOC DMA desc chain.
872 struct dwc2_dma_desc *desc = hs_ep->desc_list; in dwc2_gadget_config_nonisoc_xfer_ddma()
877 if (hs_ep->req) in dwc2_gadget_config_nonisoc_xfer_ddma()
878 ureq = &hs_ep->req->req; in dwc2_gadget_config_nonisoc_xfer_ddma()
880 /* non-DMA sg buffer */ in dwc2_gadget_config_nonisoc_xfer_ddma()
881 if (!ureq || !ureq->num_sgs) { in dwc2_gadget_config_nonisoc_xfer_ddma()
888 for_each_sg(ureq->sg, sg, ureq->num_mapped_sgs, i) { in dwc2_gadget_config_nonisoc_xfer_ddma()
890 sg_dma_address(sg) + sg->offset, sg_dma_len(sg), in dwc2_gadget_config_nonisoc_xfer_ddma()
891 (i == (ureq->num_mapped_sgs - 1))); in dwc2_gadget_config_nonisoc_xfer_ddma()
892 desc_count += hs_ep->desc_count; in dwc2_gadget_config_nonisoc_xfer_ddma()
895 hs_ep->desc_count = desc_count; in dwc2_gadget_config_nonisoc_xfer_ddma()
899 * dwc2_gadget_fill_isoc_desc - fills next isochronous descriptor in chain.
913 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_fill_isoc_desc()
920 index = hs_ep->next_desc; in dwc2_gadget_fill_isoc_desc()
921 desc = &hs_ep->desc_list[index]; in dwc2_gadget_fill_isoc_desc()
924 if ((desc->status >> DEV_DMA_BUFF_STS_SHIFT) == in dwc2_gadget_fill_isoc_desc()
926 dev_dbg(hsotg->dev, "%s: desc chain full\n", __func__); in dwc2_gadget_fill_isoc_desc()
931 if (hs_ep->next_desc) in dwc2_gadget_fill_isoc_desc()
932 hs_ep->desc_list[index - 1].status &= ~DEV_DMA_L; in dwc2_gadget_fill_isoc_desc()
934 dev_dbg(hsotg->dev, "%s: Filling ep %d, dir %s isoc desc # %d\n", in dwc2_gadget_fill_isoc_desc()
935 __func__, hs_ep->index, hs_ep->dir_in ? "in" : "out", index); in dwc2_gadget_fill_isoc_desc()
937 desc->status = 0; in dwc2_gadget_fill_isoc_desc()
938 desc->status |= (DEV_DMA_BUFF_STS_HBUSY << DEV_DMA_BUFF_STS_SHIFT); in dwc2_gadget_fill_isoc_desc()
940 desc->buf = dma_buff; in dwc2_gadget_fill_isoc_desc()
941 desc->status |= (DEV_DMA_L | DEV_DMA_IOC | in dwc2_gadget_fill_isoc_desc()
944 if (hs_ep->dir_in) { in dwc2_gadget_fill_isoc_desc()
946 pid = DIV_ROUND_UP(len, hs_ep->ep.maxpacket); in dwc2_gadget_fill_isoc_desc()
949 desc->status |= ((pid << DEV_DMA_ISOC_PID_SHIFT) & in dwc2_gadget_fill_isoc_desc()
951 ((len % hs_ep->ep.maxpacket) ? in dwc2_gadget_fill_isoc_desc()
953 ((hs_ep->target_frame << in dwc2_gadget_fill_isoc_desc()
958 desc->status &= ~DEV_DMA_BUFF_STS_MASK; in dwc2_gadget_fill_isoc_desc()
959 desc->status |= (DEV_DMA_BUFF_STS_HREADY << DEV_DMA_BUFF_STS_SHIFT); in dwc2_gadget_fill_isoc_desc()
962 if (hs_ep->dir_in) in dwc2_gadget_fill_isoc_desc()
966 hs_ep->next_desc++; in dwc2_gadget_fill_isoc_desc()
967 if (hs_ep->next_desc >= MAX_DMA_DESC_NUM_HS_ISOC) in dwc2_gadget_fill_isoc_desc()
968 hs_ep->next_desc = 0; in dwc2_gadget_fill_isoc_desc()
974 * dwc2_gadget_start_isoc_ddma - start isochronous transfer in DDMA
982 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_start_isoc_ddma()
984 int index = hs_ep->index; in dwc2_gadget_start_isoc_ddma()
992 if (list_empty(&hs_ep->queue)) { in dwc2_gadget_start_isoc_ddma()
993 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_gadget_start_isoc_ddma()
994 dev_dbg(hsotg->dev, "%s: No requests in queue\n", __func__); in dwc2_gadget_start_isoc_ddma()
1000 desc = &hs_ep->desc_list[i]; in dwc2_gadget_start_isoc_ddma()
1001 desc->status = 0; in dwc2_gadget_start_isoc_ddma()
1002 desc->status |= (DEV_DMA_BUFF_STS_HBUSY in dwc2_gadget_start_isoc_ddma()
1006 hs_ep->next_desc = 0; in dwc2_gadget_start_isoc_ddma()
1007 list_for_each_entry_safe(hs_req, treq, &hs_ep->queue, queue) { in dwc2_gadget_start_isoc_ddma()
1008 dma_addr_t dma_addr = hs_req->req.dma; in dwc2_gadget_start_isoc_ddma()
1010 if (hs_req->req.num_sgs) { in dwc2_gadget_start_isoc_ddma()
1011 WARN_ON(hs_req->req.num_sgs > 1); in dwc2_gadget_start_isoc_ddma()
1012 dma_addr = sg_dma_address(hs_req->req.sg); in dwc2_gadget_start_isoc_ddma()
1015 hs_req->req.length); in dwc2_gadget_start_isoc_ddma()
1020 hs_ep->compl_desc = 0; in dwc2_gadget_start_isoc_ddma()
1021 depctl = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index); in dwc2_gadget_start_isoc_ddma()
1022 dma_reg = hs_ep->dir_in ? DIEPDMA(index) : DOEPDMA(index); in dwc2_gadget_start_isoc_ddma()
1025 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg); in dwc2_gadget_start_isoc_ddma()
1039 * dwc2_hsotg_start_req - start a USB request from an endpoint's queue
1053 struct usb_request *ureq = &hs_req->req; in dwc2_hsotg_start_req()
1054 int index = hs_ep->index; in dwc2_hsotg_start_req()
1055 int dir_in = hs_ep->dir_in; in dwc2_hsotg_start_req()
1066 if (hs_ep->req && !continuing) { in dwc2_hsotg_start_req()
1067 dev_err(hsotg->dev, "%s: active request\n", __func__); in dwc2_hsotg_start_req()
1070 } else if (hs_ep->req != hs_req && continuing) { in dwc2_hsotg_start_req()
1071 dev_err(hsotg->dev, in dwc2_hsotg_start_req()
1082 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x, ep %d, dir %s\n", in dwc2_hsotg_start_req()
1084 hs_ep->dir_in ? "in" : "out"); in dwc2_hsotg_start_req()
1090 dev_warn(hsotg->dev, "%s: ep%d is stalled\n", __func__, index); in dwc2_hsotg_start_req()
1094 length = ureq->length - ureq->actual; in dwc2_hsotg_start_req()
1095 dev_dbg(hsotg->dev, "ureq->length:%d ureq->actual:%d\n", in dwc2_hsotg_start_req()
1096 ureq->length, ureq->actual); in dwc2_hsotg_start_req()
1104 int round = maxreq % hs_ep->ep.maxpacket; in dwc2_hsotg_start_req()
1106 dev_dbg(hsotg->dev, "%s: length %d, max-req %d, r %d\n", in dwc2_hsotg_start_req()
1111 maxreq -= round; in dwc2_hsotg_start_req()
1117 packets = DIV_ROUND_UP(length, hs_ep->ep.maxpacket); in dwc2_hsotg_start_req()
1122 if (hs_ep->isochronous) in dwc2_hsotg_start_req()
1133 if (dir_in && ureq->zero && !continuing) { in dwc2_hsotg_start_req()
1135 if ((ureq->length >= hs_ep->ep.maxpacket) && in dwc2_hsotg_start_req()
1136 !(ureq->length % hs_ep->ep.maxpacket)) in dwc2_hsotg_start_req()
1137 hs_ep->send_zlp = 1; in dwc2_hsotg_start_req()
1143 dev_dbg(hsotg->dev, "%s: %d@%d/%d, 0x%08x => 0x%08x\n", in dwc2_hsotg_start_req()
1144 __func__, packets, length, ureq->length, epsize, epsize_reg); in dwc2_hsotg_start_req()
1147 hs_ep->req = hs_req; in dwc2_hsotg_start_req()
1151 u32 mps = hs_ep->ep.maxpacket; in dwc2_hsotg_start_req()
1153 /* Adjust length: EP0 - MPS, other OUT EPs - multiple of MPS */ in dwc2_hsotg_start_req()
1158 length += (mps - (length % mps)); in dwc2_hsotg_start_req()
1162 offset = ureq->actual; in dwc2_hsotg_start_req()
1165 dwc2_gadget_config_nonisoc_xfer_ddma(hs_ep, ureq->dma + offset, in dwc2_hsotg_start_req()
1169 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg); in dwc2_hsotg_start_req()
1171 dev_dbg(hsotg->dev, "%s: %08x pad => 0x%08x\n", in dwc2_hsotg_start_req()
1172 __func__, (u32)hs_ep->desc_list_dma, dma_reg); in dwc2_hsotg_start_req()
1183 dwc2_writel(hsotg, ureq->dma, dma_reg); in dwc2_hsotg_start_req()
1185 dev_dbg(hsotg->dev, "%s: %pad => 0x%08x\n", in dwc2_hsotg_start_req()
1186 __func__, &ureq->dma, dma_reg); in dwc2_hsotg_start_req()
1190 if (hs_ep->isochronous) { in dwc2_hsotg_start_req()
1192 if (hs_ep->interval == 1) { in dwc2_hsotg_start_req()
1193 if (hs_ep->target_frame & 0x1) in dwc2_hsotg_start_req()
1200 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_hsotg_start_req()
1201 hs_req->req.actual = 0; in dwc2_hsotg_start_req()
1202 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, -ENODATA); in dwc2_hsotg_start_req()
1209 dev_dbg(hsotg->dev, "ep0 state:%d\n", hsotg->ep0_state); in dwc2_hsotg_start_req()
1212 if (!(index == 0 && hsotg->ep0_state == DWC2_EP0_SETUP)) in dwc2_hsotg_start_req()
1215 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl); in dwc2_hsotg_start_req()
1223 hs_ep->size_loaded = length; in dwc2_hsotg_start_req()
1224 hs_ep->last_load = ureq->actual; in dwc2_hsotg_start_req()
1227 /* set these anyway, we may need them for non-periodic in */ in dwc2_hsotg_start_req()
1228 hs_ep->fifo_load = 0; in dwc2_hsotg_start_req()
1240 dev_dbg(hsotg->dev, in dwc2_hsotg_start_req()
1244 dev_dbg(hsotg->dev, "%s: DXEPCTL=0x%08x\n", in dwc2_hsotg_start_req()
1248 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 1); in dwc2_hsotg_start_req()
1252 * dwc2_hsotg_map_dma - map the DMA memory being used for the request
1269 hs_ep->map_dir = hs_ep->dir_in; in dwc2_hsotg_map_dma()
1270 ret = usb_gadget_map_request(&hsotg->gadget, req, hs_ep->dir_in); in dwc2_hsotg_map_dma()
1277 dev_err(hsotg->dev, "%s: failed to map buffer %p, %d bytes\n", in dwc2_hsotg_map_dma()
1278 __func__, req->buf, req->length); in dwc2_hsotg_map_dma()
1280 return -EIO; in dwc2_hsotg_map_dma()
1287 void *req_buf = hs_req->req.buf; in dwc2_hsotg_handle_unaligned_buf_start()
1293 WARN_ON(hs_req->saved_req_buf); in dwc2_hsotg_handle_unaligned_buf_start()
1295 dev_dbg(hsotg->dev, "%s: %s: buf=%p length=%d\n", __func__, in dwc2_hsotg_handle_unaligned_buf_start()
1296 hs_ep->ep.name, req_buf, hs_req->req.length); in dwc2_hsotg_handle_unaligned_buf_start()
1298 hs_req->req.buf = kmalloc(hs_req->req.length, GFP_ATOMIC); in dwc2_hsotg_handle_unaligned_buf_start()
1299 if (!hs_req->req.buf) { in dwc2_hsotg_handle_unaligned_buf_start()
1300 hs_req->req.buf = req_buf; in dwc2_hsotg_handle_unaligned_buf_start()
1301 dev_err(hsotg->dev, in dwc2_hsotg_handle_unaligned_buf_start()
1304 return -ENOMEM; in dwc2_hsotg_handle_unaligned_buf_start()
1308 hs_req->saved_req_buf = req_buf; in dwc2_hsotg_handle_unaligned_buf_start()
1310 if (hs_ep->dir_in) in dwc2_hsotg_handle_unaligned_buf_start()
1311 memcpy(hs_req->req.buf, req_buf, hs_req->req.length); in dwc2_hsotg_handle_unaligned_buf_start()
1321 if (!using_dma(hsotg) || !hs_req->saved_req_buf) in dwc2_hsotg_handle_unaligned_buf_complete()
1324 dev_dbg(hsotg->dev, "%s: %s: status=%d actual-length=%d\n", __func__, in dwc2_hsotg_handle_unaligned_buf_complete()
1325 hs_ep->ep.name, hs_req->req.status, hs_req->req.actual); in dwc2_hsotg_handle_unaligned_buf_complete()
1328 if (!hs_ep->dir_in && !hs_req->req.status) in dwc2_hsotg_handle_unaligned_buf_complete()
1329 memcpy(hs_req->saved_req_buf, hs_req->req.buf, in dwc2_hsotg_handle_unaligned_buf_complete()
1330 hs_req->req.actual); in dwc2_hsotg_handle_unaligned_buf_complete()
1333 kfree(hs_req->req.buf); in dwc2_hsotg_handle_unaligned_buf_complete()
1335 hs_req->req.buf = hs_req->saved_req_buf; in dwc2_hsotg_handle_unaligned_buf_complete()
1336 hs_req->saved_req_buf = NULL; in dwc2_hsotg_handle_unaligned_buf_complete()
1340 * dwc2_gadget_target_frame_elapsed - Checks target frame
1348 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_target_frame_elapsed()
1349 u32 target_frame = hs_ep->target_frame; in dwc2_gadget_target_frame_elapsed()
1350 u32 current_frame = hsotg->frame_number; in dwc2_gadget_target_frame_elapsed()
1351 bool frame_overrun = hs_ep->frame_overrun; in dwc2_gadget_target_frame_elapsed()
1354 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_target_frame_elapsed()
1361 ((current_frame - target_frame) < limit / 2)) in dwc2_gadget_target_frame_elapsed()
1368 * dwc2_gadget_set_ep0_desc_chain - Set EP's desc chain pointers
1378 switch (hsotg->ep0_state) { in dwc2_gadget_set_ep0_desc_chain()
1381 hs_ep->desc_list = hsotg->setup_desc[0]; in dwc2_gadget_set_ep0_desc_chain()
1382 hs_ep->desc_list_dma = hsotg->setup_desc_dma[0]; in dwc2_gadget_set_ep0_desc_chain()
1386 hs_ep->desc_list = hsotg->ctrl_in_desc; in dwc2_gadget_set_ep0_desc_chain()
1387 hs_ep->desc_list_dma = hsotg->ctrl_in_desc_dma; in dwc2_gadget_set_ep0_desc_chain()
1390 hs_ep->desc_list = hsotg->ctrl_out_desc; in dwc2_gadget_set_ep0_desc_chain()
1391 hs_ep->desc_list_dma = hsotg->ctrl_out_desc_dma; in dwc2_gadget_set_ep0_desc_chain()
1394 dev_err(hsotg->dev, "invalid EP 0 state in queue %d\n", in dwc2_gadget_set_ep0_desc_chain()
1395 hsotg->ep0_state); in dwc2_gadget_set_ep0_desc_chain()
1396 return -EINVAL; in dwc2_gadget_set_ep0_desc_chain()
1407 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_queue()
1414 dev_dbg(hs->dev, "%s: req %p: %d@%p, noi=%d, zero=%d, snok=%d\n", in dwc2_hsotg_ep_queue()
1415 ep->name, req, req->length, req->buf, req->no_interrupt, in dwc2_hsotg_ep_queue()
1416 req->zero, req->short_not_ok); in dwc2_hsotg_ep_queue()
1418 if (hs->lx_state == DWC2_L1) { in dwc2_hsotg_ep_queue()
1423 if (hs->lx_state != DWC2_L0) { in dwc2_hsotg_ep_queue()
1424 dev_dbg(hs->dev, "%s: submit request only in active state\n", in dwc2_hsotg_ep_queue()
1426 return -EAGAIN; in dwc2_hsotg_ep_queue()
1430 INIT_LIST_HEAD(&hs_req->queue); in dwc2_hsotg_ep_queue()
1431 req->actual = 0; in dwc2_hsotg_ep_queue()
1432 req->status = -EINPROGRESS; in dwc2_hsotg_ep_queue()
1435 if (hs_ep->isochronous && in dwc2_hsotg_ep_queue()
1436 req->length > (hs_ep->mc * hs_ep->ep.maxpacket)) { in dwc2_hsotg_ep_queue()
1437 dev_err(hs->dev, "req length > maxpacket*mc\n"); in dwc2_hsotg_ep_queue()
1438 return -EINVAL; in dwc2_hsotg_ep_queue()
1441 /* In DDMA mode for ISOC's don't queue request if length greater in dwc2_hsotg_ep_queue()
1444 if (using_desc_dma(hs) && hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1446 if (hs_ep->dir_in && req->length > maxsize) { in dwc2_hsotg_ep_queue()
1447 dev_err(hs->dev, "wrong length %d (maxsize=%d)\n", in dwc2_hsotg_ep_queue()
1448 req->length, maxsize); in dwc2_hsotg_ep_queue()
1449 return -EINVAL; in dwc2_hsotg_ep_queue()
1452 if (!hs_ep->dir_in && req->length > hs_ep->ep.maxpacket) { in dwc2_hsotg_ep_queue()
1453 dev_err(hs->dev, "ISOC OUT: wrong length %d (mps=%d)\n", in dwc2_hsotg_ep_queue()
1454 req->length, hs_ep->ep.maxpacket); in dwc2_hsotg_ep_queue()
1455 return -EINVAL; in dwc2_hsotg_ep_queue()
1470 if (using_desc_dma(hs) && !hs_ep->index) { in dwc2_hsotg_ep_queue()
1476 first = list_empty(&hs_ep->queue); in dwc2_hsotg_ep_queue()
1477 list_add_tail(&hs_req->queue, &hs_ep->queue); in dwc2_hsotg_ep_queue()
1480 * Handle DDMA isochronous transfers separately - just add new entry in dwc2_hsotg_ep_queue()
1485 if (using_desc_dma(hs) && hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1486 if (hs_ep->target_frame != TARGET_FRAME_INITIAL) { in dwc2_hsotg_ep_queue()
1487 dma_addr_t dma_addr = hs_req->req.dma; in dwc2_hsotg_ep_queue()
1489 if (hs_req->req.num_sgs) { in dwc2_hsotg_ep_queue()
1490 WARN_ON(hs_req->req.num_sgs > 1); in dwc2_hsotg_ep_queue()
1491 dma_addr = sg_dma_address(hs_req->req.sg); in dwc2_hsotg_ep_queue()
1494 hs_req->req.length); in dwc2_hsotg_ep_queue()
1500 if (!hs_ep->index && !req->length && !hs_ep->dir_in && in dwc2_hsotg_ep_queue()
1501 hs->ep0_state == DWC2_EP0_DATA_OUT) in dwc2_hsotg_ep_queue()
1502 hs_ep->dir_in = 1; in dwc2_hsotg_ep_queue()
1505 if (!hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1511 hs->frame_number = dwc2_hsotg_read_frameno(hs); in dwc2_hsotg_ep_queue()
1517 hs->frame_number = dwc2_hsotg_read_frameno(hs); in dwc2_hsotg_ep_queue()
1520 if (hs_ep->target_frame != TARGET_FRAME_INITIAL) in dwc2_hsotg_ep_queue()
1530 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_queue_lock()
1534 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_queue_lock()
1536 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_queue_lock()
1550 * dwc2_hsotg_complete_oursetup - setup completion callback
1561 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_complete_oursetup()
1563 dev_dbg(hsotg->dev, "%s: ep %p, req %p\n", __func__, ep, req); in dwc2_hsotg_complete_oursetup()
1569 * ep_from_windex - convert control wIndex value to endpoint
1585 if (idx > hsotg->num_of_eps) in ep_from_windex()
1592 * dwc2_hsotg_set_test_mode - Enable usb Test Modes
1594 * @testmode: requested usb test mode
1595 * Enable usb Test Mode requested by the Host.
1611 return -EINVAL; in dwc2_hsotg_set_test_mode()
1618 * dwc2_hsotg_send_reply - send reply to control request
1635 dev_dbg(hsotg->dev, "%s: buff %p, len %d\n", __func__, buff, length); in dwc2_hsotg_send_reply()
1637 req = dwc2_hsotg_ep_alloc_request(&ep->ep, GFP_ATOMIC); in dwc2_hsotg_send_reply()
1638 hsotg->ep0_reply = req; in dwc2_hsotg_send_reply()
1640 dev_warn(hsotg->dev, "%s: cannot alloc req\n", __func__); in dwc2_hsotg_send_reply()
1641 return -ENOMEM; in dwc2_hsotg_send_reply()
1644 req->buf = hsotg->ep0_buff; in dwc2_hsotg_send_reply()
1645 req->length = length; in dwc2_hsotg_send_reply()
1650 req->zero = 0; in dwc2_hsotg_send_reply()
1651 req->complete = dwc2_hsotg_complete_oursetup; in dwc2_hsotg_send_reply()
1654 memcpy(req->buf, buff, length); in dwc2_hsotg_send_reply()
1656 ret = dwc2_hsotg_ep_queue(&ep->ep, req, GFP_ATOMIC); in dwc2_hsotg_send_reply()
1658 dev_warn(hsotg->dev, "%s: cannot queue req\n", __func__); in dwc2_hsotg_send_reply()
1666 * dwc2_hsotg_process_req_status - process request GET_STATUS
1673 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_req_status()
1679 dev_dbg(hsotg->dev, "%s: USB_REQ_GET_STATUS\n", __func__); in dwc2_hsotg_process_req_status()
1681 if (!ep0->dir_in) { in dwc2_hsotg_process_req_status()
1682 dev_warn(hsotg->dev, "%s: direction out?\n", __func__); in dwc2_hsotg_process_req_status()
1683 return -EINVAL; in dwc2_hsotg_process_req_status()
1686 switch (ctrl->bRequestType & USB_RECIP_MASK) { in dwc2_hsotg_process_req_status()
1688 status = hsotg->gadget.is_selfpowered << in dwc2_hsotg_process_req_status()
1690 status |= hsotg->remote_wakeup_allowed << in dwc2_hsotg_process_req_status()
1701 ep = ep_from_windex(hsotg, le16_to_cpu(ctrl->wIndex)); in dwc2_hsotg_process_req_status()
1703 return -ENOENT; in dwc2_hsotg_process_req_status()
1705 reply = cpu_to_le16(ep->halted ? 1 : 0); in dwc2_hsotg_process_req_status()
1712 if (le16_to_cpu(ctrl->wLength) != 2) in dwc2_hsotg_process_req_status()
1713 return -EINVAL; in dwc2_hsotg_process_req_status()
1717 dev_err(hsotg->dev, "%s: failed to send reply\n", __func__); in dwc2_hsotg_process_req_status()
1727 * get_ep_head - return the first request on the endpoint
1734 return list_first_entry_or_null(&hs_ep->queue, struct dwc2_hsotg_req, in get_ep_head()
1739 * dwc2_gadget_start_next_request - Starts next request from ep queue
1742 * If queue is empty and EP is ISOC-OUT - unmasks OUTTKNEPDIS which is masked
1748 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_start_next_request()
1749 int dir_in = hs_ep->dir_in; in dwc2_gadget_start_next_request()
1752 if (!list_empty(&hs_ep->queue)) { in dwc2_gadget_start_next_request()
1757 if (!hs_ep->isochronous) in dwc2_gadget_start_next_request()
1761 dev_dbg(hsotg->dev, "%s: No more ISOC-IN requests\n", in dwc2_gadget_start_next_request()
1764 dev_dbg(hsotg->dev, "%s: No more ISOC-OUT requests\n", in dwc2_gadget_start_next_request()
1770 * dwc2_hsotg_process_req_feature - process request {SET,CLEAR}_FEATURE
1777 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_req_feature()
1779 bool set = (ctrl->bRequest == USB_REQ_SET_FEATURE); in dwc2_hsotg_process_req_feature()
1787 dev_dbg(hsotg->dev, "%s: %s_FEATURE\n", in dwc2_hsotg_process_req_feature()
1790 wValue = le16_to_cpu(ctrl->wValue); in dwc2_hsotg_process_req_feature()
1791 wIndex = le16_to_cpu(ctrl->wIndex); in dwc2_hsotg_process_req_feature()
1792 recip = ctrl->bRequestType & USB_RECIP_MASK; in dwc2_hsotg_process_req_feature()
1799 hsotg->remote_wakeup_allowed = 1; in dwc2_hsotg_process_req_feature()
1801 hsotg->remote_wakeup_allowed = 0; in dwc2_hsotg_process_req_feature()
1806 return -EINVAL; in dwc2_hsotg_process_req_feature()
1808 return -EINVAL; in dwc2_hsotg_process_req_feature()
1810 hsotg->test_mode = wIndex >> 8; in dwc2_hsotg_process_req_feature()
1813 return -ENOENT; in dwc2_hsotg_process_req_feature()
1818 dev_err(hsotg->dev, in dwc2_hsotg_process_req_feature()
1827 dev_dbg(hsotg->dev, "%s: no endpoint for 0x%04x\n", in dwc2_hsotg_process_req_feature()
1829 return -ENOENT; in dwc2_hsotg_process_req_feature()
1834 halted = ep->halted; in dwc2_hsotg_process_req_feature()
1836 if (!ep->wedged) in dwc2_hsotg_process_req_feature()
1837 dwc2_hsotg_ep_sethalt(&ep->ep, set, true); in dwc2_hsotg_process_req_feature()
1841 dev_err(hsotg->dev, in dwc2_hsotg_process_req_feature()
1856 if (ep->req) { in dwc2_hsotg_process_req_feature()
1857 hs_req = ep->req; in dwc2_hsotg_process_req_feature()
1858 ep->req = NULL; in dwc2_hsotg_process_req_feature()
1859 list_del_init(&hs_req->queue); in dwc2_hsotg_process_req_feature()
1860 if (hs_req->req.complete) { in dwc2_hsotg_process_req_feature()
1861 spin_unlock(&hsotg->lock); in dwc2_hsotg_process_req_feature()
1863 &ep->ep, &hs_req->req); in dwc2_hsotg_process_req_feature()
1864 spin_lock(&hsotg->lock); in dwc2_hsotg_process_req_feature()
1869 if (!ep->req) in dwc2_hsotg_process_req_feature()
1876 return -ENOENT; in dwc2_hsotg_process_req_feature()
1880 return -ENOENT; in dwc2_hsotg_process_req_feature()
1888 * dwc2_hsotg_stall_ep0 - stall ep0
1895 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_stall_ep0()
1899 dev_dbg(hsotg->dev, "ep0 stall (dir=%d)\n", ep0->dir_in); in dwc2_hsotg_stall_ep0()
1900 reg = (ep0->dir_in) ? DIEPCTL0 : DOEPCTL0; in dwc2_hsotg_stall_ep0()
1912 dev_dbg(hsotg->dev, in dwc2_hsotg_stall_ep0()
1924 * dwc2_hsotg_process_control - process a control request
1935 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_control()
1939 dev_dbg(hsotg->dev, in dwc2_hsotg_process_control()
1941 ctrl->bRequestType, ctrl->bRequest, ctrl->wValue, in dwc2_hsotg_process_control()
1942 ctrl->wIndex, ctrl->wLength); in dwc2_hsotg_process_control()
1944 if (ctrl->wLength == 0) { in dwc2_hsotg_process_control()
1945 ep0->dir_in = 1; in dwc2_hsotg_process_control()
1946 hsotg->ep0_state = DWC2_EP0_STATUS_IN; in dwc2_hsotg_process_control()
1947 } else if (ctrl->bRequestType & USB_DIR_IN) { in dwc2_hsotg_process_control()
1948 ep0->dir_in = 1; in dwc2_hsotg_process_control()
1949 hsotg->ep0_state = DWC2_EP0_DATA_IN; in dwc2_hsotg_process_control()
1951 ep0->dir_in = 0; in dwc2_hsotg_process_control()
1952 hsotg->ep0_state = DWC2_EP0_DATA_OUT; in dwc2_hsotg_process_control()
1955 if ((ctrl->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD) { in dwc2_hsotg_process_control()
1956 switch (ctrl->bRequest) { in dwc2_hsotg_process_control()
1958 hsotg->connected = 1; in dwc2_hsotg_process_control()
1961 dcfg |= (le16_to_cpu(ctrl->wValue) << in dwc2_hsotg_process_control()
1965 dev_info(hsotg->dev, "new address %d\n", ctrl->wValue); in dwc2_hsotg_process_control()
1983 if (ret == 0 && hsotg->driver) { in dwc2_hsotg_process_control()
1984 spin_unlock(&hsotg->lock); in dwc2_hsotg_process_control()
1985 ret = hsotg->driver->setup(&hsotg->gadget, ctrl); in dwc2_hsotg_process_control()
1986 spin_lock(&hsotg->lock); in dwc2_hsotg_process_control()
1988 dev_dbg(hsotg->dev, "driver->setup() ret %d\n", ret); in dwc2_hsotg_process_control()
1991 hsotg->delayed_status = false; in dwc2_hsotg_process_control()
1993 hsotg->delayed_status = true; in dwc2_hsotg_process_control()
2005 * dwc2_hsotg_complete_setup - completion of a setup transfer
2016 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_complete_setup()
2018 if (req->status < 0) { in dwc2_hsotg_complete_setup()
2019 dev_dbg(hsotg->dev, "%s: failed %d\n", __func__, req->status); in dwc2_hsotg_complete_setup()
2023 spin_lock(&hsotg->lock); in dwc2_hsotg_complete_setup()
2024 if (req->actual == 0) in dwc2_hsotg_complete_setup()
2027 dwc2_hsotg_process_control(hsotg, req->buf); in dwc2_hsotg_complete_setup()
2028 spin_unlock(&hsotg->lock); in dwc2_hsotg_complete_setup()
2032 * dwc2_hsotg_enqueue_setup - start a request for EP0 packets
2040 struct usb_request *req = hsotg->ctrl_req; in dwc2_hsotg_enqueue_setup()
2044 dev_dbg(hsotg->dev, "%s: queueing setup request\n", __func__); in dwc2_hsotg_enqueue_setup()
2046 req->zero = 0; in dwc2_hsotg_enqueue_setup()
2047 req->length = 8; in dwc2_hsotg_enqueue_setup()
2048 req->buf = hsotg->ctrl_buff; in dwc2_hsotg_enqueue_setup()
2049 req->complete = dwc2_hsotg_complete_setup; in dwc2_hsotg_enqueue_setup()
2051 if (!list_empty(&hs_req->queue)) { in dwc2_hsotg_enqueue_setup()
2052 dev_dbg(hsotg->dev, "%s already queued???\n", __func__); in dwc2_hsotg_enqueue_setup()
2056 hsotg->eps_out[0]->dir_in = 0; in dwc2_hsotg_enqueue_setup()
2057 hsotg->eps_out[0]->send_zlp = 0; in dwc2_hsotg_enqueue_setup()
2058 hsotg->ep0_state = DWC2_EP0_SETUP; in dwc2_hsotg_enqueue_setup()
2060 ret = dwc2_hsotg_ep_queue(&hsotg->eps_out[0]->ep, req, GFP_ATOMIC); in dwc2_hsotg_enqueue_setup()
2062 dev_err(hsotg->dev, "%s: failed queue (%d)\n", __func__, ret); in dwc2_hsotg_enqueue_setup()
2074 u8 index = hs_ep->index; in dwc2_hsotg_program_zlp()
2075 u32 epctl_reg = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index); in dwc2_hsotg_program_zlp()
2076 u32 epsiz_reg = hs_ep->dir_in ? DIEPTSIZ(index) : DOEPTSIZ(index); in dwc2_hsotg_program_zlp()
2078 if (hs_ep->dir_in) in dwc2_hsotg_program_zlp()
2079 dev_dbg(hsotg->dev, "Sending zero-length packet on ep%d\n", in dwc2_hsotg_program_zlp()
2082 dev_dbg(hsotg->dev, "Receiving zero-length packet on ep%d\n", in dwc2_hsotg_program_zlp()
2086 dma_addr_t dma = hs_ep->desc_list_dma; in dwc2_hsotg_program_zlp()
2106 * dwc2_hsotg_complete_request - complete a request given to us
2124 dev_dbg(hsotg->dev, "%s: nothing to complete?\n", __func__); in dwc2_hsotg_complete_request()
2128 dev_dbg(hsotg->dev, "complete: ep %p %s, req %p, %d => %p\n", in dwc2_hsotg_complete_request()
2129 hs_ep, hs_ep->ep.name, hs_req, result, hs_req->req.complete); in dwc2_hsotg_complete_request()
2136 if (hs_req->req.status == -EINPROGRESS) in dwc2_hsotg_complete_request()
2137 hs_req->req.status = result; in dwc2_hsotg_complete_request()
2144 hs_ep->req = NULL; in dwc2_hsotg_complete_request()
2145 list_del_init(&hs_req->queue); in dwc2_hsotg_complete_request()
2152 if (hs_req->req.complete) { in dwc2_hsotg_complete_request()
2153 spin_unlock(&hsotg->lock); in dwc2_hsotg_complete_request()
2154 usb_gadget_giveback_request(&hs_ep->ep, &hs_req->req); in dwc2_hsotg_complete_request()
2155 spin_lock(&hsotg->lock); in dwc2_hsotg_complete_request()
2159 if (using_desc_dma(hsotg) && hs_ep->isochronous) in dwc2_hsotg_complete_request()
2168 if (!hs_ep->req && result >= 0) in dwc2_hsotg_complete_request()
2173 * dwc2_gadget_complete_isoc_request_ddma - complete an isoc request in DDMA
2183 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_complete_isoc_request_ddma()
2189 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma()
2197 dev_warn(hsotg->dev, "%s: ISOC EP queue empty\n", __func__); in dwc2_gadget_complete_isoc_request_ddma()
2200 ureq = &hs_req->req; in dwc2_gadget_complete_isoc_request_ddma()
2205 mask = hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_MASK : in dwc2_gadget_complete_isoc_request_ddma()
2207 ureq->actual = ureq->length - ((desc_sts & mask) >> in dwc2_gadget_complete_isoc_request_ddma()
2213 if (!hs_ep->dir_in && ureq->length & 0x3) in dwc2_gadget_complete_isoc_request_ddma()
2214 ureq->actual += 4 - (ureq->length & 0x3); in dwc2_gadget_complete_isoc_request_ddma()
2217 ureq->frame_number = in dwc2_gadget_complete_isoc_request_ddma()
2224 hs_ep->compl_desc++; in dwc2_gadget_complete_isoc_request_ddma()
2225 if (hs_ep->compl_desc > (MAX_DMA_DESC_NUM_HS_ISOC - 1)) in dwc2_gadget_complete_isoc_request_ddma()
2226 hs_ep->compl_desc = 0; in dwc2_gadget_complete_isoc_request_ddma()
2227 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma()
2232 * dwc2_gadget_handle_isoc_bna - handle BNA interrupt for ISOC.
2242 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_isoc_bna()
2244 if (!hs_ep->dir_in) in dwc2_gadget_handle_isoc_bna()
2248 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_gadget_handle_isoc_bna()
2249 hs_ep->next_desc = 0; in dwc2_gadget_handle_isoc_bna()
2250 hs_ep->compl_desc = 0; in dwc2_gadget_handle_isoc_bna()
2254 * dwc2_hsotg_rx_data - receive data from the FIFO for an endpoint
2265 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[ep_idx]; in dwc2_hsotg_rx_data()
2266 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_rx_data()
2275 dev_dbg(hsotg->dev, in dwc2_hsotg_rx_data()
2287 read_ptr = hs_req->req.actual; in dwc2_hsotg_rx_data()
2288 max_req = hs_req->req.length - read_ptr; in dwc2_hsotg_rx_data()
2290 dev_dbg(hsotg->dev, "%s: read %d/%d, done %d/%d\n", in dwc2_hsotg_rx_data()
2291 __func__, to_read, max_req, read_ptr, hs_req->req.length); in dwc2_hsotg_rx_data()
2303 hs_ep->total_data += to_read; in dwc2_hsotg_rx_data()
2304 hs_req->req.actual += to_read; in dwc2_hsotg_rx_data()
2308 * note, we might over-write the buffer end by 3 bytes depending on in dwc2_hsotg_rx_data()
2312 hs_req->req.buf + read_ptr, to_read); in dwc2_hsotg_rx_data()
2316 * dwc2_hsotg_ep0_zlp - send/receive zero-length packet on control endpoint
2320 * Generate a zero-length IN packet request for terminating a SETUP
2330 hsotg->eps_out[0]->dir_in = dir_in; in dwc2_hsotg_ep0_zlp()
2331 hsotg->ep0_state = dir_in ? DWC2_EP0_STATUS_IN : DWC2_EP0_STATUS_OUT; in dwc2_hsotg_ep0_zlp()
2333 dwc2_hsotg_program_zlp(hsotg, hsotg->eps_out[0]); in dwc2_hsotg_ep0_zlp()
2337 * dwc2_gadget_get_xfersize_ddma - get transferred bytes amount from desc
2338 * @hs_ep - The endpoint on which transfer went
2345 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_xfersize_ddma()
2346 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_get_xfersize_ddma()
2349 struct dwc2_dma_desc *desc = hs_ep->desc_list; in dwc2_gadget_get_xfersize_ddma()
2352 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_xfersize_ddma()
2353 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_xfersize_ddma()
2356 return -EINVAL; in dwc2_gadget_get_xfersize_ddma()
2359 if (hs_ep->index) in dwc2_gadget_get_xfersize_ddma()
2361 bytes_rem_correction = 4 - (mps % 4); in dwc2_gadget_get_xfersize_ddma()
2363 for (i = 0; i < hs_ep->desc_count; ++i) { in dwc2_gadget_get_xfersize_ddma()
2364 status = desc->status; in dwc2_gadget_get_xfersize_ddma()
2366 bytes_rem -= bytes_rem_correction; in dwc2_gadget_get_xfersize_ddma()
2369 dev_err(hsotg->dev, "descriptor %d closed with %x\n", in dwc2_gadget_get_xfersize_ddma()
2382 * dwc2_hsotg_handle_outdone - handle receiving OutDone/SetupDone from RXFIFO
2393 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[epnum]; in dwc2_hsotg_handle_outdone()
2394 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_handle_outdone()
2395 struct usb_request *req = &hs_req->req; in dwc2_hsotg_handle_outdone()
2400 dev_dbg(hsotg->dev, "%s: no request active\n", __func__); in dwc2_hsotg_handle_outdone()
2404 if (epnum == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_OUT) { in dwc2_hsotg_handle_outdone()
2405 dev_dbg(hsotg->dev, "zlp packet received\n"); in dwc2_hsotg_handle_outdone()
2426 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_handle_outdone()
2427 size_done += hs_ep->last_load; in dwc2_hsotg_handle_outdone()
2429 req->actual = size_done; in dwc2_hsotg_handle_outdone()
2433 if (req->actual < req->length && size_left == 0) { in dwc2_hsotg_handle_outdone()
2438 if (req->actual < req->length && req->short_not_ok) { in dwc2_hsotg_handle_outdone()
2439 dev_dbg(hsotg->dev, "%s: got %d/%d (short not ok) => error\n", in dwc2_hsotg_handle_outdone()
2440 __func__, req->actual, req->length); in dwc2_hsotg_handle_outdone()
2443 * todo - what should we return here? there's no one else in dwc2_hsotg_handle_outdone()
2450 hsotg->ep0_state == DWC2_EP0_DATA_OUT) { in dwc2_hsotg_handle_outdone()
2452 if (!hsotg->delayed_status) in dwc2_hsotg_handle_outdone()
2457 if (!using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_handle_outdone()
2458 req->frame_number = hs_ep->target_frame; in dwc2_hsotg_handle_outdone()
2466 * dwc2_hsotg_handle_rx - RX FIFO has data
2494 dev_dbg(hsotg->dev, "%s: GRXSTSP=0x%08x (%d@%d)\n", in dwc2_hsotg_handle_rx()
2499 dev_dbg(hsotg->dev, "GLOBALOUTNAK\n"); in dwc2_hsotg_handle_rx()
2503 dev_dbg(hsotg->dev, "OutDone (Frame=0x%08x)\n", in dwc2_hsotg_handle_rx()
2511 dev_dbg(hsotg->dev, in dwc2_hsotg_handle_rx()
2520 if (hsotg->ep0_state == DWC2_EP0_SETUP) in dwc2_hsotg_handle_rx()
2529 dev_dbg(hsotg->dev, in dwc2_hsotg_handle_rx()
2534 WARN_ON(hsotg->ep0_state != DWC2_EP0_SETUP); in dwc2_hsotg_handle_rx()
2540 dev_warn(hsotg->dev, "%s: unknown status %08x\n", in dwc2_hsotg_handle_rx()
2549 * dwc2_hsotg_ep0_mps - turn max packet size into register setting
2567 return (u32)-1; in dwc2_hsotg_ep0_mps()
2571 * dwc2_hsotg_set_ep_maxpacket - set endpoint's max-packet field
2599 hs_ep->ep.maxpacket = mps_bytes; in dwc2_hsotg_set_ep_maxpacket()
2600 hs_ep->mc = 1; in dwc2_hsotg_set_ep_maxpacket()
2604 hs_ep->mc = mc; in dwc2_hsotg_set_ep_maxpacket()
2607 hs_ep->ep.maxpacket = mps; in dwc2_hsotg_set_ep_maxpacket()
2625 dev_err(hsotg->dev, "ep%d: bad mps of %d\n", ep, mps); in dwc2_hsotg_set_ep_maxpacket()
2629 * dwc2_hsotg_txfifo_flush - flush Tx FIFO
2640 dev_warn(hsotg->dev, "%s: timeout flushing fifo GRSTCTL_TXFFLSH\n", in dwc2_hsotg_txfifo_flush()
2645 * dwc2_hsotg_trytx - check to see if anything needs transmitting
2655 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_trytx()
2657 if (!hs_ep->dir_in || !hs_req) { in dwc2_hsotg_trytx()
2659 * if request is not enqueued, we disable interrupts in dwc2_hsotg_trytx()
2662 if (hs_ep->index != 0) in dwc2_hsotg_trytx()
2663 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, in dwc2_hsotg_trytx()
2664 hs_ep->dir_in, 0); in dwc2_hsotg_trytx()
2668 if (hs_req->req.actual < hs_req->req.length) { in dwc2_hsotg_trytx()
2669 dev_dbg(hsotg->dev, "trying to write more for ep%d\n", in dwc2_hsotg_trytx()
2670 hs_ep->index); in dwc2_hsotg_trytx()
2678 * dwc2_hsotg_complete_in - complete IN transfer
2688 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_complete_in()
2689 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index)); in dwc2_hsotg_complete_in()
2693 dev_dbg(hsotg->dev, "XferCompl but no req\n"); in dwc2_hsotg_complete_in()
2698 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_IN) { in dwc2_hsotg_complete_in()
2699 dev_dbg(hsotg->dev, "zlp packet sent\n"); in dwc2_hsotg_complete_in()
2705 hs_ep->dir_in = 0; in dwc2_hsotg_complete_in()
2708 if (hsotg->test_mode) { in dwc2_hsotg_complete_in()
2711 ret = dwc2_hsotg_set_test_mode(hsotg, hsotg->test_mode); in dwc2_hsotg_complete_in()
2713 dev_dbg(hsotg->dev, "Invalid Test #%d\n", in dwc2_hsotg_complete_in()
2714 hsotg->test_mode); in dwc2_hsotg_complete_in()
2735 dev_err(hsotg->dev, "error parsing DDMA results %d\n", in dwc2_hsotg_complete_in()
2741 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_complete_in()
2742 size_done += hs_ep->last_load; in dwc2_hsotg_complete_in()
2744 if (hs_req->req.actual != size_done) in dwc2_hsotg_complete_in()
2745 dev_dbg(hsotg->dev, "%s: adjusting size done %d => %d\n", in dwc2_hsotg_complete_in()
2746 __func__, hs_req->req.actual, size_done); in dwc2_hsotg_complete_in()
2748 hs_req->req.actual = size_done; in dwc2_hsotg_complete_in()
2749 dev_dbg(hsotg->dev, "req->length:%d req->actual:%d req->zero:%d\n", in dwc2_hsotg_complete_in()
2750 hs_req->req.length, hs_req->req.actual, hs_req->req.zero); in dwc2_hsotg_complete_in()
2752 if (!size_left && hs_req->req.actual < hs_req->req.length) { in dwc2_hsotg_complete_in()
2753 dev_dbg(hsotg->dev, "%s trying more for req...\n", __func__); in dwc2_hsotg_complete_in()
2759 if (hs_ep->send_zlp) { in dwc2_hsotg_complete_in()
2760 hs_ep->send_zlp = 0; in dwc2_hsotg_complete_in()
2768 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_DATA_IN) { in dwc2_hsotg_complete_in()
2775 if (!using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_complete_in()
2776 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_hsotg_complete_in()
2784 * dwc2_gadget_read_ep_interrupts - reads interrupts for given ep
2787 * @dir_in: Endpoint direction 1-in 0-out.
2812 * dwc2_gadget_handle_ep_disabled - handle DXEPINT_EPDISBLD
2821 * For ISOC-OUT endpoints completes expired requests. If there is remaining
2826 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_ep_disabled()
2828 unsigned char idx = hs_ep->index; in dwc2_gadget_handle_ep_disabled()
2829 int dir_in = hs_ep->dir_in; in dwc2_gadget_handle_ep_disabled()
2833 dev_dbg(hsotg->dev, "%s: EPDisbld\n", __func__); in dwc2_gadget_handle_ep_disabled()
2838 dwc2_hsotg_txfifo_flush(hsotg, hs_ep->fifo_index); in dwc2_gadget_handle_ep_disabled()
2854 if (!hs_ep->isochronous) in dwc2_gadget_handle_ep_disabled()
2857 if (list_empty(&hs_ep->queue)) { in dwc2_gadget_handle_ep_disabled()
2858 dev_dbg(hsotg->dev, "%s: complete_ep 0x%p, ep->queue empty!\n", in dwc2_gadget_handle_ep_disabled()
2866 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_gadget_handle_ep_disabled()
2867 hs_req->req.actual = 0; in dwc2_gadget_handle_ep_disabled()
2869 -ENODATA); in dwc2_gadget_handle_ep_disabled()
2873 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_ep_disabled()
2878 * dwc2_gadget_handle_out_token_ep_disabled - handle DXEPINT_OUTTKNEPDIS
2881 * This is starting point for ISOC-OUT transfer, synchronization done with
2885 * HW generates OUTTKNEPDIS - out token is received while EP is disabled. Upon
2890 struct dwc2_hsotg *hsotg = ep->parent; in dwc2_gadget_handle_out_token_ep_disabled()
2892 int dir_in = ep->dir_in; in dwc2_gadget_handle_out_token_ep_disabled()
2894 if (dir_in || !ep->isochronous) in dwc2_gadget_handle_out_token_ep_disabled()
2898 if (ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_out_token_ep_disabled()
2900 ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_out_token_ep_disabled()
2906 if (ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_out_token_ep_disabled()
2909 ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_out_token_ep_disabled()
2910 if (ep->interval > 1) { in dwc2_gadget_handle_out_token_ep_disabled()
2911 ctrl = dwc2_readl(hsotg, DOEPCTL(ep->index)); in dwc2_gadget_handle_out_token_ep_disabled()
2912 if (ep->target_frame & 0x1) in dwc2_gadget_handle_out_token_ep_disabled()
2917 dwc2_writel(hsotg, ctrl, DOEPCTL(ep->index)); in dwc2_gadget_handle_out_token_ep_disabled()
2924 hs_req->req.frame_number = ep->target_frame; in dwc2_gadget_handle_out_token_ep_disabled()
2925 hs_req->req.actual = 0; in dwc2_gadget_handle_out_token_ep_disabled()
2926 dwc2_hsotg_complete_request(hsotg, ep, hs_req, -ENODATA); in dwc2_gadget_handle_out_token_ep_disabled()
2931 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_out_token_ep_disabled()
2934 if (!ep->req) in dwc2_gadget_handle_out_token_ep_disabled()
2943 * dwc2_gadget_handle_nak - handle NAK interrupt
2946 * This is starting point for ISOC-IN transfer, synchronization done with
2951 * and 'NAK'. NAK interrupt for ISOC-IN means that token has arrived and ZLP was
2958 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_nak()
2960 int dir_in = hs_ep->dir_in; in dwc2_gadget_handle_nak()
2963 if (!dir_in || !hs_ep->isochronous) in dwc2_gadget_handle_nak()
2966 if (hs_ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_nak()
2969 hs_ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_nak()
2972 /* In service interval mode target_frame must in dwc2_gadget_handle_nak()
2975 if (hsotg->params.service_interval) { in dwc2_gadget_handle_nak()
2979 hs_ep->target_frame &= ~hs_ep->interval + 1; in dwc2_gadget_handle_nak()
2992 hs_ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_nak()
2993 if (hs_ep->interval > 1) { in dwc2_gadget_handle_nak()
2995 DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
2996 if (hs_ep->target_frame & 0x1) in dwc2_gadget_handle_nak()
3001 dwc2_writel(hsotg, ctrl, DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
3008 ctrl = dwc2_readl(hsotg, DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
3012 dwc2_hsotg_txfifo_flush(hsotg, hs_ep->fifo_index); in dwc2_gadget_handle_nak()
3017 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_gadget_handle_nak()
3018 hs_req->req.actual = 0; in dwc2_gadget_handle_nak()
3019 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, -ENODATA); in dwc2_gadget_handle_nak()
3024 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_nak()
3027 if (!hs_ep->req) in dwc2_gadget_handle_nak()
3032 * dwc2_hsotg_epint - handle an in/out endpoint interrupt
3054 dev_err(hsotg->dev, "%s:Interrupt for unconfigured ep%d(%s)\n", in dwc2_hsotg_epint()
3059 dev_dbg(hsotg->dev, "%s: ep%d(%s) DxEPINT=0x%08x\n", in dwc2_hsotg_epint()
3072 if (using_desc_dma(hsotg) && idx == 0 && !hs_ep->dir_in && in dwc2_hsotg_epint()
3073 hsotg->ep0_state == DWC2_EP0_SETUP && !(ints & DXEPINT_SETUP)) in dwc2_hsotg_epint()
3077 dev_dbg(hsotg->dev, in dwc2_hsotg_epint()
3083 if (using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_epint()
3089 * if operating slave mode in dwc2_hsotg_epint()
3091 if (!hs_ep->isochronous || !(ints & DXEPINT_NAKINTRPT)) in dwc2_hsotg_epint()
3094 if (idx == 0 && !hs_ep->req) in dwc2_hsotg_epint()
3101 if (!hs_ep->isochronous || !(ints & DXEPINT_OUTTKNEPDIS)) in dwc2_hsotg_epint()
3116 dev_dbg(hsotg->dev, "%s: AHBErr\n", __func__); in dwc2_hsotg_epint()
3119 dev_dbg(hsotg->dev, "%s: Setup/Timeout\n", __func__); in dwc2_hsotg_epint()
3124 * setup packet. In non-DMA mode we'd get this in dwc2_hsotg_epint()
3137 dev_dbg(hsotg->dev, "%s: StsPhseRcvd\n", __func__); in dwc2_hsotg_epint()
3140 if (hsotg->ep0_state == DWC2_EP0_DATA_OUT) { in dwc2_hsotg_epint()
3143 if (!hsotg->delayed_status) in dwc2_hsotg_epint()
3161 dev_dbg(hsotg->dev, "%s: B2BSetup/INEPNakEff\n", __func__); in dwc2_hsotg_epint()
3164 dev_dbg(hsotg->dev, "%s: BNA interrupt\n", __func__); in dwc2_hsotg_epint()
3165 if (hs_ep->isochronous) in dwc2_hsotg_epint()
3169 if (dir_in && !hs_ep->isochronous) { in dwc2_hsotg_epint()
3172 dev_dbg(hsotg->dev, "%s: ep%d: INTknTXFEmpMsk\n", in dwc2_hsotg_epint()
3178 dev_warn(hsotg->dev, "%s: ep%d: INTknEP\n", in dwc2_hsotg_epint()
3183 if (hsotg->dedicated_fifos && in dwc2_hsotg_epint()
3185 dev_dbg(hsotg->dev, "%s: ep%d: TxFIFOEmpty\n", in dwc2_hsotg_epint()
3194 * dwc2_hsotg_irq_enumdone - Handle EnumDone interrupt (enumeration done)
3211 dev_dbg(hsotg->dev, "EnumDone (DSTS=0x%08x)\n", dsts); in dwc2_hsotg_irq_enumdone()
3223 hsotg->gadget.speed = USB_SPEED_FULL; in dwc2_hsotg_irq_enumdone()
3229 hsotg->gadget.speed = USB_SPEED_HIGH; in dwc2_hsotg_irq_enumdone()
3235 hsotg->gadget.speed = USB_SPEED_LOW; in dwc2_hsotg_irq_enumdone()
3245 dev_info(hsotg->dev, "new device is %s\n", in dwc2_hsotg_irq_enumdone()
3246 usb_speed_string(hsotg->gadget.speed)); in dwc2_hsotg_irq_enumdone()
3258 for (i = 1; i < hsotg->num_of_eps; i++) { in dwc2_hsotg_irq_enumdone()
3259 if (hsotg->eps_in[i]) in dwc2_hsotg_irq_enumdone()
3262 if (hsotg->eps_out[i]) in dwc2_hsotg_irq_enumdone()
3272 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_irq_enumdone()
3278 * kill_all_requests - remove all requests from the endpoint's queue
3292 ep->req = NULL; in kill_all_requests()
3294 while (!list_empty(&ep->queue)) { in kill_all_requests()
3300 if (!hsotg->dedicated_fifos) in kill_all_requests()
3302 size = (dwc2_readl(hsotg, DTXFSTS(ep->fifo_index)) & 0xffff) * 4; in kill_all_requests()
3303 if (size < ep->fifo_size) in kill_all_requests()
3304 dwc2_hsotg_txfifo_flush(hsotg, ep->fifo_index); in kill_all_requests()
3308 * dwc2_hsotg_disconnect - disconnect service
3319 if (!hsotg->connected) in dwc2_hsotg_disconnect()
3322 hsotg->connected = 0; in dwc2_hsotg_disconnect()
3323 hsotg->test_mode = 0; in dwc2_hsotg_disconnect()
3326 for (ep = 0; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_disconnect()
3327 if (hsotg->eps_in[ep]) in dwc2_hsotg_disconnect()
3328 kill_all_requests(hsotg, hsotg->eps_in[ep], in dwc2_hsotg_disconnect()
3329 -ESHUTDOWN); in dwc2_hsotg_disconnect()
3330 if (hsotg->eps_out[ep]) in dwc2_hsotg_disconnect()
3331 kill_all_requests(hsotg, hsotg->eps_out[ep], in dwc2_hsotg_disconnect()
3332 -ESHUTDOWN); in dwc2_hsotg_disconnect()
3336 hsotg->lx_state = DWC2_L3; in dwc2_hsotg_disconnect()
3338 usb_gadget_set_state(&hsotg->gadget, USB_STATE_NOTATTACHED); in dwc2_hsotg_disconnect()
3342 * dwc2_hsotg_irq_fifoempty - TX FIFO empty interrupt handler
3352 for (epno = 0; epno < hsotg->num_of_eps; epno++) { in dwc2_hsotg_irq_fifoempty()
3358 if (!ep->dir_in) in dwc2_hsotg_irq_fifoempty()
3361 if ((periodic && !ep->periodic) || in dwc2_hsotg_irq_fifoempty()
3362 (!periodic && ep->periodic)) in dwc2_hsotg_irq_fifoempty()
3378 * dwc2_hsotg_core_init_disconnected - issue softreset to the core
3394 kill_all_requests(hsotg, hsotg->eps_out[0], -ECONNRESET); in dwc2_hsotg_core_init_disconnected()
3401 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_core_init_disconnected()
3402 if (hsotg->eps_in[ep]) in dwc2_hsotg_core_init_disconnected()
3403 dwc2_hsotg_ep_disable(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_core_init_disconnected()
3404 if (hsotg->eps_out[ep]) in dwc2_hsotg_core_init_disconnected()
3405 dwc2_hsotg_ep_disable(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_core_init_disconnected()
3429 if (hsotg->params.eusb2_disc) in dwc2_hsotg_core_init_disconnected()
3435 switch (hsotg->params.speed) { in dwc2_hsotg_core_init_disconnected()
3440 if (hsotg->params.phy_type == DWC2_PHY_TYPE_PARAM_FS) in dwc2_hsotg_core_init_disconnected()
3449 if (hsotg->params.ipg_isoc_en) in dwc2_hsotg_core_init_disconnected()
3469 if (!hsotg->params.external_id_pin_ctl) in dwc2_hsotg_core_init_disconnected()
3476 hsotg->params.ahbcfg, in dwc2_hsotg_core_init_disconnected()
3479 /* Set DDMA mode support in the core if needed */ in dwc2_hsotg_core_init_disconnected()
3484 dwc2_writel(hsotg, ((hsotg->dedicated_fifos) ? in dwc2_hsotg_core_init_disconnected()
3491 * If INTknTXFEmpMsk is enabled, it's important to disable ep interrupts in dwc2_hsotg_core_init_disconnected()
3496 dwc2_writel(hsotg, ((hsotg->dedicated_fifos && !using_dma(hsotg)) ? in dwc2_hsotg_core_init_disconnected()
3503 * don't need XferCompl, we get that from RXFIFO in slave mode. In in dwc2_hsotg_core_init_disconnected()
3504 * DMA mode we may need this and StsPhseRcvd. in dwc2_hsotg_core_init_disconnected()
3518 /* Enable Service Interval mode if supported */ in dwc2_hsotg_core_init_disconnected()
3519 if (using_desc_dma(hsotg) && hsotg->params.service_interval) in dwc2_hsotg_core_init_disconnected()
3524 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_core_init_disconnected()
3532 * Enable the RXFIFO when in slave mode, as this is how we collect in dwc2_hsotg_core_init_disconnected()
3533 * the data. In DMA mode, we get events from the FIFO but also in dwc2_hsotg_core_init_disconnected()
3549 dev_dbg(hsotg->dev, "DCTL=0x%08x\n", dwc2_readl(hsotg, DCTL)); in dwc2_hsotg_core_init_disconnected()
3560 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) | in dwc2_hsotg_core_init_disconnected()
3566 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) | in dwc2_hsotg_core_init_disconnected()
3579 if (using_desc_dma(hsotg) && hsotg->params.service_interval) in dwc2_hsotg_core_init_disconnected()
3582 /* must be at-least 3ms to allow bus to see disconnect */ in dwc2_hsotg_core_init_disconnected()
3585 hsotg->lx_state = DWC2_L0; in dwc2_hsotg_core_init_disconnected()
3589 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_core_init_disconnected()
3596 /* set the soft-disconnect bit */ in dwc2_hsotg_core_disconnect()
3602 /* remove the soft-disconnect and let's go */ in dwc2_hsotg_core_connect()
3603 if (!hsotg->role_sw || (dwc2_readl(hsotg, GOTGCTL) & GOTGCTL_BSESVLD)) in dwc2_hsotg_core_connect()
3608 * dwc2_gadget_handle_incomplete_isoc_in - handle incomplete ISO IN Interrupt.
3613 * - Corrupted IN Token for ISOC EP.
3614 * - Packet not complete in FIFO.
3617 * - Determine the EP
3618 * - Disable EP; when 'Endpoint Disabled' interrupt is received Flush FIFO
3627 dev_dbg(hsotg->dev, "Incomplete isoc in interrupt received:\n"); in dwc2_gadget_handle_incomplete_isoc_in()
3631 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_gadget_handle_incomplete_isoc_in()
3632 hs_ep = hsotg->eps_in[idx]; in dwc2_gadget_handle_incomplete_isoc_in()
3634 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous) in dwc2_gadget_handle_incomplete_isoc_in()
3651 * dwc2_gadget_handle_incomplete_isoc_out - handle incomplete ISO OUT Interrupt
3656 * - Corrupted OUT Token for ISOC EP.
3657 * - Packet not complete in FIFO.
3660 * - Determine the EP
3661 * - Set DCTL_SGOUTNAK and unmask GOUTNAKEFF if target frame elapsed.
3672 dev_dbg(hsotg->dev, "%s: GINTSTS_INCOMPL_SOOUT\n", __func__); in dwc2_gadget_handle_incomplete_isoc_out()
3677 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_gadget_handle_incomplete_isoc_out()
3678 hs_ep = hsotg->eps_out[idx]; in dwc2_gadget_handle_incomplete_isoc_out()
3680 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous) in dwc2_gadget_handle_incomplete_isoc_out()
3704 * dwc2_hsotg_irq - handle device interrupt
3718 spin_lock(&hsotg->lock); in dwc2_hsotg_irq()
3723 dev_dbg(hsotg->dev, "%s: %08x %08x (%08x) retry %d\n", in dwc2_hsotg_irq()
3729 dev_dbg(hsotg->dev, "%s: USBRstDet\n", __func__); in dwc2_hsotg_irq()
3734 if (hsotg->in_ppd && hsotg->lx_state == DWC2_L2) in dwc2_hsotg_irq()
3737 /* Exit gadget mode clock gating. */ in dwc2_hsotg_irq()
3738 if (hsotg->params.power_down == in dwc2_hsotg_irq()
3739 DWC2_POWER_DOWN_PARAM_NONE && hsotg->bus_suspended && in dwc2_hsotg_irq()
3740 !hsotg->params.no_clock_gating) in dwc2_hsotg_irq()
3743 hsotg->lx_state = DWC2_L0; in dwc2_hsotg_irq()
3748 u32 connected = hsotg->connected; in dwc2_hsotg_irq()
3750 dev_dbg(hsotg->dev, "%s: USBRst\n", __func__); in dwc2_hsotg_irq()
3751 dev_dbg(hsotg->dev, "GNPTXSTS=%08x\n", in dwc2_hsotg_irq()
3782 dev_dbg(hsotg->dev, "%s: daint=%08x\n", __func__, daint); in dwc2_hsotg_irq()
3784 for (ep = 0; ep < hsotg->num_of_eps && daint_out; in dwc2_hsotg_irq()
3790 for (ep = 0; ep < hsotg->num_of_eps && daint_in; in dwc2_hsotg_irq()
3800 dev_dbg(hsotg->dev, "NPTxFEmp\n"); in dwc2_hsotg_irq()
3803 * Disable the interrupt to stop it happening again in dwc2_hsotg_irq()
3805 * it needs re-enabling in dwc2_hsotg_irq()
3813 dev_dbg(hsotg->dev, "PTxFEmp\n"); in dwc2_hsotg_irq()
3823 * note, since GINTSTS_RxFLvl doubles as FIFO-not-empty, in dwc2_hsotg_irq()
3832 dev_dbg(hsotg->dev, "GINTSTS_ErlySusp\n"); in dwc2_hsotg_irq()
3837 * these next two seem to crop-up occasionally causing the core in dwc2_hsotg_irq()
3856 dev_dbg(hsotg->dev, "GOUTNakEff triggered\n"); in dwc2_hsotg_irq()
3857 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_irq()
3858 hs_ep = hsotg->eps_out[idx]; in dwc2_hsotg_irq()
3866 if ((epctrl & DXEPCTL_EPENA) && hs_ep->isochronous) { in dwc2_hsotg_irq()
3873 //Non-ISOC EP's in dwc2_hsotg_irq()
3874 if (hs_ep->halted) { in dwc2_hsotg_irq()
3887 dev_info(hsotg->dev, "GINNakEff triggered\n"); in dwc2_hsotg_irq()
3905 if (gintsts & IRQ_RETRY_MASK && --retry_count > 0) in dwc2_hsotg_irq()
3909 if (hsotg->params.service_interval) in dwc2_hsotg_irq()
3912 spin_unlock(&hsotg->lock); in dwc2_hsotg_irq()
3923 epctrl_reg = hs_ep->dir_in ? DIEPCTL(hs_ep->index) : in dwc2_hsotg_ep_stop_xfr()
3924 DOEPCTL(hs_ep->index); in dwc2_hsotg_ep_stop_xfr()
3925 epint_reg = hs_ep->dir_in ? DIEPINT(hs_ep->index) : in dwc2_hsotg_ep_stop_xfr()
3926 DOEPINT(hs_ep->index); in dwc2_hsotg_ep_stop_xfr()
3928 dev_dbg(hsotg->dev, "%s: stopping transfer on %s\n", __func__, in dwc2_hsotg_ep_stop_xfr()
3929 hs_ep->name); in dwc2_hsotg_ep_stop_xfr()
3931 if (hs_ep->dir_in) { in dwc2_hsotg_ep_stop_xfr()
3932 if (hsotg->dedicated_fifos || hs_ep->periodic) { in dwc2_hsotg_ep_stop_xfr()
3937 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3945 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3960 dev_warn(hsotg->dev, "%s: timeout GINTSTS.RXFLVL\n", in dwc2_hsotg_ep_stop_xfr()
3974 dev_warn(hsotg->dev, "%s: timeout GINTSTS.GOUTNAKEFF\n", in dwc2_hsotg_ep_stop_xfr()
3978 /* Disable ep */ in dwc2_hsotg_ep_stop_xfr()
3983 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3989 if (hs_ep->dir_in) { in dwc2_hsotg_ep_stop_xfr()
3992 if (hsotg->dedicated_fifos || hs_ep->periodic) in dwc2_hsotg_ep_stop_xfr()
3993 fifo_index = hs_ep->fifo_index; in dwc2_hsotg_ep_stop_xfr()
4001 if (!hsotg->dedicated_fifos && !hs_ep->periodic) in dwc2_hsotg_ep_stop_xfr()
4011 * dwc2_hsotg_ep_enable - enable the given endpoint
4021 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_enable()
4023 unsigned int index = hs_ep->index; in dwc2_hsotg_ep_enable()
4035 dev_dbg(hsotg->dev, in dwc2_hsotg_ep_enable()
4037 __func__, ep->name, desc->bEndpointAddress, desc->bmAttributes, in dwc2_hsotg_ep_enable()
4038 desc->wMaxPacketSize, desc->bInterval); in dwc2_hsotg_ep_enable()
4042 dev_err(hsotg->dev, "%s: called for EP 0\n", __func__); in dwc2_hsotg_ep_enable()
4043 return -EINVAL; in dwc2_hsotg_ep_enable()
4046 dir_in = (desc->bEndpointAddress & USB_ENDPOINT_DIR_MASK) ? 1 : 0; in dwc2_hsotg_ep_enable()
4047 if (dir_in != hs_ep->dir_in) { in dwc2_hsotg_ep_enable()
4048 dev_err(hsotg->dev, "%s: direction mismatch!\n", __func__); in dwc2_hsotg_ep_enable()
4049 return -EINVAL; in dwc2_hsotg_ep_enable()
4052 ep_type = desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK; in dwc2_hsotg_ep_enable()
4058 dir_in && desc->bInterval > 10) { in dwc2_hsotg_ep_enable()
4059 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4061 return -EINVAL; in dwc2_hsotg_ep_enable()
4067 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4069 return -EINVAL; in dwc2_hsotg_ep_enable()
4077 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x from 0x%08x\n", in dwc2_hsotg_ep_enable()
4085 /* Allocate DMA descriptor chain for non-ctrl endpoints */ in dwc2_hsotg_ep_enable()
4086 if (using_desc_dma(hsotg) && !hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4087 hs_ep->desc_list = dmam_alloc_coherent(hsotg->dev, in dwc2_hsotg_ep_enable()
4089 &hs_ep->desc_list_dma, GFP_ATOMIC); in dwc2_hsotg_ep_enable()
4090 if (!hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4091 ret = -ENOMEM; in dwc2_hsotg_ep_enable()
4096 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_ep_enable()
4108 dwc2_hsotg_set_ep_maxpacket(hsotg, hs_ep->index, mps, mc, dir_in); in dwc2_hsotg_ep_enable()
4110 /* default, set to non-periodic */ in dwc2_hsotg_ep_enable()
4111 hs_ep->isochronous = 0; in dwc2_hsotg_ep_enable()
4112 hs_ep->periodic = 0; in dwc2_hsotg_ep_enable()
4113 hs_ep->halted = 0; in dwc2_hsotg_ep_enable()
4114 hs_ep->wedged = 0; in dwc2_hsotg_ep_enable()
4115 hs_ep->interval = desc->bInterval; in dwc2_hsotg_ep_enable()
4121 hs_ep->isochronous = 1; in dwc2_hsotg_ep_enable()
4122 hs_ep->interval = 1 << (desc->bInterval - 1); in dwc2_hsotg_ep_enable()
4123 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_hsotg_ep_enable()
4124 hs_ep->next_desc = 0; in dwc2_hsotg_ep_enable()
4125 hs_ep->compl_desc = 0; in dwc2_hsotg_ep_enable()
4127 hs_ep->periodic = 1; in dwc2_hsotg_ep_enable()
4145 hs_ep->periodic = 1; in dwc2_hsotg_ep_enable()
4147 if (hsotg->gadget.speed == USB_SPEED_HIGH) in dwc2_hsotg_ep_enable()
4148 hs_ep->interval = 1 << (desc->bInterval - 1); in dwc2_hsotg_ep_enable()
4160 * a unique tx-fifo even if it is non-periodic. in dwc2_hsotg_ep_enable()
4162 if (dir_in && hsotg->dedicated_fifos) { in dwc2_hsotg_ep_enable()
4167 size = hs_ep->ep.maxpacket * hs_ep->mc; in dwc2_hsotg_ep_enable()
4169 if (hsotg->fifo_map & (1 << i)) in dwc2_hsotg_ep_enable()
4182 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4184 ret = -ENOMEM; in dwc2_hsotg_ep_enable()
4188 hsotg->fifo_map |= 1 << fifo_index; in dwc2_hsotg_ep_enable()
4190 hs_ep->fifo_index = fifo_index; in dwc2_hsotg_ep_enable()
4191 hs_ep->fifo_size = fifo_size; in dwc2_hsotg_ep_enable()
4195 if (index && !hs_ep->isochronous) in dwc2_hsotg_ep_enable()
4198 /* WA for Full speed ISOC IN in DDMA mode. in dwc2_hsotg_ep_enable()
4204 if (hsotg->gadget.speed == USB_SPEED_FULL && in dwc2_hsotg_ep_enable()
4205 hs_ep->isochronous && dir_in) { in dwc2_hsotg_ep_enable()
4219 dev_dbg(hsotg->dev, "%s: write DxEPCTL=0x%08x\n", in dwc2_hsotg_ep_enable()
4223 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x\n", in dwc2_hsotg_ep_enable()
4230 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_ep_enable()
4233 if (ret && using_desc_dma(hsotg) && hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4234 dmam_free_coherent(hsotg->dev, desc_num * in dwc2_hsotg_ep_enable()
4236 hs_ep->desc_list, hs_ep->desc_list_dma); in dwc2_hsotg_ep_enable()
4237 hs_ep->desc_list = NULL; in dwc2_hsotg_ep_enable()
4244 * dwc2_hsotg_ep_disable - disable given endpoint
4245 * @ep: The endpoint to disable.
4250 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_disable()
4251 int dir_in = hs_ep->dir_in; in dwc2_hsotg_ep_disable()
4252 int index = hs_ep->index; in dwc2_hsotg_ep_disable()
4256 dev_dbg(hsotg->dev, "%s(ep %p)\n", __func__, ep); in dwc2_hsotg_ep_disable()
4258 if (ep == &hsotg->eps_out[0]->ep) { in dwc2_hsotg_ep_disable()
4259 dev_err(hsotg->dev, "%s: called for ep0\n", __func__); in dwc2_hsotg_ep_disable()
4260 return -EINVAL; in dwc2_hsotg_ep_disable()
4263 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) { in dwc2_hsotg_ep_disable()
4264 dev_err(hsotg->dev, "%s: called in host mode?\n", __func__); in dwc2_hsotg_ep_disable()
4265 return -EINVAL; in dwc2_hsotg_ep_disable()
4279 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl); in dwc2_hsotg_ep_disable()
4282 /* disable endpoint interrupts */ in dwc2_hsotg_ep_disable()
4283 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 0); in dwc2_hsotg_ep_disable()
4286 kill_all_requests(hsotg, hs_ep, -ESHUTDOWN); in dwc2_hsotg_ep_disable()
4288 hsotg->fifo_map &= ~(1 << hs_ep->fifo_index); in dwc2_hsotg_ep_disable()
4289 hs_ep->fifo_index = 0; in dwc2_hsotg_ep_disable()
4290 hs_ep->fifo_size = 0; in dwc2_hsotg_ep_disable()
4298 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_disable_lock()
4302 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_ep_disable_lock()
4304 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_ep_disable_lock()
4309 * on_list - check request is on the given endpoint
4317 list_for_each_entry_safe(req, treq, &ep->queue, queue) { in on_list()
4326 * dwc2_hsotg_ep_dequeue - dequeue given endpoint
4334 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_dequeue()
4337 dev_dbg(hs->dev, "ep_dequeue(%p,%p)\n", ep, req); in dwc2_hsotg_ep_dequeue()
4339 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4342 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4343 return -EINVAL; in dwc2_hsotg_ep_dequeue()
4347 if (req == &hs_ep->req->req) in dwc2_hsotg_ep_dequeue()
4350 dwc2_hsotg_complete_request(hs, hs_ep, hs_req, -ECONNRESET); in dwc2_hsotg_ep_dequeue()
4351 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4357 * dwc2_gadget_ep_set_wedge - set wedge on a given endpoint
4364 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_gadget_ep_set_wedge()
4369 spin_lock_irqsave(&hs->lock, flags); in dwc2_gadget_ep_set_wedge()
4370 hs_ep->wedged = 1; in dwc2_gadget_ep_set_wedge()
4372 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_gadget_ep_set_wedge()
4378 * dwc2_hsotg_ep_sethalt - set halt on a given endpoint
4381 * @now: If true, stall the endpoint now. Otherwise return -EAGAIN if
4390 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_sethalt()
4391 int index = hs_ep->index; in dwc2_hsotg_ep_sethalt()
4396 dev_info(hs->dev, "%s(ep %p %s, %d)\n", __func__, ep, ep->name, value); in dwc2_hsotg_ep_sethalt()
4402 dev_warn(hs->dev, in dwc2_hsotg_ep_sethalt()
4407 if (hs_ep->isochronous) { in dwc2_hsotg_ep_sethalt()
4408 dev_err(hs->dev, "%s is Isochronous Endpoint\n", ep->name); in dwc2_hsotg_ep_sethalt()
4409 return -EINVAL; in dwc2_hsotg_ep_sethalt()
4412 if (!now && value && !list_empty(&hs_ep->queue)) { in dwc2_hsotg_ep_sethalt()
4413 dev_dbg(hs->dev, "%s request is pending, cannot halt\n", in dwc2_hsotg_ep_sethalt()
4414 ep->name); in dwc2_hsotg_ep_sethalt()
4415 return -EAGAIN; in dwc2_hsotg_ep_sethalt()
4418 if (hs_ep->dir_in) { in dwc2_hsotg_ep_sethalt()
4428 hs_ep->wedged = 0; in dwc2_hsotg_ep_sethalt()
4448 hs_ep->wedged = 0; in dwc2_hsotg_ep_sethalt()
4457 hs_ep->halted = value; in dwc2_hsotg_ep_sethalt()
4462 * dwc2_hsotg_ep_sethalt_lock - set halt on a given endpoint with lock held
4469 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_sethalt_lock()
4473 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_sethalt_lock()
4475 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_sethalt_lock()
4482 .disable = dwc2_hsotg_ep_disable_lock,
4493 * dwc2_hsotg_init - initialize the usb core
4515 dev_dbg(hsotg->dev, "GRXFSIZ=0x%08x, GNPTXFSIZ=0x%08x\n", in dwc2_hsotg_init()
4526 * dwc2_hsotg_udc_start - prepare the udc for work
4542 return -ENODEV; in dwc2_hsotg_udc_start()
4546 dev_err(hsotg->dev, "%s: no driver\n", __func__); in dwc2_hsotg_udc_start()
4547 return -EINVAL; in dwc2_hsotg_udc_start()
4550 if (driver->max_speed < USB_SPEED_FULL) in dwc2_hsotg_udc_start()
4551 dev_err(hsotg->dev, "%s: bad speed\n", __func__); in dwc2_hsotg_udc_start()
4553 if (!driver->setup) { in dwc2_hsotg_udc_start()
4554 dev_err(hsotg->dev, "%s: missing entry points\n", __func__); in dwc2_hsotg_udc_start()
4555 return -EINVAL; in dwc2_hsotg_udc_start()
4558 WARN_ON(hsotg->driver); in dwc2_hsotg_udc_start()
4560 hsotg->driver = driver; in dwc2_hsotg_udc_start()
4561 hsotg->gadget.dev.of_node = hsotg->dev->of_node; in dwc2_hsotg_udc_start()
4562 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_udc_start()
4564 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) { in dwc2_hsotg_udc_start()
4570 if (!IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_udc_start()
4571 otg_set_peripheral(hsotg->uphy->otg, &hsotg->gadget); in dwc2_hsotg_udc_start()
4573 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_udc_start()
4579 hsotg->enabled = 0; in dwc2_hsotg_udc_start()
4580 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_udc_start()
4582 gadget->sg_supported = using_desc_dma(hsotg); in dwc2_hsotg_udc_start()
4583 dev_info(hsotg->dev, "bound driver %s\n", driver->driver.name); in dwc2_hsotg_udc_start()
4588 hsotg->driver = NULL; in dwc2_hsotg_udc_start()
4593 * dwc2_hsotg_udc_stop - stop the udc
4605 return -ENODEV; in dwc2_hsotg_udc_stop()
4608 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_udc_stop()
4609 if (hsotg->eps_in[ep]) in dwc2_hsotg_udc_stop()
4610 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_udc_stop()
4611 if (hsotg->eps_out[ep]) in dwc2_hsotg_udc_stop()
4612 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_udc_stop()
4615 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_udc_stop()
4617 hsotg->driver = NULL; in dwc2_hsotg_udc_stop()
4618 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_udc_stop()
4619 hsotg->enabled = 0; in dwc2_hsotg_udc_stop()
4621 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_udc_stop()
4623 if (!IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_udc_stop()
4624 otg_set_peripheral(hsotg->uphy->otg, NULL); in dwc2_hsotg_udc_stop()
4626 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) in dwc2_hsotg_udc_stop()
4633 * dwc2_hsotg_gadget_getframe - read the frame number
4644 * dwc2_hsotg_set_selfpowered - set if device is self/bus powered
4646 * @is_selfpowered: Whether the device is self-powered
4656 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_set_selfpowered()
4657 gadget->is_selfpowered = !!is_selfpowered; in dwc2_hsotg_set_selfpowered()
4658 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_set_selfpowered()
4664 * dwc2_hsotg_pullup - connect/disconnect the USB PHY
4675 dev_dbg(hsotg->dev, "%s: is_on: %d op_state: %d\n", __func__, is_on, in dwc2_hsotg_pullup()
4676 hsotg->op_state); in dwc2_hsotg_pullup()
4678 /* Don't modify pullup state while in host mode */ in dwc2_hsotg_pullup()
4679 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) { in dwc2_hsotg_pullup()
4680 hsotg->enabled = is_on; in dwc2_hsotg_pullup()
4684 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_pullup()
4686 hsotg->enabled = 1; in dwc2_hsotg_pullup()
4688 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_pullup()
4694 hsotg->enabled = 0; in dwc2_hsotg_pullup()
4697 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_pullup()
4698 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_pullup()
4708 dev_dbg(hsotg->dev, "%s: is_active: %d\n", __func__, is_active); in dwc2_hsotg_vbus_session()
4709 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_vbus_session()
4713 * that state before being initialized / de-initialized in dwc2_hsotg_vbus_session()
4715 if (hsotg->lx_state == DWC2_L2 && hsotg->in_ppd) in dwc2_hsotg_vbus_session()
4723 hsotg->op_state = OTG_STATE_B_PERIPHERAL; in dwc2_hsotg_vbus_session()
4726 if (hsotg->enabled) { in dwc2_hsotg_vbus_session()
4727 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_vbus_session()
4736 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_vbus_session()
4741 * dwc2_hsotg_vbus_draw - report bMaxPower field
4751 if (IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_vbus_draw()
4752 return -ENOTSUPP; in dwc2_hsotg_vbus_draw()
4753 return usb_phy_set_power(hsotg->uphy, mA); in dwc2_hsotg_vbus_draw()
4761 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_gadget_set_speed()
4764 hsotg->params.speed = DWC2_SPEED_PARAM_HIGH; in dwc2_gadget_set_speed()
4767 hsotg->params.speed = DWC2_SPEED_PARAM_FULL; in dwc2_gadget_set_speed()
4770 hsotg->params.speed = DWC2_SPEED_PARAM_LOW; in dwc2_gadget_set_speed()
4773 dev_err(hsotg->dev, "invalid speed (%d)\n", speed); in dwc2_gadget_set_speed()
4775 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_gadget_set_speed()
4790 * dwc2_hsotg_initep - initialise a single endpoint
4814 hs_ep->dir_in = dir_in; in dwc2_hsotg_initep()
4815 hs_ep->index = epnum; in dwc2_hsotg_initep()
4817 snprintf(hs_ep->name, sizeof(hs_ep->name), "ep%d%s", epnum, dir); in dwc2_hsotg_initep()
4819 INIT_LIST_HEAD(&hs_ep->queue); in dwc2_hsotg_initep()
4820 INIT_LIST_HEAD(&hs_ep->ep.ep_list); in dwc2_hsotg_initep()
4824 list_add_tail(&hs_ep->ep.ep_list, &hsotg->gadget.ep_list); in dwc2_hsotg_initep()
4826 hs_ep->parent = hsotg; in dwc2_hsotg_initep()
4827 hs_ep->ep.name = hs_ep->name; in dwc2_hsotg_initep()
4829 if (hsotg->params.speed == DWC2_SPEED_PARAM_LOW) in dwc2_hsotg_initep()
4830 usb_ep_set_maxpacket_limit(&hs_ep->ep, 8); in dwc2_hsotg_initep()
4832 usb_ep_set_maxpacket_limit(&hs_ep->ep, in dwc2_hsotg_initep()
4834 hs_ep->ep.ops = &dwc2_hsotg_ep_ops; in dwc2_hsotg_initep()
4837 hs_ep->ep.caps.type_control = true; in dwc2_hsotg_initep()
4839 if (hsotg->params.speed != DWC2_SPEED_PARAM_LOW) { in dwc2_hsotg_initep()
4840 hs_ep->ep.caps.type_iso = true; in dwc2_hsotg_initep()
4841 hs_ep->ep.caps.type_bulk = true; in dwc2_hsotg_initep()
4843 hs_ep->ep.caps.type_int = true; in dwc2_hsotg_initep()
4847 hs_ep->ep.caps.dir_in = true; in dwc2_hsotg_initep()
4849 hs_ep->ep.caps.dir_out = true; in dwc2_hsotg_initep()
4852 * if we're using dma, we need to set the next-endpoint pointer in dwc2_hsotg_initep()
4867 * dwc2_hsotg_hw_cfg - read HW configuration registers
4880 hsotg->num_of_eps = hsotg->hw_params.num_dev_ep; in dwc2_hsotg_hw_cfg()
4883 hsotg->num_of_eps++; in dwc2_hsotg_hw_cfg()
4885 hsotg->eps_in[0] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4888 if (!hsotg->eps_in[0]) in dwc2_hsotg_hw_cfg()
4889 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4891 hsotg->eps_out[0] = hsotg->eps_in[0]; in dwc2_hsotg_hw_cfg()
4893 cfg = hsotg->hw_params.dev_ep_dirs; in dwc2_hsotg_hw_cfg()
4894 for (i = 1, cfg >>= 2; i < hsotg->num_of_eps; i++, cfg >>= 2) { in dwc2_hsotg_hw_cfg()
4898 hsotg->eps_in[i] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4900 if (!hsotg->eps_in[i]) in dwc2_hsotg_hw_cfg()
4901 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4905 hsotg->eps_out[i] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4907 if (!hsotg->eps_out[i]) in dwc2_hsotg_hw_cfg()
4908 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4912 hsotg->fifo_mem = hsotg->hw_params.total_fifo_size; in dwc2_hsotg_hw_cfg()
4913 hsotg->dedicated_fifos = hsotg->hw_params.en_multiple_tx_fifo; in dwc2_hsotg_hw_cfg()
4915 dev_info(hsotg->dev, "EPs: %d, %s fifos, %d entries in SPRAM\n", in dwc2_hsotg_hw_cfg()
4916 hsotg->num_of_eps, in dwc2_hsotg_hw_cfg()
4917 hsotg->dedicated_fifos ? "dedicated" : "shared", in dwc2_hsotg_hw_cfg()
4918 hsotg->fifo_mem); in dwc2_hsotg_hw_cfg()
4923 * dwc2_hsotg_dump - dump state of the udc
4930 struct device *dev = hsotg->dev; in dwc2_hsotg_dump()
4946 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_dump()
4953 for (idx = 0; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_dump()
4955 "ep%d-in: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n", idx, in dwc2_hsotg_dump()
4962 "ep%d-out: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n", in dwc2_hsotg_dump()
4974 * dwc2_gadget_init - init function for gadget
4980 struct device *dev = hsotg->dev; in dwc2_gadget_init()
4986 hsotg->params.g_np_tx_fifo_size); in dwc2_gadget_init()
4987 dev_dbg(dev, "RXFIFO size: %d\n", hsotg->params.g_rx_fifo_size); in dwc2_gadget_init()
4989 switch (hsotg->params.speed) { in dwc2_gadget_init()
4991 hsotg->gadget.max_speed = USB_SPEED_LOW; in dwc2_gadget_init()
4994 hsotg->gadget.max_speed = USB_SPEED_FULL; in dwc2_gadget_init()
4997 hsotg->gadget.max_speed = USB_SPEED_HIGH; in dwc2_gadget_init()
5001 hsotg->gadget.ops = &dwc2_hsotg_gadget_ops; in dwc2_gadget_init()
5002 hsotg->gadget.name = dev_name(dev); in dwc2_gadget_init()
5003 hsotg->gadget.otg_caps = &hsotg->params.otg_caps; in dwc2_gadget_init()
5004 hsotg->remote_wakeup_allowed = 0; in dwc2_gadget_init()
5006 if (hsotg->params.lpm) in dwc2_gadget_init()
5007 hsotg->gadget.lpm_capable = true; in dwc2_gadget_init()
5009 if (hsotg->dr_mode == USB_DR_MODE_OTG) in dwc2_gadget_init()
5010 hsotg->gadget.is_otg = 1; in dwc2_gadget_init()
5011 else if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) in dwc2_gadget_init()
5012 hsotg->op_state = OTG_STATE_B_PERIPHERAL; in dwc2_gadget_init()
5016 dev_err(hsotg->dev, "Hardware configuration failed: %d\n", ret); in dwc2_gadget_init()
5020 hsotg->ctrl_buff = devm_kzalloc(hsotg->dev, in dwc2_gadget_init()
5022 if (!hsotg->ctrl_buff) in dwc2_gadget_init()
5023 return -ENOMEM; in dwc2_gadget_init()
5025 hsotg->ep0_buff = devm_kzalloc(hsotg->dev, in dwc2_gadget_init()
5027 if (!hsotg->ep0_buff) in dwc2_gadget_init()
5028 return -ENOMEM; in dwc2_gadget_init()
5036 ret = devm_request_irq(hsotg->dev, hsotg->irq, dwc2_hsotg_irq, in dwc2_gadget_init()
5037 IRQF_SHARED, dev_name(hsotg->dev), hsotg); in dwc2_gadget_init()
5043 /* hsotg->num_of_eps holds number of EPs other than ep0 */ in dwc2_gadget_init()
5045 if (hsotg->num_of_eps == 0) { in dwc2_gadget_init()
5047 return -EINVAL; in dwc2_gadget_init()
5052 INIT_LIST_HEAD(&hsotg->gadget.ep_list); in dwc2_gadget_init()
5053 hsotg->gadget.ep0 = &hsotg->eps_out[0]->ep; in dwc2_gadget_init()
5057 hsotg->ctrl_req = dwc2_hsotg_ep_alloc_request(&hsotg->eps_out[0]->ep, in dwc2_gadget_init()
5059 if (!hsotg->ctrl_req) { in dwc2_gadget_init()
5061 return -ENOMEM; in dwc2_gadget_init()
5065 for (epnum = 0; epnum < hsotg->num_of_eps; epnum++) { in dwc2_gadget_init()
5066 if (hsotg->eps_in[epnum]) in dwc2_gadget_init()
5067 dwc2_hsotg_initep(hsotg, hsotg->eps_in[epnum], in dwc2_gadget_init()
5069 if (hsotg->eps_out[epnum]) in dwc2_gadget_init()
5070 dwc2_hsotg_initep(hsotg, hsotg->eps_out[epnum], in dwc2_gadget_init()
5080 * dwc2_hsotg_remove - remove function for hsotg driver
5086 usb_del_gadget_udc(&hsotg->gadget); in dwc2_hsotg_remove()
5087 dwc2_hsotg_ep_free_request(&hsotg->eps_out[0]->ep, hsotg->ctrl_req); in dwc2_hsotg_remove()
5096 if (hsotg->lx_state != DWC2_L0) in dwc2_hsotg_suspend()
5099 if (hsotg->driver) { in dwc2_hsotg_suspend()
5102 dev_info(hsotg->dev, "suspending usb gadget %s\n", in dwc2_hsotg_suspend()
5103 hsotg->driver->driver.name); in dwc2_hsotg_suspend()
5105 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_suspend()
5106 if (hsotg->enabled) in dwc2_hsotg_suspend()
5109 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_suspend()
5110 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_suspend()
5112 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_suspend()
5113 if (hsotg->eps_in[ep]) in dwc2_hsotg_suspend()
5114 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_suspend()
5115 if (hsotg->eps_out[ep]) in dwc2_hsotg_suspend()
5116 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_suspend()
5127 if (hsotg->lx_state == DWC2_L2) in dwc2_hsotg_resume()
5130 if (hsotg->driver) { in dwc2_hsotg_resume()
5131 dev_info(hsotg->dev, "resuming usb gadget %s\n", in dwc2_hsotg_resume()
5132 hsotg->driver->driver.name); in dwc2_hsotg_resume()
5134 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_resume()
5136 if (hsotg->enabled) { in dwc2_hsotg_resume()
5137 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_resume()
5141 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_resume()
5148 * dwc2_backup_device_registers() - Backup controller device registers.
5159 dev_dbg(hsotg->dev, "%s\n", __func__); in dwc2_backup_device_registers()
5162 dr = &hsotg->dr_backup; in dwc2_backup_device_registers()
5164 dr->dcfg = dwc2_readl(hsotg, DCFG); in dwc2_backup_device_registers()
5165 dr->dctl = dwc2_readl(hsotg, DCTL); in dwc2_backup_device_registers()
5166 dr->daintmsk = dwc2_readl(hsotg, DAINTMSK); in dwc2_backup_device_registers()
5167 dr->diepmsk = dwc2_readl(hsotg, DIEPMSK); in dwc2_backup_device_registers()
5168 dr->doepmsk = dwc2_readl(hsotg, DOEPMSK); in dwc2_backup_device_registers()
5170 for (i = 0; i < hsotg->num_of_eps; i++) { in dwc2_backup_device_registers()
5172 dr->diepctl[i] = dwc2_readl(hsotg, DIEPCTL(i)); in dwc2_backup_device_registers()
5175 if (dr->diepctl[i] & DXEPCTL_DPID) in dwc2_backup_device_registers()
5176 dr->diepctl[i] |= DXEPCTL_SETD1PID; in dwc2_backup_device_registers()
5178 dr->diepctl[i] |= DXEPCTL_SETD0PID; in dwc2_backup_device_registers()
5180 dr->dieptsiz[i] = dwc2_readl(hsotg, DIEPTSIZ(i)); in dwc2_backup_device_registers()
5181 dr->diepdma[i] = dwc2_readl(hsotg, DIEPDMA(i)); in dwc2_backup_device_registers()
5184 dr->doepctl[i] = dwc2_readl(hsotg, DOEPCTL(i)); in dwc2_backup_device_registers()
5187 if (dr->doepctl[i] & DXEPCTL_DPID) in dwc2_backup_device_registers()
5188 dr->doepctl[i] |= DXEPCTL_SETD1PID; in dwc2_backup_device_registers()
5190 dr->doepctl[i] |= DXEPCTL_SETD0PID; in dwc2_backup_device_registers()
5192 dr->doeptsiz[i] = dwc2_readl(hsotg, DOEPTSIZ(i)); in dwc2_backup_device_registers()
5193 dr->doepdma[i] = dwc2_readl(hsotg, DOEPDMA(i)); in dwc2_backup_device_registers()
5194 dr->dtxfsiz[i] = dwc2_readl(hsotg, DPTXFSIZN(i)); in dwc2_backup_device_registers()
5196 dr->valid = true; in dwc2_backup_device_registers()
5201 * dwc2_restore_device_registers() - Restore controller device registers.
5215 dev_dbg(hsotg->dev, "%s\n", __func__); in dwc2_restore_device_registers()
5218 dr = &hsotg->dr_backup; in dwc2_restore_device_registers()
5219 if (!dr->valid) { in dwc2_restore_device_registers()
5220 dev_err(hsotg->dev, "%s: no device registers to restore\n", in dwc2_restore_device_registers()
5222 return -EINVAL; in dwc2_restore_device_registers()
5224 dr->valid = false; in dwc2_restore_device_registers()
5227 dwc2_writel(hsotg, dr->dctl, DCTL); in dwc2_restore_device_registers()
5229 dwc2_writel(hsotg, dr->daintmsk, DAINTMSK); in dwc2_restore_device_registers()
5230 dwc2_writel(hsotg, dr->diepmsk, DIEPMSK); in dwc2_restore_device_registers()
5231 dwc2_writel(hsotg, dr->doepmsk, DOEPMSK); in dwc2_restore_device_registers()
5233 for (i = 0; i < hsotg->num_of_eps; i++) { in dwc2_restore_device_registers()
5235 dwc2_writel(hsotg, dr->dieptsiz[i], DIEPTSIZ(i)); in dwc2_restore_device_registers()
5236 dwc2_writel(hsotg, dr->diepdma[i], DIEPDMA(i)); in dwc2_restore_device_registers()
5237 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i)); in dwc2_restore_device_registers()
5238 /** WA for enabled EPx's IN in DDMA mode. On entering to in dwc2_restore_device_registers()
5239 * hibernation wrong value read and saved from DIEPDMAx, in dwc2_restore_device_registers()
5240 * as result BNA interrupt asserted on hibernation exit in dwc2_restore_device_registers()
5244 (dr->diepctl[i] & DXEPCTL_EPENA)) in dwc2_restore_device_registers()
5245 dr->diepdma[i] = hsotg->eps_in[i]->desc_list_dma; in dwc2_restore_device_registers()
5246 dwc2_writel(hsotg, dr->dtxfsiz[i], DPTXFSIZN(i)); in dwc2_restore_device_registers()
5247 dwc2_writel(hsotg, dr->diepctl[i], DIEPCTL(i)); in dwc2_restore_device_registers()
5249 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i)); in dwc2_restore_device_registers()
5250 /* WA for enabled EPx's OUT in DDMA mode. On entering to in dwc2_restore_device_registers()
5251 * hibernation wrong value read and saved from DOEPDMAx, in dwc2_restore_device_registers()
5252 * as result BNA interrupt asserted on hibernation exit in dwc2_restore_device_registers()
5256 (dr->doepctl[i] & DXEPCTL_EPENA)) in dwc2_restore_device_registers()
5257 dr->doepdma[i] = hsotg->eps_out[i]->desc_list_dma; in dwc2_restore_device_registers()
5258 dwc2_writel(hsotg, dr->doepdma[i], DOEPDMA(i)); in dwc2_restore_device_registers()
5259 dwc2_writel(hsotg, dr->doepctl[i], DOEPCTL(i)); in dwc2_restore_device_registers()
5266 * dwc2_gadget_init_lpm - Configure the core to support LPM in device mode
5275 if (!hsotg->params.lpm) in dwc2_gadget_init_lpm()
5279 val |= hsotg->params.hird_threshold_en ? GLPMCFG_HIRD_THRES_EN : 0; in dwc2_gadget_init_lpm()
5280 val |= hsotg->params.lpm_clock_gating ? GLPMCFG_ENBLSLPM : 0; in dwc2_gadget_init_lpm()
5281 val |= hsotg->params.hird_threshold << GLPMCFG_HIRD_THRES_SHIFT; in dwc2_gadget_init_lpm()
5282 val |= hsotg->params.besl ? GLPMCFG_ENBESL : 0; in dwc2_gadget_init_lpm()
5286 dev_dbg(hsotg->dev, "GLPMCFG=0x%08x\n", dwc2_readl(hsotg, GLPMCFG)); in dwc2_gadget_init_lpm()
5289 if (hsotg->params.service_interval) in dwc2_gadget_init_lpm()
5294 * dwc2_gadget_program_ref_clk - Program GREFCLK register in device mode
5304 val |= hsotg->params.ref_clk_per << GREFCLK_REFCLKPER_SHIFT; in dwc2_gadget_program_ref_clk()
5305 val |= hsotg->params.sof_cnt_wkup_alert << in dwc2_gadget_program_ref_clk()
5309 dev_dbg(hsotg->dev, "GREFCLK=0x%08x\n", dwc2_readl(hsotg, GREFCLK)); in dwc2_gadget_program_ref_clk()
5313 * dwc2_gadget_enter_hibernation() - Put controller in Hibernation.
5317 * Return non-zero if failed to enter to hibernation.
5327 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_hibernation()
5328 dev_dbg(hsotg->dev, "Start of hibernation completed\n"); in dwc2_gadget_enter_hibernation()
5331 dev_err(hsotg->dev, "%s: failed to backup global registers\n", in dwc2_gadget_enter_hibernation()
5337 dev_err(hsotg->dev, "%s: failed to backup device registers\n", in dwc2_gadget_enter_hibernation()
5363 /* Set flag to indicate that we are in hibernation */ in dwc2_gadget_enter_hibernation()
5364 hsotg->hibernated = 1; in dwc2_gadget_enter_hibernation()
5372 /* Unmask device mode interrupts in GPWRDN */ in dwc2_gadget_enter_hibernation()
5393 hsotg->gr_backup.gpwrdn = dwc2_readl(hsotg, GPWRDN); in dwc2_gadget_enter_hibernation()
5394 dev_dbg(hsotg->dev, "Hibernation completed\n"); in dwc2_gadget_enter_hibernation()
5401 * This function is for exiting from Device mode hibernation by host initiated
5402 * resume/reset and device initiated remote-wakeup.
5408 * Return non-zero if failed to exit from hibernation.
5420 gr = &hsotg->gr_backup; in dwc2_gadget_exit_hibernation()
5421 dr = &hsotg->dr_backup; in dwc2_gadget_exit_hibernation()
5423 if (!hsotg->hibernated) { in dwc2_gadget_exit_hibernation()
5424 dev_dbg(hsotg->dev, "Already exited from Hibernation\n"); in dwc2_gadget_exit_hibernation()
5427 dev_dbg(hsotg->dev, in dwc2_gadget_exit_hibernation()
5438 /* De-assert Restore */ in dwc2_gadget_exit_hibernation()
5451 dwc2_writel(hsotg, gr->gusbcfg, GUSBCFG); in dwc2_gadget_exit_hibernation()
5452 dwc2_writel(hsotg, dr->dcfg, DCFG); in dwc2_gadget_exit_hibernation()
5453 dwc2_writel(hsotg, dr->dctl, DCTL); in dwc2_gadget_exit_hibernation()
5464 /* De-assert Wakeup Logic */ in dwc2_gadget_exit_hibernation()
5472 dwc2_writel(hsotg, dr->dctl | DCTL_RMTWKUPSIG, DCTL); in dwc2_gadget_exit_hibernation()
5488 dev_err(hsotg->dev, "%s: failed to restore registers\n", in dwc2_gadget_exit_hibernation()
5496 dev_err(hsotg->dev, "%s: failed to restore device registers\n", in dwc2_gadget_exit_hibernation()
5508 hsotg->hibernated = 0; in dwc2_gadget_exit_hibernation()
5509 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_hibernation()
5510 dev_dbg(hsotg->dev, "Hibernation recovery completes here\n"); in dwc2_gadget_exit_hibernation()
5516 * dwc2_gadget_enter_partial_power_down() - Put controller in partial
5521 * Return: non-zero if failed to enter device partial power down.
5523 * This function is for entering device mode partial power down.
5530 dev_dbg(hsotg->dev, "Entering device partial power down started.\n"); in dwc2_gadget_enter_partial_power_down()
5535 dev_err(hsotg->dev, "%s: failed to backup global registers\n", in dwc2_gadget_enter_partial_power_down()
5542 dev_err(hsotg->dev, "%s: failed to backup device registers\n", in dwc2_gadget_enter_partial_power_down()
5568 hsotg->in_ppd = 1; in dwc2_gadget_enter_partial_power_down()
5569 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_partial_power_down()
5571 dev_dbg(hsotg->dev, "Entering device partial power down completed.\n"); in dwc2_gadget_enter_partial_power_down()
5577 * dwc2_gadget_exit_partial_power_down() - Exit controller from device partial
5583 * Return: non-zero if failed to exit device partial power down.
5585 * This function is for exiting from device mode partial power down.
5595 dr = &hsotg->dr_backup; in dwc2_gadget_exit_partial_power_down()
5597 dev_dbg(hsotg->dev, "Exiting device partial Power Down started.\n"); in dwc2_gadget_exit_partial_power_down()
5615 dev_err(hsotg->dev, "%s: failed to restore registers\n", in dwc2_gadget_exit_partial_power_down()
5620 dwc2_writel(hsotg, dr->dcfg, DCFG); in dwc2_gadget_exit_partial_power_down()
5624 dev_err(hsotg->dev, "%s: failed to restore device registers\n", in dwc2_gadget_exit_partial_power_down()
5630 /* Set the Power-On Programming done bit */ in dwc2_gadget_exit_partial_power_down()
5636 hsotg->in_ppd = 0; in dwc2_gadget_exit_partial_power_down()
5637 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_partial_power_down()
5639 dev_dbg(hsotg->dev, "Exiting device partial Power Down completed.\n"); in dwc2_gadget_exit_partial_power_down()
5644 * dwc2_gadget_enter_clock_gating() - Put controller in clock gating.
5648 * Return: non-zero if failed to enter device partial power down.
5650 * This function is for entering device mode clock gating.
5656 dev_dbg(hsotg->dev, "Entering device clock gating.\n"); in dwc2_gadget_enter_clock_gating()
5670 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_clock_gating()
5671 hsotg->bus_suspended = true; in dwc2_gadget_enter_clock_gating()
5675 * dwc2_gadget_exit_clock_gating() - Exit controller from device clock gating.
5680 * This function is for exiting from device mode clock gating.
5687 dev_dbg(hsotg->dev, "Exiting device clock gating.\n"); in dwc2_gadget_exit_clock_gating()
5710 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_clock_gating()
5711 hsotg->bus_suspended = false; in dwc2_gadget_exit_clock_gating()