Lines Matching refs:ep
165 static void ep_clear_seqnum(struct net2280_ep *ep);
171 static inline void enable_pciirqenb(struct net2280_ep *ep) in enable_pciirqenb() argument
173 u32 tmp = readl(&ep->dev->regs->pciirqenb0); in enable_pciirqenb()
175 if (ep->dev->quirks & PLX_LEGACY) in enable_pciirqenb()
176 tmp |= BIT(ep->num); in enable_pciirqenb()
178 tmp |= BIT(ep_bit[ep->num]); in enable_pciirqenb()
179 writel(tmp, &ep->dev->regs->pciirqenb0); in enable_pciirqenb()
188 struct net2280_ep *ep; in net2280_enable() local
196 ep = container_of(_ep, struct net2280_ep, ep); in net2280_enable()
197 if (!_ep || !desc || ep->desc || _ep->name == ep0name || in net2280_enable()
202 dev = ep->dev; in net2280_enable()
219 ep->is_in = !!usb_endpoint_dir_in(desc); in net2280_enable()
220 if (dev->enhanced_mode && ep->is_in && ep_key[ep->num]) { in net2280_enable()
228 if (ep->num > 4 && max > 64 && (dev->quirks & PLX_LEGACY)) { in net2280_enable()
235 ep->desc = desc; in net2280_enable()
238 ep->stopped = 0; in net2280_enable()
239 ep->wedged = 0; in net2280_enable()
240 ep->out_overflow = 0; in net2280_enable()
243 set_max_speed(ep, max); in net2280_enable()
246 writel(BIT(FIFO_FLUSH), &ep->regs->ep_stat); in net2280_enable()
249 tmp = readl(&ep->cfg->ep_cfg); in net2280_enable()
256 if (ep->is_in) in net2280_enable()
268 &ep->regs->ep_rsp); in net2280_enable()
279 ep->is_iso = (type == USB_ENDPOINT_XFER_ISOC); in net2280_enable()
287 ep->is_in = (tmp & USB_DIR_IN) != 0; in net2280_enable()
290 if (dev->enhanced_mode && ep->is_in) { in net2280_enable()
296 tmp |= (ep->is_in << ENDPOINT_DIRECTION); in net2280_enable()
302 tmp |= (ep->ep.maxburst << MAX_BURST_SIZE); in net2280_enable()
309 if (!ep->is_in) in net2280_enable()
310 writel(BIT(SET_NAK_OUT_PACKETS), &ep->regs->ep_rsp); in net2280_enable()
316 BIT(CLEAR_NAK_OUT_PACKETS_MODE), &ep->regs->ep_rsp); in net2280_enable()
320 ep_clear_seqnum(ep); in net2280_enable()
321 writel(tmp, &ep->cfg->ep_cfg); in net2280_enable()
324 if (!ep->dma) { /* pio, per-packet */ in net2280_enable()
325 enable_pciirqenb(ep); in net2280_enable()
330 tmp |= readl(&ep->regs->ep_irqenb); in net2280_enable()
331 writel(tmp, &ep->regs->ep_irqenb); in net2280_enable()
333 tmp = BIT((8 + ep->num)); /* completion */ in net2280_enable()
343 writel(tmp, &ep->regs->ep_irqenb); in net2280_enable()
345 enable_pciirqenb(ep); in net2280_enable()
353 ep->dma ? "dma" : "pio", max); in net2280_enable()
360 dev_err(&ep->dev->pdev->dev, "%s: error=%d\n", __func__, ret); in net2280_enable()
384 struct net2280_ep *ep) in ep_reset_228x() argument
388 ep->desc = NULL; in ep_reset_228x()
389 INIT_LIST_HEAD(&ep->queue); in ep_reset_228x()
391 usb_ep_set_maxpacket_limit(&ep->ep, ~0); in ep_reset_228x()
392 ep->ep.ops = &net2280_ep_ops; in ep_reset_228x()
395 if (ep->dma) { in ep_reset_228x()
396 writel(0, &ep->dma->dmactl); in ep_reset_228x()
400 &ep->dma->dmastat); in ep_reset_228x()
403 tmp &= ~BIT(ep->num); in ep_reset_228x()
407 tmp &= ~BIT((8 + ep->num)); /* completion */ in ep_reset_228x()
410 writel(0, &ep->regs->ep_irqenb); in ep_reset_228x()
415 if (!ep->is_in || (ep->dev->quirks & PLX_2280)) { in ep_reset_228x()
428 if (ep->num != 0) { in ep_reset_228x()
432 writel(tmp, &ep->regs->ep_rsp); in ep_reset_228x()
435 if (ep->dev->quirks & PLX_2280) in ep_reset_228x()
454 &ep->regs->ep_stat); in ep_reset_228x()
460 struct net2280_ep *ep) in ep_reset_338x() argument
464 ep->desc = NULL; in ep_reset_338x()
465 INIT_LIST_HEAD(&ep->queue); in ep_reset_338x()
467 usb_ep_set_maxpacket_limit(&ep->ep, ~0); in ep_reset_338x()
468 ep->ep.ops = &net2280_ep_ops; in ep_reset_338x()
471 if (ep->dma) { in ep_reset_338x()
472 writel(0, &ep->dma->dmactl); in ep_reset_338x()
478 &ep->dma->dmastat); in ep_reset_338x()
480 dmastat = readl(&ep->dma->dmastat); in ep_reset_338x()
482 ep_warn(ep->dev, "The dmastat return = %x!!\n", in ep_reset_338x()
484 writel(0x5a, &ep->dma->dmastat); in ep_reset_338x()
488 tmp &= ~BIT(ep_bit[ep->num]); in ep_reset_338x()
491 if (ep->num < 5) { in ep_reset_338x()
493 tmp &= ~BIT((8 + ep->num)); /* completion */ in ep_reset_338x()
497 writel(0, &ep->regs->ep_irqenb); in ep_reset_338x()
505 BIT(DATA_IN_TOKEN_INTERRUPT), &ep->regs->ep_stat); in ep_reset_338x()
507 tmp = readl(&ep->cfg->ep_cfg); in ep_reset_338x()
508 if (ep->is_in) in ep_reset_338x()
512 writel(tmp, &ep->cfg->ep_cfg); in ep_reset_338x()
519 struct net2280_ep *ep; in net2280_disable() local
522 ep = container_of(_ep, struct net2280_ep, ep); in net2280_disable()
523 if (!_ep || !ep->desc || _ep->name == ep0name) { in net2280_disable()
527 spin_lock_irqsave(&ep->dev->lock, flags); in net2280_disable()
528 nuke(ep); in net2280_disable()
530 if (ep->dev->quirks & PLX_SUPERSPEED) in net2280_disable()
531 ep_reset_338x(ep->dev->regs, ep); in net2280_disable()
533 ep_reset_228x(ep->dev->regs, ep); in net2280_disable()
535 ep_vdbg(ep->dev, "disabled %s %s\n", in net2280_disable()
536 ep->dma ? "dma" : "pio", _ep->name); in net2280_disable()
539 (void)readl(&ep->cfg->ep_cfg); in net2280_disable()
541 if (!ep->dma && ep->num >= 1 && ep->num <= 4) in net2280_disable()
542 ep->dma = &ep->dev->dma[ep->num - 1]; in net2280_disable()
544 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_disable()
553 struct net2280_ep *ep; in net2280_alloc_request() local
560 ep = container_of(_ep, struct net2280_ep, ep); in net2280_alloc_request()
569 if (ep->dma) { in net2280_alloc_request()
572 td = pci_pool_alloc(ep->dev->requests, gfp_flags, in net2280_alloc_request()
587 struct net2280_ep *ep; in net2280_free_request() local
590 ep = container_of(_ep, struct net2280_ep, ep); in net2280_free_request()
592 dev_err(&ep->dev->pdev->dev, "%s: Inavlid ep=%p or req=%p\n", in net2280_free_request()
600 pci_pool_free(ep->dev->requests, req->td, req->td_dma); in net2280_free_request()
613 static void write_fifo(struct net2280_ep *ep, struct usb_request *req) in write_fifo() argument
615 struct net2280_ep_regs __iomem *regs = ep->regs; in write_fifo()
632 count = ep->ep.maxpacket; in write_fifo()
636 ep_vdbg(ep->dev, "write %s fifo (IN) %d bytes%s req %p\n", in write_fifo()
637 ep->ep.name, count, in write_fifo()
638 (count != ep->ep.maxpacket) ? " (short)" : "", in write_fifo()
656 if (count || total < ep->ep.maxpacket) { in write_fifo()
659 set_fifo_bytecount(ep, count & 0x03); in write_fifo()
673 static void out_flush(struct net2280_ep *ep) in out_flush() argument
678 statp = &ep->regs->ep_stat; in out_flush()
682 ep_dbg(ep->dev, "%s %s %08x !NAK\n", in out_flush()
683 ep->ep.name, __func__, tmp); in out_flush()
684 writel(BIT(SET_NAK_OUT_PACKETS), &ep->regs->ep_rsp); in out_flush()
696 ep->dev->gadget.speed == USB_SPEED_FULL) { in out_flush()
713 static int read_fifo(struct net2280_ep *ep, struct net2280_request *req) in read_fifo() argument
715 struct net2280_ep_regs __iomem *regs = ep->regs; in read_fifo()
723 if (ep->dev->chiprev == 0x0100 && in read_fifo()
724 ep->dev->gadget.speed == USB_SPEED_FULL) { in read_fifo()
726 tmp = readl(&ep->regs->ep_stat); in read_fifo()
730 start_out_naking(ep); in read_fifo()
743 tmp = readl(&ep->regs->ep_stat); in read_fifo()
753 if ((tmp % ep->ep.maxpacket) != 0) { in read_fifo()
754 ep_err(ep->dev, in read_fifo()
756 ep->ep.name, count, tmp); in read_fifo()
767 is_short = (count == 0) || ((count % ep->ep.maxpacket) != 0); in read_fifo()
769 ep_vdbg(ep->dev, "read %s fifo (OUT) %d bytes%s%s%s req %p %d/%d\n", in read_fifo()
770 ep->ep.name, count, is_short ? " (short)" : "", in read_fifo()
790 out_flush(ep); in read_fifo()
792 writel(BIT(CLEAR_NAK_OUT_PACKETS), &ep->regs->ep_rsp); in read_fifo()
793 (void) readl(&ep->regs->ep_rsp); in read_fifo()
801 static void fill_dma_desc(struct net2280_ep *ep, in fill_dma_desc() argument
812 if (ep->is_in) in fill_dma_desc()
814 if ((!ep->is_in && (dmacount % ep->ep.maxpacket) != 0) || in fill_dma_desc()
815 !(ep->dev->quirks & PLX_2280)) in fill_dma_desc()
853 static void start_queue(struct net2280_ep *ep, u32 dmactl, u32 td_dma) in start_queue() argument
855 struct net2280_dma_regs __iomem *dma = ep->dma; in start_queue()
856 unsigned int tmp = BIT(VALID_BIT) | (ep->is_in << DMA_DIRECTION); in start_queue()
858 if (!(ep->dev->quirks & PLX_2280)) in start_queue()
865 if (ep->dev->quirks & PLX_SUPERSPEED) in start_queue()
870 (void) readl(&ep->dev->pci->pcimstctl); in start_queue()
874 if (!ep->is_in) in start_queue()
875 stop_out_naking(ep); in start_queue()
878 static void start_dma(struct net2280_ep *ep, struct net2280_request *req) in start_dma() argument
881 struct net2280_dma_regs __iomem *dma = ep->dma; in start_dma()
887 writel(0, &ep->dma->dmactl); in start_dma()
890 if (!ep->is_in && (readl(&ep->regs->ep_stat) & in start_dma()
893 &ep->regs->ep_stat); in start_dma()
895 tmp = readl(&ep->regs->ep_avail); in start_dma()
922 if (ep->is_in) { in start_dma()
923 if (likely((req->req.length % ep->ep.maxpacket) || in start_dma()
926 ep->in_fifo_validate = 1; in start_dma()
928 ep->in_fifo_validate = 0; in start_dma()
932 req->td->dmadesc = cpu_to_le32 (ep->td_dma); in start_dma()
933 fill_dma_desc(ep, req, 1); in start_dma()
937 start_queue(ep, tmp, req->td_dma); in start_dma()
941 queue_dma(struct net2280_ep *ep, struct net2280_request *req, int valid) in queue_dma() argument
947 end = ep->dummy; in queue_dma()
948 ep->dummy = req->td; in queue_dma()
951 tmp = ep->td_dma; in queue_dma()
952 ep->td_dma = req->td_dma; in queue_dma()
955 end->dmadesc = cpu_to_le32 (ep->td_dma); in queue_dma()
957 fill_dma_desc(ep, req, valid); in queue_dma()
961 done(struct net2280_ep *ep, struct net2280_request *req, int status) in done() argument
964 unsigned stopped = ep->stopped; in done()
973 dev = ep->dev; in done()
974 if (ep->dma) in done()
975 usb_gadget_unmap_request(&dev->gadget, &req->req, ep->is_in); in done()
979 ep->ep.name, &req->req, status, in done()
983 ep->stopped = 1; in done()
985 usb_gadget_giveback_request(&ep->ep, &req->req); in done()
987 ep->stopped = stopped; in done()
996 struct net2280_ep *ep; in net2280_queue() local
1004 ep = container_of(_ep, struct net2280_ep, ep); in net2280_queue()
1005 if (!_ep || (!ep->desc && ep->num != 0)) { in net2280_queue()
1019 dev = ep->dev; in net2280_queue()
1026 if (ep->dma && _req->length == 0) { in net2280_queue()
1032 if (ep->dma) { in net2280_queue()
1034 ep->is_in); in net2280_queue()
1048 if (list_empty(&ep->queue) && !ep->stopped && in net2280_queue()
1049 !((dev->quirks & PLX_SUPERSPEED) && ep->dma && in net2280_queue()
1050 (readl(&ep->regs->ep_rsp) & BIT(CLEAR_ENDPOINT_HALT)))) { in net2280_queue()
1053 if (ep->dma) in net2280_queue()
1054 start_dma(ep, req); in net2280_queue()
1057 if (ep->num == 0 && _req->length == 0) { in net2280_queue()
1058 allow_status(ep); in net2280_queue()
1059 done(ep, req, 0); in net2280_queue()
1060 ep_vdbg(dev, "%s status ack\n", ep->ep.name); in net2280_queue()
1065 if (ep->is_in) in net2280_queue()
1066 write_fifo(ep, _req); in net2280_queue()
1067 else if (list_empty(&ep->queue)) { in net2280_queue()
1071 s = readl(&ep->regs->ep_stat); in net2280_queue()
1079 if (read_fifo(ep, req) && in net2280_queue()
1080 ep->num == 0) { in net2280_queue()
1081 done(ep, req, 0); in net2280_queue()
1082 allow_status(ep); in net2280_queue()
1085 } else if (read_fifo(ep, req) && in net2280_queue()
1086 ep->num != 0) { in net2280_queue()
1087 done(ep, req, 0); in net2280_queue()
1090 s = readl(&ep->regs->ep_stat); in net2280_queue()
1096 &ep->regs->ep_rsp); in net2280_queue()
1100 } else if (ep->dma) { in net2280_queue()
1103 if (ep->is_in) { in net2280_queue()
1110 (req->req.length % ep->ep.maxpacket)); in net2280_queue()
1111 if (expect != ep->in_fifo_validate) in net2280_queue()
1114 queue_dma(ep, req, valid); in net2280_queue()
1118 ep->responded = 1; in net2280_queue()
1120 list_add_tail(&req->queue, &ep->queue); in net2280_queue()
1128 dev_err(&ep->dev->pdev->dev, "%s: error=%d\n", __func__, ret); in net2280_queue()
1133 dma_done(struct net2280_ep *ep, struct net2280_request *req, u32 dmacount, in dma_done() argument
1137 done(ep, req, status); in dma_done()
1140 static void scan_dma_completions(struct net2280_ep *ep) in scan_dma_completions() argument
1145 while (!list_empty(&ep->queue)) { in scan_dma_completions()
1149 req = list_entry(ep->queue.next, in scan_dma_completions()
1164 tmp = readl(&ep->dma->dmacount); in scan_dma_completions()
1168 dma_done(ep, req, tmp, 0); in scan_dma_completions()
1170 } else if (!ep->is_in && in scan_dma_completions()
1171 (req->req.length % ep->ep.maxpacket) && in scan_dma_completions()
1172 !(ep->dev->quirks & PLX_SUPERSPEED)) { in scan_dma_completions()
1174 tmp = readl(&ep->regs->ep_stat); in scan_dma_completions()
1180 ep_warn(ep->dev, "%s lost packet sync!\n", in scan_dma_completions()
1181 ep->ep.name); in scan_dma_completions()
1184 tmp = readl(&ep->regs->ep_avail); in scan_dma_completions()
1187 ep->out_overflow = 1; in scan_dma_completions()
1188 ep_dbg(ep->dev, in scan_dma_completions()
1190 ep->ep.name, tmp, in scan_dma_completions()
1196 dma_done(ep, req, tmp, 0); in scan_dma_completions()
1200 static void restart_dma(struct net2280_ep *ep) in restart_dma() argument
1204 if (ep->stopped) in restart_dma()
1206 req = list_entry(ep->queue.next, struct net2280_request, queue); in restart_dma()
1208 start_dma(ep, req); in restart_dma()
1211 static void abort_dma(struct net2280_ep *ep) in abort_dma() argument
1214 if (likely(!list_empty(&ep->queue))) { in abort_dma()
1216 writel(BIT(DMA_ABORT), &ep->dma->dmastat); in abort_dma()
1217 spin_stop_dma(ep->dma); in abort_dma()
1219 stop_dma(ep->dma); in abort_dma()
1220 scan_dma_completions(ep); in abort_dma()
1224 static void nuke(struct net2280_ep *ep) in nuke() argument
1229 ep->stopped = 1; in nuke()
1230 if (ep->dma) in nuke()
1231 abort_dma(ep); in nuke()
1232 while (!list_empty(&ep->queue)) { in nuke()
1233 req = list_entry(ep->queue.next, in nuke()
1236 done(ep, req, -ESHUTDOWN); in nuke()
1243 struct net2280_ep *ep; in net2280_dequeue() local
1249 ep = container_of(_ep, struct net2280_ep, ep); in net2280_dequeue()
1250 if (!_ep || (!ep->desc && ep->num != 0) || !_req) { in net2280_dequeue()
1256 spin_lock_irqsave(&ep->dev->lock, flags); in net2280_dequeue()
1257 stopped = ep->stopped; in net2280_dequeue()
1261 ep->stopped = 1; in net2280_dequeue()
1262 if (ep->dma) { in net2280_dequeue()
1263 dmactl = readl(&ep->dma->dmactl); in net2280_dequeue()
1265 stop_dma(ep->dma); in net2280_dequeue()
1266 scan_dma_completions(ep); in net2280_dequeue()
1270 list_for_each_entry(req, &ep->queue, queue) { in net2280_dequeue()
1275 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_dequeue()
1276 dev_err(&ep->dev->pdev->dev, "%s: Request mismatch\n", in net2280_dequeue()
1282 if (ep->queue.next == &req->queue) { in net2280_dequeue()
1283 if (ep->dma) { in net2280_dequeue()
1284 ep_dbg(ep->dev, "unlink (%s) dma\n", _ep->name); in net2280_dequeue()
1286 abort_dma(ep); in net2280_dequeue()
1287 if (likely(ep->queue.next == &req->queue)) { in net2280_dequeue()
1290 dma_done(ep, req, in net2280_dequeue()
1291 readl(&ep->dma->dmacount), in net2280_dequeue()
1295 ep_dbg(ep->dev, "unlink (%s) pio\n", _ep->name); in net2280_dequeue()
1296 done(ep, req, -ECONNRESET); in net2280_dequeue()
1302 done(ep, req, -ECONNRESET); in net2280_dequeue()
1303 ep->stopped = stopped; in net2280_dequeue()
1305 if (ep->dma) { in net2280_dequeue()
1307 if (list_empty(&ep->queue)) in net2280_dequeue()
1308 stop_dma(ep->dma); in net2280_dequeue()
1309 else if (!ep->stopped) { in net2280_dequeue()
1312 writel(dmactl, &ep->dma->dmactl); in net2280_dequeue()
1314 start_dma(ep, list_entry(ep->queue.next, in net2280_dequeue()
1319 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_dequeue()
1330 struct net2280_ep *ep; in net2280_set_halt_and_wedge() local
1334 ep = container_of(_ep, struct net2280_ep, ep); in net2280_set_halt_and_wedge()
1335 if (!_ep || (!ep->desc && ep->num != 0)) { in net2280_set_halt_and_wedge()
1339 if (!ep->dev->driver || ep->dev->gadget.speed == USB_SPEED_UNKNOWN) { in net2280_set_halt_and_wedge()
1343 if (ep->desc /* not ep0 */ && (ep->desc->bmAttributes & 0x03) in net2280_set_halt_and_wedge()
1349 spin_lock_irqsave(&ep->dev->lock, flags); in net2280_set_halt_and_wedge()
1350 if (!list_empty(&ep->queue)) { in net2280_set_halt_and_wedge()
1353 } else if (ep->is_in && value && net2280_fifo_status(_ep) != 0) { in net2280_set_halt_and_wedge()
1357 ep_vdbg(ep->dev, "%s %s %s\n", _ep->name, in net2280_set_halt_and_wedge()
1362 if (ep->num == 0) in net2280_set_halt_and_wedge()
1363 ep->dev->protocol_stall = 1; in net2280_set_halt_and_wedge()
1365 set_halt(ep); in net2280_set_halt_and_wedge()
1367 ep->wedged = 1; in net2280_set_halt_and_wedge()
1369 clear_halt(ep); in net2280_set_halt_and_wedge()
1370 if (ep->dev->quirks & PLX_SUPERSPEED && in net2280_set_halt_and_wedge()
1371 !list_empty(&ep->queue) && ep->td_dma) in net2280_set_halt_and_wedge()
1372 restart_dma(ep); in net2280_set_halt_and_wedge()
1373 ep->wedged = 0; in net2280_set_halt_and_wedge()
1375 (void) readl(&ep->regs->ep_rsp); in net2280_set_halt_and_wedge()
1377 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_set_halt_and_wedge()
1382 spin_unlock_irqrestore(&ep->dev->lock, flags); in net2280_set_halt_and_wedge()
1384 dev_err(&ep->dev->pdev->dev, "%s: error=%d\n", __func__, retval); in net2280_set_halt_and_wedge()
1404 struct net2280_ep *ep; in net2280_fifo_status() local
1407 ep = container_of(_ep, struct net2280_ep, ep); in net2280_fifo_status()
1408 if (!_ep || (!ep->desc && ep->num != 0)) { in net2280_fifo_status()
1412 if (!ep->dev->driver || ep->dev->gadget.speed == USB_SPEED_UNKNOWN) { in net2280_fifo_status()
1413 dev_err(&ep->dev->pdev->dev, in net2280_fifo_status()
1415 __func__, ep->dev->driver, ep->dev->gadget.speed); in net2280_fifo_status()
1419 avail = readl(&ep->regs->ep_avail) & (BIT(12) - 1); in net2280_fifo_status()
1420 if (avail > ep->fifo_size) { in net2280_fifo_status()
1421 dev_err(&ep->dev->pdev->dev, "%s: Fifo overflow\n", __func__); in net2280_fifo_status()
1424 if (ep->is_in) in net2280_fifo_status()
1425 avail = ep->fifo_size - avail; in net2280_fifo_status()
1431 struct net2280_ep *ep; in net2280_fifo_flush() local
1433 ep = container_of(_ep, struct net2280_ep, ep); in net2280_fifo_flush()
1434 if (!_ep || (!ep->desc && ep->num != 0)) { in net2280_fifo_flush()
1438 if (!ep->dev->driver || ep->dev->gadget.speed == USB_SPEED_UNKNOWN) { in net2280_fifo_flush()
1439 dev_err(&ep->dev->pdev->dev, in net2280_fifo_flush()
1441 __func__, ep->dev->driver, ep->dev->gadget.speed); in net2280_fifo_flush()
1445 writel(BIT(FIFO_FLUSH), &ep->regs->ep_stat); in net2280_fifo_flush()
1446 (void) readl(&ep->regs->ep_rsp); in net2280_fifo_flush()
1558 struct usb_ep *ep; in net2280_match_ep() local
1562 ep = gadget_find_ep_by_name(_gadget, "ep-e"); in net2280_match_ep()
1563 if (ep && usb_gadget_ep_match_desc(_gadget, ep, desc, ep_comp)) in net2280_match_ep()
1564 return ep; in net2280_match_ep()
1565 ep = gadget_find_ep_by_name(_gadget, "ep-f"); in net2280_match_ep()
1566 if (ep && usb_gadget_ep_match_desc(_gadget, ep, desc, ep_comp)) in net2280_match_ep()
1567 return ep; in net2280_match_ep()
1573 ep = gadget_find_ep_by_name(_gadget, name); in net2280_match_ep()
1574 if (ep && usb_gadget_ep_match_desc(_gadget, ep, desc, ep_comp)) in net2280_match_ep()
1575 return ep; in net2280_match_ep()
1681 struct net2280_ep *ep; in registers_show() local
1683 ep = &dev->ep[i]; in registers_show()
1684 if (i && !ep->desc) in registers_show()
1687 t1 = readl(&ep->cfg->ep_cfg); in registers_show()
1688 t2 = readl(&ep->regs->ep_rsp) & 0xff; in registers_show()
1692 ep->ep.name, t1, t2, in registers_show()
1709 readl(&ep->regs->ep_irqenb)); in registers_show()
1716 readl(&ep->regs->ep_stat), in registers_show()
1717 readl(&ep->regs->ep_avail), in registers_show()
1720 ep->stopped ? "*" : ""); in registers_show()
1724 if (!ep->dma) in registers_show()
1730 readl(&ep->dma->dmactl), in registers_show()
1731 readl(&ep->dma->dmastat), in registers_show()
1732 readl(&ep->dma->dmacount), in registers_show()
1733 readl(&ep->dma->dmaaddr), in registers_show()
1734 readl(&ep->dma->dmadesc)); in registers_show()
1747 struct net2280_ep *ep; in registers_show() local
1749 ep = &dev->ep[i]; in registers_show()
1750 if (i && !ep->irqs) in registers_show()
1752 t = scnprintf(next, size, " %s/%lu", ep->ep.name, ep->irqs); in registers_show()
1782 struct net2280_ep *ep = &dev->ep[i]; in queues_show() local
1789 d = ep->desc; in queues_show()
1795 ep->ep.name, t & USB_ENDPOINT_NUMBER_MASK, in queues_show()
1799 ep->dma ? "dma" : "pio", ep->fifo_size in queues_show()
1803 ep->is_in ? "in" : "out"); in queues_show()
1809 if (list_empty(&ep->queue)) { in queues_show()
1817 list_for_each_entry(req, &ep->queue, queue) { in queues_show()
1818 if (ep->dma && req->td_dma == readl(&ep->dma->dmadesc)) in queues_show()
1824 readl(&ep->dma->dmacount)); in queues_show()
1835 if (ep->dma) { in queues_show()
1880 list_add_tail(&dev->ep[1].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1881 list_add_tail(&dev->ep[2].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1884 list_add_tail(&dev->ep[3].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1885 list_add_tail(&dev->ep[4].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1886 dev->ep[1].fifo_size = dev->ep[2].fifo_size = 1024; in set_fifo_mode()
1889 dev->ep[1].fifo_size = dev->ep[2].fifo_size = 2048; in set_fifo_mode()
1892 list_add_tail(&dev->ep[3].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1893 dev->ep[1].fifo_size = 2048; in set_fifo_mode()
1894 dev->ep[2].fifo_size = 1024; in set_fifo_mode()
1898 list_add_tail(&dev->ep[5].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1899 list_add_tail(&dev->ep[6].ep.ep_list, &dev->gadget.ep_list); in set_fifo_mode()
1909 struct net2280_ep *ep; in defect7374_disable_data_eps() local
1915 ep = &dev->ep[i]; in defect7374_disable_data_eps()
1916 writel(i, &ep->cfg->ep_cfg); in defect7374_disable_data_eps()
1967 writel(tmp, &dev->ep[i].cfg->ep_cfg); in defect7374_enable_data_eps_zero()
2039 struct net2280_ep *ep = &dev->ep[tmp + 1]; in usb_reset_228x() local
2040 if (ep->dma) in usb_reset_228x()
2041 abort_dma(ep); in usb_reset_228x()
2077 struct net2280_ep *ep = &dev->ep[tmp + 1]; in usb_reset_338x() local
2080 if (ep->dma) { in usb_reset_338x()
2081 abort_dma(ep); in usb_reset_338x()
2106 list_add_tail(&dev->ep[tmp].ep.ep_list, &dev->gadget.ep_list); in usb_reset_338x()
2123 struct net2280_ep *ep = &dev->ep[tmp]; in usb_reinit_228x() local
2125 ep->ep.name = ep_info_dft[tmp].name; in usb_reinit_228x()
2126 ep->ep.caps = ep_info_dft[tmp].caps; in usb_reinit_228x()
2127 ep->dev = dev; in usb_reinit_228x()
2128 ep->num = tmp; in usb_reinit_228x()
2131 ep->fifo_size = 1024; in usb_reinit_228x()
2132 ep->dma = &dev->dma[tmp - 1]; in usb_reinit_228x()
2134 ep->fifo_size = 64; in usb_reinit_228x()
2135 ep->regs = &dev->epregs[tmp]; in usb_reinit_228x()
2136 ep->cfg = &dev->epregs[tmp]; in usb_reinit_228x()
2137 ep_reset_228x(dev->regs, ep); in usb_reinit_228x()
2139 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, 64); in usb_reinit_228x()
2140 usb_ep_set_maxpacket_limit(&dev->ep[5].ep, 64); in usb_reinit_228x()
2141 usb_ep_set_maxpacket_limit(&dev->ep[6].ep, 64); in usb_reinit_228x()
2143 dev->gadget.ep0 = &dev->ep[0].ep; in usb_reinit_228x()
2144 dev->ep[0].stopped = 0; in usb_reinit_228x()
2164 struct net2280_ep *ep = &dev->ep[i]; in usb_reinit_338x() local
2166 ep->ep.name = dev->enhanced_mode ? ep_info_adv[i].name : in usb_reinit_338x()
2168 ep->ep.caps = dev->enhanced_mode ? ep_info_adv[i].caps : in usb_reinit_338x()
2170 ep->dev = dev; in usb_reinit_338x()
2171 ep->num = i; in usb_reinit_338x()
2174 ep->dma = &dev->dma[i - 1]; in usb_reinit_338x()
2177 ep->cfg = &dev->epregs[ne[i]]; in usb_reinit_338x()
2183 writel(ne[i], &ep->cfg->ep_cfg); in usb_reinit_338x()
2184 ep->regs = (struct net2280_ep_regs __iomem *) in usb_reinit_338x()
2188 ep->cfg = &dev->epregs[i]; in usb_reinit_338x()
2189 ep->regs = &dev->epregs[i]; in usb_reinit_338x()
2192 ep->fifo_size = (i != 0) ? 2048 : 512; in usb_reinit_338x()
2194 ep_reset_338x(dev->regs, ep); in usb_reinit_338x()
2196 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, 512); in usb_reinit_338x()
2198 dev->gadget.ep0 = &dev->ep[0].ep; in usb_reinit_338x()
2199 dev->ep[0].stopped = 0; in usb_reinit_338x()
2379 dev->ep[i].irqs = 0; in net2280_start()
2425 nuke(&dev->ep[i]); in stop_activity()
2464 static void handle_ep_small(struct net2280_ep *ep) in handle_ep_small() argument
2471 if (!list_empty(&ep->queue)) in handle_ep_small()
2472 req = list_entry(ep->queue.next, in handle_ep_small()
2478 t = readl(&ep->regs->ep_stat); in handle_ep_small()
2479 ep->irqs++; in handle_ep_small()
2481 ep_vdbg(ep->dev, "%s ack ep_stat %08x, req %p\n", in handle_ep_small()
2482 ep->ep.name, t, req ? &req->req : NULL); in handle_ep_small()
2484 if (!ep->is_in || (ep->dev->quirks & PLX_2280)) in handle_ep_small()
2485 writel(t & ~BIT(NAK_OUT_PACKETS), &ep->regs->ep_stat); in handle_ep_small()
2488 writel(t, &ep->regs->ep_stat); in handle_ep_small()
2500 if (unlikely(ep->num == 0)) { in handle_ep_small()
2501 if (ep->is_in) { in handle_ep_small()
2504 if (ep->dev->protocol_stall) { in handle_ep_small()
2505 ep->stopped = 1; in handle_ep_small()
2506 set_halt(ep); in handle_ep_small()
2509 allow_status(ep); in handle_ep_small()
2513 if (ep->dev->protocol_stall) { in handle_ep_small()
2514 ep->stopped = 1; in handle_ep_small()
2515 set_halt(ep); in handle_ep_small()
2517 } else if (ep->responded && in handle_ep_small()
2518 !req && !ep->stopped) in handle_ep_small()
2519 write_fifo(ep, NULL); in handle_ep_small()
2524 if (ep->dev->protocol_stall) { in handle_ep_small()
2525 ep->stopped = 1; in handle_ep_small()
2526 set_halt(ep); in handle_ep_small()
2533 (ep->responded && !req)) { in handle_ep_small()
2534 ep->dev->protocol_stall = 1; in handle_ep_small()
2535 set_halt(ep); in handle_ep_small()
2536 ep->stopped = 1; in handle_ep_small()
2538 done(ep, req, -EOVERFLOW); in handle_ep_small()
2548 if (likely(ep->dma)) { in handle_ep_small()
2551 int stopped = ep->stopped; in handle_ep_small()
2557 ep->stopped = 1; in handle_ep_small()
2558 for (count = 0; ; t = readl(&ep->regs->ep_stat)) { in handle_ep_small()
2563 scan_dma_completions(ep); in handle_ep_small()
2564 if (unlikely(list_empty(&ep->queue) || in handle_ep_small()
2565 ep->out_overflow)) { in handle_ep_small()
2569 req = list_entry(ep->queue.next, in handle_ep_small()
2576 count = readl(&ep->dma->dmacount); in handle_ep_small()
2578 if (readl(&ep->dma->dmadesc) in handle_ep_small()
2587 writel(BIT(DMA_ABORT), &ep->dma->dmastat); in handle_ep_small()
2588 spin_stop_dma(ep->dma); in handle_ep_small()
2592 t = readl(&ep->regs->ep_avail); in handle_ep_small()
2593 dma_done(ep, req, count, in handle_ep_small()
2594 (ep->out_overflow || t) in handle_ep_small()
2599 if (unlikely(ep->out_overflow || in handle_ep_small()
2600 (ep->dev->chiprev == 0x0100 && in handle_ep_small()
2601 ep->dev->gadget.speed in handle_ep_small()
2603 out_flush(ep); in handle_ep_small()
2604 ep->out_overflow = 0; in handle_ep_small()
2608 ep->stopped = stopped; in handle_ep_small()
2609 if (!list_empty(&ep->queue)) in handle_ep_small()
2610 restart_dma(ep); in handle_ep_small()
2612 ep_dbg(ep->dev, "%s dma ep_stat %08x ??\n", in handle_ep_small()
2613 ep->ep.name, t); in handle_ep_small()
2618 if (read_fifo(ep, req) && ep->num != 0) in handle_ep_small()
2626 if (len > ep->ep.maxpacket) in handle_ep_small()
2627 len = ep->ep.maxpacket; in handle_ep_small()
2633 (!req->req.zero || len != ep->ep.maxpacket) && ep->num) in handle_ep_small()
2643 done(ep, req, 0); in handle_ep_small()
2646 if (ep->num == 0) { in handle_ep_small()
2651 if (!ep->stopped) in handle_ep_small()
2652 allow_status(ep); in handle_ep_small()
2655 if (!list_empty(&ep->queue) && !ep->stopped) in handle_ep_small()
2656 req = list_entry(ep->queue.next, in handle_ep_small()
2660 if (req && !ep->is_in) in handle_ep_small()
2661 stop_out_naking(ep); in handle_ep_small()
2668 if (req && !ep->stopped) { in handle_ep_small()
2672 write_fifo(ep, &req->req); in handle_ep_small()
2678 struct net2280_ep *ep; in get_ep_by_addr() local
2681 return &dev->ep[0]; in get_ep_by_addr()
2682 list_for_each_entry(ep, &dev->gadget.ep_list, ep.ep_list) { in get_ep_by_addr()
2685 if (!ep->desc) in get_ep_by_addr()
2687 bEndpointAddress = ep->desc->bEndpointAddress; in get_ep_by_addr()
2691 return ep; in get_ep_by_addr()
2770 static void ep_clear_seqnum(struct net2280_ep *ep) in ep_clear_seqnum() argument
2772 struct net2280 *dev = ep->dev; in ep_clear_seqnum()
2777 val |= ep_pl[ep->num]; in ep_clear_seqnum()
2786 struct net2280_ep *ep, struct usb_ctrlrequest r) in handle_stat0_irqs_superspeed() argument
2811 set_fifo_bytecount(ep, sizeof(status)); in handle_stat0_irqs_superspeed()
2813 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2823 set_fifo_bytecount(ep, sizeof(status)); in handle_stat0_irqs_superspeed()
2825 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2843 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2851 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2859 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2871 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2882 ep_vdbg(dev, "%s clear halt\n", e->ep.name); in handle_stat0_irqs_superspeed()
2891 allow_status(ep); in handle_stat0_irqs_superspeed()
2892 ep->stopped = 1; in handle_stat0_irqs_superspeed()
2909 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2917 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2925 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2937 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2946 ep->stopped = 1; in handle_stat0_irqs_superspeed()
2947 if (ep->num == 0) in handle_stat0_irqs_superspeed()
2948 ep->dev->protocol_stall = 1; in handle_stat0_irqs_superspeed()
2950 if (ep->dma) in handle_stat0_irqs_superspeed()
2951 abort_dma(ep); in handle_stat0_irqs_superspeed()
2952 set_halt(ep); in handle_stat0_irqs_superspeed()
2954 allow_status_338x(ep); in handle_stat0_irqs_superspeed()
2968 readl(&ep->cfg->ep_cfg)); in handle_stat0_irqs_superspeed()
2970 ep->responded = 0; in handle_stat0_irqs_superspeed()
2981 set_halt(ep); in handle_stat0_irqs_superspeed()
3009 handle_ep_small(&dev->ep[index]); in usb338x_handle_ep_intr()
3015 struct net2280_ep *ep; in handle_stat0_irqs() local
3037 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, in handle_stat0_irqs()
3041 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, in handle_stat0_irqs()
3045 usb_ep_set_maxpacket_limit(&dev->ep[0].ep, in handle_stat0_irqs()
3053 ep = &dev->ep[0]; in handle_stat0_irqs()
3054 ep->irqs++; in handle_stat0_irqs()
3058 while (!list_empty(&ep->queue)) { in handle_stat0_irqs()
3059 req = list_entry(ep->queue.next, in handle_stat0_irqs()
3061 done(ep, req, (req->req.actual == req->req.length) in handle_stat0_irqs()
3064 ep->stopped = 0; in handle_stat0_irqs()
3067 if (ep->dev->quirks & PLX_2280) in handle_stat0_irqs()
3085 &ep->regs->ep_stat); in handle_stat0_irqs()
3111 ep->is_in = (u.r.bRequestType & USB_DIR_IN) != 0; in handle_stat0_irqs()
3112 if (ep->is_in) { in handle_stat0_irqs()
3116 stop_out_naking(ep); in handle_stat0_irqs()
3126 ep->responded = 1; in handle_stat0_irqs()
3129 handle_stat0_irqs_superspeed(dev, ep, u.r); in handle_stat0_irqs()
3152 set_fifo_bytecount(ep, w_length); in handle_stat0_irqs()
3154 allow_status(ep); in handle_stat0_irqs()
3155 ep_vdbg(dev, "%s stat %02x\n", ep->ep.name, status); in handle_stat0_irqs()
3172 ep->ep.name); in handle_stat0_irqs()
3174 ep_vdbg(dev, "%s clear halt\n", e->ep.name); in handle_stat0_irqs()
3176 if ((ep->dev->quirks & PLX_SUPERSPEED) && in handle_stat0_irqs()
3180 allow_status(ep); in handle_stat0_irqs()
3195 if (e->ep.name == ep0name) in handle_stat0_irqs()
3200 allow_status(ep); in handle_stat0_irqs()
3201 ep_vdbg(dev, "%s set halt\n", ep->ep.name); in handle_stat0_irqs()
3211 readl(&ep->cfg->ep_cfg)); in handle_stat0_irqs()
3212 ep->responded = 0; in handle_stat0_irqs()
3259 ep = &dev->ep[num]; in handle_stat0_irqs()
3260 handle_ep_small(ep); in handle_stat0_irqs()
3281 struct net2280_ep *ep; in handle_stat1_irqs() local
3391 ep = &dev->ep[num + 1]; in handle_stat1_irqs()
3392 dma = ep->dma; in handle_stat1_irqs()
3404 if (!ep->is_in && (r_dmacount & 0x00FFFFFF) && in handle_stat1_irqs()
3410 ep_dbg(ep->dev, "%s no xact done? %08x\n", in handle_stat1_irqs()
3411 ep->ep.name, tmp); in handle_stat1_irqs()
3414 stop_dma(ep->dma); in handle_stat1_irqs()
3426 scan_dma_completions(ep); in handle_stat1_irqs()
3429 if (!list_empty(&ep->queue)) { in handle_stat1_irqs()
3431 restart_dma(ep); in handle_stat1_irqs()
3433 ep->irqs++; in handle_stat1_irqs()
3507 if (!dev->ep[i].dummy) in net2280_remove()
3509 pci_pool_free(dev->requests, dev->ep[i].dummy, in net2280_remove()
3510 dev->ep[i].td_dma); in net2280_remove()
3667 &dev->ep[i].td_dma); in net2280_probe()
3675 dev->ep[i].dummy = td; in net2280_probe()