This source file includes following definitions.
- our_req
- our_ep
- to_hsotg
- dwc2_set_bit
- dwc2_clear_bit
- index_to_ep
- using_dma
- using_desc_dma
- dwc2_gadget_incr_frame_num
- dwc2_gadget_dec_frame_num_by_one
- dwc2_hsotg_en_gsint
- dwc2_hsotg_disable_gsint
- dwc2_hsotg_ctrl_epint
- dwc2_hsotg_tx_fifo_count
- dwc2_hsotg_tx_fifo_total_depth
- dwc2_gadget_wkup_alert_handler
- dwc2_hsotg_tx_fifo_average_depth
- dwc2_hsotg_init_fifo
- dwc2_hsotg_ep_alloc_request
- is_ep_periodic
- dwc2_hsotg_unmap_dma
- dwc2_gadget_alloc_ctrl_desc_chains
- dwc2_hsotg_write_fifo
- get_ep_limit
- dwc2_hsotg_read_frameno
- dwc2_gadget_get_chain_limit
- dwc2_gadget_get_desc_params
- dwc2_gadget_fill_nonisoc_xfer_ddma_one
- dwc2_gadget_config_nonisoc_xfer_ddma
- dwc2_gadget_fill_isoc_desc
- dwc2_gadget_start_isoc_ddma
- dwc2_hsotg_start_req
- dwc2_hsotg_map_dma
- dwc2_hsotg_handle_unaligned_buf_start
- dwc2_hsotg_handle_unaligned_buf_complete
- dwc2_gadget_target_frame_elapsed
- dwc2_gadget_set_ep0_desc_chain
- dwc2_hsotg_ep_queue
- dwc2_hsotg_ep_queue_lock
- dwc2_hsotg_ep_free_request
- dwc2_hsotg_complete_oursetup
- ep_from_windex
- dwc2_hsotg_set_test_mode
- dwc2_hsotg_send_reply
- dwc2_hsotg_process_req_status
- get_ep_head
- dwc2_gadget_start_next_request
- dwc2_hsotg_process_req_feature
- dwc2_hsotg_stall_ep0
- dwc2_hsotg_process_control
- dwc2_hsotg_complete_setup
- dwc2_hsotg_enqueue_setup
- dwc2_hsotg_program_zlp
- dwc2_hsotg_complete_request
- dwc2_gadget_complete_isoc_request_ddma
- dwc2_gadget_handle_isoc_bna
- dwc2_hsotg_rx_data
- dwc2_hsotg_ep0_zlp
- dwc2_hsotg_change_ep_iso_parity
- dwc2_gadget_get_xfersize_ddma
- dwc2_hsotg_handle_outdone
- dwc2_hsotg_handle_rx
- dwc2_hsotg_ep0_mps
- dwc2_hsotg_set_ep_maxpacket
- dwc2_hsotg_txfifo_flush
- dwc2_hsotg_trytx
- dwc2_hsotg_complete_in
- dwc2_gadget_read_ep_interrupts
- dwc2_gadget_handle_ep_disabled
- dwc2_gadget_handle_out_token_ep_disabled
- dwc2_gadget_handle_nak
- dwc2_hsotg_epint
- dwc2_hsotg_irq_enumdone
- kill_all_requests
- dwc2_hsotg_disconnect
- dwc2_hsotg_irq_fifoempty
- dwc2_hsotg_core_init_disconnected
- dwc2_hsotg_core_disconnect
- dwc2_hsotg_core_connect
- dwc2_gadget_handle_incomplete_isoc_in
- dwc2_gadget_handle_incomplete_isoc_out
- dwc2_hsotg_irq
- dwc2_hsotg_ep_stop_xfr
- dwc2_hsotg_ep_enable
- dwc2_hsotg_ep_disable
- dwc2_hsotg_ep_disable_lock
- on_list
- dwc2_hsotg_ep_dequeue
- dwc2_hsotg_ep_sethalt
- dwc2_hsotg_ep_sethalt_lock
- dwc2_hsotg_init
- dwc2_hsotg_udc_start
- dwc2_hsotg_udc_stop
- dwc2_hsotg_gadget_getframe
- dwc2_hsotg_pullup
- dwc2_hsotg_vbus_session
- dwc2_hsotg_vbus_draw
- dwc2_hsotg_initep
- dwc2_hsotg_hw_cfg
- dwc2_hsotg_dump
- dwc2_gadget_init
- dwc2_hsotg_remove
- dwc2_hsotg_suspend
- dwc2_hsotg_resume
- dwc2_backup_device_registers
- dwc2_restore_device_registers
- dwc2_gadget_init_lpm
- dwc2_gadget_program_ref_clk
- dwc2_gadget_enter_hibernation
- dwc2_gadget_exit_hibernation
1
2
3
4
5
6
7
8
9
10
11
12
13
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/spinlock.h>
17 #include <linux/interrupt.h>
18 #include <linux/platform_device.h>
19 #include <linux/dma-mapping.h>
20 #include <linux/mutex.h>
21 #include <linux/seq_file.h>
22 #include <linux/delay.h>
23 #include <linux/io.h>
24 #include <linux/slab.h>
25 #include <linux/of_platform.h>
26
27 #include <linux/usb/ch9.h>
28 #include <linux/usb/gadget.h>
29 #include <linux/usb/phy.h>
30 #include <linux/usb/composite.h>
31
32
33 #include "core.h"
34 #include "hw.h"
35
36
37 static inline struct dwc2_hsotg_req *our_req(struct usb_request *req)
38 {
39 return container_of(req, struct dwc2_hsotg_req, req);
40 }
41
42 static inline struct dwc2_hsotg_ep *our_ep(struct usb_ep *ep)
43 {
44 return container_of(ep, struct dwc2_hsotg_ep, ep);
45 }
46
47 static inline struct dwc2_hsotg *to_hsotg(struct usb_gadget *gadget)
48 {
49 return container_of(gadget, struct dwc2_hsotg, gadget);
50 }
51
52 static inline void dwc2_set_bit(struct dwc2_hsotg *hsotg, u32 offset, u32 val)
53 {
54 dwc2_writel(hsotg, dwc2_readl(hsotg, offset) | val, offset);
55 }
56
57 static inline void dwc2_clear_bit(struct dwc2_hsotg *hsotg, u32 offset, u32 val)
58 {
59 dwc2_writel(hsotg, dwc2_readl(hsotg, offset) & ~val, offset);
60 }
61
62 static inline struct dwc2_hsotg_ep *index_to_ep(struct dwc2_hsotg *hsotg,
63 u32 ep_index, u32 dir_in)
64 {
65 if (dir_in)
66 return hsotg->eps_in[ep_index];
67 else
68 return hsotg->eps_out[ep_index];
69 }
70
71
72 static void dwc2_hsotg_dump(struct dwc2_hsotg *hsotg);
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93 static inline bool using_dma(struct dwc2_hsotg *hsotg)
94 {
95 return hsotg->params.g_dma;
96 }
97
98
99
100
101
102
103
104 static inline bool using_desc_dma(struct dwc2_hsotg *hsotg)
105 {
106 return hsotg->params.g_dma_desc;
107 }
108
109
110
111
112
113
114
115
116 static inline void dwc2_gadget_incr_frame_num(struct dwc2_hsotg_ep *hs_ep)
117 {
118 hs_ep->target_frame += hs_ep->interval;
119 if (hs_ep->target_frame > DSTS_SOFFN_LIMIT) {
120 hs_ep->frame_overrun = true;
121 hs_ep->target_frame &= DSTS_SOFFN_LIMIT;
122 } else {
123 hs_ep->frame_overrun = false;
124 }
125 }
126
127
128
129
130
131
132
133
134
135
136
137 static inline void dwc2_gadget_dec_frame_num_by_one(struct dwc2_hsotg_ep *hs_ep)
138 {
139 if (hs_ep->target_frame)
140 hs_ep->target_frame -= 1;
141 else
142 hs_ep->target_frame = DSTS_SOFFN_LIMIT;
143 }
144
145
146
147
148
149
150 static void dwc2_hsotg_en_gsint(struct dwc2_hsotg *hsotg, u32 ints)
151 {
152 u32 gsintmsk = dwc2_readl(hsotg, GINTMSK);
153 u32 new_gsintmsk;
154
155 new_gsintmsk = gsintmsk | ints;
156
157 if (new_gsintmsk != gsintmsk) {
158 dev_dbg(hsotg->dev, "gsintmsk now 0x%08x\n", new_gsintmsk);
159 dwc2_writel(hsotg, new_gsintmsk, GINTMSK);
160 }
161 }
162
163
164
165
166
167
168 static void dwc2_hsotg_disable_gsint(struct dwc2_hsotg *hsotg, u32 ints)
169 {
170 u32 gsintmsk = dwc2_readl(hsotg, GINTMSK);
171 u32 new_gsintmsk;
172
173 new_gsintmsk = gsintmsk & ~ints;
174
175 if (new_gsintmsk != gsintmsk)
176 dwc2_writel(hsotg, new_gsintmsk, GINTMSK);
177 }
178
179
180
181
182
183
184
185
186
187
188
189 static void dwc2_hsotg_ctrl_epint(struct dwc2_hsotg *hsotg,
190 unsigned int ep, unsigned int dir_in,
191 unsigned int en)
192 {
193 unsigned long flags;
194 u32 bit = 1 << ep;
195 u32 daint;
196
197 if (!dir_in)
198 bit <<= 16;
199
200 local_irq_save(flags);
201 daint = dwc2_readl(hsotg, DAINTMSK);
202 if (en)
203 daint |= bit;
204 else
205 daint &= ~bit;
206 dwc2_writel(hsotg, daint, DAINTMSK);
207 local_irq_restore(flags);
208 }
209
210
211
212
213
214
215 int dwc2_hsotg_tx_fifo_count(struct dwc2_hsotg *hsotg)
216 {
217 if (hsotg->hw_params.en_multiple_tx_fifo)
218
219 return hsotg->hw_params.num_dev_in_eps;
220 else
221
222 return hsotg->hw_params.num_dev_perio_in_ep;
223 }
224
225
226
227
228
229
230
231 int dwc2_hsotg_tx_fifo_total_depth(struct dwc2_hsotg *hsotg)
232 {
233 int addr;
234 int tx_addr_max;
235 u32 np_tx_fifo_size;
236
237 np_tx_fifo_size = min_t(u32, hsotg->hw_params.dev_nperio_tx_fifo_size,
238 hsotg->params.g_np_tx_fifo_size);
239
240
241 tx_addr_max = hsotg->hw_params.total_fifo_size;
242
243 addr = hsotg->params.g_rx_fifo_size + np_tx_fifo_size;
244 if (tx_addr_max <= addr)
245 return 0;
246
247 return tx_addr_max - addr;
248 }
249
250
251
252
253
254
255
256 static void dwc2_gadget_wkup_alert_handler(struct dwc2_hsotg *hsotg)
257 {
258 u32 gintsts2;
259 u32 gintmsk2;
260
261 gintsts2 = dwc2_readl(hsotg, GINTSTS2);
262 gintmsk2 = dwc2_readl(hsotg, GINTMSK2);
263
264 if (gintsts2 & GINTSTS2_WKUP_ALERT_INT) {
265 dev_dbg(hsotg->dev, "%s: Wkup_Alert_Int\n", __func__);
266 dwc2_set_bit(hsotg, GINTSTS2, GINTSTS2_WKUP_ALERT_INT);
267 dwc2_set_bit(hsotg, DCTL, DCTL_RMTWKUPSIG);
268 }
269 }
270
271
272
273
274
275
276
277 int dwc2_hsotg_tx_fifo_average_depth(struct dwc2_hsotg *hsotg)
278 {
279 int tx_fifo_count;
280 int tx_fifo_depth;
281
282 tx_fifo_depth = dwc2_hsotg_tx_fifo_total_depth(hsotg);
283
284 tx_fifo_count = dwc2_hsotg_tx_fifo_count(hsotg);
285
286 if (!tx_fifo_count)
287 return tx_fifo_depth;
288 else
289 return tx_fifo_depth / tx_fifo_count;
290 }
291
292
293
294
295
296 static void dwc2_hsotg_init_fifo(struct dwc2_hsotg *hsotg)
297 {
298 unsigned int ep;
299 unsigned int addr;
300 int timeout;
301
302 u32 val;
303 u32 *txfsz = hsotg->params.g_tx_fifo_size;
304
305
306 WARN_ON(hsotg->fifo_map);
307 hsotg->fifo_map = 0;
308
309
310 dwc2_writel(hsotg, hsotg->params.g_rx_fifo_size, GRXFSIZ);
311 dwc2_writel(hsotg, (hsotg->params.g_rx_fifo_size <<
312 FIFOSIZE_STARTADDR_SHIFT) |
313 (hsotg->params.g_np_tx_fifo_size << FIFOSIZE_DEPTH_SHIFT),
314 GNPTXFSIZ);
315
316
317
318
319
320
321
322
323
324 addr = hsotg->params.g_rx_fifo_size + hsotg->params.g_np_tx_fifo_size;
325
326
327
328
329
330
331 for (ep = 1; ep < MAX_EPS_CHANNELS; ep++) {
332 if (!txfsz[ep])
333 continue;
334 val = addr;
335 val |= txfsz[ep] << FIFOSIZE_DEPTH_SHIFT;
336 WARN_ONCE(addr + txfsz[ep] > hsotg->fifo_mem,
337 "insufficient fifo memory");
338 addr += txfsz[ep];
339
340 dwc2_writel(hsotg, val, DPTXFSIZN(ep));
341 val = dwc2_readl(hsotg, DPTXFSIZN(ep));
342 }
343
344 dwc2_writel(hsotg, hsotg->hw_params.total_fifo_size |
345 addr << GDFIFOCFG_EPINFOBASE_SHIFT,
346 GDFIFOCFG);
347
348
349
350
351
352 dwc2_writel(hsotg, GRSTCTL_TXFNUM(0x10) | GRSTCTL_TXFFLSH |
353 GRSTCTL_RXFFLSH, GRSTCTL);
354
355
356 timeout = 100;
357 while (1) {
358 val = dwc2_readl(hsotg, GRSTCTL);
359
360 if ((val & (GRSTCTL_TXFFLSH | GRSTCTL_RXFFLSH)) == 0)
361 break;
362
363 if (--timeout == 0) {
364 dev_err(hsotg->dev,
365 "%s: timeout flushing fifos (GRSTCTL=%08x)\n",
366 __func__, val);
367 break;
368 }
369
370 udelay(1);
371 }
372
373 dev_dbg(hsotg->dev, "FIFOs reset, timeout at %d\n", timeout);
374 }
375
376
377
378
379
380
381
382
383 static struct usb_request *dwc2_hsotg_ep_alloc_request(struct usb_ep *ep,
384 gfp_t flags)
385 {
386 struct dwc2_hsotg_req *req;
387
388 req = kzalloc(sizeof(*req), flags);
389 if (!req)
390 return NULL;
391
392 INIT_LIST_HEAD(&req->queue);
393
394 return &req->req;
395 }
396
397
398
399
400
401
402
403
404 static inline int is_ep_periodic(struct dwc2_hsotg_ep *hs_ep)
405 {
406 return hs_ep->periodic;
407 }
408
409
410
411
412
413
414
415
416
417
418 static void dwc2_hsotg_unmap_dma(struct dwc2_hsotg *hsotg,
419 struct dwc2_hsotg_ep *hs_ep,
420 struct dwc2_hsotg_req *hs_req)
421 {
422 struct usb_request *req = &hs_req->req;
423
424 usb_gadget_unmap_request(&hsotg->gadget, req, hs_ep->dir_in);
425 }
426
427
428
429
430
431
432
433
434
435 static int dwc2_gadget_alloc_ctrl_desc_chains(struct dwc2_hsotg *hsotg)
436 {
437 hsotg->setup_desc[0] =
438 dmam_alloc_coherent(hsotg->dev,
439 sizeof(struct dwc2_dma_desc),
440 &hsotg->setup_desc_dma[0],
441 GFP_KERNEL);
442 if (!hsotg->setup_desc[0])
443 goto fail;
444
445 hsotg->setup_desc[1] =
446 dmam_alloc_coherent(hsotg->dev,
447 sizeof(struct dwc2_dma_desc),
448 &hsotg->setup_desc_dma[1],
449 GFP_KERNEL);
450 if (!hsotg->setup_desc[1])
451 goto fail;
452
453 hsotg->ctrl_in_desc =
454 dmam_alloc_coherent(hsotg->dev,
455 sizeof(struct dwc2_dma_desc),
456 &hsotg->ctrl_in_desc_dma,
457 GFP_KERNEL);
458 if (!hsotg->ctrl_in_desc)
459 goto fail;
460
461 hsotg->ctrl_out_desc =
462 dmam_alloc_coherent(hsotg->dev,
463 sizeof(struct dwc2_dma_desc),
464 &hsotg->ctrl_out_desc_dma,
465 GFP_KERNEL);
466 if (!hsotg->ctrl_out_desc)
467 goto fail;
468
469 return 0;
470
471 fail:
472 return -ENOMEM;
473 }
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491 static int dwc2_hsotg_write_fifo(struct dwc2_hsotg *hsotg,
492 struct dwc2_hsotg_ep *hs_ep,
493 struct dwc2_hsotg_req *hs_req)
494 {
495 bool periodic = is_ep_periodic(hs_ep);
496 u32 gnptxsts = dwc2_readl(hsotg, GNPTXSTS);
497 int buf_pos = hs_req->req.actual;
498 int to_write = hs_ep->size_loaded;
499 void *data;
500 int can_write;
501 int pkt_round;
502 int max_transfer;
503
504 to_write -= (buf_pos - hs_ep->last_load);
505
506
507 if (to_write == 0)
508 return 0;
509
510 if (periodic && !hsotg->dedicated_fifos) {
511 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index));
512 int size_left;
513 int size_done;
514
515
516
517
518
519
520 size_left = DXEPTSIZ_XFERSIZE_GET(epsize);
521
522
523
524
525
526 if (hs_ep->fifo_load != 0) {
527 dwc2_hsotg_en_gsint(hsotg, GINTSTS_PTXFEMP);
528 return -ENOSPC;
529 }
530
531 dev_dbg(hsotg->dev, "%s: left=%d, load=%d, fifo=%d, size %d\n",
532 __func__, size_left,
533 hs_ep->size_loaded, hs_ep->fifo_load, hs_ep->fifo_size);
534
535
536 size_done = hs_ep->size_loaded - size_left;
537
538
539 can_write = hs_ep->fifo_load - size_done;
540 dev_dbg(hsotg->dev, "%s: => can_write1=%d\n",
541 __func__, can_write);
542
543 can_write = hs_ep->fifo_size - can_write;
544 dev_dbg(hsotg->dev, "%s: => can_write2=%d\n",
545 __func__, can_write);
546
547 if (can_write <= 0) {
548 dwc2_hsotg_en_gsint(hsotg, GINTSTS_PTXFEMP);
549 return -ENOSPC;
550 }
551 } else if (hsotg->dedicated_fifos && hs_ep->index != 0) {
552 can_write = dwc2_readl(hsotg,
553 DTXFSTS(hs_ep->fifo_index));
554
555 can_write &= 0xffff;
556 can_write *= 4;
557 } else {
558 if (GNPTXSTS_NP_TXQ_SPC_AVAIL_GET(gnptxsts) == 0) {
559 dev_dbg(hsotg->dev,
560 "%s: no queue slots available (0x%08x)\n",
561 __func__, gnptxsts);
562
563 dwc2_hsotg_en_gsint(hsotg, GINTSTS_NPTXFEMP);
564 return -ENOSPC;
565 }
566
567 can_write = GNPTXSTS_NP_TXF_SPC_AVAIL_GET(gnptxsts);
568 can_write *= 4;
569 }
570
571 max_transfer = hs_ep->ep.maxpacket * hs_ep->mc;
572
573 dev_dbg(hsotg->dev, "%s: GNPTXSTS=%08x, can=%d, to=%d, max_transfer %d\n",
574 __func__, gnptxsts, can_write, to_write, max_transfer);
575
576
577
578
579
580
581 if (can_write > 512 && !periodic)
582 can_write = 512;
583
584
585
586
587
588
589 if (to_write > max_transfer) {
590 to_write = max_transfer;
591
592
593 if (!hsotg->dedicated_fifos)
594 dwc2_hsotg_en_gsint(hsotg,
595 periodic ? GINTSTS_PTXFEMP :
596 GINTSTS_NPTXFEMP);
597 }
598
599
600
601 if (to_write > can_write) {
602 to_write = can_write;
603 pkt_round = to_write % max_transfer;
604
605
606
607
608
609
610
611
612
613 if (pkt_round)
614 to_write -= pkt_round;
615
616
617
618
619
620
621
622 if (!hsotg->dedicated_fifos)
623 dwc2_hsotg_en_gsint(hsotg,
624 periodic ? GINTSTS_PTXFEMP :
625 GINTSTS_NPTXFEMP);
626 }
627
628 dev_dbg(hsotg->dev, "write %d/%d, can_write %d, done %d\n",
629 to_write, hs_req->req.length, can_write, buf_pos);
630
631 if (to_write <= 0)
632 return -ENOSPC;
633
634 hs_req->req.actual = buf_pos + to_write;
635 hs_ep->total_data += to_write;
636
637 if (periodic)
638 hs_ep->fifo_load += to_write;
639
640 to_write = DIV_ROUND_UP(to_write, 4);
641 data = hs_req->req.buf + buf_pos;
642
643 dwc2_writel_rep(hsotg, EPFIFO(hs_ep->index), data, to_write);
644
645 return (to_write >= can_write) ? -ENOSPC : 0;
646 }
647
648
649
650
651
652
653
654
655 static unsigned int get_ep_limit(struct dwc2_hsotg_ep *hs_ep)
656 {
657 int index = hs_ep->index;
658 unsigned int maxsize;
659 unsigned int maxpkt;
660
661 if (index != 0) {
662 maxsize = DXEPTSIZ_XFERSIZE_LIMIT + 1;
663 maxpkt = DXEPTSIZ_PKTCNT_LIMIT + 1;
664 } else {
665 maxsize = 64 + 64;
666 if (hs_ep->dir_in)
667 maxpkt = DIEPTSIZ0_PKTCNT_LIMIT + 1;
668 else
669 maxpkt = 2;
670 }
671
672
673 maxpkt--;
674 maxsize--;
675
676
677
678
679
680
681 if ((maxpkt * hs_ep->ep.maxpacket) < maxsize)
682 maxsize = maxpkt * hs_ep->ep.maxpacket;
683
684 return maxsize;
685 }
686
687
688
689
690
691
692
693 static u32 dwc2_hsotg_read_frameno(struct dwc2_hsotg *hsotg)
694 {
695 u32 dsts;
696
697 dsts = dwc2_readl(hsotg, DSTS);
698 dsts &= DSTS_SOFFN_MASK;
699 dsts >>= DSTS_SOFFN_SHIFT;
700
701 return dsts;
702 }
703
704
705
706
707
708
709
710
711
712
713 static unsigned int dwc2_gadget_get_chain_limit(struct dwc2_hsotg_ep *hs_ep)
714 {
715 int is_isoc = hs_ep->isochronous;
716 unsigned int maxsize;
717
718 if (is_isoc)
719 maxsize = (hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_LIMIT :
720 DEV_DMA_ISOC_RX_NBYTES_LIMIT) *
721 MAX_DMA_DESC_NUM_HS_ISOC;
722 else
723 maxsize = DEV_DMA_NBYTES_LIMIT * MAX_DMA_DESC_NUM_GENERIC;
724
725 return maxsize;
726 }
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743 static u32 dwc2_gadget_get_desc_params(struct dwc2_hsotg_ep *hs_ep, u32 *mask)
744 {
745 u32 mps = hs_ep->ep.maxpacket;
746 int dir_in = hs_ep->dir_in;
747 u32 desc_size = 0;
748
749 if (!hs_ep->index && !dir_in) {
750 desc_size = mps;
751 *mask = DEV_DMA_NBYTES_MASK;
752 } else if (hs_ep->isochronous) {
753 if (dir_in) {
754 desc_size = DEV_DMA_ISOC_TX_NBYTES_LIMIT;
755 *mask = DEV_DMA_ISOC_TX_NBYTES_MASK;
756 } else {
757 desc_size = DEV_DMA_ISOC_RX_NBYTES_LIMIT;
758 *mask = DEV_DMA_ISOC_RX_NBYTES_MASK;
759 }
760 } else {
761 desc_size = DEV_DMA_NBYTES_LIMIT;
762 *mask = DEV_DMA_NBYTES_MASK;
763
764
765 desc_size -= desc_size % mps;
766 }
767
768 return desc_size;
769 }
770
771 static void dwc2_gadget_fill_nonisoc_xfer_ddma_one(struct dwc2_hsotg_ep *hs_ep,
772 struct dwc2_dma_desc **desc,
773 dma_addr_t dma_buff,
774 unsigned int len,
775 bool true_last)
776 {
777 int dir_in = hs_ep->dir_in;
778 u32 mps = hs_ep->ep.maxpacket;
779 u32 maxsize = 0;
780 u32 offset = 0;
781 u32 mask = 0;
782 int i;
783
784 maxsize = dwc2_gadget_get_desc_params(hs_ep, &mask);
785
786 hs_ep->desc_count = (len / maxsize) +
787 ((len % maxsize) ? 1 : 0);
788 if (len == 0)
789 hs_ep->desc_count = 1;
790
791 for (i = 0; i < hs_ep->desc_count; ++i) {
792 (*desc)->status = 0;
793 (*desc)->status |= (DEV_DMA_BUFF_STS_HBUSY
794 << DEV_DMA_BUFF_STS_SHIFT);
795
796 if (len > maxsize) {
797 if (!hs_ep->index && !dir_in)
798 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC);
799
800 (*desc)->status |=
801 maxsize << DEV_DMA_NBYTES_SHIFT & mask;
802 (*desc)->buf = dma_buff + offset;
803
804 len -= maxsize;
805 offset += maxsize;
806 } else {
807 if (true_last)
808 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC);
809
810 if (dir_in)
811 (*desc)->status |= (len % mps) ? DEV_DMA_SHORT :
812 ((hs_ep->send_zlp && true_last) ?
813 DEV_DMA_SHORT : 0);
814
815 (*desc)->status |=
816 len << DEV_DMA_NBYTES_SHIFT & mask;
817 (*desc)->buf = dma_buff + offset;
818 }
819
820 (*desc)->status &= ~DEV_DMA_BUFF_STS_MASK;
821 (*desc)->status |= (DEV_DMA_BUFF_STS_HREADY
822 << DEV_DMA_BUFF_STS_SHIFT);
823 (*desc)++;
824 }
825 }
826
827
828
829
830
831
832
833
834
835
836
837 static void dwc2_gadget_config_nonisoc_xfer_ddma(struct dwc2_hsotg_ep *hs_ep,
838 dma_addr_t dma_buff,
839 unsigned int len)
840 {
841 struct usb_request *ureq = NULL;
842 struct dwc2_dma_desc *desc = hs_ep->desc_list;
843 struct scatterlist *sg;
844 int i;
845 u8 desc_count = 0;
846
847 if (hs_ep->req)
848 ureq = &hs_ep->req->req;
849
850
851 if (!ureq || !ureq->num_sgs) {
852 dwc2_gadget_fill_nonisoc_xfer_ddma_one(hs_ep, &desc,
853 dma_buff, len, true);
854 return;
855 }
856
857
858 for_each_sg(ureq->sg, sg, ureq->num_sgs, i) {
859 dwc2_gadget_fill_nonisoc_xfer_ddma_one(hs_ep, &desc,
860 sg_dma_address(sg) + sg->offset, sg_dma_len(sg),
861 sg_is_last(sg));
862 desc_count += hs_ep->desc_count;
863 }
864
865 hs_ep->desc_count = desc_count;
866 }
867
868
869
870
871
872
873
874
875
876
877
878
879 static int dwc2_gadget_fill_isoc_desc(struct dwc2_hsotg_ep *hs_ep,
880 dma_addr_t dma_buff, unsigned int len)
881 {
882 struct dwc2_dma_desc *desc;
883 struct dwc2_hsotg *hsotg = hs_ep->parent;
884 u32 index;
885 u32 maxsize = 0;
886 u32 mask = 0;
887 u8 pid = 0;
888
889 maxsize = dwc2_gadget_get_desc_params(hs_ep, &mask);
890
891 index = hs_ep->next_desc;
892 desc = &hs_ep->desc_list[index];
893
894
895 if ((desc->status >> DEV_DMA_BUFF_STS_SHIFT) ==
896 DEV_DMA_BUFF_STS_HREADY) {
897 dev_dbg(hsotg->dev, "%s: desc chain full\n", __func__);
898 return 1;
899 }
900
901
902 if (hs_ep->next_desc)
903 hs_ep->desc_list[index - 1].status &= ~DEV_DMA_L;
904
905 dev_dbg(hsotg->dev, "%s: Filling ep %d, dir %s isoc desc # %d\n",
906 __func__, hs_ep->index, hs_ep->dir_in ? "in" : "out", index);
907
908 desc->status = 0;
909 desc->status |= (DEV_DMA_BUFF_STS_HBUSY << DEV_DMA_BUFF_STS_SHIFT);
910
911 desc->buf = dma_buff;
912 desc->status |= (DEV_DMA_L | DEV_DMA_IOC |
913 ((len << DEV_DMA_NBYTES_SHIFT) & mask));
914
915 if (hs_ep->dir_in) {
916 if (len)
917 pid = DIV_ROUND_UP(len, hs_ep->ep.maxpacket);
918 else
919 pid = 1;
920 desc->status |= ((pid << DEV_DMA_ISOC_PID_SHIFT) &
921 DEV_DMA_ISOC_PID_MASK) |
922 ((len % hs_ep->ep.maxpacket) ?
923 DEV_DMA_SHORT : 0) |
924 ((hs_ep->target_frame <<
925 DEV_DMA_ISOC_FRNUM_SHIFT) &
926 DEV_DMA_ISOC_FRNUM_MASK);
927 }
928
929 desc->status &= ~DEV_DMA_BUFF_STS_MASK;
930 desc->status |= (DEV_DMA_BUFF_STS_HREADY << DEV_DMA_BUFF_STS_SHIFT);
931
932
933 if (hs_ep->dir_in)
934 dwc2_gadget_incr_frame_num(hs_ep);
935
936
937 hs_ep->next_desc++;
938 if (hs_ep->next_desc >= MAX_DMA_DESC_NUM_HS_ISOC)
939 hs_ep->next_desc = 0;
940
941 return 0;
942 }
943
944
945
946
947
948
949
950
951 static void dwc2_gadget_start_isoc_ddma(struct dwc2_hsotg_ep *hs_ep)
952 {
953 struct dwc2_hsotg *hsotg = hs_ep->parent;
954 struct dwc2_hsotg_req *hs_req, *treq;
955 int index = hs_ep->index;
956 int ret;
957 int i;
958 u32 dma_reg;
959 u32 depctl;
960 u32 ctrl;
961 struct dwc2_dma_desc *desc;
962
963 if (list_empty(&hs_ep->queue)) {
964 hs_ep->target_frame = TARGET_FRAME_INITIAL;
965 dev_dbg(hsotg->dev, "%s: No requests in queue\n", __func__);
966 return;
967 }
968
969
970 for (i = 0; i < MAX_DMA_DESC_NUM_HS_ISOC; i++) {
971 desc = &hs_ep->desc_list[i];
972 desc->status = 0;
973 desc->status |= (DEV_DMA_BUFF_STS_HBUSY
974 << DEV_DMA_BUFF_STS_SHIFT);
975 }
976
977 hs_ep->next_desc = 0;
978 list_for_each_entry_safe(hs_req, treq, &hs_ep->queue, queue) {
979 dma_addr_t dma_addr = hs_req->req.dma;
980
981 if (hs_req->req.num_sgs) {
982 WARN_ON(hs_req->req.num_sgs > 1);
983 dma_addr = sg_dma_address(hs_req->req.sg);
984 }
985 ret = dwc2_gadget_fill_isoc_desc(hs_ep, dma_addr,
986 hs_req->req.length);
987 if (ret)
988 break;
989 }
990
991 hs_ep->compl_desc = 0;
992 depctl = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index);
993 dma_reg = hs_ep->dir_in ? DIEPDMA(index) : DOEPDMA(index);
994
995
996 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg);
997
998 ctrl = dwc2_readl(hsotg, depctl);
999 ctrl |= DXEPCTL_EPENA | DXEPCTL_CNAK;
1000 dwc2_writel(hsotg, ctrl, depctl);
1001 }
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013 static void dwc2_hsotg_start_req(struct dwc2_hsotg *hsotg,
1014 struct dwc2_hsotg_ep *hs_ep,
1015 struct dwc2_hsotg_req *hs_req,
1016 bool continuing)
1017 {
1018 struct usb_request *ureq = &hs_req->req;
1019 int index = hs_ep->index;
1020 int dir_in = hs_ep->dir_in;
1021 u32 epctrl_reg;
1022 u32 epsize_reg;
1023 u32 epsize;
1024 u32 ctrl;
1025 unsigned int length;
1026 unsigned int packets;
1027 unsigned int maxreq;
1028 unsigned int dma_reg;
1029
1030 if (index != 0) {
1031 if (hs_ep->req && !continuing) {
1032 dev_err(hsotg->dev, "%s: active request\n", __func__);
1033 WARN_ON(1);
1034 return;
1035 } else if (hs_ep->req != hs_req && continuing) {
1036 dev_err(hsotg->dev,
1037 "%s: continue different req\n", __func__);
1038 WARN_ON(1);
1039 return;
1040 }
1041 }
1042
1043 dma_reg = dir_in ? DIEPDMA(index) : DOEPDMA(index);
1044 epctrl_reg = dir_in ? DIEPCTL(index) : DOEPCTL(index);
1045 epsize_reg = dir_in ? DIEPTSIZ(index) : DOEPTSIZ(index);
1046
1047 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x, ep %d, dir %s\n",
1048 __func__, dwc2_readl(hsotg, epctrl_reg), index,
1049 hs_ep->dir_in ? "in" : "out");
1050
1051
1052 ctrl = dwc2_readl(hsotg, epctrl_reg);
1053
1054 if (index && ctrl & DXEPCTL_STALL) {
1055 dev_warn(hsotg->dev, "%s: ep%d is stalled\n", __func__, index);
1056 return;
1057 }
1058
1059 length = ureq->length - ureq->actual;
1060 dev_dbg(hsotg->dev, "ureq->length:%d ureq->actual:%d\n",
1061 ureq->length, ureq->actual);
1062
1063 if (!using_desc_dma(hsotg))
1064 maxreq = get_ep_limit(hs_ep);
1065 else
1066 maxreq = dwc2_gadget_get_chain_limit(hs_ep);
1067
1068 if (length > maxreq) {
1069 int round = maxreq % hs_ep->ep.maxpacket;
1070
1071 dev_dbg(hsotg->dev, "%s: length %d, max-req %d, r %d\n",
1072 __func__, length, maxreq, round);
1073
1074
1075 if (round)
1076 maxreq -= round;
1077
1078 length = maxreq;
1079 }
1080
1081 if (length)
1082 packets = DIV_ROUND_UP(length, hs_ep->ep.maxpacket);
1083 else
1084 packets = 1;
1085
1086 if (dir_in && index != 0)
1087 if (hs_ep->isochronous)
1088 epsize = DXEPTSIZ_MC(packets);
1089 else
1090 epsize = DXEPTSIZ_MC(1);
1091 else
1092 epsize = 0;
1093
1094
1095
1096
1097
1098 if (dir_in && ureq->zero && !continuing) {
1099
1100 if ((ureq->length >= hs_ep->ep.maxpacket) &&
1101 !(ureq->length % hs_ep->ep.maxpacket))
1102 hs_ep->send_zlp = 1;
1103 }
1104
1105 epsize |= DXEPTSIZ_PKTCNT(packets);
1106 epsize |= DXEPTSIZ_XFERSIZE(length);
1107
1108 dev_dbg(hsotg->dev, "%s: %d@%d/%d, 0x%08x => 0x%08x\n",
1109 __func__, packets, length, ureq->length, epsize, epsize_reg);
1110
1111
1112 hs_ep->req = hs_req;
1113
1114 if (using_desc_dma(hsotg)) {
1115 u32 offset = 0;
1116 u32 mps = hs_ep->ep.maxpacket;
1117
1118
1119 if (!dir_in) {
1120 if (!index)
1121 length = mps;
1122 else if (length % mps)
1123 length += (mps - (length % mps));
1124 }
1125
1126
1127
1128
1129
1130
1131 if (!index && hsotg->ep0_state == DWC2_EP0_DATA_OUT &&
1132 continuing)
1133 offset = ureq->actual;
1134
1135
1136 dwc2_gadget_config_nonisoc_xfer_ddma(hs_ep, ureq->dma + offset,
1137 length);
1138
1139
1140 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg);
1141
1142 dev_dbg(hsotg->dev, "%s: %08x pad => 0x%08x\n",
1143 __func__, (u32)hs_ep->desc_list_dma, dma_reg);
1144 } else {
1145
1146 dwc2_writel(hsotg, epsize, epsize_reg);
1147
1148 if (using_dma(hsotg) && !continuing && (length != 0)) {
1149
1150
1151
1152
1153
1154 dwc2_writel(hsotg, ureq->dma, dma_reg);
1155
1156 dev_dbg(hsotg->dev, "%s: %pad => 0x%08x\n",
1157 __func__, &ureq->dma, dma_reg);
1158 }
1159 }
1160
1161 if (hs_ep->isochronous && hs_ep->interval == 1) {
1162 hs_ep->target_frame = dwc2_hsotg_read_frameno(hsotg);
1163 dwc2_gadget_incr_frame_num(hs_ep);
1164
1165 if (hs_ep->target_frame & 0x1)
1166 ctrl |= DXEPCTL_SETODDFR;
1167 else
1168 ctrl |= DXEPCTL_SETEVENFR;
1169 }
1170
1171 ctrl |= DXEPCTL_EPENA;
1172
1173 dev_dbg(hsotg->dev, "ep0 state:%d\n", hsotg->ep0_state);
1174
1175
1176 if (!(index == 0 && hsotg->ep0_state == DWC2_EP0_SETUP))
1177 ctrl |= DXEPCTL_CNAK;
1178
1179 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl);
1180 dwc2_writel(hsotg, ctrl, epctrl_reg);
1181
1182
1183
1184
1185
1186
1187 hs_ep->size_loaded = length;
1188 hs_ep->last_load = ureq->actual;
1189
1190 if (dir_in && !using_dma(hsotg)) {
1191
1192 hs_ep->fifo_load = 0;
1193
1194 dwc2_hsotg_write_fifo(hsotg, hs_ep, hs_req);
1195 }
1196
1197
1198
1199
1200
1201
1202
1203 if (!(dwc2_readl(hsotg, epctrl_reg) & DXEPCTL_EPENA))
1204 dev_dbg(hsotg->dev,
1205 "ep%d: failed to become enabled (DXEPCTL=0x%08x)?\n",
1206 index, dwc2_readl(hsotg, epctrl_reg));
1207
1208 dev_dbg(hsotg->dev, "%s: DXEPCTL=0x%08x\n",
1209 __func__, dwc2_readl(hsotg, epctrl_reg));
1210
1211
1212 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 1);
1213 }
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227 static int dwc2_hsotg_map_dma(struct dwc2_hsotg *hsotg,
1228 struct dwc2_hsotg_ep *hs_ep,
1229 struct usb_request *req)
1230 {
1231 int ret;
1232
1233 ret = usb_gadget_map_request(&hsotg->gadget, req, hs_ep->dir_in);
1234 if (ret)
1235 goto dma_error;
1236
1237 return 0;
1238
1239 dma_error:
1240 dev_err(hsotg->dev, "%s: failed to map buffer %p, %d bytes\n",
1241 __func__, req->buf, req->length);
1242
1243 return -EIO;
1244 }
1245
1246 static int dwc2_hsotg_handle_unaligned_buf_start(struct dwc2_hsotg *hsotg,
1247 struct dwc2_hsotg_ep *hs_ep,
1248 struct dwc2_hsotg_req *hs_req)
1249 {
1250 void *req_buf = hs_req->req.buf;
1251
1252
1253 if (!using_dma(hsotg) || !((long)req_buf & 3))
1254 return 0;
1255
1256 WARN_ON(hs_req->saved_req_buf);
1257
1258 dev_dbg(hsotg->dev, "%s: %s: buf=%p length=%d\n", __func__,
1259 hs_ep->ep.name, req_buf, hs_req->req.length);
1260
1261 hs_req->req.buf = kmalloc(hs_req->req.length, GFP_ATOMIC);
1262 if (!hs_req->req.buf) {
1263 hs_req->req.buf = req_buf;
1264 dev_err(hsotg->dev,
1265 "%s: unable to allocate memory for bounce buffer\n",
1266 __func__);
1267 return -ENOMEM;
1268 }
1269
1270
1271 hs_req->saved_req_buf = req_buf;
1272
1273 if (hs_ep->dir_in)
1274 memcpy(hs_req->req.buf, req_buf, hs_req->req.length);
1275 return 0;
1276 }
1277
1278 static void
1279 dwc2_hsotg_handle_unaligned_buf_complete(struct dwc2_hsotg *hsotg,
1280 struct dwc2_hsotg_ep *hs_ep,
1281 struct dwc2_hsotg_req *hs_req)
1282 {
1283
1284 if (!using_dma(hsotg) || !hs_req->saved_req_buf)
1285 return;
1286
1287 dev_dbg(hsotg->dev, "%s: %s: status=%d actual-length=%d\n", __func__,
1288 hs_ep->ep.name, hs_req->req.status, hs_req->req.actual);
1289
1290
1291 if (!hs_ep->dir_in && !hs_req->req.status)
1292 memcpy(hs_req->saved_req_buf, hs_req->req.buf,
1293 hs_req->req.actual);
1294
1295
1296 kfree(hs_req->req.buf);
1297
1298 hs_req->req.buf = hs_req->saved_req_buf;
1299 hs_req->saved_req_buf = NULL;
1300 }
1301
1302
1303
1304
1305
1306
1307
1308
1309 static bool dwc2_gadget_target_frame_elapsed(struct dwc2_hsotg_ep *hs_ep)
1310 {
1311 struct dwc2_hsotg *hsotg = hs_ep->parent;
1312 u32 target_frame = hs_ep->target_frame;
1313 u32 current_frame = hsotg->frame_number;
1314 bool frame_overrun = hs_ep->frame_overrun;
1315
1316 if (!frame_overrun && current_frame >= target_frame)
1317 return true;
1318
1319 if (frame_overrun && current_frame >= target_frame &&
1320 ((current_frame - target_frame) < DSTS_SOFFN_LIMIT / 2))
1321 return true;
1322
1323 return false;
1324 }
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334 static int dwc2_gadget_set_ep0_desc_chain(struct dwc2_hsotg *hsotg,
1335 struct dwc2_hsotg_ep *hs_ep)
1336 {
1337 switch (hsotg->ep0_state) {
1338 case DWC2_EP0_SETUP:
1339 case DWC2_EP0_STATUS_OUT:
1340 hs_ep->desc_list = hsotg->setup_desc[0];
1341 hs_ep->desc_list_dma = hsotg->setup_desc_dma[0];
1342 break;
1343 case DWC2_EP0_DATA_IN:
1344 case DWC2_EP0_STATUS_IN:
1345 hs_ep->desc_list = hsotg->ctrl_in_desc;
1346 hs_ep->desc_list_dma = hsotg->ctrl_in_desc_dma;
1347 break;
1348 case DWC2_EP0_DATA_OUT:
1349 hs_ep->desc_list = hsotg->ctrl_out_desc;
1350 hs_ep->desc_list_dma = hsotg->ctrl_out_desc_dma;
1351 break;
1352 default:
1353 dev_err(hsotg->dev, "invalid EP 0 state in queue %d\n",
1354 hsotg->ep0_state);
1355 return -EINVAL;
1356 }
1357
1358 return 0;
1359 }
1360
1361 static int dwc2_hsotg_ep_queue(struct usb_ep *ep, struct usb_request *req,
1362 gfp_t gfp_flags)
1363 {
1364 struct dwc2_hsotg_req *hs_req = our_req(req);
1365 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
1366 struct dwc2_hsotg *hs = hs_ep->parent;
1367 bool first;
1368 int ret;
1369 u32 maxsize = 0;
1370 u32 mask = 0;
1371
1372
1373 dev_dbg(hs->dev, "%s: req %p: %d@%p, noi=%d, zero=%d, snok=%d\n",
1374 ep->name, req, req->length, req->buf, req->no_interrupt,
1375 req->zero, req->short_not_ok);
1376
1377
1378 if (hs->lx_state != DWC2_L0) {
1379 dev_dbg(hs->dev, "%s: submit request only in active state\n",
1380 __func__);
1381 return -EAGAIN;
1382 }
1383
1384
1385 INIT_LIST_HEAD(&hs_req->queue);
1386 req->actual = 0;
1387 req->status = -EINPROGRESS;
1388
1389
1390 if (hs_ep->isochronous &&
1391 req->length > (hs_ep->mc * hs_ep->ep.maxpacket)) {
1392 dev_err(hs->dev, "req length > maxpacket*mc\n");
1393 return -EINVAL;
1394 }
1395
1396
1397
1398
1399 if (using_desc_dma(hs) && hs_ep->isochronous) {
1400 maxsize = dwc2_gadget_get_desc_params(hs_ep, &mask);
1401 if (hs_ep->dir_in && req->length > maxsize) {
1402 dev_err(hs->dev, "wrong length %d (maxsize=%d)\n",
1403 req->length, maxsize);
1404 return -EINVAL;
1405 }
1406
1407 if (!hs_ep->dir_in && req->length > hs_ep->ep.maxpacket) {
1408 dev_err(hs->dev, "ISOC OUT: wrong length %d (mps=%d)\n",
1409 req->length, hs_ep->ep.maxpacket);
1410 return -EINVAL;
1411 }
1412 }
1413
1414 ret = dwc2_hsotg_handle_unaligned_buf_start(hs, hs_ep, hs_req);
1415 if (ret)
1416 return ret;
1417
1418
1419 if (using_dma(hs)) {
1420 ret = dwc2_hsotg_map_dma(hs, hs_ep, req);
1421 if (ret)
1422 return ret;
1423 }
1424
1425 if (using_desc_dma(hs) && !hs_ep->index) {
1426 ret = dwc2_gadget_set_ep0_desc_chain(hs, hs_ep);
1427 if (ret)
1428 return ret;
1429 }
1430
1431 first = list_empty(&hs_ep->queue);
1432 list_add_tail(&hs_req->queue, &hs_ep->queue);
1433
1434
1435
1436
1437
1438
1439
1440 if (using_desc_dma(hs) && hs_ep->isochronous) {
1441 if (hs_ep->target_frame != TARGET_FRAME_INITIAL) {
1442 dma_addr_t dma_addr = hs_req->req.dma;
1443
1444 if (hs_req->req.num_sgs) {
1445 WARN_ON(hs_req->req.num_sgs > 1);
1446 dma_addr = sg_dma_address(hs_req->req.sg);
1447 }
1448 dwc2_gadget_fill_isoc_desc(hs_ep, dma_addr,
1449 hs_req->req.length);
1450 }
1451 return 0;
1452 }
1453
1454
1455 if (!hs_ep->index && !req->length && !hs_ep->dir_in &&
1456 hs->ep0_state == DWC2_EP0_DATA_OUT)
1457 hs_ep->dir_in = 1;
1458
1459 if (first) {
1460 if (!hs_ep->isochronous) {
1461 dwc2_hsotg_start_req(hs, hs_ep, hs_req, false);
1462 return 0;
1463 }
1464
1465
1466 hs->frame_number = dwc2_hsotg_read_frameno(hs);
1467 while (dwc2_gadget_target_frame_elapsed(hs_ep)) {
1468 dwc2_gadget_incr_frame_num(hs_ep);
1469
1470
1471
1472 hs->frame_number = dwc2_hsotg_read_frameno(hs);
1473 }
1474
1475 if (hs_ep->target_frame != TARGET_FRAME_INITIAL)
1476 dwc2_hsotg_start_req(hs, hs_ep, hs_req, false);
1477 }
1478 return 0;
1479 }
1480
1481 static int dwc2_hsotg_ep_queue_lock(struct usb_ep *ep, struct usb_request *req,
1482 gfp_t gfp_flags)
1483 {
1484 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
1485 struct dwc2_hsotg *hs = hs_ep->parent;
1486 unsigned long flags = 0;
1487 int ret = 0;
1488
1489 spin_lock_irqsave(&hs->lock, flags);
1490 ret = dwc2_hsotg_ep_queue(ep, req, gfp_flags);
1491 spin_unlock_irqrestore(&hs->lock, flags);
1492
1493 return ret;
1494 }
1495
1496 static void dwc2_hsotg_ep_free_request(struct usb_ep *ep,
1497 struct usb_request *req)
1498 {
1499 struct dwc2_hsotg_req *hs_req = our_req(req);
1500
1501 kfree(hs_req);
1502 }
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512 static void dwc2_hsotg_complete_oursetup(struct usb_ep *ep,
1513 struct usb_request *req)
1514 {
1515 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
1516 struct dwc2_hsotg *hsotg = hs_ep->parent;
1517
1518 dev_dbg(hsotg->dev, "%s: ep %p, req %p\n", __func__, ep, req);
1519
1520 dwc2_hsotg_ep_free_request(ep, req);
1521 }
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531 static struct dwc2_hsotg_ep *ep_from_windex(struct dwc2_hsotg *hsotg,
1532 u32 windex)
1533 {
1534 struct dwc2_hsotg_ep *ep;
1535 int dir = (windex & USB_DIR_IN) ? 1 : 0;
1536 int idx = windex & 0x7F;
1537
1538 if (windex >= 0x100)
1539 return NULL;
1540
1541 if (idx > hsotg->num_of_eps)
1542 return NULL;
1543
1544 ep = index_to_ep(hsotg, idx, dir);
1545
1546 if (idx && ep->dir_in != dir)
1547 return NULL;
1548
1549 return ep;
1550 }
1551
1552
1553
1554
1555
1556
1557
1558 int dwc2_hsotg_set_test_mode(struct dwc2_hsotg *hsotg, int testmode)
1559 {
1560 int dctl = dwc2_readl(hsotg, DCTL);
1561
1562 dctl &= ~DCTL_TSTCTL_MASK;
1563 switch (testmode) {
1564 case TEST_J:
1565 case TEST_K:
1566 case TEST_SE0_NAK:
1567 case TEST_PACKET:
1568 case TEST_FORCE_EN:
1569 dctl |= testmode << DCTL_TSTCTL_SHIFT;
1570 break;
1571 default:
1572 return -EINVAL;
1573 }
1574 dwc2_writel(hsotg, dctl, DCTL);
1575 return 0;
1576 }
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588 static int dwc2_hsotg_send_reply(struct dwc2_hsotg *hsotg,
1589 struct dwc2_hsotg_ep *ep,
1590 void *buff,
1591 int length)
1592 {
1593 struct usb_request *req;
1594 int ret;
1595
1596 dev_dbg(hsotg->dev, "%s: buff %p, len %d\n", __func__, buff, length);
1597
1598 req = dwc2_hsotg_ep_alloc_request(&ep->ep, GFP_ATOMIC);
1599 hsotg->ep0_reply = req;
1600 if (!req) {
1601 dev_warn(hsotg->dev, "%s: cannot alloc req\n", __func__);
1602 return -ENOMEM;
1603 }
1604
1605 req->buf = hsotg->ep0_buff;
1606 req->length = length;
1607
1608
1609
1610
1611 req->zero = 0;
1612 req->complete = dwc2_hsotg_complete_oursetup;
1613
1614 if (length)
1615 memcpy(req->buf, buff, length);
1616
1617 ret = dwc2_hsotg_ep_queue(&ep->ep, req, GFP_ATOMIC);
1618 if (ret) {
1619 dev_warn(hsotg->dev, "%s: cannot queue req\n", __func__);
1620 return ret;
1621 }
1622
1623 return 0;
1624 }
1625
1626
1627
1628
1629
1630
1631 static int dwc2_hsotg_process_req_status(struct dwc2_hsotg *hsotg,
1632 struct usb_ctrlrequest *ctrl)
1633 {
1634 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0];
1635 struct dwc2_hsotg_ep *ep;
1636 __le16 reply;
1637 u16 status;
1638 int ret;
1639
1640 dev_dbg(hsotg->dev, "%s: USB_REQ_GET_STATUS\n", __func__);
1641
1642 if (!ep0->dir_in) {
1643 dev_warn(hsotg->dev, "%s: direction out?\n", __func__);
1644 return -EINVAL;
1645 }
1646
1647 switch (ctrl->bRequestType & USB_RECIP_MASK) {
1648 case USB_RECIP_DEVICE:
1649 status = 1 << USB_DEVICE_SELF_POWERED;
1650 status |= hsotg->remote_wakeup_allowed <<
1651 USB_DEVICE_REMOTE_WAKEUP;
1652 reply = cpu_to_le16(status);
1653 break;
1654
1655 case USB_RECIP_INTERFACE:
1656
1657 reply = cpu_to_le16(0);
1658 break;
1659
1660 case USB_RECIP_ENDPOINT:
1661 ep = ep_from_windex(hsotg, le16_to_cpu(ctrl->wIndex));
1662 if (!ep)
1663 return -ENOENT;
1664
1665 reply = cpu_to_le16(ep->halted ? 1 : 0);
1666 break;
1667
1668 default:
1669 return 0;
1670 }
1671
1672 if (le16_to_cpu(ctrl->wLength) != 2)
1673 return -EINVAL;
1674
1675 ret = dwc2_hsotg_send_reply(hsotg, ep0, &reply, 2);
1676 if (ret) {
1677 dev_err(hsotg->dev, "%s: failed to send reply\n", __func__);
1678 return ret;
1679 }
1680
1681 return 1;
1682 }
1683
1684 static int dwc2_hsotg_ep_sethalt(struct usb_ep *ep, int value, bool now);
1685
1686
1687
1688
1689
1690
1691
1692 static struct dwc2_hsotg_req *get_ep_head(struct dwc2_hsotg_ep *hs_ep)
1693 {
1694 return list_first_entry_or_null(&hs_ep->queue, struct dwc2_hsotg_req,
1695 queue);
1696 }
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706 static void dwc2_gadget_start_next_request(struct dwc2_hsotg_ep *hs_ep)
1707 {
1708 u32 mask;
1709 struct dwc2_hsotg *hsotg = hs_ep->parent;
1710 int dir_in = hs_ep->dir_in;
1711 struct dwc2_hsotg_req *hs_req;
1712 u32 epmsk_reg = dir_in ? DIEPMSK : DOEPMSK;
1713
1714 if (!list_empty(&hs_ep->queue)) {
1715 hs_req = get_ep_head(hs_ep);
1716 dwc2_hsotg_start_req(hsotg, hs_ep, hs_req, false);
1717 return;
1718 }
1719 if (!hs_ep->isochronous)
1720 return;
1721
1722 if (dir_in) {
1723 dev_dbg(hsotg->dev, "%s: No more ISOC-IN requests\n",
1724 __func__);
1725 } else {
1726 dev_dbg(hsotg->dev, "%s: No more ISOC-OUT requests\n",
1727 __func__);
1728 mask = dwc2_readl(hsotg, epmsk_reg);
1729 mask |= DOEPMSK_OUTTKNEPDISMSK;
1730 dwc2_writel(hsotg, mask, epmsk_reg);
1731 }
1732 }
1733
1734
1735
1736
1737
1738
1739 static int dwc2_hsotg_process_req_feature(struct dwc2_hsotg *hsotg,
1740 struct usb_ctrlrequest *ctrl)
1741 {
1742 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0];
1743 struct dwc2_hsotg_req *hs_req;
1744 bool set = (ctrl->bRequest == USB_REQ_SET_FEATURE);
1745 struct dwc2_hsotg_ep *ep;
1746 int ret;
1747 bool halted;
1748 u32 recip;
1749 u32 wValue;
1750 u32 wIndex;
1751
1752 dev_dbg(hsotg->dev, "%s: %s_FEATURE\n",
1753 __func__, set ? "SET" : "CLEAR");
1754
1755 wValue = le16_to_cpu(ctrl->wValue);
1756 wIndex = le16_to_cpu(ctrl->wIndex);
1757 recip = ctrl->bRequestType & USB_RECIP_MASK;
1758
1759 switch (recip) {
1760 case USB_RECIP_DEVICE:
1761 switch (wValue) {
1762 case USB_DEVICE_REMOTE_WAKEUP:
1763 if (set)
1764 hsotg->remote_wakeup_allowed = 1;
1765 else
1766 hsotg->remote_wakeup_allowed = 0;
1767 break;
1768
1769 case USB_DEVICE_TEST_MODE:
1770 if ((wIndex & 0xff) != 0)
1771 return -EINVAL;
1772 if (!set)
1773 return -EINVAL;
1774
1775 hsotg->test_mode = wIndex >> 8;
1776 break;
1777 default:
1778 return -ENOENT;
1779 }
1780
1781 ret = dwc2_hsotg_send_reply(hsotg, ep0, NULL, 0);
1782 if (ret) {
1783 dev_err(hsotg->dev,
1784 "%s: failed to send reply\n", __func__);
1785 return ret;
1786 }
1787 break;
1788
1789 case USB_RECIP_ENDPOINT:
1790 ep = ep_from_windex(hsotg, wIndex);
1791 if (!ep) {
1792 dev_dbg(hsotg->dev, "%s: no endpoint for 0x%04x\n",
1793 __func__, wIndex);
1794 return -ENOENT;
1795 }
1796
1797 switch (wValue) {
1798 case USB_ENDPOINT_HALT:
1799 halted = ep->halted;
1800
1801 dwc2_hsotg_ep_sethalt(&ep->ep, set, true);
1802
1803 ret = dwc2_hsotg_send_reply(hsotg, ep0, NULL, 0);
1804 if (ret) {
1805 dev_err(hsotg->dev,
1806 "%s: failed to send reply\n", __func__);
1807 return ret;
1808 }
1809
1810
1811
1812
1813
1814
1815 if (!set && halted) {
1816
1817
1818
1819
1820 if (ep->req) {
1821 hs_req = ep->req;
1822 ep->req = NULL;
1823 list_del_init(&hs_req->queue);
1824 if (hs_req->req.complete) {
1825 spin_unlock(&hsotg->lock);
1826 usb_gadget_giveback_request(
1827 &ep->ep, &hs_req->req);
1828 spin_lock(&hsotg->lock);
1829 }
1830 }
1831
1832
1833 if (!ep->req)
1834 dwc2_gadget_start_next_request(ep);
1835 }
1836
1837 break;
1838
1839 default:
1840 return -ENOENT;
1841 }
1842 break;
1843 default:
1844 return -ENOENT;
1845 }
1846 return 1;
1847 }
1848
1849 static void dwc2_hsotg_enqueue_setup(struct dwc2_hsotg *hsotg);
1850
1851
1852
1853
1854
1855
1856
1857 static void dwc2_hsotg_stall_ep0(struct dwc2_hsotg *hsotg)
1858 {
1859 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0];
1860 u32 reg;
1861 u32 ctrl;
1862
1863 dev_dbg(hsotg->dev, "ep0 stall (dir=%d)\n", ep0->dir_in);
1864 reg = (ep0->dir_in) ? DIEPCTL0 : DOEPCTL0;
1865
1866
1867
1868
1869
1870
1871 ctrl = dwc2_readl(hsotg, reg);
1872 ctrl |= DXEPCTL_STALL;
1873 ctrl |= DXEPCTL_CNAK;
1874 dwc2_writel(hsotg, ctrl, reg);
1875
1876 dev_dbg(hsotg->dev,
1877 "written DXEPCTL=0x%08x to %08x (DXEPCTL=0x%08x)\n",
1878 ctrl, reg, dwc2_readl(hsotg, reg));
1879
1880
1881
1882
1883
1884 dwc2_hsotg_enqueue_setup(hsotg);
1885 }
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896 static void dwc2_hsotg_process_control(struct dwc2_hsotg *hsotg,
1897 struct usb_ctrlrequest *ctrl)
1898 {
1899 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0];
1900 int ret = 0;
1901 u32 dcfg;
1902
1903 dev_dbg(hsotg->dev,
1904 "ctrl Type=%02x, Req=%02x, V=%04x, I=%04x, L=%04x\n",
1905 ctrl->bRequestType, ctrl->bRequest, ctrl->wValue,
1906 ctrl->wIndex, ctrl->wLength);
1907
1908 if (ctrl->wLength == 0) {
1909 ep0->dir_in = 1;
1910 hsotg->ep0_state = DWC2_EP0_STATUS_IN;
1911 } else if (ctrl->bRequestType & USB_DIR_IN) {
1912 ep0->dir_in = 1;
1913 hsotg->ep0_state = DWC2_EP0_DATA_IN;
1914 } else {
1915 ep0->dir_in = 0;
1916 hsotg->ep0_state = DWC2_EP0_DATA_OUT;
1917 }
1918
1919 if ((ctrl->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD) {
1920 switch (ctrl->bRequest) {
1921 case USB_REQ_SET_ADDRESS:
1922 hsotg->connected = 1;
1923 dcfg = dwc2_readl(hsotg, DCFG);
1924 dcfg &= ~DCFG_DEVADDR_MASK;
1925 dcfg |= (le16_to_cpu(ctrl->wValue) <<
1926 DCFG_DEVADDR_SHIFT) & DCFG_DEVADDR_MASK;
1927 dwc2_writel(hsotg, dcfg, DCFG);
1928
1929 dev_info(hsotg->dev, "new address %d\n", ctrl->wValue);
1930
1931 ret = dwc2_hsotg_send_reply(hsotg, ep0, NULL, 0);
1932 return;
1933
1934 case USB_REQ_GET_STATUS:
1935 ret = dwc2_hsotg_process_req_status(hsotg, ctrl);
1936 break;
1937
1938 case USB_REQ_CLEAR_FEATURE:
1939 case USB_REQ_SET_FEATURE:
1940 ret = dwc2_hsotg_process_req_feature(hsotg, ctrl);
1941 break;
1942 }
1943 }
1944
1945
1946
1947 if (ret == 0 && hsotg->driver) {
1948 spin_unlock(&hsotg->lock);
1949 ret = hsotg->driver->setup(&hsotg->gadget, ctrl);
1950 spin_lock(&hsotg->lock);
1951 if (ret < 0)
1952 dev_dbg(hsotg->dev, "driver->setup() ret %d\n", ret);
1953 }
1954
1955 hsotg->delayed_status = false;
1956 if (ret == USB_GADGET_DELAYED_STATUS)
1957 hsotg->delayed_status = true;
1958
1959
1960
1961
1962
1963
1964 if (ret < 0)
1965 dwc2_hsotg_stall_ep0(hsotg);
1966 }
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976 static void dwc2_hsotg_complete_setup(struct usb_ep *ep,
1977 struct usb_request *req)
1978 {
1979 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
1980 struct dwc2_hsotg *hsotg = hs_ep->parent;
1981
1982 if (req->status < 0) {
1983 dev_dbg(hsotg->dev, "%s: failed %d\n", __func__, req->status);
1984 return;
1985 }
1986
1987 spin_lock(&hsotg->lock);
1988 if (req->actual == 0)
1989 dwc2_hsotg_enqueue_setup(hsotg);
1990 else
1991 dwc2_hsotg_process_control(hsotg, req->buf);
1992 spin_unlock(&hsotg->lock);
1993 }
1994
1995
1996
1997
1998
1999
2000
2001
2002 static void dwc2_hsotg_enqueue_setup(struct dwc2_hsotg *hsotg)
2003 {
2004 struct usb_request *req = hsotg->ctrl_req;
2005 struct dwc2_hsotg_req *hs_req = our_req(req);
2006 int ret;
2007
2008 dev_dbg(hsotg->dev, "%s: queueing setup request\n", __func__);
2009
2010 req->zero = 0;
2011 req->length = 8;
2012 req->buf = hsotg->ctrl_buff;
2013 req->complete = dwc2_hsotg_complete_setup;
2014
2015 if (!list_empty(&hs_req->queue)) {
2016 dev_dbg(hsotg->dev, "%s already queued???\n", __func__);
2017 return;
2018 }
2019
2020 hsotg->eps_out[0]->dir_in = 0;
2021 hsotg->eps_out[0]->send_zlp = 0;
2022 hsotg->ep0_state = DWC2_EP0_SETUP;
2023
2024 ret = dwc2_hsotg_ep_queue(&hsotg->eps_out[0]->ep, req, GFP_ATOMIC);
2025 if (ret < 0) {
2026 dev_err(hsotg->dev, "%s: failed queue (%d)\n", __func__, ret);
2027
2028
2029
2030
2031 }
2032 }
2033
2034 static void dwc2_hsotg_program_zlp(struct dwc2_hsotg *hsotg,
2035 struct dwc2_hsotg_ep *hs_ep)
2036 {
2037 u32 ctrl;
2038 u8 index = hs_ep->index;
2039 u32 epctl_reg = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index);
2040 u32 epsiz_reg = hs_ep->dir_in ? DIEPTSIZ(index) : DOEPTSIZ(index);
2041
2042 if (hs_ep->dir_in)
2043 dev_dbg(hsotg->dev, "Sending zero-length packet on ep%d\n",
2044 index);
2045 else
2046 dev_dbg(hsotg->dev, "Receiving zero-length packet on ep%d\n",
2047 index);
2048 if (using_desc_dma(hsotg)) {
2049
2050 dma_addr_t dma = hs_ep->desc_list_dma;
2051
2052 if (!index)
2053 dwc2_gadget_set_ep0_desc_chain(hsotg, hs_ep);
2054
2055 dwc2_gadget_config_nonisoc_xfer_ddma(hs_ep, dma, 0);
2056 } else {
2057 dwc2_writel(hsotg, DXEPTSIZ_MC(1) | DXEPTSIZ_PKTCNT(1) |
2058 DXEPTSIZ_XFERSIZE(0),
2059 epsiz_reg);
2060 }
2061
2062 ctrl = dwc2_readl(hsotg, epctl_reg);
2063 ctrl |= DXEPCTL_CNAK;
2064 ctrl |= DXEPCTL_EPENA;
2065 ctrl |= DXEPCTL_USBACTEP;
2066 dwc2_writel(hsotg, ctrl, epctl_reg);
2067 }
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082 static void dwc2_hsotg_complete_request(struct dwc2_hsotg *hsotg,
2083 struct dwc2_hsotg_ep *hs_ep,
2084 struct dwc2_hsotg_req *hs_req,
2085 int result)
2086 {
2087 if (!hs_req) {
2088 dev_dbg(hsotg->dev, "%s: nothing to complete?\n", __func__);
2089 return;
2090 }
2091
2092 dev_dbg(hsotg->dev, "complete: ep %p %s, req %p, %d => %p\n",
2093 hs_ep, hs_ep->ep.name, hs_req, result, hs_req->req.complete);
2094
2095
2096
2097
2098
2099
2100 if (hs_req->req.status == -EINPROGRESS)
2101 hs_req->req.status = result;
2102
2103 if (using_dma(hsotg))
2104 dwc2_hsotg_unmap_dma(hsotg, hs_ep, hs_req);
2105
2106 dwc2_hsotg_handle_unaligned_buf_complete(hsotg, hs_ep, hs_req);
2107
2108 hs_ep->req = NULL;
2109 list_del_init(&hs_req->queue);
2110
2111
2112
2113
2114
2115
2116 if (hs_req->req.complete) {
2117 spin_unlock(&hsotg->lock);
2118 usb_gadget_giveback_request(&hs_ep->ep, &hs_req->req);
2119 spin_lock(&hsotg->lock);
2120 }
2121
2122
2123 if (using_desc_dma(hsotg) && hs_ep->isochronous)
2124 return;
2125
2126
2127
2128
2129
2130
2131
2132 if (!hs_ep->req && result >= 0)
2133 dwc2_gadget_start_next_request(hs_ep);
2134 }
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145 static void dwc2_gadget_complete_isoc_request_ddma(struct dwc2_hsotg_ep *hs_ep)
2146 {
2147 struct dwc2_hsotg *hsotg = hs_ep->parent;
2148 struct dwc2_hsotg_req *hs_req;
2149 struct usb_request *ureq;
2150 u32 desc_sts;
2151 u32 mask;
2152
2153 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status;
2154
2155
2156 while ((desc_sts & DEV_DMA_BUFF_STS_MASK) >>
2157 DEV_DMA_BUFF_STS_SHIFT == DEV_DMA_BUFF_STS_DMADONE) {
2158
2159 hs_req = get_ep_head(hs_ep);
2160 if (!hs_req) {
2161 dev_warn(hsotg->dev, "%s: ISOC EP queue empty\n", __func__);
2162 return;
2163 }
2164 ureq = &hs_req->req;
2165
2166
2167 if ((desc_sts & DEV_DMA_STS_MASK) >> DEV_DMA_STS_SHIFT ==
2168 DEV_DMA_STS_SUCC) {
2169 mask = hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_MASK :
2170 DEV_DMA_ISOC_RX_NBYTES_MASK;
2171 ureq->actual = ureq->length - ((desc_sts & mask) >>
2172 DEV_DMA_ISOC_NBYTES_SHIFT);
2173
2174
2175
2176
2177 if (!hs_ep->dir_in && ureq->length & 0x3)
2178 ureq->actual += 4 - (ureq->length & 0x3);
2179
2180
2181 ureq->frame_number =
2182 (desc_sts & DEV_DMA_ISOC_FRNUM_MASK) >>
2183 DEV_DMA_ISOC_FRNUM_SHIFT;
2184 }
2185
2186 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, 0);
2187
2188 hs_ep->compl_desc++;
2189 if (hs_ep->compl_desc > (MAX_DMA_DESC_NUM_HS_ISOC - 1))
2190 hs_ep->compl_desc = 0;
2191 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status;
2192 }
2193 }
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204 static void dwc2_gadget_handle_isoc_bna(struct dwc2_hsotg_ep *hs_ep)
2205 {
2206 struct dwc2_hsotg *hsotg = hs_ep->parent;
2207
2208 if (!hs_ep->dir_in)
2209 dwc2_flush_rx_fifo(hsotg);
2210 dwc2_hsotg_complete_request(hsotg, hs_ep, get_ep_head(hs_ep), 0);
2211
2212 hs_ep->target_frame = TARGET_FRAME_INITIAL;
2213 hs_ep->next_desc = 0;
2214 hs_ep->compl_desc = 0;
2215 }
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227 static void dwc2_hsotg_rx_data(struct dwc2_hsotg *hsotg, int ep_idx, int size)
2228 {
2229 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[ep_idx];
2230 struct dwc2_hsotg_req *hs_req = hs_ep->req;
2231 int to_read;
2232 int max_req;
2233 int read_ptr;
2234
2235 if (!hs_req) {
2236 u32 epctl = dwc2_readl(hsotg, DOEPCTL(ep_idx));
2237 int ptr;
2238
2239 dev_dbg(hsotg->dev,
2240 "%s: FIFO %d bytes on ep%d but no req (DXEPCTl=0x%08x)\n",
2241 __func__, size, ep_idx, epctl);
2242
2243
2244 for (ptr = 0; ptr < size; ptr += 4)
2245 (void)dwc2_readl(hsotg, EPFIFO(ep_idx));
2246
2247 return;
2248 }
2249
2250 to_read = size;
2251 read_ptr = hs_req->req.actual;
2252 max_req = hs_req->req.length - read_ptr;
2253
2254 dev_dbg(hsotg->dev, "%s: read %d/%d, done %d/%d\n",
2255 __func__, to_read, max_req, read_ptr, hs_req->req.length);
2256
2257 if (to_read > max_req) {
2258
2259
2260
2261
2262
2263
2264 WARN_ON_ONCE(1);
2265 }
2266
2267 hs_ep->total_data += to_read;
2268 hs_req->req.actual += to_read;
2269 to_read = DIV_ROUND_UP(to_read, 4);
2270
2271
2272
2273
2274
2275 dwc2_readl_rep(hsotg, EPFIFO(ep_idx),
2276 hs_req->req.buf + read_ptr, to_read);
2277 }
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291 static void dwc2_hsotg_ep0_zlp(struct dwc2_hsotg *hsotg, bool dir_in)
2292 {
2293
2294 hsotg->eps_out[0]->dir_in = dir_in;
2295 hsotg->ep0_state = dir_in ? DWC2_EP0_STATUS_IN : DWC2_EP0_STATUS_OUT;
2296
2297 dwc2_hsotg_program_zlp(hsotg, hsotg->eps_out[0]);
2298 }
2299
2300 static void dwc2_hsotg_change_ep_iso_parity(struct dwc2_hsotg *hsotg,
2301 u32 epctl_reg)
2302 {
2303 u32 ctrl;
2304
2305 ctrl = dwc2_readl(hsotg, epctl_reg);
2306 if (ctrl & DXEPCTL_EOFRNUM)
2307 ctrl |= DXEPCTL_SETEVENFR;
2308 else
2309 ctrl |= DXEPCTL_SETODDFR;
2310 dwc2_writel(hsotg, ctrl, epctl_reg);
2311 }
2312
2313
2314
2315
2316
2317
2318
2319
2320 static unsigned int dwc2_gadget_get_xfersize_ddma(struct dwc2_hsotg_ep *hs_ep)
2321 {
2322 struct dwc2_hsotg *hsotg = hs_ep->parent;
2323 unsigned int bytes_rem = 0;
2324 struct dwc2_dma_desc *desc = hs_ep->desc_list;
2325 int i;
2326 u32 status;
2327
2328 if (!desc)
2329 return -EINVAL;
2330
2331 for (i = 0; i < hs_ep->desc_count; ++i) {
2332 status = desc->status;
2333 bytes_rem += status & DEV_DMA_NBYTES_MASK;
2334
2335 if (status & DEV_DMA_STS_MASK)
2336 dev_err(hsotg->dev, "descriptor %d closed with %x\n",
2337 i, status & DEV_DMA_STS_MASK);
2338 desc++;
2339 }
2340
2341 return bytes_rem;
2342 }
2343
2344
2345
2346
2347
2348
2349
2350
2351
2352
2353 static void dwc2_hsotg_handle_outdone(struct dwc2_hsotg *hsotg, int epnum)
2354 {
2355 u32 epsize = dwc2_readl(hsotg, DOEPTSIZ(epnum));
2356 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[epnum];
2357 struct dwc2_hsotg_req *hs_req = hs_ep->req;
2358 struct usb_request *req = &hs_req->req;
2359 unsigned int size_left = DXEPTSIZ_XFERSIZE_GET(epsize);
2360 int result = 0;
2361
2362 if (!hs_req) {
2363 dev_dbg(hsotg->dev, "%s: no request active\n", __func__);
2364 return;
2365 }
2366
2367 if (epnum == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_OUT) {
2368 dev_dbg(hsotg->dev, "zlp packet received\n");
2369 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, 0);
2370 dwc2_hsotg_enqueue_setup(hsotg);
2371 return;
2372 }
2373
2374 if (using_desc_dma(hsotg))
2375 size_left = dwc2_gadget_get_xfersize_ddma(hs_ep);
2376
2377 if (using_dma(hsotg)) {
2378 unsigned int size_done;
2379
2380
2381
2382
2383
2384
2385
2386
2387
2388
2389 size_done = hs_ep->size_loaded - size_left;
2390 size_done += hs_ep->last_load;
2391
2392 req->actual = size_done;
2393 }
2394
2395
2396 if (req->actual < req->length && size_left == 0) {
2397 dwc2_hsotg_start_req(hsotg, hs_ep, hs_req, true);
2398 return;
2399 }
2400
2401 if (req->actual < req->length && req->short_not_ok) {
2402 dev_dbg(hsotg->dev, "%s: got %d/%d (short not ok) => error\n",
2403 __func__, req->actual, req->length);
2404
2405
2406
2407
2408
2409 }
2410
2411
2412 if (!using_desc_dma(hsotg) && epnum == 0 &&
2413 hsotg->ep0_state == DWC2_EP0_DATA_OUT) {
2414
2415 if (!hsotg->delayed_status)
2416 dwc2_hsotg_ep0_zlp(hsotg, true);
2417 }
2418
2419
2420
2421
2422
2423 if (!using_dma(hsotg)) {
2424 if (hs_ep->isochronous && hs_ep->interval == 1)
2425 dwc2_hsotg_change_ep_iso_parity(hsotg, DOEPCTL(epnum));
2426 else if (hs_ep->isochronous && hs_ep->interval > 1)
2427 dwc2_gadget_incr_frame_num(hs_ep);
2428 }
2429
2430
2431 if (!using_desc_dma(hsotg) && hs_ep->isochronous)
2432 req->frame_number = hsotg->frame_number;
2433
2434 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, result);
2435 }
2436
2437
2438
2439
2440
2441
2442
2443
2444
2445
2446
2447
2448
2449
2450
2451
2452
2453 static void dwc2_hsotg_handle_rx(struct dwc2_hsotg *hsotg)
2454 {
2455 u32 grxstsr = dwc2_readl(hsotg, GRXSTSP);
2456 u32 epnum, status, size;
2457
2458 WARN_ON(using_dma(hsotg));
2459
2460 epnum = grxstsr & GRXSTS_EPNUM_MASK;
2461 status = grxstsr & GRXSTS_PKTSTS_MASK;
2462
2463 size = grxstsr & GRXSTS_BYTECNT_MASK;
2464 size >>= GRXSTS_BYTECNT_SHIFT;
2465
2466 dev_dbg(hsotg->dev, "%s: GRXSTSP=0x%08x (%d@%d)\n",
2467 __func__, grxstsr, size, epnum);
2468
2469 switch ((status & GRXSTS_PKTSTS_MASK) >> GRXSTS_PKTSTS_SHIFT) {
2470 case GRXSTS_PKTSTS_GLOBALOUTNAK:
2471 dev_dbg(hsotg->dev, "GLOBALOUTNAK\n");
2472 break;
2473
2474 case GRXSTS_PKTSTS_OUTDONE:
2475 dev_dbg(hsotg->dev, "OutDone (Frame=0x%08x)\n",
2476 dwc2_hsotg_read_frameno(hsotg));
2477
2478 if (!using_dma(hsotg))
2479 dwc2_hsotg_handle_outdone(hsotg, epnum);
2480 break;
2481
2482 case GRXSTS_PKTSTS_SETUPDONE:
2483 dev_dbg(hsotg->dev,
2484 "SetupDone (Frame=0x%08x, DOPEPCTL=0x%08x)\n",
2485 dwc2_hsotg_read_frameno(hsotg),
2486 dwc2_readl(hsotg, DOEPCTL(0)));
2487
2488
2489
2490
2491
2492 if (hsotg->ep0_state == DWC2_EP0_SETUP)
2493 dwc2_hsotg_handle_outdone(hsotg, epnum);
2494 break;
2495
2496 case GRXSTS_PKTSTS_OUTRX:
2497 dwc2_hsotg_rx_data(hsotg, epnum, size);
2498 break;
2499
2500 case GRXSTS_PKTSTS_SETUPRX:
2501 dev_dbg(hsotg->dev,
2502 "SetupRX (Frame=0x%08x, DOPEPCTL=0x%08x)\n",
2503 dwc2_hsotg_read_frameno(hsotg),
2504 dwc2_readl(hsotg, DOEPCTL(0)));
2505
2506 WARN_ON(hsotg->ep0_state != DWC2_EP0_SETUP);
2507
2508 dwc2_hsotg_rx_data(hsotg, epnum, size);
2509 break;
2510
2511 default:
2512 dev_warn(hsotg->dev, "%s: unknown status %08x\n",
2513 __func__, grxstsr);
2514
2515 dwc2_hsotg_dump(hsotg);
2516 break;
2517 }
2518 }
2519
2520
2521
2522
2523
2524 static u32 dwc2_hsotg_ep0_mps(unsigned int mps)
2525 {
2526 switch (mps) {
2527 case 64:
2528 return D0EPCTL_MPS_64;
2529 case 32:
2530 return D0EPCTL_MPS_32;
2531 case 16:
2532 return D0EPCTL_MPS_16;
2533 case 8:
2534 return D0EPCTL_MPS_8;
2535 }
2536
2537
2538 WARN_ON(1);
2539 return (u32)-1;
2540 }
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550
2551
2552
2553 static void dwc2_hsotg_set_ep_maxpacket(struct dwc2_hsotg *hsotg,
2554 unsigned int ep, unsigned int mps,
2555 unsigned int mc, unsigned int dir_in)
2556 {
2557 struct dwc2_hsotg_ep *hs_ep;
2558 u32 reg;
2559
2560 hs_ep = index_to_ep(hsotg, ep, dir_in);
2561 if (!hs_ep)
2562 return;
2563
2564 if (ep == 0) {
2565 u32 mps_bytes = mps;
2566
2567
2568 mps = dwc2_hsotg_ep0_mps(mps_bytes);
2569 if (mps > 3)
2570 goto bad_mps;
2571 hs_ep->ep.maxpacket = mps_bytes;
2572 hs_ep->mc = 1;
2573 } else {
2574 if (mps > 1024)
2575 goto bad_mps;
2576 hs_ep->mc = mc;
2577 if (mc > 3)
2578 goto bad_mps;
2579 hs_ep->ep.maxpacket = mps;
2580 }
2581
2582 if (dir_in) {
2583 reg = dwc2_readl(hsotg, DIEPCTL(ep));
2584 reg &= ~DXEPCTL_MPS_MASK;
2585 reg |= mps;
2586 dwc2_writel(hsotg, reg, DIEPCTL(ep));
2587 } else {
2588 reg = dwc2_readl(hsotg, DOEPCTL(ep));
2589 reg &= ~DXEPCTL_MPS_MASK;
2590 reg |= mps;
2591 dwc2_writel(hsotg, reg, DOEPCTL(ep));
2592 }
2593
2594 return;
2595
2596 bad_mps:
2597 dev_err(hsotg->dev, "ep%d: bad mps of %d\n", ep, mps);
2598 }
2599
2600
2601
2602
2603
2604
2605 static void dwc2_hsotg_txfifo_flush(struct dwc2_hsotg *hsotg, unsigned int idx)
2606 {
2607 dwc2_writel(hsotg, GRSTCTL_TXFNUM(idx) | GRSTCTL_TXFFLSH,
2608 GRSTCTL);
2609
2610
2611 if (dwc2_hsotg_wait_bit_clear(hsotg, GRSTCTL, GRSTCTL_TXFFLSH, 100))
2612 dev_warn(hsotg->dev, "%s: timeout flushing fifo GRSTCTL_TXFFLSH\n",
2613 __func__);
2614 }
2615
2616
2617
2618
2619
2620
2621
2622
2623
2624 static int dwc2_hsotg_trytx(struct dwc2_hsotg *hsotg,
2625 struct dwc2_hsotg_ep *hs_ep)
2626 {
2627 struct dwc2_hsotg_req *hs_req = hs_ep->req;
2628
2629 if (!hs_ep->dir_in || !hs_req) {
2630
2631
2632
2633
2634 if (hs_ep->index != 0)
2635 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index,
2636 hs_ep->dir_in, 0);
2637 return 0;
2638 }
2639
2640 if (hs_req->req.actual < hs_req->req.length) {
2641 dev_dbg(hsotg->dev, "trying to write more for ep%d\n",
2642 hs_ep->index);
2643 return dwc2_hsotg_write_fifo(hsotg, hs_ep, hs_req);
2644 }
2645
2646 return 0;
2647 }
2648
2649
2650
2651
2652
2653
2654
2655
2656
2657 static void dwc2_hsotg_complete_in(struct dwc2_hsotg *hsotg,
2658 struct dwc2_hsotg_ep *hs_ep)
2659 {
2660 struct dwc2_hsotg_req *hs_req = hs_ep->req;
2661 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index));
2662 int size_left, size_done;
2663
2664 if (!hs_req) {
2665 dev_dbg(hsotg->dev, "XferCompl but no req\n");
2666 return;
2667 }
2668
2669
2670 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_IN) {
2671 dev_dbg(hsotg->dev, "zlp packet sent\n");
2672
2673
2674
2675
2676
2677 hs_ep->dir_in = 0;
2678
2679 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, 0);
2680 if (hsotg->test_mode) {
2681 int ret;
2682
2683 ret = dwc2_hsotg_set_test_mode(hsotg, hsotg->test_mode);
2684 if (ret < 0) {
2685 dev_dbg(hsotg->dev, "Invalid Test #%d\n",
2686 hsotg->test_mode);
2687 dwc2_hsotg_stall_ep0(hsotg);
2688 return;
2689 }
2690 }
2691 dwc2_hsotg_enqueue_setup(hsotg);
2692 return;
2693 }
2694
2695
2696
2697
2698
2699
2700
2701
2702
2703
2704 if (using_desc_dma(hsotg)) {
2705 size_left = dwc2_gadget_get_xfersize_ddma(hs_ep);
2706 if (size_left < 0)
2707 dev_err(hsotg->dev, "error parsing DDMA results %d\n",
2708 size_left);
2709 } else {
2710 size_left = DXEPTSIZ_XFERSIZE_GET(epsize);
2711 }
2712
2713 size_done = hs_ep->size_loaded - size_left;
2714 size_done += hs_ep->last_load;
2715
2716 if (hs_req->req.actual != size_done)
2717 dev_dbg(hsotg->dev, "%s: adjusting size done %d => %d\n",
2718 __func__, hs_req->req.actual, size_done);
2719
2720 hs_req->req.actual = size_done;
2721 dev_dbg(hsotg->dev, "req->length:%d req->actual:%d req->zero:%d\n",
2722 hs_req->req.length, hs_req->req.actual, hs_req->req.zero);
2723
2724 if (!size_left && hs_req->req.actual < hs_req->req.length) {
2725 dev_dbg(hsotg->dev, "%s trying more for req...\n", __func__);
2726 dwc2_hsotg_start_req(hsotg, hs_ep, hs_req, true);
2727 return;
2728 }
2729
2730
2731 if (hs_ep->send_zlp) {
2732 dwc2_hsotg_program_zlp(hsotg, hs_ep);
2733 hs_ep->send_zlp = 0;
2734
2735 return;
2736 }
2737
2738 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_DATA_IN) {
2739
2740 dwc2_hsotg_ep0_zlp(hsotg, false);
2741 return;
2742 }
2743
2744 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, 0);
2745 }
2746
2747
2748
2749
2750
2751
2752
2753
2754
2755
2756 static u32 dwc2_gadget_read_ep_interrupts(struct dwc2_hsotg *hsotg,
2757 unsigned int idx, int dir_in)
2758 {
2759 u32 epmsk_reg = dir_in ? DIEPMSK : DOEPMSK;
2760 u32 epint_reg = dir_in ? DIEPINT(idx) : DOEPINT(idx);
2761 u32 ints;
2762 u32 mask;
2763 u32 diepempmsk;
2764
2765 mask = dwc2_readl(hsotg, epmsk_reg);
2766 diepempmsk = dwc2_readl(hsotg, DIEPEMPMSK);
2767 mask |= ((diepempmsk >> idx) & 0x1) ? DIEPMSK_TXFIFOEMPTY : 0;
2768 mask |= DXEPINT_SETUP_RCVD;
2769
2770 ints = dwc2_readl(hsotg, epint_reg);
2771 ints &= mask;
2772 return ints;
2773 }
2774
2775
2776
2777
2778
2779
2780
2781
2782
2783
2784
2785
2786
2787
2788 static void dwc2_gadget_handle_ep_disabled(struct dwc2_hsotg_ep *hs_ep)
2789 {
2790 struct dwc2_hsotg *hsotg = hs_ep->parent;
2791 struct dwc2_hsotg_req *hs_req;
2792 unsigned char idx = hs_ep->index;
2793 int dir_in = hs_ep->dir_in;
2794 u32 epctl_reg = dir_in ? DIEPCTL(idx) : DOEPCTL(idx);
2795 int dctl = dwc2_readl(hsotg, DCTL);
2796
2797 dev_dbg(hsotg->dev, "%s: EPDisbld\n", __func__);
2798
2799 if (dir_in) {
2800 int epctl = dwc2_readl(hsotg, epctl_reg);
2801
2802 dwc2_hsotg_txfifo_flush(hsotg, hs_ep->fifo_index);
2803
2804 if (hs_ep->isochronous) {
2805 dwc2_hsotg_complete_in(hsotg, hs_ep);
2806 return;
2807 }
2808
2809 if ((epctl & DXEPCTL_STALL) && (epctl & DXEPCTL_EPTYPE_BULK)) {
2810 int dctl = dwc2_readl(hsotg, DCTL);
2811
2812 dctl |= DCTL_CGNPINNAK;
2813 dwc2_writel(hsotg, dctl, DCTL);
2814 }
2815 return;
2816 }
2817
2818 if (dctl & DCTL_GOUTNAKSTS) {
2819 dctl |= DCTL_CGOUTNAK;
2820 dwc2_writel(hsotg, dctl, DCTL);
2821 }
2822
2823 if (!hs_ep->isochronous)
2824 return;
2825
2826 if (list_empty(&hs_ep->queue)) {
2827 dev_dbg(hsotg->dev, "%s: complete_ep 0x%p, ep->queue empty!\n",
2828 __func__, hs_ep);
2829 return;
2830 }
2831
2832 do {
2833 hs_req = get_ep_head(hs_ep);
2834 if (hs_req)
2835 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req,
2836 -ENODATA);
2837 dwc2_gadget_incr_frame_num(hs_ep);
2838
2839 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg);
2840 } while (dwc2_gadget_target_frame_elapsed(hs_ep));
2841
2842 dwc2_gadget_start_next_request(hs_ep);
2843 }
2844
2845
2846
2847
2848
2849
2850
2851
2852
2853
2854
2855
2856 static void dwc2_gadget_handle_out_token_ep_disabled(struct dwc2_hsotg_ep *ep)
2857 {
2858 struct dwc2_hsotg *hsotg = ep->parent;
2859 int dir_in = ep->dir_in;
2860 u32 doepmsk;
2861
2862 if (dir_in || !ep->isochronous)
2863 return;
2864
2865 if (using_desc_dma(hsotg)) {
2866 if (ep->target_frame == TARGET_FRAME_INITIAL) {
2867
2868 ep->target_frame = hsotg->frame_number;
2869 dwc2_gadget_start_isoc_ddma(ep);
2870 }
2871 return;
2872 }
2873
2874 if (ep->interval > 1 &&
2875 ep->target_frame == TARGET_FRAME_INITIAL) {
2876 u32 ctrl;
2877
2878 ep->target_frame = hsotg->frame_number;
2879 dwc2_gadget_incr_frame_num(ep);
2880
2881 ctrl = dwc2_readl(hsotg, DOEPCTL(ep->index));
2882 if (ep->target_frame & 0x1)
2883 ctrl |= DXEPCTL_SETODDFR;
2884 else
2885 ctrl |= DXEPCTL_SETEVENFR;
2886
2887 dwc2_writel(hsotg, ctrl, DOEPCTL(ep->index));
2888 }
2889
2890 dwc2_gadget_start_next_request(ep);
2891 doepmsk = dwc2_readl(hsotg, DOEPMSK);
2892 doepmsk &= ~DOEPMSK_OUTTKNEPDISMSK;
2893 dwc2_writel(hsotg, doepmsk, DOEPMSK);
2894 }
2895
2896
2897
2898
2899
2900
2901
2902
2903
2904
2905
2906
2907
2908
2909
2910 static void dwc2_gadget_handle_nak(struct dwc2_hsotg_ep *hs_ep)
2911 {
2912 struct dwc2_hsotg *hsotg = hs_ep->parent;
2913 int dir_in = hs_ep->dir_in;
2914
2915 if (!dir_in || !hs_ep->isochronous)
2916 return;
2917
2918 if (hs_ep->target_frame == TARGET_FRAME_INITIAL) {
2919
2920 if (using_desc_dma(hsotg)) {
2921 hs_ep->target_frame = hsotg->frame_number;
2922 dwc2_gadget_incr_frame_num(hs_ep);
2923
2924
2925
2926
2927 if (hsotg->params.service_interval) {
2928
2929
2930
2931 hs_ep->target_frame &= ~hs_ep->interval + 1;
2932
2933
2934
2935
2936 dwc2_gadget_incr_frame_num(hs_ep);
2937 dwc2_gadget_dec_frame_num_by_one(hs_ep);
2938 }
2939
2940 dwc2_gadget_start_isoc_ddma(hs_ep);
2941 return;
2942 }
2943
2944 hs_ep->target_frame = hsotg->frame_number;
2945 if (hs_ep->interval > 1) {
2946 u32 ctrl = dwc2_readl(hsotg,
2947 DIEPCTL(hs_ep->index));
2948 if (hs_ep->target_frame & 0x1)
2949 ctrl |= DXEPCTL_SETODDFR;
2950 else
2951 ctrl |= DXEPCTL_SETEVENFR;
2952
2953 dwc2_writel(hsotg, ctrl, DIEPCTL(hs_ep->index));
2954 }
2955
2956 dwc2_hsotg_complete_request(hsotg, hs_ep,
2957 get_ep_head(hs_ep), 0);
2958 }
2959
2960 if (!using_desc_dma(hsotg))
2961 dwc2_gadget_incr_frame_num(hs_ep);
2962 }
2963
2964
2965
2966
2967
2968
2969
2970
2971
2972 static void dwc2_hsotg_epint(struct dwc2_hsotg *hsotg, unsigned int idx,
2973 int dir_in)
2974 {
2975 struct dwc2_hsotg_ep *hs_ep = index_to_ep(hsotg, idx, dir_in);
2976 u32 epint_reg = dir_in ? DIEPINT(idx) : DOEPINT(idx);
2977 u32 epctl_reg = dir_in ? DIEPCTL(idx) : DOEPCTL(idx);
2978 u32 epsiz_reg = dir_in ? DIEPTSIZ(idx) : DOEPTSIZ(idx);
2979 u32 ints;
2980 u32 ctrl;
2981
2982 ints = dwc2_gadget_read_ep_interrupts(hsotg, idx, dir_in);
2983 ctrl = dwc2_readl(hsotg, epctl_reg);
2984
2985
2986 dwc2_writel(hsotg, ints, epint_reg);
2987
2988 if (!hs_ep) {
2989 dev_err(hsotg->dev, "%s:Interrupt for unconfigured ep%d(%s)\n",
2990 __func__, idx, dir_in ? "in" : "out");
2991 return;
2992 }
2993
2994 dev_dbg(hsotg->dev, "%s: ep%d(%s) DxEPINT=0x%08x\n",
2995 __func__, idx, dir_in ? "in" : "out", ints);
2996
2997
2998 if (idx == 0 && (ints & (DXEPINT_SETUP | DXEPINT_SETUP_RCVD)))
2999 ints &= ~DXEPINT_XFERCOMPL;
3000
3001
3002
3003
3004
3005
3006
3007 if (using_desc_dma(hsotg) && idx == 0 && !hs_ep->dir_in &&
3008 hsotg->ep0_state == DWC2_EP0_SETUP && !(ints & DXEPINT_SETUP))
3009 ints &= ~DXEPINT_XFERCOMPL;
3010
3011 if (ints & DXEPINT_XFERCOMPL) {
3012 dev_dbg(hsotg->dev,
3013 "%s: XferCompl: DxEPCTL=0x%08x, DXEPTSIZ=%08x\n",
3014 __func__, dwc2_readl(hsotg, epctl_reg),
3015 dwc2_readl(hsotg, epsiz_reg));
3016
3017
3018 if (using_desc_dma(hsotg) && hs_ep->isochronous) {
3019
3020 if (!(ints & DXEPINT_BNAINTR))
3021 dwc2_gadget_complete_isoc_request_ddma(hs_ep);
3022 } else if (dir_in) {
3023
3024
3025
3026
3027
3028 if (hs_ep->isochronous && hs_ep->interval > 1)
3029 dwc2_gadget_incr_frame_num(hs_ep);
3030
3031 dwc2_hsotg_complete_in(hsotg, hs_ep);
3032 if (ints & DXEPINT_NAKINTRPT)
3033 ints &= ~DXEPINT_NAKINTRPT;
3034
3035 if (idx == 0 && !hs_ep->req)
3036 dwc2_hsotg_enqueue_setup(hsotg);
3037 } else if (using_dma(hsotg)) {
3038
3039
3040
3041
3042 if (hs_ep->isochronous && hs_ep->interval > 1)
3043 dwc2_gadget_incr_frame_num(hs_ep);
3044
3045 dwc2_hsotg_handle_outdone(hsotg, idx);
3046 }
3047 }
3048
3049 if (ints & DXEPINT_EPDISBLD)
3050 dwc2_gadget_handle_ep_disabled(hs_ep);
3051
3052 if (ints & DXEPINT_OUTTKNEPDIS)
3053 dwc2_gadget_handle_out_token_ep_disabled(hs_ep);
3054
3055 if (ints & DXEPINT_NAKINTRPT)
3056 dwc2_gadget_handle_nak(hs_ep);
3057
3058 if (ints & DXEPINT_AHBERR)
3059 dev_dbg(hsotg->dev, "%s: AHBErr\n", __func__);
3060
3061 if (ints & DXEPINT_SETUP) {
3062 dev_dbg(hsotg->dev, "%s: Setup/Timeout\n", __func__);
3063
3064 if (using_dma(hsotg) && idx == 0) {
3065
3066
3067
3068
3069
3070
3071
3072 if (dir_in)
3073 WARN_ON_ONCE(1);
3074 else
3075 dwc2_hsotg_handle_outdone(hsotg, 0);
3076 }
3077 }
3078
3079 if (ints & DXEPINT_STSPHSERCVD) {
3080 dev_dbg(hsotg->dev, "%s: StsPhseRcvd\n", __func__);
3081
3082
3083 if (hsotg->ep0_state == DWC2_EP0_DATA_OUT) {
3084
3085 if (using_desc_dma(hsotg)) {
3086 if (!hsotg->delayed_status)
3087 dwc2_hsotg_ep0_zlp(hsotg, true);
3088 else
3089
3090
3091
3092
3093
3094
3095
3096 dwc2_set_bit(hsotg, DIEPCTL(0),
3097 DXEPCTL_CNAK);
3098 }
3099 }
3100
3101 }
3102
3103 if (ints & DXEPINT_BACK2BACKSETUP)
3104 dev_dbg(hsotg->dev, "%s: B2BSetup/INEPNakEff\n", __func__);
3105
3106 if (ints & DXEPINT_BNAINTR) {
3107 dev_dbg(hsotg->dev, "%s: BNA interrupt\n", __func__);
3108 if (hs_ep->isochronous)
3109 dwc2_gadget_handle_isoc_bna(hs_ep);
3110 }
3111
3112 if (dir_in && !hs_ep->isochronous) {
3113
3114 if (ints & DXEPINT_INTKNTXFEMP) {
3115 dev_dbg(hsotg->dev, "%s: ep%d: INTknTXFEmpMsk\n",
3116 __func__, idx);
3117 }
3118
3119
3120 if (ints & DXEPINT_INTKNEPMIS) {
3121 dev_warn(hsotg->dev, "%s: ep%d: INTknEP\n",
3122 __func__, idx);
3123 }
3124
3125
3126 if (hsotg->dedicated_fifos &&
3127 ints & DXEPINT_TXFEMP) {
3128 dev_dbg(hsotg->dev, "%s: ep%d: TxFIFOEmpty\n",
3129 __func__, idx);
3130 if (!using_dma(hsotg))
3131 dwc2_hsotg_trytx(hsotg, hs_ep);
3132 }
3133 }
3134 }
3135
3136
3137
3138
3139
3140
3141
3142
3143 static void dwc2_hsotg_irq_enumdone(struct dwc2_hsotg *hsotg)
3144 {
3145 u32 dsts = dwc2_readl(hsotg, DSTS);
3146 int ep0_mps = 0, ep_mps = 8;
3147
3148
3149
3150
3151
3152
3153
3154 dev_dbg(hsotg->dev, "EnumDone (DSTS=0x%08x)\n", dsts);
3155
3156
3157
3158
3159
3160
3161
3162
3163 switch ((dsts & DSTS_ENUMSPD_MASK) >> DSTS_ENUMSPD_SHIFT) {
3164 case DSTS_ENUMSPD_FS:
3165 case DSTS_ENUMSPD_FS48:
3166 hsotg->gadget.speed = USB_SPEED_FULL;
3167 ep0_mps = EP0_MPS_LIMIT;
3168 ep_mps = 1023;
3169 break;
3170
3171 case DSTS_ENUMSPD_HS:
3172 hsotg->gadget.speed = USB_SPEED_HIGH;
3173 ep0_mps = EP0_MPS_LIMIT;
3174 ep_mps = 1024;
3175 break;
3176
3177 case DSTS_ENUMSPD_LS:
3178 hsotg->gadget.speed = USB_SPEED_LOW;
3179 ep0_mps = 8;
3180 ep_mps = 8;
3181
3182
3183
3184
3185
3186 break;
3187 }
3188 dev_info(hsotg->dev, "new device is %s\n",
3189 usb_speed_string(hsotg->gadget.speed));
3190
3191
3192
3193
3194
3195
3196 if (ep0_mps) {
3197 int i;
3198
3199 dwc2_hsotg_set_ep_maxpacket(hsotg, 0, ep0_mps, 0, 1);
3200 dwc2_hsotg_set_ep_maxpacket(hsotg, 0, ep0_mps, 0, 0);
3201 for (i = 1; i < hsotg->num_of_eps; i++) {
3202 if (hsotg->eps_in[i])
3203 dwc2_hsotg_set_ep_maxpacket(hsotg, i, ep_mps,
3204 0, 1);
3205 if (hsotg->eps_out[i])
3206 dwc2_hsotg_set_ep_maxpacket(hsotg, i, ep_mps,
3207 0, 0);
3208 }
3209 }
3210
3211
3212
3213 dwc2_hsotg_enqueue_setup(hsotg);
3214
3215 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n",
3216 dwc2_readl(hsotg, DIEPCTL0),
3217 dwc2_readl(hsotg, DOEPCTL0));
3218 }
3219
3220
3221
3222
3223
3224
3225
3226
3227
3228
3229 static void kill_all_requests(struct dwc2_hsotg *hsotg,
3230 struct dwc2_hsotg_ep *ep,
3231 int result)
3232 {
3233 unsigned int size;
3234
3235 ep->req = NULL;
3236
3237 while (!list_empty(&ep->queue)) {
3238 struct dwc2_hsotg_req *req = get_ep_head(ep);
3239
3240 dwc2_hsotg_complete_request(hsotg, ep, req, result);
3241 }
3242
3243 if (!hsotg->dedicated_fifos)
3244 return;
3245 size = (dwc2_readl(hsotg, DTXFSTS(ep->fifo_index)) & 0xffff) * 4;
3246 if (size < ep->fifo_size)
3247 dwc2_hsotg_txfifo_flush(hsotg, ep->fifo_index);
3248 }
3249
3250
3251
3252
3253
3254
3255
3256
3257
3258 void dwc2_hsotg_disconnect(struct dwc2_hsotg *hsotg)
3259 {
3260 unsigned int ep;
3261
3262 if (!hsotg->connected)
3263 return;
3264
3265 hsotg->connected = 0;
3266 hsotg->test_mode = 0;
3267
3268
3269 for (ep = 0; ep < hsotg->num_of_eps; ep++) {
3270 if (hsotg->eps_in[ep])
3271 kill_all_requests(hsotg, hsotg->eps_in[ep],
3272 -ESHUTDOWN);
3273 if (hsotg->eps_out[ep])
3274 kill_all_requests(hsotg, hsotg->eps_out[ep],
3275 -ESHUTDOWN);
3276 }
3277
3278 call_gadget(hsotg, disconnect);
3279 hsotg->lx_state = DWC2_L3;
3280
3281 usb_gadget_set_state(&hsotg->gadget, USB_STATE_NOTATTACHED);
3282 }
3283
3284
3285
3286
3287
3288
3289 static void dwc2_hsotg_irq_fifoempty(struct dwc2_hsotg *hsotg, bool periodic)
3290 {
3291 struct dwc2_hsotg_ep *ep;
3292 int epno, ret;
3293
3294
3295 for (epno = 0; epno < hsotg->num_of_eps; epno++) {
3296 ep = index_to_ep(hsotg, epno, 1);
3297
3298 if (!ep)
3299 continue;
3300
3301 if (!ep->dir_in)
3302 continue;
3303
3304 if ((periodic && !ep->periodic) ||
3305 (!periodic && ep->periodic))
3306 continue;
3307
3308 ret = dwc2_hsotg_trytx(hsotg, ep);
3309 if (ret < 0)
3310 break;
3311 }
3312 }
3313
3314
3315 #define IRQ_RETRY_MASK (GINTSTS_NPTXFEMP | \
3316 GINTSTS_PTXFEMP | \
3317 GINTSTS_RXFLVL)
3318
3319 static int dwc2_hsotg_ep_disable(struct usb_ep *ep);
3320
3321
3322
3323
3324
3325
3326
3327 void dwc2_hsotg_core_init_disconnected(struct dwc2_hsotg *hsotg,
3328 bool is_usb_reset)
3329 {
3330 u32 intmsk;
3331 u32 val;
3332 u32 usbcfg;
3333 u32 dcfg = 0;
3334 int ep;
3335
3336
3337 kill_all_requests(hsotg, hsotg->eps_out[0], -ECONNRESET);
3338
3339 if (!is_usb_reset) {
3340 if (dwc2_core_reset(hsotg, true))
3341 return;
3342 } else {
3343
3344 for (ep = 1; ep < hsotg->num_of_eps; ep++) {
3345 if (hsotg->eps_in[ep])
3346 dwc2_hsotg_ep_disable(&hsotg->eps_in[ep]->ep);
3347 if (hsotg->eps_out[ep])
3348 dwc2_hsotg_ep_disable(&hsotg->eps_out[ep]->ep);
3349 }
3350 }
3351
3352
3353
3354
3355
3356
3357
3358 usbcfg = dwc2_readl(hsotg, GUSBCFG);
3359 usbcfg &= ~GUSBCFG_TOUTCAL_MASK;
3360 usbcfg |= GUSBCFG_TOUTCAL(7);
3361
3362
3363 usbcfg &= ~(GUSBCFG_SRPCAP | GUSBCFG_HNPCAP);
3364 dwc2_writel(hsotg, usbcfg, GUSBCFG);
3365
3366 dwc2_phy_init(hsotg, true);
3367
3368 dwc2_hsotg_init_fifo(hsotg);
3369
3370 if (!is_usb_reset)
3371 dwc2_set_bit(hsotg, DCTL, DCTL_SFTDISCON);
3372
3373 dcfg |= DCFG_EPMISCNT(1);
3374
3375 switch (hsotg->params.speed) {
3376 case DWC2_SPEED_PARAM_LOW:
3377 dcfg |= DCFG_DEVSPD_LS;
3378 break;
3379 case DWC2_SPEED_PARAM_FULL:
3380 if (hsotg->params.phy_type == DWC2_PHY_TYPE_PARAM_FS)
3381 dcfg |= DCFG_DEVSPD_FS48;
3382 else
3383 dcfg |= DCFG_DEVSPD_FS;
3384 break;
3385 default:
3386 dcfg |= DCFG_DEVSPD_HS;
3387 }
3388
3389 if (hsotg->params.ipg_isoc_en)
3390 dcfg |= DCFG_IPG_ISOC_SUPPORDED;
3391
3392 dwc2_writel(hsotg, dcfg, DCFG);
3393
3394
3395 dwc2_writel(hsotg, 0xffffffff, GOTGINT);
3396
3397
3398 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
3399 intmsk = GINTSTS_ERLYSUSP | GINTSTS_SESSREQINT |
3400 GINTSTS_GOUTNAKEFF | GINTSTS_GINNAKEFF |
3401 GINTSTS_USBRST | GINTSTS_RESETDET |
3402 GINTSTS_ENUMDONE | GINTSTS_OTGINT |
3403 GINTSTS_USBSUSP | GINTSTS_WKUPINT |
3404 GINTSTS_LPMTRANRCVD;
3405
3406 if (!using_desc_dma(hsotg))
3407 intmsk |= GINTSTS_INCOMPL_SOIN | GINTSTS_INCOMPL_SOOUT;
3408
3409 if (!hsotg->params.external_id_pin_ctl)
3410 intmsk |= GINTSTS_CONIDSTSCHNG;
3411
3412 dwc2_writel(hsotg, intmsk, GINTMSK);
3413
3414 if (using_dma(hsotg)) {
3415 dwc2_writel(hsotg, GAHBCFG_GLBL_INTR_EN | GAHBCFG_DMA_EN |
3416 hsotg->params.ahbcfg,
3417 GAHBCFG);
3418
3419
3420 if (using_desc_dma(hsotg))
3421 dwc2_set_bit(hsotg, DCFG, DCFG_DESCDMA_EN);
3422
3423 } else {
3424 dwc2_writel(hsotg, ((hsotg->dedicated_fifos) ?
3425 (GAHBCFG_NP_TXF_EMP_LVL |
3426 GAHBCFG_P_TXF_EMP_LVL) : 0) |
3427 GAHBCFG_GLBL_INTR_EN, GAHBCFG);
3428 }
3429
3430
3431
3432
3433
3434
3435
3436 dwc2_writel(hsotg, ((hsotg->dedicated_fifos && !using_dma(hsotg)) ?
3437 DIEPMSK_TXFIFOEMPTY | DIEPMSK_INTKNTXFEMPMSK : 0) |
3438 DIEPMSK_EPDISBLDMSK | DIEPMSK_XFERCOMPLMSK |
3439 DIEPMSK_TIMEOUTMSK | DIEPMSK_AHBERRMSK,
3440 DIEPMSK);
3441
3442
3443
3444
3445
3446 dwc2_writel(hsotg, (using_dma(hsotg) ? (DIEPMSK_XFERCOMPLMSK |
3447 DOEPMSK_STSPHSERCVDMSK) : 0) |
3448 DOEPMSK_EPDISBLDMSK | DOEPMSK_AHBERRMSK |
3449 DOEPMSK_SETUPMSK,
3450 DOEPMSK);
3451
3452
3453 if (using_desc_dma(hsotg)) {
3454 dwc2_set_bit(hsotg, DOEPMSK, DOEPMSK_BNAMSK);
3455 dwc2_set_bit(hsotg, DIEPMSK, DIEPMSK_BNAININTRMSK);
3456 }
3457
3458
3459 if (using_desc_dma(hsotg) && hsotg->params.service_interval)
3460 dwc2_set_bit(hsotg, DCTL, DCTL_SERVICE_INTERVAL_SUPPORTED);
3461
3462 dwc2_writel(hsotg, 0, DAINTMSK);
3463
3464 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n",
3465 dwc2_readl(hsotg, DIEPCTL0),
3466 dwc2_readl(hsotg, DOEPCTL0));
3467
3468
3469 dwc2_hsotg_en_gsint(hsotg, GINTSTS_OEPINT | GINTSTS_IEPINT);
3470
3471
3472
3473
3474
3475
3476 if (!using_dma(hsotg))
3477 dwc2_hsotg_en_gsint(hsotg, GINTSTS_RXFLVL);
3478
3479
3480 dwc2_hsotg_ctrl_epint(hsotg, 0, 0, 1);
3481 dwc2_hsotg_ctrl_epint(hsotg, 0, 1, 1);
3482
3483 if (!is_usb_reset) {
3484 dwc2_set_bit(hsotg, DCTL, DCTL_PWRONPRGDONE);
3485 udelay(10);
3486 dwc2_clear_bit(hsotg, DCTL, DCTL_PWRONPRGDONE);
3487 }
3488
3489 dev_dbg(hsotg->dev, "DCTL=0x%08x\n", dwc2_readl(hsotg, DCTL));
3490
3491
3492
3493
3494
3495
3496
3497 dwc2_writel(hsotg, DXEPTSIZ_MC(1) | DXEPTSIZ_PKTCNT(1) |
3498 DXEPTSIZ_XFERSIZE(8), DOEPTSIZ0);
3499
3500 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) |
3501 DXEPCTL_CNAK | DXEPCTL_EPENA |
3502 DXEPCTL_USBACTEP,
3503 DOEPCTL0);
3504
3505
3506 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) |
3507 DXEPCTL_USBACTEP, DIEPCTL0);
3508
3509
3510 val = DCTL_CGOUTNAK | DCTL_CGNPINNAK;
3511 if (!is_usb_reset)
3512 val |= DCTL_SFTDISCON;
3513 dwc2_set_bit(hsotg, DCTL, val);
3514
3515
3516 dwc2_gadget_init_lpm(hsotg);
3517
3518
3519 if (using_desc_dma(hsotg) && hsotg->params.service_interval)
3520 dwc2_gadget_program_ref_clk(hsotg);
3521
3522
3523 mdelay(3);
3524
3525 hsotg->lx_state = DWC2_L0;
3526
3527 dwc2_hsotg_enqueue_setup(hsotg);
3528
3529 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n",
3530 dwc2_readl(hsotg, DIEPCTL0),
3531 dwc2_readl(hsotg, DOEPCTL0));
3532 }
3533
3534 static void dwc2_hsotg_core_disconnect(struct dwc2_hsotg *hsotg)
3535 {
3536
3537 dwc2_set_bit(hsotg, DCTL, DCTL_SFTDISCON);
3538 }
3539
3540 void dwc2_hsotg_core_connect(struct dwc2_hsotg *hsotg)
3541 {
3542
3543 dwc2_clear_bit(hsotg, DCTL, DCTL_SFTDISCON);
3544 }
3545
3546
3547
3548
3549
3550
3551
3552
3553
3554
3555
3556
3557
3558
3559 static void dwc2_gadget_handle_incomplete_isoc_in(struct dwc2_hsotg *hsotg)
3560 {
3561 struct dwc2_hsotg_ep *hs_ep;
3562 u32 epctrl;
3563 u32 daintmsk;
3564 u32 idx;
3565
3566 dev_dbg(hsotg->dev, "Incomplete isoc in interrupt received:\n");
3567
3568 daintmsk = dwc2_readl(hsotg, DAINTMSK);
3569
3570 for (idx = 1; idx < hsotg->num_of_eps; idx++) {
3571 hs_ep = hsotg->eps_in[idx];
3572
3573 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous)
3574 continue;
3575
3576 epctrl = dwc2_readl(hsotg, DIEPCTL(idx));
3577 if ((epctrl & DXEPCTL_EPENA) &&
3578 dwc2_gadget_target_frame_elapsed(hs_ep)) {
3579 epctrl |= DXEPCTL_SNAK;
3580 epctrl |= DXEPCTL_EPDIS;
3581 dwc2_writel(hsotg, epctrl, DIEPCTL(idx));
3582 }
3583 }
3584
3585
3586 dwc2_writel(hsotg, GINTSTS_INCOMPL_SOIN, GINTSTS);
3587 }
3588
3589
3590
3591
3592
3593
3594
3595
3596
3597
3598
3599
3600
3601
3602 static void dwc2_gadget_handle_incomplete_isoc_out(struct dwc2_hsotg *hsotg)
3603 {
3604 u32 gintsts;
3605 u32 gintmsk;
3606 u32 daintmsk;
3607 u32 epctrl;
3608 struct dwc2_hsotg_ep *hs_ep;
3609 int idx;
3610
3611 dev_dbg(hsotg->dev, "%s: GINTSTS_INCOMPL_SOOUT\n", __func__);
3612
3613 daintmsk = dwc2_readl(hsotg, DAINTMSK);
3614 daintmsk >>= DAINT_OUTEP_SHIFT;
3615
3616 for (idx = 1; idx < hsotg->num_of_eps; idx++) {
3617 hs_ep = hsotg->eps_out[idx];
3618
3619 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous)
3620 continue;
3621
3622 epctrl = dwc2_readl(hsotg, DOEPCTL(idx));
3623 if ((epctrl & DXEPCTL_EPENA) &&
3624 dwc2_gadget_target_frame_elapsed(hs_ep)) {
3625
3626 gintmsk = dwc2_readl(hsotg, GINTMSK);
3627 gintmsk |= GINTSTS_GOUTNAKEFF;
3628 dwc2_writel(hsotg, gintmsk, GINTMSK);
3629
3630 gintsts = dwc2_readl(hsotg, GINTSTS);
3631 if (!(gintsts & GINTSTS_GOUTNAKEFF)) {
3632 dwc2_set_bit(hsotg, DCTL, DCTL_SGOUTNAK);
3633 break;
3634 }
3635 }
3636 }
3637
3638
3639 dwc2_writel(hsotg, GINTSTS_INCOMPL_SOOUT, GINTSTS);
3640 }
3641
3642
3643
3644
3645
3646
3647 static irqreturn_t dwc2_hsotg_irq(int irq, void *pw)
3648 {
3649 struct dwc2_hsotg *hsotg = pw;
3650 int retry_count = 8;
3651 u32 gintsts;
3652 u32 gintmsk;
3653
3654 if (!dwc2_is_device_mode(hsotg))
3655 return IRQ_NONE;
3656
3657 spin_lock(&hsotg->lock);
3658 irq_retry:
3659 gintsts = dwc2_readl(hsotg, GINTSTS);
3660 gintmsk = dwc2_readl(hsotg, GINTMSK);
3661
3662 dev_dbg(hsotg->dev, "%s: %08x %08x (%08x) retry %d\n",
3663 __func__, gintsts, gintsts & gintmsk, gintmsk, retry_count);
3664
3665 gintsts &= gintmsk;
3666
3667 if (gintsts & GINTSTS_RESETDET) {
3668 dev_dbg(hsotg->dev, "%s: USBRstDet\n", __func__);
3669
3670 dwc2_writel(hsotg, GINTSTS_RESETDET, GINTSTS);
3671
3672
3673 if (hsotg->lx_state == DWC2_L2) {
3674 dwc2_exit_partial_power_down(hsotg, true);
3675 hsotg->lx_state = DWC2_L0;
3676 }
3677 }
3678
3679 if (gintsts & (GINTSTS_USBRST | GINTSTS_RESETDET)) {
3680 u32 usb_status = dwc2_readl(hsotg, GOTGCTL);
3681 u32 connected = hsotg->connected;
3682
3683 dev_dbg(hsotg->dev, "%s: USBRst\n", __func__);
3684 dev_dbg(hsotg->dev, "GNPTXSTS=%08x\n",
3685 dwc2_readl(hsotg, GNPTXSTS));
3686
3687 dwc2_writel(hsotg, GINTSTS_USBRST, GINTSTS);
3688
3689
3690 dwc2_hsotg_disconnect(hsotg);
3691
3692
3693 dwc2_clear_bit(hsotg, DCFG, DCFG_DEVADDR_MASK);
3694
3695 if (usb_status & GOTGCTL_BSESVLD && connected)
3696 dwc2_hsotg_core_init_disconnected(hsotg, true);
3697 }
3698
3699 if (gintsts & GINTSTS_ENUMDONE) {
3700 dwc2_writel(hsotg, GINTSTS_ENUMDONE, GINTSTS);
3701
3702 dwc2_hsotg_irq_enumdone(hsotg);
3703 }
3704
3705 if (gintsts & (GINTSTS_OEPINT | GINTSTS_IEPINT)) {
3706 u32 daint = dwc2_readl(hsotg, DAINT);
3707 u32 daintmsk = dwc2_readl(hsotg, DAINTMSK);
3708 u32 daint_out, daint_in;
3709 int ep;
3710
3711 daint &= daintmsk;
3712 daint_out = daint >> DAINT_OUTEP_SHIFT;
3713 daint_in = daint & ~(daint_out << DAINT_OUTEP_SHIFT);
3714
3715 dev_dbg(hsotg->dev, "%s: daint=%08x\n", __func__, daint);
3716
3717 for (ep = 0; ep < hsotg->num_of_eps && daint_out;
3718 ep++, daint_out >>= 1) {
3719 if (daint_out & 1)
3720 dwc2_hsotg_epint(hsotg, ep, 0);
3721 }
3722
3723 for (ep = 0; ep < hsotg->num_of_eps && daint_in;
3724 ep++, daint_in >>= 1) {
3725 if (daint_in & 1)
3726 dwc2_hsotg_epint(hsotg, ep, 1);
3727 }
3728 }
3729
3730
3731
3732 if (gintsts & GINTSTS_NPTXFEMP) {
3733 dev_dbg(hsotg->dev, "NPTxFEmp\n");
3734
3735
3736
3737
3738
3739
3740
3741 dwc2_hsotg_disable_gsint(hsotg, GINTSTS_NPTXFEMP);
3742 dwc2_hsotg_irq_fifoempty(hsotg, false);
3743 }
3744
3745 if (gintsts & GINTSTS_PTXFEMP) {
3746 dev_dbg(hsotg->dev, "PTxFEmp\n");
3747
3748
3749
3750 dwc2_hsotg_disable_gsint(hsotg, GINTSTS_PTXFEMP);
3751 dwc2_hsotg_irq_fifoempty(hsotg, true);
3752 }
3753
3754 if (gintsts & GINTSTS_RXFLVL) {
3755
3756
3757
3758
3759
3760
3761 dwc2_hsotg_handle_rx(hsotg);
3762 }
3763
3764 if (gintsts & GINTSTS_ERLYSUSP) {
3765 dev_dbg(hsotg->dev, "GINTSTS_ErlySusp\n");
3766 dwc2_writel(hsotg, GINTSTS_ERLYSUSP, GINTSTS);
3767 }
3768
3769
3770
3771
3772
3773
3774
3775 if (gintsts & GINTSTS_GOUTNAKEFF) {
3776 u8 idx;
3777 u32 epctrl;
3778 u32 gintmsk;
3779 u32 daintmsk;
3780 struct dwc2_hsotg_ep *hs_ep;
3781
3782 daintmsk = dwc2_readl(hsotg, DAINTMSK);
3783 daintmsk >>= DAINT_OUTEP_SHIFT;
3784
3785 gintmsk = dwc2_readl(hsotg, GINTMSK);
3786 gintmsk &= ~GINTSTS_GOUTNAKEFF;
3787 dwc2_writel(hsotg, gintmsk, GINTMSK);
3788
3789 dev_dbg(hsotg->dev, "GOUTNakEff triggered\n");
3790 for (idx = 1; idx < hsotg->num_of_eps; idx++) {
3791 hs_ep = hsotg->eps_out[idx];
3792
3793 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous)
3794 continue;
3795
3796 epctrl = dwc2_readl(hsotg, DOEPCTL(idx));
3797
3798 if (epctrl & DXEPCTL_EPENA) {
3799 epctrl |= DXEPCTL_SNAK;
3800 epctrl |= DXEPCTL_EPDIS;
3801 dwc2_writel(hsotg, epctrl, DOEPCTL(idx));
3802 }
3803 }
3804
3805
3806 }
3807
3808 if (gintsts & GINTSTS_GINNAKEFF) {
3809 dev_info(hsotg->dev, "GINNakEff triggered\n");
3810
3811 dwc2_set_bit(hsotg, DCTL, DCTL_CGNPINNAK);
3812
3813 dwc2_hsotg_dump(hsotg);
3814 }
3815
3816 if (gintsts & GINTSTS_INCOMPL_SOIN)
3817 dwc2_gadget_handle_incomplete_isoc_in(hsotg);
3818
3819 if (gintsts & GINTSTS_INCOMPL_SOOUT)
3820 dwc2_gadget_handle_incomplete_isoc_out(hsotg);
3821
3822
3823
3824
3825
3826
3827 if (gintsts & IRQ_RETRY_MASK && --retry_count > 0)
3828 goto irq_retry;
3829
3830
3831 if (hsotg->params.service_interval)
3832 dwc2_gadget_wkup_alert_handler(hsotg);
3833
3834 spin_unlock(&hsotg->lock);
3835
3836 return IRQ_HANDLED;
3837 }
3838
3839 static void dwc2_hsotg_ep_stop_xfr(struct dwc2_hsotg *hsotg,
3840 struct dwc2_hsotg_ep *hs_ep)
3841 {
3842 u32 epctrl_reg;
3843 u32 epint_reg;
3844
3845 epctrl_reg = hs_ep->dir_in ? DIEPCTL(hs_ep->index) :
3846 DOEPCTL(hs_ep->index);
3847 epint_reg = hs_ep->dir_in ? DIEPINT(hs_ep->index) :
3848 DOEPINT(hs_ep->index);
3849
3850 dev_dbg(hsotg->dev, "%s: stopping transfer on %s\n", __func__,
3851 hs_ep->name);
3852
3853 if (hs_ep->dir_in) {
3854 if (hsotg->dedicated_fifos || hs_ep->periodic) {
3855 dwc2_set_bit(hsotg, epctrl_reg, DXEPCTL_SNAK);
3856
3857 if (dwc2_hsotg_wait_bit_set(hsotg, epint_reg,
3858 DXEPINT_INEPNAKEFF, 100))
3859 dev_warn(hsotg->dev,
3860 "%s: timeout DIEPINT.NAKEFF\n",
3861 __func__);
3862 } else {
3863 dwc2_set_bit(hsotg, DCTL, DCTL_SGNPINNAK);
3864
3865 if (dwc2_hsotg_wait_bit_set(hsotg, GINTSTS,
3866 GINTSTS_GINNAKEFF, 100))
3867 dev_warn(hsotg->dev,
3868 "%s: timeout GINTSTS.GINNAKEFF\n",
3869 __func__);
3870 }
3871 } else {
3872 if (!(dwc2_readl(hsotg, GINTSTS) & GINTSTS_GOUTNAKEFF))
3873 dwc2_set_bit(hsotg, DCTL, DCTL_SGOUTNAK);
3874
3875
3876 if (dwc2_hsotg_wait_bit_set(hsotg, GINTSTS,
3877 GINTSTS_GOUTNAKEFF, 100))
3878 dev_warn(hsotg->dev, "%s: timeout GINTSTS.GOUTNAKEFF\n",
3879 __func__);
3880 }
3881
3882
3883 dwc2_set_bit(hsotg, epctrl_reg, DXEPCTL_EPDIS | DXEPCTL_SNAK);
3884
3885
3886 if (dwc2_hsotg_wait_bit_set(hsotg, epint_reg, DXEPINT_EPDISBLD, 100))
3887 dev_warn(hsotg->dev,
3888 "%s: timeout DOEPCTL.EPDisable\n", __func__);
3889
3890
3891 dwc2_set_bit(hsotg, epint_reg, DXEPINT_EPDISBLD);
3892
3893 if (hs_ep->dir_in) {
3894 unsigned short fifo_index;
3895
3896 if (hsotg->dedicated_fifos || hs_ep->periodic)
3897 fifo_index = hs_ep->fifo_index;
3898 else
3899 fifo_index = 0;
3900
3901
3902 dwc2_flush_tx_fifo(hsotg, fifo_index);
3903
3904
3905 if (!hsotg->dedicated_fifos && !hs_ep->periodic)
3906 dwc2_set_bit(hsotg, DCTL, DCTL_CGNPINNAK);
3907
3908 } else {
3909
3910 dwc2_set_bit(hsotg, DCTL, DCTL_CGOUTNAK);
3911 }
3912 }
3913
3914
3915
3916
3917
3918
3919
3920
3921 static int dwc2_hsotg_ep_enable(struct usb_ep *ep,
3922 const struct usb_endpoint_descriptor *desc)
3923 {
3924 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
3925 struct dwc2_hsotg *hsotg = hs_ep->parent;
3926 unsigned long flags;
3927 unsigned int index = hs_ep->index;
3928 u32 epctrl_reg;
3929 u32 epctrl;
3930 u32 mps;
3931 u32 mc;
3932 u32 mask;
3933 unsigned int dir_in;
3934 unsigned int i, val, size;
3935 int ret = 0;
3936 unsigned char ep_type;
3937 int desc_num;
3938
3939 dev_dbg(hsotg->dev,
3940 "%s: ep %s: a 0x%02x, attr 0x%02x, mps 0x%04x, intr %d\n",
3941 __func__, ep->name, desc->bEndpointAddress, desc->bmAttributes,
3942 desc->wMaxPacketSize, desc->bInterval);
3943
3944
3945 if (index == 0) {
3946 dev_err(hsotg->dev, "%s: called for EP 0\n", __func__);
3947 return -EINVAL;
3948 }
3949
3950 dir_in = (desc->bEndpointAddress & USB_ENDPOINT_DIR_MASK) ? 1 : 0;
3951 if (dir_in != hs_ep->dir_in) {
3952 dev_err(hsotg->dev, "%s: direction mismatch!\n", __func__);
3953 return -EINVAL;
3954 }
3955
3956 ep_type = desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK;
3957 mps = usb_endpoint_maxp(desc);
3958 mc = usb_endpoint_maxp_mult(desc);
3959
3960
3961 if (using_desc_dma(hsotg) && ep_type == USB_ENDPOINT_XFER_ISOC &&
3962 dir_in && desc->bInterval > 10) {
3963 dev_err(hsotg->dev,
3964 "%s: ISOC IN, DDMA: bInterval>10 not supported!\n", __func__);
3965 return -EINVAL;
3966 }
3967
3968
3969 if (using_desc_dma(hsotg) && ep_type == USB_ENDPOINT_XFER_ISOC &&
3970 !dir_in && mc > 1) {
3971 dev_err(hsotg->dev,
3972 "%s: ISOC OUT, DDMA: HB not supported!\n", __func__);
3973 return -EINVAL;
3974 }
3975
3976
3977
3978 epctrl_reg = dir_in ? DIEPCTL(index) : DOEPCTL(index);
3979 epctrl = dwc2_readl(hsotg, epctrl_reg);
3980
3981 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x from 0x%08x\n",
3982 __func__, epctrl, epctrl_reg);
3983
3984 if (using_desc_dma(hsotg) && ep_type == USB_ENDPOINT_XFER_ISOC)
3985 desc_num = MAX_DMA_DESC_NUM_HS_ISOC;
3986 else
3987 desc_num = MAX_DMA_DESC_NUM_GENERIC;
3988
3989
3990 if (using_desc_dma(hsotg) && !hs_ep->desc_list) {
3991 hs_ep->desc_list = dmam_alloc_coherent(hsotg->dev,
3992 desc_num * sizeof(struct dwc2_dma_desc),
3993 &hs_ep->desc_list_dma, GFP_ATOMIC);
3994 if (!hs_ep->desc_list) {
3995 ret = -ENOMEM;
3996 goto error2;
3997 }
3998 }
3999
4000 spin_lock_irqsave(&hsotg->lock, flags);
4001
4002 epctrl &= ~(DXEPCTL_EPTYPE_MASK | DXEPCTL_MPS_MASK);
4003 epctrl |= DXEPCTL_MPS(mps);
4004
4005
4006
4007
4008
4009 epctrl |= DXEPCTL_USBACTEP;
4010
4011
4012 dwc2_hsotg_set_ep_maxpacket(hsotg, hs_ep->index, mps, mc, dir_in);
4013
4014
4015 hs_ep->isochronous = 0;
4016 hs_ep->periodic = 0;
4017 hs_ep->halted = 0;
4018 hs_ep->interval = desc->bInterval;
4019
4020 switch (ep_type) {
4021 case USB_ENDPOINT_XFER_ISOC:
4022 epctrl |= DXEPCTL_EPTYPE_ISO;
4023 epctrl |= DXEPCTL_SETEVENFR;
4024 hs_ep->isochronous = 1;
4025 hs_ep->interval = 1 << (desc->bInterval - 1);
4026 hs_ep->target_frame = TARGET_FRAME_INITIAL;
4027 hs_ep->next_desc = 0;
4028 hs_ep->compl_desc = 0;
4029 if (dir_in) {
4030 hs_ep->periodic = 1;
4031 mask = dwc2_readl(hsotg, DIEPMSK);
4032 mask |= DIEPMSK_NAKMSK;
4033 dwc2_writel(hsotg, mask, DIEPMSK);
4034 } else {
4035 mask = dwc2_readl(hsotg, DOEPMSK);
4036 mask |= DOEPMSK_OUTTKNEPDISMSK;
4037 dwc2_writel(hsotg, mask, DOEPMSK);
4038 }
4039 break;
4040
4041 case USB_ENDPOINT_XFER_BULK:
4042 epctrl |= DXEPCTL_EPTYPE_BULK;
4043 break;
4044
4045 case USB_ENDPOINT_XFER_INT:
4046 if (dir_in)
4047 hs_ep->periodic = 1;
4048
4049 if (hsotg->gadget.speed == USB_SPEED_HIGH)
4050 hs_ep->interval = 1 << (desc->bInterval - 1);
4051
4052 epctrl |= DXEPCTL_EPTYPE_INTERRUPT;
4053 break;
4054
4055 case USB_ENDPOINT_XFER_CONTROL:
4056 epctrl |= DXEPCTL_EPTYPE_CONTROL;
4057 break;
4058 }
4059
4060
4061
4062
4063
4064 if (dir_in && hsotg->dedicated_fifos) {
4065 unsigned fifo_count = dwc2_hsotg_tx_fifo_count(hsotg);
4066 u32 fifo_index = 0;
4067 u32 fifo_size = UINT_MAX;
4068
4069 size = hs_ep->ep.maxpacket * hs_ep->mc;
4070 for (i = 1; i <= fifo_count; ++i) {
4071 if (hsotg->fifo_map & (1 << i))
4072 continue;
4073 val = dwc2_readl(hsotg, DPTXFSIZN(i));
4074 val = (val >> FIFOSIZE_DEPTH_SHIFT) * 4;
4075 if (val < size)
4076 continue;
4077
4078 if (val < fifo_size) {
4079 fifo_size = val;
4080 fifo_index = i;
4081 }
4082 }
4083 if (!fifo_index) {
4084 dev_err(hsotg->dev,
4085 "%s: No suitable fifo found\n", __func__);
4086 ret = -ENOMEM;
4087 goto error1;
4088 }
4089 epctrl &= ~(DXEPCTL_TXFNUM_LIMIT << DXEPCTL_TXFNUM_SHIFT);
4090 hsotg->fifo_map |= 1 << fifo_index;
4091 epctrl |= DXEPCTL_TXFNUM(fifo_index);
4092 hs_ep->fifo_index = fifo_index;
4093 hs_ep->fifo_size = fifo_size;
4094 }
4095
4096
4097 if (index && !hs_ep->isochronous)
4098 epctrl |= DXEPCTL_SETD0PID;
4099
4100
4101
4102
4103
4104
4105
4106 if (hsotg->gadget.speed == USB_SPEED_FULL &&
4107 hs_ep->isochronous && dir_in) {
4108
4109
4110
4111
4112 u32 gsnpsid = dwc2_readl(hsotg, GSNPSID);
4113
4114 if ((gsnpsid >= DWC2_CORE_REV_2_72a &&
4115 gsnpsid <= DWC2_CORE_REV_4_00a) ||
4116 gsnpsid == DWC2_FS_IOT_REV_1_00a ||
4117 gsnpsid == DWC2_HS_IOT_REV_1_00a)
4118 epctrl |= DXEPCTL_CNAK;
4119 }
4120
4121 dev_dbg(hsotg->dev, "%s: write DxEPCTL=0x%08x\n",
4122 __func__, epctrl);
4123
4124 dwc2_writel(hsotg, epctrl, epctrl_reg);
4125 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x\n",
4126 __func__, dwc2_readl(hsotg, epctrl_reg));
4127
4128
4129 dwc2_hsotg_ctrl_epint(hsotg, index, dir_in, 1);
4130
4131 error1:
4132 spin_unlock_irqrestore(&hsotg->lock, flags);
4133
4134 error2:
4135 if (ret && using_desc_dma(hsotg) && hs_ep->desc_list) {
4136 dmam_free_coherent(hsotg->dev, desc_num *
4137 sizeof(struct dwc2_dma_desc),
4138 hs_ep->desc_list, hs_ep->desc_list_dma);
4139 hs_ep->desc_list = NULL;
4140 }
4141
4142 return ret;
4143 }
4144
4145
4146
4147
4148
4149 static int dwc2_hsotg_ep_disable(struct usb_ep *ep)
4150 {
4151 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
4152 struct dwc2_hsotg *hsotg = hs_ep->parent;
4153 int dir_in = hs_ep->dir_in;
4154 int index = hs_ep->index;
4155 u32 epctrl_reg;
4156 u32 ctrl;
4157
4158 dev_dbg(hsotg->dev, "%s(ep %p)\n", __func__, ep);
4159
4160 if (ep == &hsotg->eps_out[0]->ep) {
4161 dev_err(hsotg->dev, "%s: called for ep0\n", __func__);
4162 return -EINVAL;
4163 }
4164
4165 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) {
4166 dev_err(hsotg->dev, "%s: called in host mode?\n", __func__);
4167 return -EINVAL;
4168 }
4169
4170 epctrl_reg = dir_in ? DIEPCTL(index) : DOEPCTL(index);
4171
4172 ctrl = dwc2_readl(hsotg, epctrl_reg);
4173
4174 if (ctrl & DXEPCTL_EPENA)
4175 dwc2_hsotg_ep_stop_xfr(hsotg, hs_ep);
4176
4177 ctrl &= ~DXEPCTL_EPENA;
4178 ctrl &= ~DXEPCTL_USBACTEP;
4179 ctrl |= DXEPCTL_SNAK;
4180
4181 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl);
4182 dwc2_writel(hsotg, ctrl, epctrl_reg);
4183
4184
4185 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 0);
4186
4187
4188 kill_all_requests(hsotg, hs_ep, -ESHUTDOWN);
4189
4190 hsotg->fifo_map &= ~(1 << hs_ep->fifo_index);
4191 hs_ep->fifo_index = 0;
4192 hs_ep->fifo_size = 0;
4193
4194 return 0;
4195 }
4196
4197 static int dwc2_hsotg_ep_disable_lock(struct usb_ep *ep)
4198 {
4199 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
4200 struct dwc2_hsotg *hsotg = hs_ep->parent;
4201 unsigned long flags;
4202 int ret;
4203
4204 spin_lock_irqsave(&hsotg->lock, flags);
4205 ret = dwc2_hsotg_ep_disable(ep);
4206 spin_unlock_irqrestore(&hsotg->lock, flags);
4207 return ret;
4208 }
4209
4210
4211
4212
4213
4214
4215 static bool on_list(struct dwc2_hsotg_ep *ep, struct dwc2_hsotg_req *test)
4216 {
4217 struct dwc2_hsotg_req *req, *treq;
4218
4219 list_for_each_entry_safe(req, treq, &ep->queue, queue) {
4220 if (req == test)
4221 return true;
4222 }
4223
4224 return false;
4225 }
4226
4227
4228
4229
4230
4231
4232 static int dwc2_hsotg_ep_dequeue(struct usb_ep *ep, struct usb_request *req)
4233 {
4234 struct dwc2_hsotg_req *hs_req = our_req(req);
4235 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
4236 struct dwc2_hsotg *hs = hs_ep->parent;
4237 unsigned long flags;
4238
4239 dev_dbg(hs->dev, "ep_dequeue(%p,%p)\n", ep, req);
4240
4241 spin_lock_irqsave(&hs->lock, flags);
4242
4243 if (!on_list(hs_ep, hs_req)) {
4244 spin_unlock_irqrestore(&hs->lock, flags);
4245 return -EINVAL;
4246 }
4247
4248
4249 if (req == &hs_ep->req->req)
4250 dwc2_hsotg_ep_stop_xfr(hs, hs_ep);
4251
4252 dwc2_hsotg_complete_request(hs, hs_ep, hs_req, -ECONNRESET);
4253 spin_unlock_irqrestore(&hs->lock, flags);
4254
4255 return 0;
4256 }
4257
4258
4259
4260
4261
4262
4263
4264
4265
4266
4267
4268 static int dwc2_hsotg_ep_sethalt(struct usb_ep *ep, int value, bool now)
4269 {
4270 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
4271 struct dwc2_hsotg *hs = hs_ep->parent;
4272 int index = hs_ep->index;
4273 u32 epreg;
4274 u32 epctl;
4275 u32 xfertype;
4276
4277 dev_info(hs->dev, "%s(ep %p %s, %d)\n", __func__, ep, ep->name, value);
4278
4279 if (index == 0) {
4280 if (value)
4281 dwc2_hsotg_stall_ep0(hs);
4282 else
4283 dev_warn(hs->dev,
4284 "%s: can't clear halt on ep0\n", __func__);
4285 return 0;
4286 }
4287
4288 if (hs_ep->isochronous) {
4289 dev_err(hs->dev, "%s is Isochronous Endpoint\n", ep->name);
4290 return -EINVAL;
4291 }
4292
4293 if (!now && value && !list_empty(&hs_ep->queue)) {
4294 dev_dbg(hs->dev, "%s request is pending, cannot halt\n",
4295 ep->name);
4296 return -EAGAIN;
4297 }
4298
4299 if (hs_ep->dir_in) {
4300 epreg = DIEPCTL(index);
4301 epctl = dwc2_readl(hs, epreg);
4302
4303 if (value) {
4304 epctl |= DXEPCTL_STALL | DXEPCTL_SNAK;
4305 if (epctl & DXEPCTL_EPENA)
4306 epctl |= DXEPCTL_EPDIS;
4307 } else {
4308 epctl &= ~DXEPCTL_STALL;
4309 xfertype = epctl & DXEPCTL_EPTYPE_MASK;
4310 if (xfertype == DXEPCTL_EPTYPE_BULK ||
4311 xfertype == DXEPCTL_EPTYPE_INTERRUPT)
4312 epctl |= DXEPCTL_SETD0PID;
4313 }
4314 dwc2_writel(hs, epctl, epreg);
4315 } else {
4316 epreg = DOEPCTL(index);
4317 epctl = dwc2_readl(hs, epreg);
4318
4319 if (value) {
4320 epctl |= DXEPCTL_STALL;
4321 } else {
4322 epctl &= ~DXEPCTL_STALL;
4323 xfertype = epctl & DXEPCTL_EPTYPE_MASK;
4324 if (xfertype == DXEPCTL_EPTYPE_BULK ||
4325 xfertype == DXEPCTL_EPTYPE_INTERRUPT)
4326 epctl |= DXEPCTL_SETD0PID;
4327 }
4328 dwc2_writel(hs, epctl, epreg);
4329 }
4330
4331 hs_ep->halted = value;
4332
4333 return 0;
4334 }
4335
4336
4337
4338
4339
4340
4341 static int dwc2_hsotg_ep_sethalt_lock(struct usb_ep *ep, int value)
4342 {
4343 struct dwc2_hsotg_ep *hs_ep = our_ep(ep);
4344 struct dwc2_hsotg *hs = hs_ep->parent;
4345 unsigned long flags = 0;
4346 int ret = 0;
4347
4348 spin_lock_irqsave(&hs->lock, flags);
4349 ret = dwc2_hsotg_ep_sethalt(ep, value, false);
4350 spin_unlock_irqrestore(&hs->lock, flags);
4351
4352 return ret;
4353 }
4354
4355 static const struct usb_ep_ops dwc2_hsotg_ep_ops = {
4356 .enable = dwc2_hsotg_ep_enable,
4357 .disable = dwc2_hsotg_ep_disable_lock,
4358 .alloc_request = dwc2_hsotg_ep_alloc_request,
4359 .free_request = dwc2_hsotg_ep_free_request,
4360 .queue = dwc2_hsotg_ep_queue_lock,
4361 .dequeue = dwc2_hsotg_ep_dequeue,
4362 .set_halt = dwc2_hsotg_ep_sethalt_lock,
4363
4364 };
4365
4366
4367
4368
4369
4370 static void dwc2_hsotg_init(struct dwc2_hsotg *hsotg)
4371 {
4372
4373
4374 dwc2_writel(hsotg, DIEPMSK_TIMEOUTMSK | DIEPMSK_AHBERRMSK |
4375 DIEPMSK_EPDISBLDMSK | DIEPMSK_XFERCOMPLMSK,
4376 DIEPMSK);
4377
4378 dwc2_writel(hsotg, DOEPMSK_SETUPMSK | DOEPMSK_AHBERRMSK |
4379 DOEPMSK_EPDISBLDMSK | DOEPMSK_XFERCOMPLMSK,
4380 DOEPMSK);
4381
4382 dwc2_writel(hsotg, 0, DAINTMSK);
4383
4384
4385 dwc2_set_bit(hsotg, DCTL, DCTL_SFTDISCON);
4386
4387
4388
4389 dev_dbg(hsotg->dev, "GRXFSIZ=0x%08x, GNPTXFSIZ=0x%08x\n",
4390 dwc2_readl(hsotg, GRXFSIZ),
4391 dwc2_readl(hsotg, GNPTXFSIZ));
4392
4393 dwc2_hsotg_init_fifo(hsotg);
4394
4395 if (using_dma(hsotg))
4396 dwc2_set_bit(hsotg, GAHBCFG, GAHBCFG_DMA_EN);
4397 }
4398
4399
4400
4401
4402
4403
4404
4405
4406
4407 static int dwc2_hsotg_udc_start(struct usb_gadget *gadget,
4408 struct usb_gadget_driver *driver)
4409 {
4410 struct dwc2_hsotg *hsotg = to_hsotg(gadget);
4411 unsigned long flags;
4412 int ret;
4413
4414 if (!hsotg) {
4415 pr_err("%s: called with no device\n", __func__);
4416 return -ENODEV;
4417 }
4418
4419 if (!driver) {
4420 dev_err(hsotg->dev, "%s: no driver\n", __func__);
4421 return -EINVAL;
4422 }
4423
4424 if (driver->max_speed < USB_SPEED_FULL)
4425 dev_err(hsotg->dev, "%s: bad speed\n", __func__);
4426
4427 if (!driver->setup) {
4428 dev_err(hsotg->dev, "%s: missing entry points\n", __func__);
4429 return -EINVAL;
4430 }
4431
4432 WARN_ON(hsotg->driver);
4433
4434 driver->driver.bus = NULL;
4435 hsotg->driver = driver;
4436 hsotg->gadget.dev.of_node = hsotg->dev->of_node;
4437 hsotg->gadget.speed = USB_SPEED_UNKNOWN;
4438
4439 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) {
4440 ret = dwc2_lowlevel_hw_enable(hsotg);
4441 if (ret)
4442 goto err;
4443 }
4444
4445 if (!IS_ERR_OR_NULL(hsotg->uphy))
4446 otg_set_peripheral(hsotg->uphy->otg, &hsotg->gadget);
4447
4448 spin_lock_irqsave(&hsotg->lock, flags);
4449 if (dwc2_hw_is_device(hsotg)) {
4450 dwc2_hsotg_init(hsotg);
4451 dwc2_hsotg_core_init_disconnected(hsotg, false);
4452 }
4453
4454 hsotg->enabled = 0;
4455 spin_unlock_irqrestore(&hsotg->lock, flags);
4456
4457 gadget->sg_supported = using_desc_dma(hsotg);
4458 dev_info(hsotg->dev, "bound driver %s\n", driver->driver.name);
4459
4460 return 0;
4461
4462 err:
4463 hsotg->driver = NULL;
4464 return ret;
4465 }
4466
4467
4468
4469
4470
4471
4472
4473 static int dwc2_hsotg_udc_stop(struct usb_gadget *gadget)
4474 {
4475 struct dwc2_hsotg *hsotg = to_hsotg(gadget);
4476 unsigned long flags = 0;
4477 int ep;
4478
4479 if (!hsotg)
4480 return -ENODEV;
4481
4482
4483 for (ep = 1; ep < hsotg->num_of_eps; ep++) {
4484 if (hsotg->eps_in[ep])
4485 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep);
4486 if (hsotg->eps_out[ep])
4487 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep);
4488 }
4489
4490 spin_lock_irqsave(&hsotg->lock, flags);
4491
4492 hsotg->driver = NULL;
4493 hsotg->gadget.speed = USB_SPEED_UNKNOWN;
4494 hsotg->enabled = 0;
4495
4496 spin_unlock_irqrestore(&hsotg->lock, flags);
4497
4498 if (!IS_ERR_OR_NULL(hsotg->uphy))
4499 otg_set_peripheral(hsotg->uphy->otg, NULL);
4500
4501 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL)
4502 dwc2_lowlevel_hw_disable(hsotg);
4503
4504 return 0;
4505 }
4506
4507
4508
4509
4510
4511
4512
4513 static int dwc2_hsotg_gadget_getframe(struct usb_gadget *gadget)
4514 {
4515 return dwc2_hsotg_read_frameno(to_hsotg(gadget));
4516 }
4517
4518
4519
4520
4521
4522
4523
4524
4525 static int dwc2_hsotg_pullup(struct usb_gadget *gadget, int is_on)
4526 {
4527 struct dwc2_hsotg *hsotg = to_hsotg(gadget);
4528 unsigned long flags = 0;
4529
4530 dev_dbg(hsotg->dev, "%s: is_on: %d op_state: %d\n", __func__, is_on,
4531 hsotg->op_state);
4532
4533
4534 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) {
4535 hsotg->enabled = is_on;
4536 return 0;
4537 }
4538
4539 spin_lock_irqsave(&hsotg->lock, flags);
4540 if (is_on) {
4541 hsotg->enabled = 1;
4542 dwc2_hsotg_core_init_disconnected(hsotg, false);
4543
4544 dwc2_enable_acg(hsotg);
4545 dwc2_hsotg_core_connect(hsotg);
4546 } else {
4547 dwc2_hsotg_core_disconnect(hsotg);
4548 dwc2_hsotg_disconnect(hsotg);
4549 hsotg->enabled = 0;
4550 }
4551
4552 hsotg->gadget.speed = USB_SPEED_UNKNOWN;
4553 spin_unlock_irqrestore(&hsotg->lock, flags);
4554
4555 return 0;
4556 }
4557
4558 static int dwc2_hsotg_vbus_session(struct usb_gadget *gadget, int is_active)
4559 {
4560 struct dwc2_hsotg *hsotg = to_hsotg(gadget);
4561 unsigned long flags;
4562
4563 dev_dbg(hsotg->dev, "%s: is_active: %d\n", __func__, is_active);
4564 spin_lock_irqsave(&hsotg->lock, flags);
4565
4566
4567
4568
4569
4570 if (hsotg->lx_state == DWC2_L2)
4571 dwc2_exit_partial_power_down(hsotg, false);
4572
4573 if (is_active) {
4574 hsotg->op_state = OTG_STATE_B_PERIPHERAL;
4575
4576 dwc2_hsotg_core_init_disconnected(hsotg, false);
4577 if (hsotg->enabled) {
4578
4579 dwc2_enable_acg(hsotg);
4580 dwc2_hsotg_core_connect(hsotg);
4581 }
4582 } else {
4583 dwc2_hsotg_core_disconnect(hsotg);
4584 dwc2_hsotg_disconnect(hsotg);
4585 }
4586
4587 spin_unlock_irqrestore(&hsotg->lock, flags);
4588 return 0;
4589 }
4590
4591
4592
4593
4594
4595
4596
4597
4598 static int dwc2_hsotg_vbus_draw(struct usb_gadget *gadget, unsigned int mA)
4599 {
4600 struct dwc2_hsotg *hsotg = to_hsotg(gadget);
4601
4602 if (IS_ERR_OR_NULL(hsotg->uphy))
4603 return -ENOTSUPP;
4604 return usb_phy_set_power(hsotg->uphy, mA);
4605 }
4606
4607 static const struct usb_gadget_ops dwc2_hsotg_gadget_ops = {
4608 .get_frame = dwc2_hsotg_gadget_getframe,
4609 .udc_start = dwc2_hsotg_udc_start,
4610 .udc_stop = dwc2_hsotg_udc_stop,
4611 .pullup = dwc2_hsotg_pullup,
4612 .vbus_session = dwc2_hsotg_vbus_session,
4613 .vbus_draw = dwc2_hsotg_vbus_draw,
4614 };
4615
4616
4617
4618
4619
4620
4621
4622
4623
4624
4625
4626
4627 static void dwc2_hsotg_initep(struct dwc2_hsotg *hsotg,
4628 struct dwc2_hsotg_ep *hs_ep,
4629 int epnum,
4630 bool dir_in)
4631 {
4632 char *dir;
4633
4634 if (epnum == 0)
4635 dir = "";
4636 else if (dir_in)
4637 dir = "in";
4638 else
4639 dir = "out";
4640
4641 hs_ep->dir_in = dir_in;
4642 hs_ep->index = epnum;
4643
4644 snprintf(hs_ep->name, sizeof(hs_ep->name), "ep%d%s", epnum, dir);
4645
4646 INIT_LIST_HEAD(&hs_ep->queue);
4647 INIT_LIST_HEAD(&hs_ep->ep.ep_list);
4648
4649
4650 if (epnum)
4651 list_add_tail(&hs_ep->ep.ep_list, &hsotg->gadget.ep_list);
4652
4653 hs_ep->parent = hsotg;
4654 hs_ep->ep.name = hs_ep->name;
4655
4656 if (hsotg->params.speed == DWC2_SPEED_PARAM_LOW)
4657 usb_ep_set_maxpacket_limit(&hs_ep->ep, 8);
4658 else
4659 usb_ep_set_maxpacket_limit(&hs_ep->ep,
4660 epnum ? 1024 : EP0_MPS_LIMIT);
4661 hs_ep->ep.ops = &dwc2_hsotg_ep_ops;
4662
4663 if (epnum == 0) {
4664 hs_ep->ep.caps.type_control = true;
4665 } else {
4666 if (hsotg->params.speed != DWC2_SPEED_PARAM_LOW) {
4667 hs_ep->ep.caps.type_iso = true;
4668 hs_ep->ep.caps.type_bulk = true;
4669 }
4670 hs_ep->ep.caps.type_int = true;
4671 }
4672
4673 if (dir_in)
4674 hs_ep->ep.caps.dir_in = true;
4675 else
4676 hs_ep->ep.caps.dir_out = true;
4677
4678
4679
4680
4681
4682
4683 if (using_dma(hsotg)) {
4684 u32 next = DXEPCTL_NEXTEP((epnum + 1) % 15);
4685
4686 if (dir_in)
4687 dwc2_writel(hsotg, next, DIEPCTL(epnum));
4688 else
4689 dwc2_writel(hsotg, next, DOEPCTL(epnum));
4690 }
4691 }
4692
4693
4694
4695
4696
4697
4698
4699 static int dwc2_hsotg_hw_cfg(struct dwc2_hsotg *hsotg)
4700 {
4701 u32 cfg;
4702 u32 ep_type;
4703 u32 i;
4704
4705
4706
4707 hsotg->num_of_eps = hsotg->hw_params.num_dev_ep;
4708
4709
4710 hsotg->num_of_eps++;
4711
4712 hsotg->eps_in[0] = devm_kzalloc(hsotg->dev,
4713 sizeof(struct dwc2_hsotg_ep),
4714 GFP_KERNEL);
4715 if (!hsotg->eps_in[0])
4716 return -ENOMEM;
4717
4718 hsotg->eps_out[0] = hsotg->eps_in[0];
4719
4720 cfg = hsotg->hw_params.dev_ep_dirs;
4721 for (i = 1, cfg >>= 2; i < hsotg->num_of_eps; i++, cfg >>= 2) {
4722 ep_type = cfg & 3;
4723
4724 if (!(ep_type & 2)) {
4725 hsotg->eps_in[i] = devm_kzalloc(hsotg->dev,
4726 sizeof(struct dwc2_hsotg_ep), GFP_KERNEL);
4727 if (!hsotg->eps_in[i])
4728 return -ENOMEM;
4729 }
4730
4731 if (!(ep_type & 1)) {
4732 hsotg->eps_out[i] = devm_kzalloc(hsotg->dev,
4733 sizeof(struct dwc2_hsotg_ep), GFP_KERNEL);
4734 if (!hsotg->eps_out[i])
4735 return -ENOMEM;
4736 }
4737 }
4738
4739 hsotg->fifo_mem = hsotg->hw_params.total_fifo_size;
4740 hsotg->dedicated_fifos = hsotg->hw_params.en_multiple_tx_fifo;
4741
4742 dev_info(hsotg->dev, "EPs: %d, %s fifos, %d entries in SPRAM\n",
4743 hsotg->num_of_eps,
4744 hsotg->dedicated_fifos ? "dedicated" : "shared",
4745 hsotg->fifo_mem);
4746 return 0;
4747 }
4748
4749
4750
4751
4752
4753
4754 static void dwc2_hsotg_dump(struct dwc2_hsotg *hsotg)
4755 {
4756 #ifdef DEBUG
4757 struct device *dev = hsotg->dev;
4758 u32 val;
4759 int idx;
4760
4761 dev_info(dev, "DCFG=0x%08x, DCTL=0x%08x, DIEPMSK=%08x\n",
4762 dwc2_readl(hsotg, DCFG), dwc2_readl(hsotg, DCTL),
4763 dwc2_readl(hsotg, DIEPMSK));
4764
4765 dev_info(dev, "GAHBCFG=0x%08x, GHWCFG1=0x%08x\n",
4766 dwc2_readl(hsotg, GAHBCFG), dwc2_readl(hsotg, GHWCFG1));
4767
4768 dev_info(dev, "GRXFSIZ=0x%08x, GNPTXFSIZ=0x%08x\n",
4769 dwc2_readl(hsotg, GRXFSIZ), dwc2_readl(hsotg, GNPTXFSIZ));
4770
4771
4772
4773 for (idx = 1; idx < hsotg->num_of_eps; idx++) {
4774 val = dwc2_readl(hsotg, DPTXFSIZN(idx));
4775 dev_info(dev, "DPTx[%d] FSize=%d, StAddr=0x%08x\n", idx,
4776 val >> FIFOSIZE_DEPTH_SHIFT,
4777 val & FIFOSIZE_STARTADDR_MASK);
4778 }
4779
4780 for (idx = 0; idx < hsotg->num_of_eps; idx++) {
4781 dev_info(dev,
4782 "ep%d-in: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n", idx,
4783 dwc2_readl(hsotg, DIEPCTL(idx)),
4784 dwc2_readl(hsotg, DIEPTSIZ(idx)),
4785 dwc2_readl(hsotg, DIEPDMA(idx)));
4786
4787 val = dwc2_readl(hsotg, DOEPCTL(idx));
4788 dev_info(dev,
4789 "ep%d-out: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n",
4790 idx, dwc2_readl(hsotg, DOEPCTL(idx)),
4791 dwc2_readl(hsotg, DOEPTSIZ(idx)),
4792 dwc2_readl(hsotg, DOEPDMA(idx)));
4793 }
4794
4795 dev_info(dev, "DVBUSDIS=0x%08x, DVBUSPULSE=%08x\n",
4796 dwc2_readl(hsotg, DVBUSDIS), dwc2_readl(hsotg, DVBUSPULSE));
4797 #endif
4798 }
4799
4800
4801
4802
4803
4804
4805 int dwc2_gadget_init(struct dwc2_hsotg *hsotg)
4806 {
4807 struct device *dev = hsotg->dev;
4808 int epnum;
4809 int ret;
4810
4811
4812 dev_dbg(dev, "NonPeriodic TXFIFO size: %d\n",
4813 hsotg->params.g_np_tx_fifo_size);
4814 dev_dbg(dev, "RXFIFO size: %d\n", hsotg->params.g_rx_fifo_size);
4815
4816 hsotg->gadget.max_speed = USB_SPEED_HIGH;
4817 hsotg->gadget.ops = &dwc2_hsotg_gadget_ops;
4818 hsotg->gadget.name = dev_name(dev);
4819 hsotg->remote_wakeup_allowed = 0;
4820
4821 if (hsotg->params.lpm)
4822 hsotg->gadget.lpm_capable = true;
4823
4824 if (hsotg->dr_mode == USB_DR_MODE_OTG)
4825 hsotg->gadget.is_otg = 1;
4826 else if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL)
4827 hsotg->op_state = OTG_STATE_B_PERIPHERAL;
4828
4829 ret = dwc2_hsotg_hw_cfg(hsotg);
4830 if (ret) {
4831 dev_err(hsotg->dev, "Hardware configuration failed: %d\n", ret);
4832 return ret;
4833 }
4834
4835 hsotg->ctrl_buff = devm_kzalloc(hsotg->dev,
4836 DWC2_CTRL_BUFF_SIZE, GFP_KERNEL);
4837 if (!hsotg->ctrl_buff)
4838 return -ENOMEM;
4839
4840 hsotg->ep0_buff = devm_kzalloc(hsotg->dev,
4841 DWC2_CTRL_BUFF_SIZE, GFP_KERNEL);
4842 if (!hsotg->ep0_buff)
4843 return -ENOMEM;
4844
4845 if (using_desc_dma(hsotg)) {
4846 ret = dwc2_gadget_alloc_ctrl_desc_chains(hsotg);
4847 if (ret < 0)
4848 return ret;
4849 }
4850
4851 ret = devm_request_irq(hsotg->dev, hsotg->irq, dwc2_hsotg_irq,
4852 IRQF_SHARED, dev_name(hsotg->dev), hsotg);
4853 if (ret < 0) {
4854 dev_err(dev, "cannot claim IRQ for gadget\n");
4855 return ret;
4856 }
4857
4858
4859
4860 if (hsotg->num_of_eps == 0) {
4861 dev_err(dev, "wrong number of EPs (zero)\n");
4862 return -EINVAL;
4863 }
4864
4865
4866
4867 INIT_LIST_HEAD(&hsotg->gadget.ep_list);
4868 hsotg->gadget.ep0 = &hsotg->eps_out[0]->ep;
4869
4870
4871
4872 hsotg->ctrl_req = dwc2_hsotg_ep_alloc_request(&hsotg->eps_out[0]->ep,
4873 GFP_KERNEL);
4874 if (!hsotg->ctrl_req) {
4875 dev_err(dev, "failed to allocate ctrl req\n");
4876 return -ENOMEM;
4877 }
4878
4879
4880 for (epnum = 0; epnum < hsotg->num_of_eps; epnum++) {
4881 if (hsotg->eps_in[epnum])
4882 dwc2_hsotg_initep(hsotg, hsotg->eps_in[epnum],
4883 epnum, 1);
4884 if (hsotg->eps_out[epnum])
4885 dwc2_hsotg_initep(hsotg, hsotg->eps_out[epnum],
4886 epnum, 0);
4887 }
4888
4889 ret = usb_add_gadget_udc(dev, &hsotg->gadget);
4890 if (ret) {
4891 dwc2_hsotg_ep_free_request(&hsotg->eps_out[0]->ep,
4892 hsotg->ctrl_req);
4893 return ret;
4894 }
4895 dwc2_hsotg_dump(hsotg);
4896
4897 return 0;
4898 }
4899
4900
4901
4902
4903
4904
4905 int dwc2_hsotg_remove(struct dwc2_hsotg *hsotg)
4906 {
4907 usb_del_gadget_udc(&hsotg->gadget);
4908 dwc2_hsotg_ep_free_request(&hsotg->eps_out[0]->ep, hsotg->ctrl_req);
4909
4910 return 0;
4911 }
4912
4913 int dwc2_hsotg_suspend(struct dwc2_hsotg *hsotg)
4914 {
4915 unsigned long flags;
4916
4917 if (hsotg->lx_state != DWC2_L0)
4918 return 0;
4919
4920 if (hsotg->driver) {
4921 int ep;
4922
4923 dev_info(hsotg->dev, "suspending usb gadget %s\n",
4924 hsotg->driver->driver.name);
4925
4926 spin_lock_irqsave(&hsotg->lock, flags);
4927 if (hsotg->enabled)
4928 dwc2_hsotg_core_disconnect(hsotg);
4929 dwc2_hsotg_disconnect(hsotg);
4930 hsotg->gadget.speed = USB_SPEED_UNKNOWN;
4931 spin_unlock_irqrestore(&hsotg->lock, flags);
4932
4933 for (ep = 0; ep < hsotg->num_of_eps; ep++) {
4934 if (hsotg->eps_in[ep])
4935 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep);
4936 if (hsotg->eps_out[ep])
4937 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep);
4938 }
4939 }
4940
4941 return 0;
4942 }
4943
4944 int dwc2_hsotg_resume(struct dwc2_hsotg *hsotg)
4945 {
4946 unsigned long flags;
4947
4948 if (hsotg->lx_state == DWC2_L2)
4949 return 0;
4950
4951 if (hsotg->driver) {
4952 dev_info(hsotg->dev, "resuming usb gadget %s\n",
4953 hsotg->driver->driver.name);
4954
4955 spin_lock_irqsave(&hsotg->lock, flags);
4956 dwc2_hsotg_core_init_disconnected(hsotg, false);
4957 if (hsotg->enabled) {
4958
4959 dwc2_enable_acg(hsotg);
4960 dwc2_hsotg_core_connect(hsotg);
4961 }
4962 spin_unlock_irqrestore(&hsotg->lock, flags);
4963 }
4964
4965 return 0;
4966 }
4967
4968
4969
4970
4971
4972
4973
4974
4975 int dwc2_backup_device_registers(struct dwc2_hsotg *hsotg)
4976 {
4977 struct dwc2_dregs_backup *dr;
4978 int i;
4979
4980 dev_dbg(hsotg->dev, "%s\n", __func__);
4981
4982
4983 dr = &hsotg->dr_backup;
4984
4985 dr->dcfg = dwc2_readl(hsotg, DCFG);
4986 dr->dctl = dwc2_readl(hsotg, DCTL);
4987 dr->daintmsk = dwc2_readl(hsotg, DAINTMSK);
4988 dr->diepmsk = dwc2_readl(hsotg, DIEPMSK);
4989 dr->doepmsk = dwc2_readl(hsotg, DOEPMSK);
4990
4991 for (i = 0; i < hsotg->num_of_eps; i++) {
4992
4993 dr->diepctl[i] = dwc2_readl(hsotg, DIEPCTL(i));
4994
4995
4996 if (dr->diepctl[i] & DXEPCTL_DPID)
4997 dr->diepctl[i] |= DXEPCTL_SETD1PID;
4998 else
4999 dr->diepctl[i] |= DXEPCTL_SETD0PID;
5000
5001 dr->dieptsiz[i] = dwc2_readl(hsotg, DIEPTSIZ(i));
5002 dr->diepdma[i] = dwc2_readl(hsotg, DIEPDMA(i));
5003
5004
5005 dr->doepctl[i] = dwc2_readl(hsotg, DOEPCTL(i));
5006
5007
5008 if (dr->doepctl[i] & DXEPCTL_DPID)
5009 dr->doepctl[i] |= DXEPCTL_SETD1PID;
5010 else
5011 dr->doepctl[i] |= DXEPCTL_SETD0PID;
5012
5013 dr->doeptsiz[i] = dwc2_readl(hsotg, DOEPTSIZ(i));
5014 dr->doepdma[i] = dwc2_readl(hsotg, DOEPDMA(i));
5015 dr->dtxfsiz[i] = dwc2_readl(hsotg, DPTXFSIZN(i));
5016 }
5017 dr->valid = true;
5018 return 0;
5019 }
5020
5021
5022
5023
5024
5025
5026
5027
5028
5029
5030
5031 int dwc2_restore_device_registers(struct dwc2_hsotg *hsotg, int remote_wakeup)
5032 {
5033 struct dwc2_dregs_backup *dr;
5034 int i;
5035
5036 dev_dbg(hsotg->dev, "%s\n", __func__);
5037
5038
5039 dr = &hsotg->dr_backup;
5040 if (!dr->valid) {
5041 dev_err(hsotg->dev, "%s: no device registers to restore\n",
5042 __func__);
5043 return -EINVAL;
5044 }
5045 dr->valid = false;
5046
5047 if (!remote_wakeup)
5048 dwc2_writel(hsotg, dr->dctl, DCTL);
5049
5050 dwc2_writel(hsotg, dr->daintmsk, DAINTMSK);
5051 dwc2_writel(hsotg, dr->diepmsk, DIEPMSK);
5052 dwc2_writel(hsotg, dr->doepmsk, DOEPMSK);
5053
5054 for (i = 0; i < hsotg->num_of_eps; i++) {
5055
5056 dwc2_writel(hsotg, dr->dieptsiz[i], DIEPTSIZ(i));
5057 dwc2_writel(hsotg, dr->diepdma[i], DIEPDMA(i));
5058 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i));
5059
5060
5061
5062
5063
5064 if (hsotg->params.g_dma_desc &&
5065 (dr->diepctl[i] & DXEPCTL_EPENA))
5066 dr->diepdma[i] = hsotg->eps_in[i]->desc_list_dma;
5067 dwc2_writel(hsotg, dr->dtxfsiz[i], DPTXFSIZN(i));
5068 dwc2_writel(hsotg, dr->diepctl[i], DIEPCTL(i));
5069
5070 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i));
5071
5072
5073
5074
5075
5076 if (hsotg->params.g_dma_desc &&
5077 (dr->doepctl[i] & DXEPCTL_EPENA))
5078 dr->doepdma[i] = hsotg->eps_out[i]->desc_list_dma;
5079 dwc2_writel(hsotg, dr->doepdma[i], DOEPDMA(i));
5080 dwc2_writel(hsotg, dr->doepctl[i], DOEPCTL(i));
5081 }
5082
5083 return 0;
5084 }
5085
5086
5087
5088
5089
5090
5091
5092 void dwc2_gadget_init_lpm(struct dwc2_hsotg *hsotg)
5093 {
5094 u32 val;
5095
5096 if (!hsotg->params.lpm)
5097 return;
5098
5099 val = GLPMCFG_LPMCAP | GLPMCFG_APPL1RES;
5100 val |= hsotg->params.hird_threshold_en ? GLPMCFG_HIRD_THRES_EN : 0;
5101 val |= hsotg->params.lpm_clock_gating ? GLPMCFG_ENBLSLPM : 0;
5102 val |= hsotg->params.hird_threshold << GLPMCFG_HIRD_THRES_SHIFT;
5103 val |= hsotg->params.besl ? GLPMCFG_ENBESL : 0;
5104 val |= GLPMCFG_LPM_REJECT_CTRL_CONTROL;
5105 val |= GLPMCFG_LPM_ACCEPT_CTRL_ISOC;
5106 dwc2_writel(hsotg, val, GLPMCFG);
5107 dev_dbg(hsotg->dev, "GLPMCFG=0x%08x\n", dwc2_readl(hsotg, GLPMCFG));
5108
5109
5110 if (hsotg->params.service_interval)
5111 dwc2_set_bit(hsotg, GINTMSK2, GINTMSK2_WKUP_ALERT_INT_MSK);
5112 }
5113
5114
5115
5116
5117
5118
5119
5120 void dwc2_gadget_program_ref_clk(struct dwc2_hsotg *hsotg)
5121 {
5122 u32 val = 0;
5123
5124 val |= GREFCLK_REF_CLK_MODE;
5125 val |= hsotg->params.ref_clk_per << GREFCLK_REFCLKPER_SHIFT;
5126 val |= hsotg->params.sof_cnt_wkup_alert <<
5127 GREFCLK_SOF_CNT_WKUP_ALERT_SHIFT;
5128
5129 dwc2_writel(hsotg, val, GREFCLK);
5130 dev_dbg(hsotg->dev, "GREFCLK=0x%08x\n", dwc2_readl(hsotg, GREFCLK));
5131 }
5132
5133
5134
5135
5136
5137
5138
5139
5140 int dwc2_gadget_enter_hibernation(struct dwc2_hsotg *hsotg)
5141 {
5142 u32 gpwrdn;
5143 int ret = 0;
5144
5145
5146 hsotg->lx_state = DWC2_L2;
5147 dev_dbg(hsotg->dev, "Start of hibernation completed\n");
5148 ret = dwc2_backup_global_registers(hsotg);
5149 if (ret) {
5150 dev_err(hsotg->dev, "%s: failed to backup global registers\n",
5151 __func__);
5152 return ret;
5153 }
5154 ret = dwc2_backup_device_registers(hsotg);
5155 if (ret) {
5156 dev_err(hsotg->dev, "%s: failed to backup device registers\n",
5157 __func__);
5158 return ret;
5159 }
5160
5161 gpwrdn = GPWRDN_PWRDNRSTN;
5162 gpwrdn |= GPWRDN_PMUACTV;
5163 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5164 udelay(10);
5165
5166
5167 hsotg->hibernated = 1;
5168
5169
5170 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5171 gpwrdn |= GPWRDN_PMUINTSEL;
5172 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5173 udelay(10);
5174
5175
5176 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5177 gpwrdn |= GPWRDN_RST_DET_MSK;
5178 gpwrdn |= GPWRDN_LNSTSCHG_MSK;
5179 gpwrdn |= GPWRDN_STS_CHGINT_MSK;
5180 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5181 udelay(10);
5182
5183
5184 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5185 gpwrdn |= GPWRDN_PWRDNCLMP;
5186 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5187 udelay(10);
5188
5189
5190 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5191 gpwrdn |= GPWRDN_PWRDNSWTCH;
5192 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5193 udelay(10);
5194
5195
5196 hsotg->gr_backup.gpwrdn = dwc2_readl(hsotg, GPWRDN);
5197 dev_dbg(hsotg->dev, "Hibernation completed\n");
5198
5199 return ret;
5200 }
5201
5202
5203
5204
5205
5206
5207
5208
5209
5210
5211
5212
5213 int dwc2_gadget_exit_hibernation(struct dwc2_hsotg *hsotg,
5214 int rem_wakeup, int reset)
5215 {
5216 u32 pcgcctl;
5217 u32 gpwrdn;
5218 u32 dctl;
5219 int ret = 0;
5220 struct dwc2_gregs_backup *gr;
5221 struct dwc2_dregs_backup *dr;
5222
5223 gr = &hsotg->gr_backup;
5224 dr = &hsotg->dr_backup;
5225
5226 if (!hsotg->hibernated) {
5227 dev_dbg(hsotg->dev, "Already exited from Hibernation\n");
5228 return 1;
5229 }
5230 dev_dbg(hsotg->dev,
5231 "%s: called with rem_wakeup = %d reset = %d\n",
5232 __func__, rem_wakeup, reset);
5233
5234 dwc2_hib_restore_common(hsotg, rem_wakeup, 0);
5235
5236 if (!reset) {
5237
5238 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
5239 }
5240
5241
5242 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5243 gpwrdn &= ~GPWRDN_RESTORE;
5244 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5245 udelay(10);
5246
5247 if (!rem_wakeup) {
5248 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5249 pcgcctl &= ~PCGCTL_RSTPDWNMODULE;
5250 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5251 }
5252
5253
5254 dwc2_writel(hsotg, gr->gusbcfg, GUSBCFG);
5255 dwc2_writel(hsotg, dr->dcfg, DCFG);
5256 dwc2_writel(hsotg, dr->dctl, DCTL);
5257
5258
5259 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5260 gpwrdn &= ~GPWRDN_PMUACTV;
5261 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5262
5263 if (rem_wakeup) {
5264 udelay(10);
5265
5266 dwc2_writel(hsotg, dr->dctl | DCTL_RMTWKUPSIG, DCTL);
5267 } else {
5268 udelay(50);
5269
5270 dctl = dwc2_readl(hsotg, DCTL);
5271 dctl |= DCTL_PWRONPRGDONE;
5272 dwc2_writel(hsotg, dctl, DCTL);
5273 }
5274
5275 mdelay(2);
5276
5277 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
5278
5279
5280 ret = dwc2_restore_global_registers(hsotg);
5281 if (ret) {
5282 dev_err(hsotg->dev, "%s: failed to restore registers\n",
5283 __func__);
5284 return ret;
5285 }
5286
5287
5288 ret = dwc2_restore_device_registers(hsotg, rem_wakeup);
5289 if (ret) {
5290 dev_err(hsotg->dev, "%s: failed to restore device registers\n",
5291 __func__);
5292 return ret;
5293 }
5294
5295 if (rem_wakeup) {
5296 mdelay(10);
5297 dctl = dwc2_readl(hsotg, DCTL);
5298 dctl &= ~DCTL_RMTWKUPSIG;
5299 dwc2_writel(hsotg, dctl, DCTL);
5300 }
5301
5302 hsotg->hibernated = 0;
5303 hsotg->lx_state = DWC2_L0;
5304 dev_dbg(hsotg->dev, "Hibernation recovery completes here\n");
5305
5306 return ret;
5307 }