Lines Matching refs:musb_ep
57 struct musb *musb, struct musb_ep *musb_ep) in map_dma_buffer() argument
64 if (!is_dma_capable() || !musb_ep->dma) in map_dma_buffer()
72 compatible = dma->is_compatible(musb_ep->dma, in map_dma_buffer()
73 musb_ep->packet_sz, request->request.buf, in map_dma_buffer()
110 struct musb_ep *musb_ep = request->ep; in unmap_dma_buffer() local
112 if (!is_buffer_mapped(request) || !musb_ep->dma) in unmap_dma_buffer()
147 struct musb_ep *ep, in musb_g_giveback()
190 static void nuke(struct musb_ep *ep, const int status) in nuke()
241 static inline int max_ep_writesize(struct musb *musb, struct musb_ep *ep) in max_ep_writesize()
259 struct musb_ep *musb_ep; in txstate() local
265 musb_ep = req->ep; in txstate()
268 if (!musb_ep->desc) { in txstate()
270 musb_ep->end_point.name); in txstate()
275 if (dma_channel_status(musb_ep->dma) == MUSB_DMA_STATUS_BUSY) { in txstate()
284 fifo_count = min(max_ep_writesize(musb, musb_ep), in txstate()
289 musb_ep->end_point.name, csr); in txstate()
295 musb_ep->end_point.name, csr); in txstate()
300 epnum, musb_ep->packet_sz, fifo_count, in txstate()
310 musb_ep->dma->max_len); in txstate()
318 if (request_size < musb_ep->packet_sz) in txstate()
319 musb_ep->dma->desired_mode = 0; in txstate()
321 musb_ep->dma->desired_mode = 1; in txstate()
324 musb_ep->dma, musb_ep->packet_sz, in txstate()
325 musb_ep->dma->desired_mode, in txstate()
328 if (musb_ep->dma->desired_mode == 0) { in txstate()
356 if (!musb_ep->hb_mult || in txstate()
357 (musb_ep->hb_mult && in txstate()
359 musb_ep->type))) in txstate()
394 musb_ep->dma, musb_ep->packet_sz, in txstate()
399 c->channel_release(musb_ep->dma); in txstate()
400 musb_ep->dma = NULL; in txstate()
407 musb_ep->dma, musb_ep->packet_sz, in txstate()
421 musb_write_fifo(musb_ep->hw_ep, fifo_count, in txstate()
431 musb_ep->end_point.name, use_dma ? "dma" : "pio", in txstate()
448 struct musb_ep *musb_ep = &musb->endpoints[epnum].ep_in; in musb_g_tx() local
453 req = next_request(musb_ep); in musb_g_tx()
457 dev_dbg(musb->controller, "<== %s, txcsr %04x\n", musb_ep->end_point.name, csr); in musb_g_tx()
459 dma = is_dma_capable() ? musb_ep->dma : NULL; in musb_g_tx()
486 dev_dbg(musb->controller, "%s dma still busy?\n", musb_ep->end_point.name); in musb_g_tx()
501 request->actual += musb_ep->dma->actual_len; in musb_g_tx()
503 epnum, csr, musb_ep->dma->actual_len, request); in musb_g_tx()
511 && (request->length % musb_ep->packet_sz == 0) in musb_g_tx()
516 (musb_ep->packet_sz - 1)))) in musb_g_tx()
533 musb_g_giveback(musb_ep, request, 0); in musb_g_tx()
543 req = musb_ep->desc ? next_request(musb_ep) : NULL; in musb_g_tx()
546 musb_ep->end_point.name); in musb_g_tx()
564 struct musb_ep *musb_ep; in rxstate() local
573 musb_ep = &hw_ep->ep_in; in rxstate()
575 musb_ep = &hw_ep->ep_out; in rxstate()
577 fifo_count = musb_ep->packet_sz; in rxstate()
580 if (!musb_ep->desc) { in rxstate()
582 musb_ep->end_point.name); in rxstate()
587 if (dma_channel_status(musb_ep->dma) == MUSB_DMA_STATUS_BUSY) { in rxstate()
594 musb_ep->end_point.name, csr); in rxstate()
600 struct dma_channel *channel = musb_ep->dma; in rxstate()
608 musb_ep->packet_sz, in rxstate()
634 if (request->short_not_ok && fifo_count == musb_ep->packet_sz) in rxstate()
648 channel = musb_ep->dma; in rxstate()
691 musb_ep->dma->desired_mode = 1; in rxstate()
693 if (!musb_ep->hb_mult && in rxstate()
694 musb_ep->hw_ep->rx_double_buffered) in rxstate()
701 musb_ep->dma->desired_mode = 0; in rxstate()
706 musb_ep->packet_sz, in rxstate()
724 channel = musb_ep->dma; in rxstate()
727 if (fifo_count < musb_ep->packet_sz) in rxstate()
746 if (transfer_size <= musb_ep->packet_sz) { in rxstate()
747 musb_ep->dma->desired_mode = 0; in rxstate()
749 musb_ep->dma->desired_mode = 1; in rxstate()
756 musb_ep->packet_sz, in rxstate()
768 musb_ep->end_point.name, in rxstate()
770 musb_ep->packet_sz); in rxstate()
777 struct dma_channel *channel = musb_ep->dma; in rxstate()
782 musb_ep->packet_sz, in rxstate()
806 musb_read_fifo(musb_ep->hw_ep, fifo_count, (u8 *) in rxstate()
823 fifo_count < musb_ep->packet_sz) in rxstate()
824 musb_g_giveback(musb_ep, request, 0); in rxstate()
836 struct musb_ep *musb_ep; in musb_g_rx() local
842 musb_ep = &hw_ep->ep_in; in musb_g_rx()
844 musb_ep = &hw_ep->ep_out; in musb_g_rx()
848 req = next_request(musb_ep); in musb_g_rx()
855 dma = is_dma_capable() ? musb_ep->dma : NULL; in musb_g_rx()
857 dev_dbg(musb->controller, "<== %s, rxcsr %04x%s %p\n", musb_ep->end_point.name, in musb_g_rx()
872 dev_dbg(musb->controller, "%s iso overrun on %p\n", musb_ep->name, request); in musb_g_rx()
878 dev_dbg(musb->controller, "%s, incomprx\n", musb_ep->end_point.name); in musb_g_rx()
884 musb_ep->end_point.name, csr); in musb_g_rx()
895 request->actual += musb_ep->dma->actual_len; in musb_g_rx()
900 musb_ep->dma->actual_len, request); in musb_g_rx()
907 & (musb_ep->packet_sz - 1))) { in musb_g_rx()
915 && (musb_ep->dma->actual_len in musb_g_rx()
916 == musb_ep->packet_sz)) { in musb_g_rx()
927 musb_g_giveback(musb_ep, request, 0); in musb_g_rx()
938 req = next_request(musb_ep); in musb_g_rx()
956 struct musb_ep *musb_ep; in musb_gadget_enable() local
969 musb_ep = to_musb_ep(ep); in musb_gadget_enable()
970 hw_ep = musb_ep->hw_ep; in musb_gadget_enable()
972 musb = musb_ep->musb; in musb_gadget_enable()
974 epnum = musb_ep->current_epnum; in musb_gadget_enable()
978 if (musb_ep->desc) { in musb_gadget_enable()
982 musb_ep->type = usb_endpoint_type(desc); in musb_gadget_enable()
1002 musb_ep->hb_mult = (tmp >> 11) & 3; in musb_gadget_enable()
1004 musb_ep->hb_mult = 0; in musb_gadget_enable()
1007 musb_ep->packet_sz = tmp & 0x7ff; in musb_gadget_enable()
1008 tmp = musb_ep->packet_sz * (musb_ep->hb_mult + 1); in musb_gadget_enable()
1017 musb_ep->is_in = 1; in musb_gadget_enable()
1018 if (!musb_ep->is_in) in musb_gadget_enable()
1038 if (can_bulk_split(musb, musb_ep->type)) in musb_gadget_enable()
1039 musb_ep->hb_mult = (hw_ep->max_packet_sz_tx / in musb_gadget_enable()
1040 musb_ep->packet_sz) - 1; in musb_gadget_enable()
1041 musb_writew(regs, MUSB_TXMAXP, musb_ep->packet_sz in musb_gadget_enable()
1042 | (musb_ep->hb_mult << 11)); in musb_gadget_enable()
1049 if (musb_ep->type == USB_ENDPOINT_XFER_ISOC) in musb_gadget_enable()
1060 musb_ep->is_in = 0; in musb_gadget_enable()
1061 if (musb_ep->is_in) in musb_gadget_enable()
1081 musb_writew(regs, MUSB_RXMAXP, musb_ep->packet_sz in musb_gadget_enable()
1082 | (musb_ep->hb_mult << 11)); in musb_gadget_enable()
1092 if (musb_ep->type == USB_ENDPOINT_XFER_ISOC) in musb_gadget_enable()
1094 else if (musb_ep->type == USB_ENDPOINT_XFER_INT) in musb_gadget_enable()
1108 musb_ep->dma = c->channel_alloc(c, hw_ep, in musb_gadget_enable()
1111 musb_ep->dma = NULL; in musb_gadget_enable()
1113 musb_ep->desc = desc; in musb_gadget_enable()
1114 musb_ep->busy = 0; in musb_gadget_enable()
1115 musb_ep->wedged = 0; in musb_gadget_enable()
1119 musb_driver_name, musb_ep->end_point.name, in musb_gadget_enable()
1120 ({ char *s; switch (musb_ep->type) { in musb_gadget_enable()
1125 musb_ep->is_in ? "IN" : "OUT", in musb_gadget_enable()
1126 musb_ep->dma ? "dma, " : "", in musb_gadget_enable()
1127 musb_ep->packet_sz); in musb_gadget_enable()
1144 struct musb_ep *musb_ep; in musb_gadget_disable() local
1148 musb_ep = to_musb_ep(ep); in musb_gadget_disable()
1149 musb = musb_ep->musb; in musb_gadget_disable()
1150 epnum = musb_ep->current_epnum; in musb_gadget_disable()
1157 if (musb_ep->is_in) { in musb_gadget_disable()
1167 musb_ep->desc = NULL; in musb_gadget_disable()
1168 musb_ep->end_point.desc = NULL; in musb_gadget_disable()
1171 nuke(musb_ep, -ESHUTDOWN); in musb_gadget_disable()
1177 dev_dbg(musb->controller, "%s\n", musb_ep->end_point.name); in musb_gadget_disable()
1188 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_alloc_request() local
1189 struct musb *musb = musb_ep->musb; in musb_alloc_request()
1199 request->epnum = musb_ep->current_epnum; in musb_alloc_request()
1200 request->ep = musb_ep; in musb_alloc_request()
1242 struct musb_ep *musb_ep; in musb_gadget_queue() local
1253 musb_ep = to_musb_ep(ep); in musb_gadget_queue()
1254 musb = musb_ep->musb; in musb_gadget_queue()
1259 if (request->ep != musb_ep) in musb_gadget_queue()
1267 request->epnum = musb_ep->current_epnum; in musb_gadget_queue()
1268 request->tx = musb_ep->is_in; in musb_gadget_queue()
1270 map_dma_buffer(request, musb, musb_ep); in musb_gadget_queue()
1275 if (!musb_ep->desc) { in musb_gadget_queue()
1284 list_add_tail(&request->list, &musb_ep->req_list); in musb_gadget_queue()
1287 if (!musb_ep->busy && &request->list == musb_ep->req_list.next) in musb_gadget_queue()
1297 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_dequeue() local
1302 struct musb *musb = musb_ep->musb; in musb_gadget_dequeue()
1304 if (!ep || !request || to_musb_request(request)->ep != musb_ep) in musb_gadget_dequeue()
1309 list_for_each_entry(r, &musb_ep->req_list, list) { in musb_gadget_dequeue()
1320 if (musb_ep->req_list.next != &req->list || musb_ep->busy) in musb_gadget_dequeue()
1321 musb_g_giveback(musb_ep, request, -ECONNRESET); in musb_gadget_dequeue()
1324 else if (is_dma_capable() && musb_ep->dma) { in musb_gadget_dequeue()
1327 musb_ep_select(musb->mregs, musb_ep->current_epnum); in musb_gadget_dequeue()
1329 status = c->channel_abort(musb_ep->dma); in musb_gadget_dequeue()
1333 musb_g_giveback(musb_ep, request, -ECONNRESET); in musb_gadget_dequeue()
1338 musb_g_giveback(musb_ep, request, -ECONNRESET); in musb_gadget_dequeue()
1354 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_set_halt() local
1355 u8 epnum = musb_ep->current_epnum; in musb_gadget_set_halt()
1356 struct musb *musb = musb_ep->musb; in musb_gadget_set_halt()
1370 if ((USB_ENDPOINT_XFER_ISOC == musb_ep->type)) { in musb_gadget_set_halt()
1377 request = next_request(musb_ep); in musb_gadget_set_halt()
1386 if (musb_ep->is_in) { in musb_gadget_set_halt()
1395 musb_ep->wedged = 0; in musb_gadget_set_halt()
1399 if (musb_ep->is_in) { in musb_gadget_set_halt()
1424 if (!musb_ep->busy && !value && request) { in musb_gadget_set_halt()
1439 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_set_wedge() local
1444 musb_ep->wedged = 1; in musb_gadget_set_wedge()
1451 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_fifo_status() local
1452 void __iomem *epio = musb_ep->hw_ep->regs; in musb_gadget_fifo_status()
1455 if (musb_ep->desc && !musb_ep->is_in) { in musb_gadget_fifo_status()
1456 struct musb *musb = musb_ep->musb; in musb_gadget_fifo_status()
1457 int epnum = musb_ep->current_epnum; in musb_gadget_fifo_status()
1474 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_fifo_flush() local
1475 struct musb *musb = musb_ep->musb; in musb_gadget_fifo_flush()
1476 u8 epnum = musb_ep->current_epnum; in musb_gadget_fifo_flush()
1490 if (musb_ep->is_in) { in musb_gadget_fifo_flush()
1708 init_peripheral_ep(struct musb *musb, struct musb_ep *ep, u8 epnum, int is_in) in init_peripheral_ep()