Lines Matching refs:musb_ep

57 			struct musb *musb, struct musb_ep *musb_ep)  in map_dma_buffer()  argument
64 if (!is_dma_capable() || !musb_ep->dma) in map_dma_buffer()
72 compatible = dma->is_compatible(musb_ep->dma, in map_dma_buffer()
73 musb_ep->packet_sz, request->request.buf, in map_dma_buffer()
110 struct musb_ep *musb_ep = request->ep; in unmap_dma_buffer() local
112 if (!is_buffer_mapped(request) || !musb_ep->dma) in unmap_dma_buffer()
147 struct musb_ep *ep, in musb_g_giveback()
190 static void nuke(struct musb_ep *ep, const int status) in nuke()
241 static inline int max_ep_writesize(struct musb *musb, struct musb_ep *ep) in max_ep_writesize()
259 struct musb_ep *musb_ep; in txstate() local
265 musb_ep = req->ep; in txstate()
268 if (!musb_ep->desc) { in txstate()
270 musb_ep->end_point.name); in txstate()
275 if (dma_channel_status(musb_ep->dma) == MUSB_DMA_STATUS_BUSY) { in txstate()
284 fifo_count = min(max_ep_writesize(musb, musb_ep), in txstate()
289 musb_ep->end_point.name, csr); in txstate()
295 musb_ep->end_point.name, csr); in txstate()
300 epnum, musb_ep->packet_sz, fifo_count, in txstate()
310 musb_ep->dma->max_len); in txstate()
317 if (request_size < musb_ep->packet_sz) in txstate()
318 musb_ep->dma->desired_mode = 0; in txstate()
320 musb_ep->dma->desired_mode = 1; in txstate()
323 musb_ep->dma, musb_ep->packet_sz, in txstate()
324 musb_ep->dma->desired_mode, in txstate()
327 if (musb_ep->dma->desired_mode == 0) { in txstate()
355 if (!musb_ep->hb_mult || in txstate()
356 (musb_ep->hb_mult && in txstate()
358 musb_ep->type))) in txstate()
392 musb_ep->dma, musb_ep->packet_sz, in txstate()
397 c->channel_release(musb_ep->dma); in txstate()
398 musb_ep->dma = NULL; in txstate()
405 musb_ep->dma, musb_ep->packet_sz, in txstate()
419 musb_write_fifo(musb_ep->hw_ep, fifo_count, in txstate()
429 musb_ep->end_point.name, use_dma ? "dma" : "pio", in txstate()
446 struct musb_ep *musb_ep = &musb->endpoints[epnum].ep_in; in musb_g_tx() local
451 req = next_request(musb_ep); in musb_g_tx()
455 dev_dbg(musb->controller, "<== %s, txcsr %04x\n", musb_ep->end_point.name, csr); in musb_g_tx()
457 dma = is_dma_capable() ? musb_ep->dma : NULL; in musb_g_tx()
484 dev_dbg(musb->controller, "%s dma still busy?\n", musb_ep->end_point.name); in musb_g_tx()
500 request->actual += musb_ep->dma->actual_len; in musb_g_tx()
502 epnum, csr, musb_ep->dma->actual_len, request); in musb_g_tx()
510 && (request->length % musb_ep->packet_sz == 0) in musb_g_tx()
517 (musb_ep->packet_sz - 1))))) in musb_g_tx()
535 musb_g_giveback(musb_ep, request, 0); in musb_g_tx()
545 req = musb_ep->desc ? next_request(musb_ep) : NULL; in musb_g_tx()
548 musb_ep->end_point.name); in musb_g_tx()
566 struct musb_ep *musb_ep; in rxstate() local
575 musb_ep = &hw_ep->ep_in; in rxstate()
577 musb_ep = &hw_ep->ep_out; in rxstate()
579 fifo_count = musb_ep->packet_sz; in rxstate()
582 if (!musb_ep->desc) { in rxstate()
584 musb_ep->end_point.name); in rxstate()
589 if (dma_channel_status(musb_ep->dma) == MUSB_DMA_STATUS_BUSY) { in rxstate()
596 musb_ep->end_point.name, csr); in rxstate()
602 struct dma_channel *channel = musb_ep->dma; in rxstate()
610 musb_ep->packet_sz, in rxstate()
636 if (request->short_not_ok && fifo_count == musb_ep->packet_sz) in rxstate()
652 channel = musb_ep->dma; in rxstate()
695 musb_ep->dma->desired_mode = 1; in rxstate()
697 if (!musb_ep->hb_mult && in rxstate()
698 musb_ep->hw_ep->rx_double_buffered) in rxstate()
705 musb_ep->dma->desired_mode = 0; in rxstate()
710 musb_ep->packet_sz, in rxstate()
728 channel = musb_ep->dma; in rxstate()
731 if (fifo_count < musb_ep->packet_sz) in rxstate()
750 if (transfer_size <= musb_ep->packet_sz) { in rxstate()
751 musb_ep->dma->desired_mode = 0; in rxstate()
753 musb_ep->dma->desired_mode = 1; in rxstate()
760 musb_ep->packet_sz, in rxstate()
771 musb_ep->end_point.name, in rxstate()
773 musb_ep->packet_sz); in rxstate()
779 struct dma_channel *channel = musb_ep->dma; in rxstate()
784 musb_ep->packet_sz, in rxstate()
807 musb_read_fifo(musb_ep->hw_ep, fifo_count, (u8 *) in rxstate()
824 fifo_count < musb_ep->packet_sz) in rxstate()
825 musb_g_giveback(musb_ep, request, 0); in rxstate()
837 struct musb_ep *musb_ep; in musb_g_rx() local
843 musb_ep = &hw_ep->ep_in; in musb_g_rx()
845 musb_ep = &hw_ep->ep_out; in musb_g_rx()
849 req = next_request(musb_ep); in musb_g_rx()
856 dma = is_dma_capable() ? musb_ep->dma : NULL; in musb_g_rx()
858 dev_dbg(musb->controller, "<== %s, rxcsr %04x%s %p\n", musb_ep->end_point.name, in musb_g_rx()
873 dev_dbg(musb->controller, "%s iso overrun on %p\n", musb_ep->name, request); in musb_g_rx()
879 dev_dbg(musb->controller, "%s, incomprx\n", musb_ep->end_point.name); in musb_g_rx()
885 musb_ep->end_point.name, csr); in musb_g_rx()
896 request->actual += musb_ep->dma->actual_len; in musb_g_rx()
901 musb_ep->dma->actual_len, request); in musb_g_rx()
908 & (musb_ep->packet_sz - 1))) { in musb_g_rx()
916 && (musb_ep->dma->actual_len in musb_g_rx()
917 == musb_ep->packet_sz)) { in musb_g_rx()
928 musb_g_giveback(musb_ep, request, 0); in musb_g_rx()
939 req = next_request(musb_ep); in musb_g_rx()
957 struct musb_ep *musb_ep; in musb_gadget_enable() local
970 musb_ep = to_musb_ep(ep); in musb_gadget_enable()
971 hw_ep = musb_ep->hw_ep; in musb_gadget_enable()
973 musb = musb_ep->musb; in musb_gadget_enable()
975 epnum = musb_ep->current_epnum; in musb_gadget_enable()
979 if (musb_ep->desc) { in musb_gadget_enable()
983 musb_ep->type = usb_endpoint_type(desc); in musb_gadget_enable()
1003 musb_ep->hb_mult = (tmp >> 11) & 3; in musb_gadget_enable()
1005 musb_ep->hb_mult = 0; in musb_gadget_enable()
1008 musb_ep->packet_sz = tmp & 0x7ff; in musb_gadget_enable()
1009 tmp = musb_ep->packet_sz * (musb_ep->hb_mult + 1); in musb_gadget_enable()
1018 musb_ep->is_in = 1; in musb_gadget_enable()
1019 if (!musb_ep->is_in) in musb_gadget_enable()
1039 if (can_bulk_split(musb, musb_ep->type)) in musb_gadget_enable()
1040 musb_ep->hb_mult = (hw_ep->max_packet_sz_tx / in musb_gadget_enable()
1041 musb_ep->packet_sz) - 1; in musb_gadget_enable()
1042 musb_writew(regs, MUSB_TXMAXP, musb_ep->packet_sz in musb_gadget_enable()
1043 | (musb_ep->hb_mult << 11)); in musb_gadget_enable()
1050 if (musb_ep->type == USB_ENDPOINT_XFER_ISOC) in musb_gadget_enable()
1061 musb_ep->is_in = 0; in musb_gadget_enable()
1062 if (musb_ep->is_in) in musb_gadget_enable()
1082 musb_writew(regs, MUSB_RXMAXP, musb_ep->packet_sz in musb_gadget_enable()
1083 | (musb_ep->hb_mult << 11)); in musb_gadget_enable()
1093 if (musb_ep->type == USB_ENDPOINT_XFER_ISOC) in musb_gadget_enable()
1095 else if (musb_ep->type == USB_ENDPOINT_XFER_INT) in musb_gadget_enable()
1109 musb_ep->dma = c->channel_alloc(c, hw_ep, in musb_gadget_enable()
1112 musb_ep->dma = NULL; in musb_gadget_enable()
1114 musb_ep->desc = desc; in musb_gadget_enable()
1115 musb_ep->busy = 0; in musb_gadget_enable()
1116 musb_ep->wedged = 0; in musb_gadget_enable()
1120 musb_driver_name, musb_ep->end_point.name, in musb_gadget_enable()
1121 ({ char *s; switch (musb_ep->type) { in musb_gadget_enable()
1126 musb_ep->is_in ? "IN" : "OUT", in musb_gadget_enable()
1127 musb_ep->dma ? "dma, " : "", in musb_gadget_enable()
1128 musb_ep->packet_sz); in musb_gadget_enable()
1145 struct musb_ep *musb_ep; in musb_gadget_disable() local
1149 musb_ep = to_musb_ep(ep); in musb_gadget_disable()
1150 musb = musb_ep->musb; in musb_gadget_disable()
1151 epnum = musb_ep->current_epnum; in musb_gadget_disable()
1158 if (musb_ep->is_in) { in musb_gadget_disable()
1168 musb_ep->desc = NULL; in musb_gadget_disable()
1169 musb_ep->end_point.desc = NULL; in musb_gadget_disable()
1172 nuke(musb_ep, -ESHUTDOWN); in musb_gadget_disable()
1178 dev_dbg(musb->controller, "%s\n", musb_ep->end_point.name); in musb_gadget_disable()
1189 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_alloc_request() local
1190 struct musb *musb = musb_ep->musb; in musb_alloc_request()
1200 request->epnum = musb_ep->current_epnum; in musb_alloc_request()
1201 request->ep = musb_ep; in musb_alloc_request()
1243 struct musb_ep *musb_ep; in musb_gadget_queue() local
1254 musb_ep = to_musb_ep(ep); in musb_gadget_queue()
1255 musb = musb_ep->musb; in musb_gadget_queue()
1260 if (request->ep != musb_ep) in musb_gadget_queue()
1268 request->epnum = musb_ep->current_epnum; in musb_gadget_queue()
1269 request->tx = musb_ep->is_in; in musb_gadget_queue()
1271 map_dma_buffer(request, musb, musb_ep); in musb_gadget_queue()
1276 if (!musb_ep->desc) { in musb_gadget_queue()
1285 list_add_tail(&request->list, &musb_ep->req_list); in musb_gadget_queue()
1288 if (!musb_ep->busy && &request->list == musb_ep->req_list.next) in musb_gadget_queue()
1298 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_dequeue() local
1303 struct musb *musb = musb_ep->musb; in musb_gadget_dequeue()
1305 if (!ep || !request || to_musb_request(request)->ep != musb_ep) in musb_gadget_dequeue()
1310 list_for_each_entry(r, &musb_ep->req_list, list) { in musb_gadget_dequeue()
1321 if (musb_ep->req_list.next != &req->list || musb_ep->busy) in musb_gadget_dequeue()
1322 musb_g_giveback(musb_ep, request, -ECONNRESET); in musb_gadget_dequeue()
1325 else if (is_dma_capable() && musb_ep->dma) { in musb_gadget_dequeue()
1328 musb_ep_select(musb->mregs, musb_ep->current_epnum); in musb_gadget_dequeue()
1330 status = c->channel_abort(musb_ep->dma); in musb_gadget_dequeue()
1334 musb_g_giveback(musb_ep, request, -ECONNRESET); in musb_gadget_dequeue()
1339 musb_g_giveback(musb_ep, request, -ECONNRESET); in musb_gadget_dequeue()
1355 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_set_halt() local
1356 u8 epnum = musb_ep->current_epnum; in musb_gadget_set_halt()
1357 struct musb *musb = musb_ep->musb; in musb_gadget_set_halt()
1371 if ((USB_ENDPOINT_XFER_ISOC == musb_ep->type)) { in musb_gadget_set_halt()
1378 request = next_request(musb_ep); in musb_gadget_set_halt()
1387 if (musb_ep->is_in) { in musb_gadget_set_halt()
1396 musb_ep->wedged = 0; in musb_gadget_set_halt()
1400 if (musb_ep->is_in) { in musb_gadget_set_halt()
1425 if (!musb_ep->busy && !value && request) { in musb_gadget_set_halt()
1440 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_set_wedge() local
1445 musb_ep->wedged = 1; in musb_gadget_set_wedge()
1452 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_fifo_status() local
1453 void __iomem *epio = musb_ep->hw_ep->regs; in musb_gadget_fifo_status()
1456 if (musb_ep->desc && !musb_ep->is_in) { in musb_gadget_fifo_status()
1457 struct musb *musb = musb_ep->musb; in musb_gadget_fifo_status()
1458 int epnum = musb_ep->current_epnum; in musb_gadget_fifo_status()
1475 struct musb_ep *musb_ep = to_musb_ep(ep); in musb_gadget_fifo_flush() local
1476 struct musb *musb = musb_ep->musb; in musb_gadget_fifo_flush()
1477 u8 epnum = musb_ep->current_epnum; in musb_gadget_fifo_flush()
1491 if (musb_ep->is_in) { in musb_gadget_fifo_flush()
1744 init_peripheral_ep(struct musb *musb, struct musb_ep *ep, u8 epnum, int is_in) in init_peripheral_ep()