Lines Matching refs:musb

97 struct musb *hcd_to_musb(struct usb_hcd *hcd)  in hcd_to_musb()
99 return *(struct musb **) hcd->hcd_priv; in hcd_to_musb()
103 static void musb_ep_program(struct musb *musb, u8 epnum,
112 struct musb *musb = ep->musb; in musb_h_tx_flush_fifo() local
121 dev_dbg(musb->controller, "Host TX FIFONOTEMPTY csr: %02x\n", csr); in musb_h_tx_flush_fifo()
209 musb_start_urb(struct musb *musb, int is_in, struct musb_qh *qh) in musb_start_urb() argument
213 void __iomem *mbase = musb->mregs; in musb_start_urb()
231 musb->ep0_stage = MUSB_EP0_START; in musb_start_urb()
247 dev_dbg(musb->controller, "qh %p urb %p dev%d ep%d%s%s, hw_ep %d, %p/%d\n", in musb_start_urb()
260 musb_ep_program(musb, epnum, urb, !is_in, buf, offset, len); in musb_start_urb()
270 dev_dbg(musb->controller, "check whether there's still time for periodic Tx\n"); in musb_start_urb()
284 dev_dbg(musb->controller, "SOF for %d\n", epnum); in musb_start_urb()
292 dev_dbg(musb->controller, "Start TX%d %s\n", epnum, in musb_start_urb()
303 static void musb_giveback(struct musb *musb, struct urb *urb, int status) in musb_giveback() argument
304 __releases(musb->lock) in musb_giveback()
305 __acquires(musb->lock) in musb_giveback()
307 dev_dbg(musb->controller, in musb_giveback()
316 usb_hcd_unlink_urb_from_ep(musb->hcd, urb); in musb_giveback()
317 spin_unlock(&musb->lock); in musb_giveback()
318 usb_hcd_giveback_urb(musb->hcd, urb, status); in musb_giveback()
319 spin_lock(&musb->lock); in musb_giveback()
349 static void musb_advance_schedule(struct musb *musb, struct urb *urb, in musb_advance_schedule() argument
372 musb_giveback(musb, urb, status); in musb_advance_schedule()
380 struct dma_controller *dma = musb->dma_controller; in musb_advance_schedule()
428 dev_dbg(musb->controller, "... next ep%d %cX urb %p\n", in musb_advance_schedule()
430 musb_start_urb(musb, is_in, qh); in musb_advance_schedule()
457 musb_host_packet_rx(struct musb *musb, struct urb *urb, u8 epnum, u8 iso_err) in musb_host_packet_rx() argument
465 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_host_packet_rx()
473 dev_dbg(musb->controller, "RX%d count %d, buffer %p len %d/%d\n", epnum, rx_count, in musb_host_packet_rx()
495 dev_dbg(musb->controller, "** OVERFLOW %d into %d\n", rx_count, length); in musb_host_packet_rx()
513 dev_dbg(musb->controller, "** OVERFLOW %d into %d\n", rx_count, length); in musb_host_packet_rx()
558 musb_rx_reinit(struct musb *musb, struct musb_qh *qh, struct musb_hw_ep *ep) in musb_rx_reinit() argument
596 if (musb->is_multipoint) { in musb_rx_reinit()
602 musb_writeb(musb->mregs, MUSB_FADDR, qh->addr_reg); in musb_rx_reinit()
611 if (musb->double_buffer_not_ok) in musb_rx_reinit()
649 can_bulk_split(hw_ep->musb, qh->type))) in musb_tx_dma_program()
696 static void musb_ep_program(struct musb *musb, u8 epnum, in musb_ep_program() argument
703 void __iomem *mbase = musb->mregs; in musb_ep_program()
704 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_ep_program()
711 dev_dbg(musb->controller, "%s hw%d urb %p spd%d dev%d ep%d%s " in musb_ep_program()
730 dma_controller = musb->dma_controller; in musb_ep_program()
755 int_txe = musb->intrtxe; in musb_ep_program()
803 if (musb->is_multipoint) { in musb_ep_program()
814 if (musb->double_buffer_not_ok) { in musb_ep_program()
817 } else if (can_bulk_split(musb, qh->type)) { in musb_ep_program()
830 if (musb->is_multipoint) in musb_ep_program()
835 if (can_bulk_split(musb, qh->type)) in musb_ep_program()
853 dev_err(musb->controller, in musb_ep_program()
878 musb_rx_reinit(musb, qh, hw_ep); in musb_ep_program()
930 dev_dbg(musb->controller, "RXCSR%d := %04x\n", epnum, csr); in musb_ep_program()
939 static void musb_bulk_nak_timeout(struct musb *musb, struct musb_hw_ep *ep, in musb_bulk_nak_timeout() argument
944 void __iomem *mbase = musb->mregs; in musb_bulk_nak_timeout()
959 cur_qh = first_qh(&musb->in_bulk); in musb_bulk_nak_timeout()
969 cur_qh = first_qh(&musb->out_bulk); in musb_bulk_nak_timeout()
975 musb->dma_controller->channel_abort(dma); in musb_bulk_nak_timeout()
983 list_move_tail(&cur_qh->ring, &musb->in_bulk); in musb_bulk_nak_timeout()
986 next_qh = first_qh(&musb->in_bulk); in musb_bulk_nak_timeout()
992 list_move_tail(&cur_qh->ring, &musb->out_bulk); in musb_bulk_nak_timeout()
995 next_qh = first_qh(&musb->out_bulk); in musb_bulk_nak_timeout()
1000 musb_start_urb(musb, is_in, next_qh); in musb_bulk_nak_timeout()
1008 static bool musb_h_ep0_continue(struct musb *musb, u16 len, struct urb *urb) in musb_h_ep0_continue() argument
1013 struct musb_hw_ep *hw_ep = musb->control_ep; in musb_h_ep0_continue()
1017 switch (musb->ep0_stage) { in musb_h_ep0_continue()
1040 dev_dbg(musb->controller, "start no-DATA\n"); in musb_h_ep0_continue()
1043 dev_dbg(musb->controller, "start IN-DATA\n"); in musb_h_ep0_continue()
1044 musb->ep0_stage = MUSB_EP0_IN; in musb_h_ep0_continue()
1048 dev_dbg(musb->controller, "start OUT-DATA\n"); in musb_h_ep0_continue()
1049 musb->ep0_stage = MUSB_EP0_OUT; in musb_h_ep0_continue()
1060 dev_dbg(musb->controller, "Sending %d byte%s to ep0 fifo %p\n", in musb_h_ep0_continue()
1071 ERR("bogus ep0 stage %d\n", musb->ep0_stage); in musb_h_ep0_continue()
1084 irqreturn_t musb_h_ep0_irq(struct musb *musb) in musb_h_ep0_irq() argument
1089 void __iomem *mbase = musb->mregs; in musb_h_ep0_irq()
1090 struct musb_hw_ep *hw_ep = musb->control_ep; in musb_h_ep0_irq()
1105 dev_dbg(musb->controller, "<== csr0 %04x, qh %p, count %d, urb %p, stage %d\n", in musb_h_ep0_irq()
1106 csr, qh, len, urb, musb->ep0_stage); in musb_h_ep0_irq()
1109 if (MUSB_EP0_STATUS == musb->ep0_stage) { in musb_h_ep0_irq()
1116 dev_dbg(musb->controller, "STALLING ENDPOINT\n"); in musb_h_ep0_irq()
1120 dev_dbg(musb->controller, "no response, csr0 %04x\n", csr); in musb_h_ep0_irq()
1124 dev_dbg(musb->controller, "control NAK timeout\n"); in musb_h_ep0_irq()
1139 dev_dbg(musb->controller, "aborting\n"); in musb_h_ep0_irq()
1172 if (musb_h_ep0_continue(musb, len, urb)) { in musb_h_ep0_irq()
1174 csr = (MUSB_EP0_IN == musb->ep0_stage) in musb_h_ep0_irq()
1190 musb->ep0_stage = MUSB_EP0_STATUS; in musb_h_ep0_irq()
1192 dev_dbg(musb->controller, "ep0 STATUS, csr %04x\n", csr); in musb_h_ep0_irq()
1198 musb->ep0_stage = MUSB_EP0_IDLE; in musb_h_ep0_irq()
1202 musb_advance_schedule(musb, urb, hw_ep, 1); in musb_h_ep0_irq()
1225 void musb_host_tx(struct musb *musb, u8 epnum) in musb_host_tx() argument
1232 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_host_tx()
1237 void __iomem *mbase = musb->mregs; in musb_host_tx()
1246 dev_dbg(musb->controller, "extra TX%d ready, csr %04x\n", epnum, tx_csr); in musb_host_tx()
1252 dev_dbg(musb->controller, "OUT/TX%d end, csr %04x%s\n", epnum, tx_csr, in musb_host_tx()
1258 dev_dbg(musb->controller, "TX end %d stall\n", epnum); in musb_host_tx()
1265 dev_dbg(musb->controller, "TX 3strikes on ep=%d\n", epnum); in musb_host_tx()
1271 && !list_is_singular(&musb->out_bulk)) { in musb_host_tx()
1272 dev_dbg(musb->controller, in musb_host_tx()
1274 musb_bulk_nak_timeout(musb, hw_ep, 0); in musb_host_tx()
1276 dev_dbg(musb->controller, in musb_host_tx()
1298 musb->dma_controller->channel_abort(dma); in musb_host_tx()
1323 dev_dbg(musb->controller, "extra TX%d ready, csr %04x\n", epnum, tx_csr); in musb_host_tx()
1382 dev_dbg(musb->controller, "DMA complete but packet still in FIFO, " in musb_host_tx()
1439 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_OUT); in musb_host_tx()
1442 if (musb_tx_dma_program(musb->dma_controller, hw_ep, qh, urb, in musb_host_tx()
1449 dev_dbg(musb->controller, "not complete, but DMA enabled?\n"); in musb_host_tx()
1463 usb_hcd_unmap_urb_for_dma(musb->hcd, urb); in musb_host_tx()
1475 dev_err(musb->controller, "error: sg list empty\n"); in musb_host_tx()
1545 void musb_host_rx(struct musb *musb, u8 epnum) in musb_host_rx() argument
1548 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_host_rx()
1552 void __iomem *mbase = musb->mregs; in musb_host_rx()
1576 dev_dbg(musb->controller, "BOGUS RX%d ready, csr %04x, count %d\n", epnum, val, in musb_host_rx()
1584 dev_dbg(musb->controller, "<== hw %d rxcsr %04x, urb actual %d (+dma %zu)\n", in musb_host_rx()
1591 dev_dbg(musb->controller, "RX end %d STALL\n", epnum); in musb_host_rx()
1597 dev_dbg(musb->controller, "end %d RX proto error\n", epnum); in musb_host_rx()
1605 dev_dbg(musb->controller, "RX end %d NAK timeout\n", epnum); in musb_host_rx()
1617 && !list_is_singular(&musb->in_bulk)) { in musb_host_rx()
1618 musb_bulk_nak_timeout(musb, hw_ep, 1); in musb_host_rx()
1628 dev_dbg(musb->controller, "RX end %d ISO data error\n", epnum); in musb_host_rx()
1633 dev_dbg(musb->controller, "end %d high bandwidth incomplete ISO packet RX\n", in musb_host_rx()
1643 musb->dma_controller->channel_abort(dma); in musb_host_rx()
1674 musb->dma_controller->channel_abort(dma); in musb_host_rx()
1679 dev_dbg(musb->controller, "RXCSR%d %04x, reqpkt, len %zu%s\n", epnum, rx_csr, in musb_host_rx()
1719 c = musb->dma_controller; in musb_host_rx()
1751 dev_dbg(musb->controller, "ep %d dma %s, rxcsr %04x, rxcount %d\n", epnum, in musb_host_rx()
1785 dev_dbg(musb->controller, "RX%d count %d, buffer 0x%llx len %d/%d\n", in musb_host_rx()
1792 c = musb->dma_controller; in musb_host_rx()
1809 dev_dbg(musb->controller, "** OVERFLOW %d into %d\n",\ in musb_host_rx()
1900 usb_hcd_unmap_urb_for_dma(musb->hcd, urb); in musb_host_rx()
1914 dev_err(musb->controller, "error: sg list empty\n"); in musb_host_rx()
1923 done = musb_host_packet_rx(musb, urb, epnum, in musb_host_rx()
1931 done = musb_host_packet_rx(musb, urb, in musb_host_rx()
1934 dev_dbg(musb->controller, "read %spacket\n", done ? "last " : ""); in musb_host_rx()
1947 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_IN); in musb_host_rx()
1957 struct musb *musb, in musb_schedule() argument
1972 head = &musb->control; in musb_schedule()
1973 hw_ep = musb->control_ep; in musb_schedule()
1989 for (epnum = 1, hw_ep = musb->endpoints + 1; in musb_schedule()
1990 epnum < musb->nr_endpoints; in musb_schedule()
1997 if (hw_ep == musb->bulk_ep) in musb_schedule()
2020 hw_ep = musb->endpoints + epnum; in musb_schedule()
2034 hw_ep = musb->bulk_ep; in musb_schedule()
2036 head = &musb->in_bulk; in musb_schedule()
2038 head = &musb->out_bulk; in musb_schedule()
2057 hw_ep = musb->endpoints + best_end; in musb_schedule()
2058 dev_dbg(musb->controller, "qh %p periodic slot %d\n", qh, best_end); in musb_schedule()
2068 musb_start_urb(musb, is_in, qh); in musb_schedule()
2078 struct musb *musb = hcd_to_musb(hcd); in musb_urb_enqueue() local
2087 if (!is_host_active(musb) || !musb->is_active) in musb_urb_enqueue()
2090 spin_lock_irqsave(&musb->lock, flags); in musb_urb_enqueue()
2095 spin_unlock_irqrestore(&musb->lock, flags); in musb_urb_enqueue()
2116 spin_lock_irqsave(&musb->lock, flags); in musb_urb_enqueue()
2118 spin_unlock_irqrestore(&musb->lock, flags); in musb_urb_enqueue()
2139 ok = (usb_pipein(urb->pipe) && musb->hb_iso_rx) in musb_urb_enqueue()
2140 || (usb_pipeout(urb->pipe) && musb->hb_iso_tx); in musb_urb_enqueue()
2203 if (musb->is_multipoint) { in musb_urb_enqueue()
2225 spin_lock_irqsave(&musb->lock, flags); in musb_urb_enqueue()
2234 ret = musb_schedule(musb, qh, in musb_urb_enqueue()
2243 spin_unlock_irqrestore(&musb->lock, flags); in musb_urb_enqueue()
2247 spin_lock_irqsave(&musb->lock, flags); in musb_urb_enqueue()
2249 spin_unlock_irqrestore(&musb->lock, flags); in musb_urb_enqueue()
2264 struct musb *musb = ep->musb; in musb_cleanup_urb() local
2267 void __iomem *regs = ep->musb->mregs; in musb_cleanup_urb()
2279 status = ep->musb->dma_controller->channel_abort(dma); in musb_cleanup_urb()
2280 dev_dbg(musb->controller, in musb_cleanup_urb()
2315 musb_advance_schedule(ep->musb, urb, ep, is_in); in musb_cleanup_urb()
2321 struct musb *musb = hcd_to_musb(hcd); in musb_urb_dequeue() local
2327 dev_dbg(musb->controller, "urb=%p, dev%d ep%d%s\n", urb, in musb_urb_dequeue()
2332 spin_lock_irqsave(&musb->lock, flags); in musb_urb_dequeue()
2359 musb_giveback(musb, urb, 0); in musb_urb_dequeue()
2373 spin_unlock_irqrestore(&musb->lock, flags); in musb_urb_dequeue()
2383 struct musb *musb = hcd_to_musb(hcd); in musb_h_disable() local
2387 spin_lock_irqsave(&musb->lock, flags); in musb_h_disable()
2413 musb_advance_schedule(musb, urb, qh->hw_ep, is_in); in musb_h_disable()
2421 musb_giveback(musb, next_urb(qh), -ESHUTDOWN); in musb_h_disable()
2428 spin_unlock_irqrestore(&musb->lock, flags); in musb_h_disable()
2433 struct musb *musb = hcd_to_musb(hcd); in musb_h_get_frame_number() local
2435 return musb_readw(musb->mregs, MUSB_FRAME); in musb_h_get_frame_number()
2440 struct musb *musb = hcd_to_musb(hcd); in musb_h_start() local
2446 musb->port1_status = 0; in musb_h_start()
2458 struct musb *musb = hcd_to_musb(hcd); in musb_bus_suspend() local
2461 musb_port_suspend(musb, true); in musb_bus_suspend()
2463 if (!is_host_active(musb)) in musb_bus_suspend()
2466 switch (musb->xceiv->otg->state) { in musb_bus_suspend()
2474 devctl = musb_readb(musb->mregs, MUSB_DEVCTL); in musb_bus_suspend()
2476 musb->xceiv->otg->state = OTG_STATE_A_WAIT_BCON; in musb_bus_suspend()
2482 if (musb->is_active) { in musb_bus_suspend()
2484 usb_otg_state_string(musb->xceiv->otg->state)); in musb_bus_suspend()
2492 struct musb *musb = hcd_to_musb(hcd); in musb_bus_resume() local
2494 if (musb->config && in musb_bus_resume()
2495 musb->config->host_port_deassert_reset_at_resume) in musb_bus_resume()
2496 musb_port_reset(musb, false); in musb_bus_resume()
2575 struct musb *musb = hcd_to_musb(hcd); in musb_map_urb_for_dma() local
2584 if (musb->hwvers < MUSB_HWVERS_1800) in musb_map_urb_for_dma()
2600 struct musb *musb = hcd_to_musb(hcd); in musb_unmap_urb_for_dma() local
2605 if (musb->hwvers < MUSB_HWVERS_1800) in musb_unmap_urb_for_dma()
2615 .hcd_priv_size = sizeof(struct musb *),
2644 int musb_host_alloc(struct musb *musb) in musb_host_alloc() argument
2646 struct device *dev = musb->controller; in musb_host_alloc()
2649 musb->hcd = usb_create_hcd(&musb_hc_driver, dev, dev_name(dev)); in musb_host_alloc()
2650 if (!musb->hcd) in musb_host_alloc()
2653 *musb->hcd->hcd_priv = (unsigned long) musb; in musb_host_alloc()
2654 musb->hcd->self.uses_pio_for_control = 1; in musb_host_alloc()
2655 musb->hcd->uses_new_polling = 1; in musb_host_alloc()
2656 musb->hcd->has_tt = 1; in musb_host_alloc()
2661 void musb_host_cleanup(struct musb *musb) in musb_host_cleanup() argument
2663 if (musb->port_mode == MUSB_PORT_MODE_GADGET) in musb_host_cleanup()
2665 usb_remove_hcd(musb->hcd); in musb_host_cleanup()
2668 void musb_host_free(struct musb *musb) in musb_host_free() argument
2670 usb_put_hcd(musb->hcd); in musb_host_free()
2673 int musb_host_setup(struct musb *musb, int power_budget) in musb_host_setup() argument
2676 struct usb_hcd *hcd = musb->hcd; in musb_host_setup()
2678 MUSB_HST_MODE(musb); in musb_host_setup()
2679 musb->xceiv->otg->default_a = 1; in musb_host_setup()
2680 musb->xceiv->otg->state = OTG_STATE_A_IDLE; in musb_host_setup()
2682 otg_set_host(musb->xceiv->otg, &hcd->self); in musb_host_setup()
2684 musb->xceiv->otg->host = &hcd->self; in musb_host_setup()
2695 void musb_host_resume_root_hub(struct musb *musb) in musb_host_resume_root_hub() argument
2697 usb_hcd_resume_root_hub(musb->hcd); in musb_host_resume_root_hub()
2700 void musb_host_poke_root_hub(struct musb *musb) in musb_host_poke_root_hub() argument
2702 MUSB_HST_MODE(musb); in musb_host_poke_root_hub()
2703 if (musb->hcd->status_urb) in musb_host_poke_root_hub()
2704 usb_hcd_poll_rh_status(musb->hcd); in musb_host_poke_root_hub()
2706 usb_hcd_resume_root_hub(musb->hcd); in musb_host_poke_root_hub()