Lines Matching refs:hw_ep
227 struct musb_hw_ep *hw_ep = qh->hw_ep; in musb_start_urb() local
230 int epnum = hw_ep->epnum; in musb_start_urb()
269 musb_ep_set_qh(hw_ep, is_in, qh); in musb_start_urb()
303 hw_ep->tx_channel ? "dma" : "pio"); in musb_start_urb()
305 if (!hw_ep->tx_channel) in musb_start_urb()
306 musb_h_tx_start(hw_ep); in musb_start_urb()
308 musb_h_tx_dma_start(hw_ep); in musb_start_urb()
336 void __iomem *epio = qh->hw_ep->regs; in musb_save_toggle()
360 struct musb_hw_ep *hw_ep, int is_in) in musb_advance_schedule() argument
362 struct musb_qh *qh = musb_ep_get_qh(hw_ep, is_in); in musb_advance_schedule()
363 struct musb_hw_ep *ep = qh->hw_ep; in musb_advance_schedule()
439 hw_ep->epnum, is_in ? 'R' : 'T', next_urb(qh)); in musb_advance_schedule()
444 static u16 musb_h_flush_rxfifo(struct musb_hw_ep *hw_ep, u16 csr) in musb_h_flush_rxfifo() argument
456 musb_writew(hw_ep->regs, MUSB_RXCSR, csr); in musb_h_flush_rxfifo()
457 musb_writew(hw_ep->regs, MUSB_RXCSR, csr); in musb_h_flush_rxfifo()
460 return musb_readw(hw_ep->regs, MUSB_RXCSR); in musb_h_flush_rxfifo()
475 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_host_packet_rx() local
476 void __iomem *epio = hw_ep->regs; in musb_host_packet_rx()
477 struct musb_qh *qh = hw_ep->in_qh; in musb_host_packet_rx()
542 musb_read_fifo(hw_ep, length, buf); in musb_host_packet_rx()
547 musb_h_flush_rxfifo(hw_ep, csr); in musb_host_packet_rx()
631 struct musb_hw_ep *hw_ep, struct musb_qh *qh, in musb_tx_dma_set_mode_mentor() argument
635 struct dma_channel *channel = hw_ep->tx_channel; in musb_tx_dma_set_mode_mentor()
636 void __iomem *epio = hw_ep->regs; in musb_tx_dma_set_mode_mentor()
658 can_bulk_split(hw_ep->musb, qh->type))) in musb_tx_dma_set_mode_mentor()
672 struct musb_hw_ep *hw_ep, in musb_tx_dma_set_mode_cppi_tusb() argument
679 struct dma_channel *channel = hw_ep->tx_channel; in musb_tx_dma_set_mode_cppi_tusb()
681 if (!is_cppi_enabled(hw_ep->musb) && !tusb_dma_omap(hw_ep->musb)) in musb_tx_dma_set_mode_cppi_tusb()
696 struct musb_hw_ep *hw_ep, struct musb_qh *qh, in musb_tx_dma_program() argument
699 struct dma_channel *channel = hw_ep->tx_channel; in musb_tx_dma_program()
704 if (musb_dma_inventra(hw_ep->musb) || musb_dma_ux500(hw_ep->musb)) in musb_tx_dma_program()
705 res = musb_tx_dma_set_mode_mentor(dma, hw_ep, qh, urb, in musb_tx_dma_program()
708 res = musb_tx_dma_set_mode_cppi_tusb(dma, hw_ep, qh, urb, in musb_tx_dma_program()
723 void __iomem *epio = hw_ep->regs; in musb_tx_dma_program()
727 hw_ep->tx_channel = NULL; in musb_tx_dma_program()
749 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_ep_program() local
750 void __iomem *epio = hw_ep->regs; in musb_ep_program()
751 struct musb_qh *qh = musb_ep_get_qh(hw_ep, !is_out); in musb_ep_program()
771 hw_ep->tx_channel = NULL; in musb_ep_program()
777 dma_channel = is_out ? hw_ep->tx_channel : hw_ep->rx_channel; in musb_ep_program()
780 dma_controller, hw_ep, is_out); in musb_ep_program()
782 hw_ep->tx_channel = dma_channel; in musb_ep_program()
784 hw_ep->rx_channel = dma_channel; in musb_ep_program()
811 if (!hw_ep->tx_double_buffered) in musb_ep_program()
812 musb_h_tx_flush_fifo(hw_ep); in musb_ep_program()
829 if (!hw_ep->tx_double_buffered) { in musb_ep_program()
844 musb_h_ep0_flush_fifo(hw_ep); in musb_ep_program()
861 hw_ep->max_packet_sz_tx); in musb_ep_program()
863 qh->hb_mult = hw_ep->max_packet_sz_tx in musb_ep_program()
881 load_count = min((u32) hw_ep->max_packet_sz_tx, in musb_ep_program()
887 hw_ep, qh, urb, offset, len)) in musb_ep_program()
908 musb_write_fifo(hw_ep, load_count, buf); in musb_ep_program()
912 musb_write_fifo(hw_ep, load_count, buf); in musb_ep_program()
922 if (hw_ep->rx_reinit) { in musb_ep_program()
935 csr = musb_readw(hw_ep->regs, MUSB_RXCSR); in musb_ep_program()
941 hw_ep->epnum, csr); in musb_ep_program()
955 musb_writew(hw_ep->regs, MUSB_RXCSR, csr); in musb_ep_program()
956 csr = musb_readw(hw_ep->regs, MUSB_RXCSR); in musb_ep_program()
969 hw_ep->rx_channel = dma_channel = NULL; in musb_ep_program()
976 musb_writew(hw_ep->regs, MUSB_RXCSR, csr); in musb_ep_program()
977 csr = musb_readw(hw_ep->regs, MUSB_RXCSR); in musb_ep_program()
1058 struct musb_hw_ep *hw_ep = musb->control_ep; in musb_h_ep0_continue() local
1059 struct musb_qh *qh = hw_ep->in_qh; in musb_h_ep0_continue()
1070 musb_read_fifo(hw_ep, fifo_count, fifo_dest); in musb_h_ep0_continue()
1109 musb_write_fifo(hw_ep, fifo_count, fifo_dest); in musb_h_ep0_continue()
1135 struct musb_hw_ep *hw_ep = musb->control_ep; in musb_h_ep0_irq() local
1136 void __iomem *epio = hw_ep->regs; in musb_h_ep0_irq()
1137 struct musb_qh *qh = hw_ep->in_qh; in musb_h_ep0_irq()
1197 musb_h_ep0_flush_fifo(hw_ep); in musb_h_ep0_irq()
1211 musb_h_ep0_flush_fifo(hw_ep); in musb_h_ep0_irq()
1247 musb_advance_schedule(musb, urb, hw_ep, 1); in musb_h_ep0_irq()
1277 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_host_tx() local
1278 void __iomem *epio = hw_ep->regs; in musb_host_tx()
1279 struct musb_qh *qh = hw_ep->out_qh; in musb_host_tx()
1296 dma = is_dma_capable() ? hw_ep->tx_channel : NULL; in musb_host_tx()
1319 musb_bulk_nak_timeout(musb, hw_ep, 0); in musb_host_tx()
1349 musb_h_tx_flush_fifo(hw_ep); in musb_host_tx()
1484 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_OUT); in musb_host_tx()
1487 if (musb_tx_dma_program(musb->dma_controller, hw_ep, qh, urb, in musb_host_tx()
1490 musb_h_tx_dma_start(hw_ep); in musb_host_tx()
1527 musb_write_fifo(hw_ep, length, urb->transfer_buffer); in musb_host_tx()
1531 musb_write_fifo(hw_ep, length, urb->transfer_buffer + offset); in musb_host_tx()
1549 struct musb_hw_ep *hw_ep, in musb_rx_dma_iso_cppi41() argument
1554 struct dma_channel *channel = hw_ep->tx_channel; in musb_rx_dma_iso_cppi41()
1555 void __iomem *epio = hw_ep->regs; in musb_rx_dma_iso_cppi41()
1567 musb_writew(hw_ep->regs, MUSB_RXCSR, val); in musb_rx_dma_iso_cppi41()
1576 struct musb_hw_ep *hw_ep, in musb_rx_dma_iso_cppi41() argument
1622 struct musb_hw_ep *hw_ep, in musb_rx_dma_inventra_cppi41() argument
1627 struct dma_channel *channel = hw_ep->rx_channel; in musb_rx_dma_inventra_cppi41()
1628 void __iomem *epio = hw_ep->regs; in musb_rx_dma_inventra_cppi41()
1651 if (musb_dma_cppi41(hw_ep->musb)) in musb_rx_dma_inventra_cppi41()
1652 done = musb_rx_dma_iso_cppi41(dma, hw_ep, qh, in musb_rx_dma_inventra_cppi41()
1692 struct musb_hw_ep *hw_ep, in musb_rx_dma_in_inventra_cppi41() argument
1698 struct musb *musb = hw_ep->musb; in musb_rx_dma_in_inventra_cppi41()
1699 void __iomem *epio = hw_ep->regs; in musb_rx_dma_in_inventra_cppi41()
1700 struct dma_channel *channel = hw_ep->rx_channel; in musb_rx_dma_in_inventra_cppi41()
1745 if (rx_count < hw_ep->max_packet_sz_rx) { in musb_rx_dma_in_inventra_cppi41()
1779 hw_ep->rx_channel = NULL; in musb_rx_dma_in_inventra_cppi41()
1792 struct musb_hw_ep *hw_ep, in musb_rx_dma_inventra_cppi41() argument
1801 struct musb_hw_ep *hw_ep, in musb_rx_dma_in_inventra_cppi41() argument
1818 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_host_rx() local
1820 void __iomem *epio = hw_ep->regs; in musb_host_rx()
1821 struct musb_qh *qh = hw_ep->in_qh; in musb_host_rx()
1835 dma = is_dma_capable() ? hw_ep->rx_channel : NULL; in musb_host_rx()
1849 musb_h_flush_rxfifo(hw_ep, MUSB_RXCSR_CLRDATATOG); in musb_host_rx()
1889 musb_bulk_nak_timeout(musb, hw_ep, 1); in musb_host_rx()
1917 musb_h_flush_rxfifo(hw_ep, MUSB_RXCSR_CLRDATATOG); in musb_host_rx()
1965 musb_writew(hw_ep->regs, MUSB_RXCSR, val); in musb_host_rx()
1969 done = musb_rx_dma_inventra_cppi41(c, hw_ep, qh, urb, xfer_len); in musb_host_rx()
1970 dev_dbg(hw_ep->musb->controller, in musb_host_rx()
1998 dev_dbg(hw_ep->musb->controller, in musb_host_rx()
2006 done = musb_rx_dma_in_inventra_cppi41(c, hw_ep, qh, in musb_host_rx()
2066 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_IN); in musb_host_rx()
2083 struct musb_hw_ep *hw_ep = NULL; in musb_schedule() local
2092 hw_ep = musb->control_ep; in musb_schedule()
2108 for (epnum = 1, hw_ep = musb->endpoints + 1; in musb_schedule()
2110 epnum++, hw_ep++) { in musb_schedule()
2113 if (musb_ep_get_qh(hw_ep, is_in) != NULL) in musb_schedule()
2116 if (hw_ep == musb->bulk_ep) in musb_schedule()
2120 diff = hw_ep->max_packet_sz_rx; in musb_schedule()
2122 diff = hw_ep->max_packet_sz_tx; in musb_schedule()
2139 hw_ep = musb->endpoints + epnum; in musb_schedule()
2141 txtype = (musb_readb(hw_ep->regs, MUSB_TXTYPE) in musb_schedule()
2153 hw_ep = musb->bulk_ep; in musb_schedule()
2176 hw_ep = musb->endpoints + best_end; in musb_schedule()
2184 qh->hw_ep = hw_ep; in musb_schedule()
2382 struct musb_hw_ep *ep = qh->hw_ep; in musb_cleanup_urb()
2474 || musb_ep_get_qh(qh->hw_ep, is_in) != qh) { in musb_urb_dequeue()
2516 if (musb_ep_get_qh(qh->hw_ep, is_in) == qh) { in musb_h_disable()
2532 musb_advance_schedule(musb, urb, qh->hw_ep, is_in); in musb_h_disable()