Lines Matching refs:ep_ring
443 struct xhci_ring *ep_ring; in xhci_find_new_dequeue_state() local
451 ep_ring = xhci_triad_to_transfer_ring(xhci, slot_id, in xhci_find_new_dequeue_state()
453 if (!ep_ring) { in xhci_find_new_dequeue_state()
474 new_seg = ep_ring->deq_seg; in xhci_find_new_dequeue_state()
475 new_deq = ep_ring->dequeue; in xhci_find_new_dequeue_state()
499 next_trb(xhci, ep_ring, &new_seg, &new_deq); in xhci_find_new_dequeue_state()
531 static void td_to_noop(struct xhci_hcd *xhci, struct xhci_ring *ep_ring, in td_to_noop() argument
539 next_trb(xhci, ep_ring, &cur_seg, &cur_trb)) { in td_to_noop()
640 struct xhci_ring *ep_ring; in xhci_handle_cmd_stop_ep() local
678 ep_ring = xhci_urb_to_transfer_ring(xhci, cur_td->urb); in xhci_handle_cmd_stop_ep()
679 if (!ep_ring) { in xhci_handle_cmd_stop_ep()
706 td_to_noop(xhci, ep_ring, cur_td, false); in xhci_handle_cmd_stop_ep()
901 struct xhci_ring *ep_ring, in update_ring_for_set_deq_completion() argument
908 num_trbs_free_temp = ep_ring->num_trbs_free; in update_ring_for_set_deq_completion()
909 dequeue_temp = ep_ring->dequeue; in update_ring_for_set_deq_completion()
917 if (last_trb(xhci, ep_ring, ep_ring->deq_seg, ep_ring->dequeue)) { in update_ring_for_set_deq_completion()
918 ep_ring->deq_seg = ep_ring->deq_seg->next; in update_ring_for_set_deq_completion()
919 ep_ring->dequeue = ep_ring->deq_seg->trbs; in update_ring_for_set_deq_completion()
922 while (ep_ring->dequeue != dev->eps[ep_index].queued_deq_ptr) { in update_ring_for_set_deq_completion()
924 ep_ring->num_trbs_free++; in update_ring_for_set_deq_completion()
925 ep_ring->dequeue++; in update_ring_for_set_deq_completion()
926 if (last_trb(xhci, ep_ring, ep_ring->deq_seg, in update_ring_for_set_deq_completion()
927 ep_ring->dequeue)) { in update_ring_for_set_deq_completion()
928 if (ep_ring->dequeue == in update_ring_for_set_deq_completion()
931 ep_ring->deq_seg = ep_ring->deq_seg->next; in update_ring_for_set_deq_completion()
932 ep_ring->dequeue = ep_ring->deq_seg->trbs; in update_ring_for_set_deq_completion()
934 if (ep_ring->dequeue == dequeue_temp) { in update_ring_for_set_deq_completion()
942 ep_ring->num_trbs_free = num_trbs_free_temp; in update_ring_for_set_deq_completion()
958 struct xhci_ring *ep_ring; in xhci_handle_cmd_set_deq() local
969 ep_ring = xhci_stream_id_to_ring(dev, ep_index, stream_id); in xhci_handle_cmd_set_deq()
970 if (!ep_ring) { in xhci_handle_cmd_set_deq()
1031 ep_ring, ep_index); in xhci_handle_cmd_set_deq()
1806 struct xhci_ring *ep_ring; in finish_td() local
1818 ep_ring = xhci_dma_to_transfer_ring(ep, le64_to_cpu(event->buffer)); in finish_td()
1844 ep_ring->stream_id, td, event_trb); in finish_td()
1847 while (ep_ring->dequeue != td->last_trb) in finish_td()
1848 inc_deq(xhci, ep_ring); in finish_td()
1849 inc_deq(xhci, ep_ring); in finish_td()
1901 struct xhci_ring *ep_ring; in process_ctrl_td() local
1910 ep_ring = xhci_dma_to_transfer_ring(ep, le64_to_cpu(event->buffer)); in process_ctrl_td()
1916 if (event_trb == ep_ring->dequeue) { in process_ctrl_td()
1935 if (event_trb == ep_ring->dequeue || event_trb == td->last_trb) in process_ctrl_td()
1944 if (event_trb != ep_ring->dequeue && event_trb != td->last_trb) in process_ctrl_td()
1961 if (event_trb != ep_ring->dequeue && in process_ctrl_td()
1975 if (event_trb != ep_ring->dequeue) { in process_ctrl_td()
2018 struct xhci_ring *ep_ring; in process_isoc_td() local
2028 ep_ring = xhci_dma_to_transfer_ring(ep, le64_to_cpu(event->buffer)); in process_isoc_td()
2085 for (cur_trb = ep_ring->dequeue, in process_isoc_td()
2086 cur_seg = ep_ring->deq_seg; cur_trb != event_trb; in process_isoc_td()
2087 next_trb(xhci, ep_ring, &cur_seg, &cur_trb)) { in process_isoc_td()
2108 struct xhci_ring *ep_ring; in skip_isoc_td() local
2113 ep_ring = xhci_dma_to_transfer_ring(ep, le64_to_cpu(event->buffer)); in skip_isoc_td()
2125 while (ep_ring->dequeue != td->last_trb) in skip_isoc_td()
2126 inc_deq(xhci, ep_ring); in skip_isoc_td()
2127 inc_deq(xhci, ep_ring); in skip_isoc_td()
2139 struct xhci_ring *ep_ring; in process_bulk_intr_td() local
2144 ep_ring = xhci_dma_to_transfer_ring(ep, le64_to_cpu(event->buffer)); in process_bulk_intr_td()
2231 for (cur_trb = ep_ring->dequeue, cur_seg = ep_ring->deq_seg; in process_bulk_intr_td()
2233 next_trb(xhci, ep_ring, &cur_seg, &cur_trb)) { in process_bulk_intr_td()
2263 struct xhci_ring *ep_ring; in handle_tx_event() local
2300 ep_ring = xhci_dma_to_transfer_ring(ep, le64_to_cpu(event->buffer)); in handle_tx_event()
2302 if (!ep_ring || in handle_tx_event()
2322 list_for_each(tmp, &ep_ring->td_list) in handle_tx_event()
2387 if (!list_empty(&ep_ring->td_list)) in handle_tx_event()
2395 if (!list_empty(&ep_ring->td_list)) in handle_tx_event()
2433 if (list_empty(&ep_ring->td_list)) { in handle_tx_event()
2467 td = list_entry(ep_ring->td_list.next, struct xhci_td, td_list); in handle_tx_event()
2472 event_seg = trb_in_td(xhci, ep_ring->deq_seg, ep_ring->dequeue, in handle_tx_event()
2497 ep_ring->last_td_was_short) { in handle_tx_event()
2498 ep_ring->last_td_was_short = false; in handle_tx_event()
2508 trb_in_td(xhci, ep_ring->deq_seg, in handle_tx_event()
2509 ep_ring->dequeue, td->last_trb, in handle_tx_event()
2518 ep_ring->last_td_was_short = true; in handle_tx_event()
2520 ep_ring->last_td_was_short = false; in handle_tx_event()
2804 static int prepare_ring(struct xhci_hcd *xhci, struct xhci_ring *ep_ring, in prepare_ring() argument
2838 if (room_on_ring(xhci, ep_ring, num_trbs)) in prepare_ring()
2841 if (ep_ring == xhci->cmd_ring) { in prepare_ring()
2848 num_trbs_needed = num_trbs - ep_ring->num_trbs_free; in prepare_ring()
2849 if (xhci_ring_expansion(xhci, ep_ring, num_trbs_needed, in prepare_ring()
2856 if (enqueue_is_link_trb(ep_ring)) { in prepare_ring()
2857 struct xhci_ring *ring = ep_ring; in prepare_ring()
2901 struct xhci_ring *ep_ring; in prepare_transfer() local
2904 ep_ring = xhci_stream_id_to_ring(xdev, ep_index, stream_id); in prepare_transfer()
2905 if (!ep_ring) { in prepare_transfer()
2911 ret = prepare_ring(xhci, ep_ring, in prepare_transfer()
2931 list_add_tail(&td->td_list, &ep_ring->td_list); in prepare_transfer()
2932 td->start_seg = ep_ring->enq_seg; in prepare_transfer()
2933 td->first_trb = ep_ring->enqueue; in prepare_transfer()
3087 struct xhci_ring *ep_ring; in queue_bulk_sg_tx() local
3104 ep_ring = xhci_urb_to_transfer_ring(xhci, urb); in queue_bulk_sg_tx()
3105 if (!ep_ring) in queue_bulk_sg_tx()
3141 start_trb = &ep_ring->enqueue->generic; in queue_bulk_sg_tx()
3142 start_cycle = ep_ring->cycle_state; in queue_bulk_sg_tx()
3176 field |= ep_ring->cycle_state; in queue_bulk_sg_tx()
3184 td->last_trb = ep_ring->enqueue; in queue_bulk_sg_tx()
3188 urb_priv->td[1]->last_trb = ep_ring->enqueue; in queue_bulk_sg_tx()
3217 queue_trb(xhci, ep_ring, more_trbs_coming, in queue_bulk_sg_tx()
3258 struct xhci_ring *ep_ring; in xhci_queue_bulk_tx() local
3277 ep_ring = xhci_urb_to_transfer_ring(xhci, urb); in xhci_queue_bulk_tx()
3278 if (!ep_ring) in xhci_queue_bulk_tx()
3326 start_trb = &ep_ring->enqueue->generic; in xhci_queue_bulk_tx()
3327 start_cycle = ep_ring->cycle_state; in xhci_queue_bulk_tx()
3352 field |= ep_ring->cycle_state; in xhci_queue_bulk_tx()
3360 td->last_trb = ep_ring->enqueue; in xhci_queue_bulk_tx()
3364 urb_priv->td[1]->last_trb = ep_ring->enqueue; in xhci_queue_bulk_tx()
3385 queue_trb(xhci, ep_ring, more_trbs_coming, in xhci_queue_bulk_tx()
3410 struct xhci_ring *ep_ring; in xhci_queue_ctrl_tx() local
3420 ep_ring = xhci_urb_to_transfer_ring(xhci, urb); in xhci_queue_ctrl_tx()
3421 if (!ep_ring) in xhci_queue_ctrl_tx()
3454 start_trb = &ep_ring->enqueue->generic; in xhci_queue_ctrl_tx()
3455 start_cycle = ep_ring->cycle_state; in xhci_queue_ctrl_tx()
3475 queue_trb(xhci, ep_ring, true, in xhci_queue_ctrl_tx()
3501 queue_trb(xhci, ep_ring, true, in xhci_queue_ctrl_tx()
3505 field | ep_ring->cycle_state); in xhci_queue_ctrl_tx()
3509 td->last_trb = ep_ring->enqueue; in xhci_queue_ctrl_tx()
3517 queue_trb(xhci, ep_ring, false, in xhci_queue_ctrl_tx()
3522 field | TRB_IOC | TRB_TYPE(TRB_STATUS) | ep_ring->cycle_state); in xhci_queue_ctrl_tx()
3698 struct xhci_ring *ep_ring; in xhci_queue_isoc_tx() local
3713 ep_ring = xhci->devs[slot_id]->eps[ep_index].ring; in xhci_queue_isoc_tx()
3722 start_trb = &ep_ring->enqueue->generic; in xhci_queue_isoc_tx()
3723 start_cycle = ep_ring->cycle_state; in xhci_queue_isoc_tx()
3787 field |= ep_ring->cycle_state; in xhci_queue_isoc_tx()
3792 field |= ep_ring->cycle_state; in xhci_queue_isoc_tx()
3807 td->last_trb = ep_ring->enqueue; in xhci_queue_isoc_tx()
3834 queue_trb(xhci, ep_ring, more_trbs_coming, in xhci_queue_isoc_tx()
3877 urb_priv->td[0]->last_trb = ep_ring->enqueue; in xhci_queue_isoc_tx()
3879 td_to_noop(xhci, ep_ring, urb_priv->td[0], true); in xhci_queue_isoc_tx()
3882 ep_ring->enqueue = urb_priv->td[0]->first_trb; in xhci_queue_isoc_tx()
3883 ep_ring->enq_seg = urb_priv->td[0]->start_seg; in xhci_queue_isoc_tx()
3884 ep_ring->cycle_state = start_cycle; in xhci_queue_isoc_tx()
3885 ep_ring->num_trbs_free = ep_ring->num_trbs_free_temp; in xhci_queue_isoc_tx()
3901 struct xhci_ring *ep_ring; in xhci_queue_isoc_tx_prepare() local
3913 ep_ring = xdev->eps[ep_index].ring; in xhci_queue_isoc_tx_prepare()
3924 ret = prepare_ring(xhci, ep_ring, le32_to_cpu(ep_ctx->ep_info) & EP_STATE_MASK, in xhci_queue_isoc_tx_prepare()
3955 if (HCC_CFC(xhci->hcc_params) && !list_empty(&ep_ring->td_list)) { in xhci_queue_isoc_tx_prepare()
3989 ep_ring->num_trbs_free_temp = ep_ring->num_trbs_free; in xhci_queue_isoc_tx_prepare()