Lines Matching refs:ep_index
327 unsigned int ep_index, in xhci_ring_ep_doorbell() argument
331 struct xhci_virt_ep *ep = &xhci->devs[slot_id]->eps[ep_index]; in xhci_ring_ep_doorbell()
343 writel(DB_VALUE(ep_index, stream_id), db_addr); in xhci_ring_ep_doorbell()
352 unsigned int ep_index) in ring_doorbell_for_active_rings() argument
357 ep = &xhci->devs[slot_id]->eps[ep_index]; in ring_doorbell_for_active_rings()
362 xhci_ring_ep_doorbell(xhci, slot_id, ep_index, 0); in ring_doorbell_for_active_rings()
370 xhci_ring_ep_doorbell(xhci, slot_id, ep_index, in ring_doorbell_for_active_rings()
376 unsigned int slot_id, unsigned int ep_index, in xhci_triad_to_transfer_ring() argument
381 ep = &xhci->devs[slot_id]->eps[ep_index]; in xhci_triad_to_transfer_ring()
390 slot_id, ep_index); in xhci_triad_to_transfer_ring()
401 slot_id, ep_index, in xhci_triad_to_transfer_ring()
437 unsigned int slot_id, unsigned int ep_index, in xhci_find_new_dequeue_state() argument
442 struct xhci_virt_ep *ep = &dev->eps[ep_index]; in xhci_find_new_dequeue_state()
452 ep_index, stream_id); in xhci_find_new_dequeue_state()
470 = xhci_get_ep_ctx(xhci, dev->out_ctx, ep_index); in xhci_find_new_dequeue_state()
639 unsigned int ep_index; in xhci_handle_cmd_stop_ep() local
657 ep_index = TRB_TO_EP_INDEX(le32_to_cpu(trb->generic.field[3])); in xhci_handle_cmd_stop_ep()
658 ep = &xhci->devs[slot_id]->eps[ep_index]; in xhci_handle_cmd_stop_ep()
663 ring_doorbell_for_active_rings(xhci, slot_id, ep_index); in xhci_handle_cmd_stop_ep()
702 xhci_find_new_dequeue_state(xhci, slot_id, ep_index, in xhci_handle_cmd_stop_ep()
720 xhci_queue_new_dequeue_state(xhci, slot_id, ep_index, in xhci_handle_cmd_stop_ep()
725 ring_doorbell_for_active_rings(xhci, slot_id, ep_index); in xhci_handle_cmd_stop_ep()
772 int slot_id, int ep_index) in xhci_kill_endpoint_urbs() argument
778 ep = &xhci->devs[slot_id]->eps[ep_index]; in xhci_kill_endpoint_urbs()
787 slot_id, ep_index, stream_id + 1); in xhci_kill_endpoint_urbs()
797 slot_id, ep_index); in xhci_kill_endpoint_urbs()
902 unsigned int ep_index) in update_ring_for_set_deq_completion() argument
922 while (ep_ring->dequeue != dev->eps[ep_index].queued_deq_ptr) { in update_ring_for_set_deq_completion()
929 dev->eps[ep_index].queued_deq_ptr) in update_ring_for_set_deq_completion()
956 unsigned int ep_index; in xhci_handle_cmd_set_deq() local
964 ep_index = TRB_TO_EP_INDEX(le32_to_cpu(trb->generic.field[3])); in xhci_handle_cmd_set_deq()
967 ep = &dev->eps[ep_index]; in xhci_handle_cmd_set_deq()
969 ep_ring = xhci_stream_id_to_ring(dev, ep_index, stream_id); in xhci_handle_cmd_set_deq()
977 ep_ctx = xhci_get_ep_ctx(xhci, dev->out_ctx, ep_index); in xhci_handle_cmd_set_deq()
1031 ep_ring, ep_index); in xhci_handle_cmd_set_deq()
1040 dev->eps[ep_index].ep_state &= ~SET_DEQ_PENDING; in xhci_handle_cmd_set_deq()
1041 dev->eps[ep_index].queued_deq_seg = NULL; in xhci_handle_cmd_set_deq()
1042 dev->eps[ep_index].queued_deq_ptr = NULL; in xhci_handle_cmd_set_deq()
1044 ring_doorbell_for_active_rings(xhci, slot_id, ep_index); in xhci_handle_cmd_set_deq()
1050 unsigned int ep_index; in xhci_handle_cmd_reset_ep() local
1052 ep_index = TRB_TO_EP_INDEX(le32_to_cpu(trb->generic.field[3])); in xhci_handle_cmd_reset_ep()
1078 xhci->devs[slot_id]->eps[ep_index].ep_state &= ~EP_HALTED; in xhci_handle_cmd_reset_ep()
1109 unsigned int ep_index; in xhci_handle_cmd_config_ep() local
1131 ep_index = xhci_last_valid_endpoint(add_flags) - 1; in xhci_handle_cmd_config_ep()
1140 ep_index != (unsigned int) -1 && in xhci_handle_cmd_config_ep()
1142 ep_state = virt_dev->eps[ep_index].ep_state; in xhci_handle_cmd_config_ep()
1148 ep_index, ep_state); in xhci_handle_cmd_config_ep()
1150 virt_dev->eps[ep_index].ep_state &= ~EP_HALTED; in xhci_handle_cmd_config_ep()
1151 ring_doorbell_for_active_rings(xhci, slot_id, ep_index); in xhci_handle_cmd_config_ep()
1734 unsigned int slot_id, unsigned int ep_index, in xhci_cleanup_halted_endpoint() argument
1738 struct xhci_virt_ep *ep = &xhci->devs[slot_id]->eps[ep_index]; in xhci_cleanup_halted_endpoint()
1747 xhci_queue_reset_ep(xhci, command, slot_id, ep_index); in xhci_cleanup_halted_endpoint()
1748 xhci_cleanup_stalled_ring(xhci, ep_index, td); in xhci_cleanup_halted_endpoint()
1807 int ep_index; in finish_td() local
1816 ep_index = TRB_TO_EP_ID(le32_to_cpu(event->flags)) - 1; in finish_td()
1818 ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in finish_td()
1840 xhci_cleanup_halted_endpoint(xhci, slot_id, ep_index, in finish_td()
1900 int ep_index; in process_ctrl_td() local
1906 ep_index = TRB_TO_EP_ID(le32_to_cpu(event->flags)) - 1; in process_ctrl_td()
1908 ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in process_ctrl_td()
1940 trb_comp_code, ep_index); in process_ctrl_td()
2229 int ep_index; in handle_tx_event() local
2262 ep_index = TRB_TO_EP_ID(le32_to_cpu(event->flags)) - 1; in handle_tx_event()
2263 ep = &xdev->eps[ep_index]; in handle_tx_event()
2265 ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in handle_tx_event()
2352 ep_index); in handle_tx_event()
2360 ep_index); in handle_tx_event()
2404 ep_index); in handle_tx_event()
2467 "comp_code %u\n", ep_index, in handle_tx_event()
2852 unsigned int ep_index, in prepare_transfer() argument
2863 struct xhci_ep_ctx *ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in prepare_transfer()
2865 ep_ring = xhci_stream_id_to_ring(xdev, ep_index, stream_id); in prepare_transfer()
2951 unsigned int ep_index, unsigned int stream_id, int start_cycle, in giveback_first_trb() argument
2963 xhci_ring_ep_doorbell(xhci, slot_id, ep_index, stream_id); in giveback_first_trb()
2973 struct urb *urb, int slot_id, unsigned int ep_index) in xhci_queue_intr_tx() argument
2976 xhci->devs[slot_id]->out_ctx, ep_index); in xhci_queue_intr_tx()
3000 return xhci_queue_bulk_tx(xhci, mem_flags, urb, slot_id, ep_index); in xhci_queue_intr_tx()
3046 struct urb *urb, int slot_id, unsigned int ep_index) in queue_bulk_sg_tx() argument
3075 ep_index, urb->stream_id, in queue_bulk_sg_tx()
3089 ep_index, urb->stream_id, in queue_bulk_sg_tx()
3210 giveback_first_trb(xhci, slot_id, ep_index, urb->stream_id, in queue_bulk_sg_tx()
3217 struct urb *urb, int slot_id, unsigned int ep_index) in xhci_queue_bulk_tx() argument
3236 return queue_bulk_sg_tx(xhci, mem_flags, urb, slot_id, ep_index); in xhci_queue_bulk_tx()
3260 ep_index, urb->stream_id, in xhci_queue_bulk_tx()
3274 ep_index, urb->stream_id, in xhci_queue_bulk_tx()
3362 giveback_first_trb(xhci, slot_id, ep_index, urb->stream_id, in xhci_queue_bulk_tx()
3369 struct urb *urb, int slot_id, unsigned int ep_index) in xhci_queue_ctrl_tx() argument
3402 ep_index, urb->stream_id, in xhci_queue_ctrl_tx()
3485 giveback_first_trb(xhci, slot_id, ep_index, 0, in xhci_queue_ctrl_tx()
3566 struct urb *urb, int slot_id, unsigned int ep_index) in xhci_queue_isoc_tx() argument
3581 ep_ring = xhci->devs[slot_id]->eps[ep_index].ring; in xhci_queue_isoc_tx()
3618 ret = prepare_transfer(xhci, xhci->devs[slot_id], ep_index, in xhci_queue_isoc_tx()
3714 giveback_first_trb(xhci, slot_id, ep_index, urb->stream_id, in xhci_queue_isoc_tx()
3749 struct urb *urb, int slot_id, unsigned int ep_index) in xhci_queue_isoc_tx_prepare() argument
3761 ep_ring = xdev->eps[ep_index].ring; in xhci_queue_isoc_tx_prepare()
3762 ep_ctx = xhci_get_ep_ctx(xhci, xdev->out_ctx, ep_index); in xhci_queue_isoc_tx_prepare()
3807 return xhci_queue_isoc_tx(xhci, mem_flags, urb, slot_id, ep_index); in xhci_queue_isoc_tx_prepare()
3920 int slot_id, unsigned int ep_index, int suspend) in xhci_queue_stop_endpoint() argument
3923 u32 trb_ep_index = EP_ID_FOR_TRB(ep_index); in xhci_queue_stop_endpoint()
3933 unsigned int slot_id, unsigned int ep_index, in xhci_queue_new_dequeue_state() argument
3939 u32 trb_ep_index = EP_ID_FOR_TRB(ep_index); in xhci_queue_new_dequeue_state()
3964 ep = &xhci->devs[slot_id]->eps[ep_index]; in xhci_queue_new_dequeue_state()
4000 int slot_id, unsigned int ep_index) in xhci_queue_reset_ep() argument
4003 u32 trb_ep_index = EP_ID_FOR_TRB(ep_index); in xhci_queue_reset_ep()