Lines Matching refs:rpipe

110 static int __wa_xfer_delayed_run(struct wa_rpipe *rpipe, int *dto_waiting);
237 struct wa_rpipe *rpipe; in wa_check_for_delayed_rpipes() local
241 rpipe = list_first_entry(&wa->rpipe_delayed_list, in wa_check_for_delayed_rpipes()
243 __wa_xfer_delayed_run(rpipe, &dto_waiting); in wa_check_for_delayed_rpipes()
248 le16_to_cpu(rpipe->descr.wRPipeIndex)); in wa_check_for_delayed_rpipes()
249 list_del_init(&rpipe->list_node); in wa_check_for_delayed_rpipes()
256 static void wa_add_delayed_rpipe(struct wahc *wa, struct wa_rpipe *rpipe) in wa_add_delayed_rpipe() argument
262 if (list_empty(&rpipe->list_node)) { in wa_add_delayed_rpipe()
264 __func__, le16_to_cpu(rpipe->descr.wRPipeIndex)); in wa_add_delayed_rpipe()
265 list_add_tail(&rpipe->list_node, &wa->rpipe_delayed_list); in wa_add_delayed_rpipe()
463 struct wa_rpipe *rpipe = xfer->ep->hcpriv; in __wa_xfer_abort_cb() local
486 wa_xfer_delayed_run(rpipe); in __wa_xfer_abort_cb()
513 struct wa_rpipe *rpipe = xfer->ep->hcpriv; in __wa_xfer_abort() local
520 b->cmd.wRPipe = rpipe->descr.wRPipeIndex; in __wa_xfer_abort()
599 struct wa_rpipe *rpipe = xfer->ep->hcpriv; in __wa_xfer_setup_sizes() local
601 switch (rpipe->descr.bmAttribute & 0x3) { in __wa_xfer_setup_sizes()
623 maxpktsize = le16_to_cpu(rpipe->descr.wMaxPacketSize); in __wa_xfer_setup_sizes()
624 xfer->seg_size = le16_to_cpu(rpipe->descr.wBlocks) in __wa_xfer_setup_sizes()
637 if ((rpipe->descr.bmAttribute & 0x3) == USB_ENDPOINT_XFER_ISOC) { in __wa_xfer_setup_sizes()
697 struct wa_rpipe *rpipe = xfer->ep->hcpriv; in __wa_xfer_setup_hdr0() local
703 xfer_hdr0->wRPipe = rpipe->descr.wRPipeIndex; in __wa_xfer_setup_hdr0()
752 struct wa_rpipe *rpipe; in wa_seg_dto_cb() local
846 rpipe = xfer->ep->hcpriv; in wa_seg_dto_cb()
855 rpipe_ready = rpipe_avail_inc(rpipe); in wa_seg_dto_cb()
866 wa_xfer_delayed_run(rpipe); in wa_seg_dto_cb()
889 struct wa_rpipe *rpipe; in wa_seg_iso_pack_desc_cb() local
912 rpipe = xfer->ep->hcpriv; in wa_seg_iso_pack_desc_cb()
924 rpipe_ready = rpipe_avail_inc(rpipe); in wa_seg_iso_pack_desc_cb()
932 wa_xfer_delayed_run(rpipe); in wa_seg_iso_pack_desc_cb()
962 struct wa_rpipe *rpipe; in wa_seg_tr_cb() local
987 rpipe = xfer->ep->hcpriv; in wa_seg_tr_cb()
1002 rpipe_ready = rpipe_avail_inc(rpipe); in wa_seg_tr_cb()
1008 wa_xfer_delayed_run(rpipe); in wa_seg_tr_cb()
1389 static int __wa_seg_submit(struct wa_rpipe *rpipe, struct wa_xfer *xfer, in __wa_seg_submit() argument
1443 rpipe_avail_dec(rpipe); in __wa_seg_submit()
1464 static int __wa_xfer_delayed_run(struct wa_rpipe *rpipe, int *dto_waiting) in __wa_xfer_delayed_run() argument
1467 struct device *dev = &rpipe->wa->usb_iface->dev; in __wa_xfer_delayed_run()
1474 spin_lock_irqsave(&rpipe->seg_lock, flags); in __wa_xfer_delayed_run()
1475 while (atomic_read(&rpipe->segs_available) > 0 in __wa_xfer_delayed_run()
1476 && !list_empty(&rpipe->seg_list) in __wa_xfer_delayed_run()
1477 && (dto_acquired = __wa_dto_try_get(rpipe->wa))) { in __wa_xfer_delayed_run()
1478 seg = list_first_entry(&(rpipe->seg_list), struct wa_seg, in __wa_xfer_delayed_run()
1488 result = __wa_seg_submit(rpipe, xfer, seg, &dto_done); in __wa_xfer_delayed_run()
1491 __wa_dto_put(rpipe->wa); in __wa_xfer_delayed_run()
1494 atomic_read(&rpipe->segs_available), result); in __wa_xfer_delayed_run()
1498 spin_unlock_irqrestore(&rpipe->seg_lock, flags); in __wa_xfer_delayed_run()
1510 spin_lock_irqsave(&rpipe->seg_lock, flags); in __wa_xfer_delayed_run()
1518 if (!dto_acquired && !list_empty(&rpipe->seg_list) in __wa_xfer_delayed_run()
1519 && (atomic_read(&rpipe->segs_available) == in __wa_xfer_delayed_run()
1520 le16_to_cpu(rpipe->descr.wRequests))) in __wa_xfer_delayed_run()
1523 spin_unlock_irqrestore(&rpipe->seg_lock, flags); in __wa_xfer_delayed_run()
1528 static void wa_xfer_delayed_run(struct wa_rpipe *rpipe) in wa_xfer_delayed_run() argument
1531 int dto_done = __wa_xfer_delayed_run(rpipe, &dto_waiting); in wa_xfer_delayed_run()
1542 wa_add_delayed_rpipe(rpipe->wa, rpipe); in wa_xfer_delayed_run()
1544 wa_check_for_delayed_rpipes(rpipe->wa); in wa_xfer_delayed_run()
1562 struct wa_rpipe *rpipe = xfer->ep->hcpriv; in __wa_xfer_submit() local
1563 size_t maxrequests = le16_to_cpu(rpipe->descr.wRequests); in __wa_xfer_submit()
1571 BUG_ON(atomic_read(&rpipe->segs_available) > maxrequests); in __wa_xfer_submit()
1573 spin_lock_irqsave(&rpipe->seg_lock, flags); in __wa_xfer_submit()
1577 available = atomic_read(&rpipe->segs_available); in __wa_xfer_submit()
1578 empty = list_empty(&rpipe->seg_list); in __wa_xfer_submit()
1585 dto_acquired = __wa_dto_try_get(rpipe->wa); in __wa_xfer_submit()
1588 result = __wa_seg_submit(rpipe, xfer, seg, in __wa_xfer_submit()
1594 __wa_dto_put(rpipe->wa); in __wa_xfer_submit()
1607 list_add_tail(&seg->list_node, &rpipe->seg_list); in __wa_xfer_submit()
1616 if (!dto_acquired && !list_empty(&rpipe->seg_list) in __wa_xfer_submit()
1617 && (atomic_read(&rpipe->segs_available) == in __wa_xfer_submit()
1618 le16_to_cpu(rpipe->descr.wRequests))) in __wa_xfer_submit()
1620 spin_unlock_irqrestore(&rpipe->seg_lock, flags); in __wa_xfer_submit()
1623 wa_add_delayed_rpipe(rpipe->wa, rpipe); in __wa_xfer_submit()
1625 wa_check_for_delayed_rpipes(rpipe->wa); in __wa_xfer_submit()
1802 struct wa_rpipe *rpipe; in wa_process_errored_transfers_run() local
1806 rpipe = ep->hcpriv; in wa_process_errored_transfers_run()
1816 wa_xfer_delayed_run(rpipe); in wa_process_errored_transfers_run()
1937 struct wa_rpipe *rpipe; in wa_urb_dequeue() local
1961 rpipe = xfer->ep->hcpriv; in wa_urb_dequeue()
1962 if (rpipe == NULL) { in wa_urb_dequeue()
1992 spin_lock(&rpipe->seg_lock); in wa_urb_dequeue()
2046 rpipe_ready = rpipe_avail_inc(rpipe); in wa_urb_dequeue()
2052 spin_unlock(&rpipe->seg_lock); in wa_urb_dequeue()
2059 wa_xfer_delayed_run(rpipe); in wa_urb_dequeue()
2142 struct wa_rpipe *rpipe = xfer->ep->hcpriv; in wa_complete_remaining_xfer_segs() local
2153 rpipe_avail_inc(rpipe); in wa_complete_remaining_xfer_segs()
2296 struct wa_rpipe *rpipe; in wa_xfer_result_chew() local
2308 rpipe = xfer->ep->hcpriv; in wa_xfer_result_chew()
2365 rpipe_ready = rpipe_avail_inc(rpipe); in wa_xfer_result_chew()
2372 wa_xfer_delayed_run(rpipe); in wa_xfer_result_chew()
2392 rpipe_ready = rpipe_avail_inc(rpipe); in wa_xfer_result_chew()
2415 wa_xfer_delayed_run(rpipe); in wa_xfer_result_chew()
2450 struct wa_rpipe *rpipe; in wa_process_iso_packet_status() local
2474 rpipe = xfer->ep->hcpriv; in wa_process_iso_packet_status()
2563 rpipe_ready = rpipe_avail_inc(rpipe); in wa_process_iso_packet_status()
2574 wa_xfer_delayed_run(rpipe); in wa_process_iso_packet_status()
2601 struct wa_rpipe *rpipe; in wa_buf_in_cb() local
2616 rpipe = xfer->ep->hcpriv; in wa_buf_in_cb()
2678 rpipe_ready = rpipe_avail_inc(rpipe); in wa_buf_in_cb()
2686 wa_xfer_delayed_run(rpipe); in wa_buf_in_cb()
2710 rpipe_ready = rpipe_avail_inc(rpipe); in wa_buf_in_cb()
2720 wa_xfer_delayed_run(rpipe); in wa_buf_in_cb()