Lines Matching refs:qh

50 		return &periodic->qh->qh_next;  in periodic_next_shadow()
68 return &periodic->qh->hw->hw_next; in shadow_next_periodic()
219 struct ehci_qh *qh, int sign) in reserve_release_intr_bandwidth() argument
223 int usecs = qh->ps.usecs; in reserve_release_intr_bandwidth()
224 int c_usecs = qh->ps.c_usecs; in reserve_release_intr_bandwidth()
225 int tt_usecs = qh->ps.tt_usecs; in reserve_release_intr_bandwidth()
228 if (qh->ps.phase == NO_FRAME) /* Bandwidth wasn't reserved */ in reserve_release_intr_bandwidth()
230 start_uf = qh->ps.bw_phase << 3; in reserve_release_intr_bandwidth()
232 bandwidth_dbg(ehci, sign, "intr", &qh->ps); in reserve_release_intr_bandwidth()
241 for (i = start_uf + qh->ps.phase_uf; i < EHCI_BANDWIDTH_SIZE; in reserve_release_intr_bandwidth()
242 i += qh->ps.bw_uperiod) in reserve_release_intr_bandwidth()
246 if (qh->ps.c_usecs) { in reserve_release_intr_bandwidth()
249 i += qh->ps.bw_uperiod) { in reserve_release_intr_bandwidth()
251 if (qh->ps.cs_mask & m) in reserve_release_intr_bandwidth()
259 tt = find_tt(qh->ps.udev); in reserve_release_intr_bandwidth()
261 list_add_tail(&qh->ps.ps_list, &tt->ps_list); in reserve_release_intr_bandwidth()
263 list_del(&qh->ps.ps_list); in reserve_release_intr_bandwidth()
266 i += qh->ps.bw_period) in reserve_release_intr_bandwidth()
467 hw = here.qh->hw; in tt_no_collision()
468 if (same_tt(dev, here.qh->ps.udev)) { in tt_no_collision()
479 here = here.qh->qh_next; in tt_no_collision()
545 static void qh_link_periodic(struct ehci_hcd *ehci, struct ehci_qh *qh) in qh_link_periodic() argument
548 unsigned period = qh->ps.period; in qh_link_periodic()
550 dev_dbg(&qh->ps.udev->dev, in qh_link_periodic()
552 period, hc32_to_cpup(ehci, &qh->hw->hw_info2) in qh_link_periodic()
554 qh, qh->ps.phase, qh->ps.usecs, qh->ps.c_usecs); in qh_link_periodic()
560 for (i = qh->ps.phase; i < ehci->periodic_size; i += period) { in qh_link_periodic()
579 while (here.ptr && qh != here.qh) { in qh_link_periodic()
580 if (qh->ps.period > here.qh->ps.period) in qh_link_periodic()
582 prev = &here.qh->qh_next; in qh_link_periodic()
583 hw_p = &here.qh->hw->hw_next; in qh_link_periodic()
587 if (qh != here.qh) { in qh_link_periodic()
588 qh->qh_next = here; in qh_link_periodic()
589 if (here.qh) in qh_link_periodic()
590 qh->hw->hw_next = *hw_p; in qh_link_periodic()
592 prev->qh = qh; in qh_link_periodic()
593 *hw_p = QH_NEXT (ehci, qh->qh_dma); in qh_link_periodic()
596 qh->qh_state = QH_STATE_LINKED; in qh_link_periodic()
597 qh->xacterrs = 0; in qh_link_periodic()
598 qh->exception = 0; in qh_link_periodic()
601 ehci_to_hcd(ehci)->self.bandwidth_allocated += qh->ps.bw_period in qh_link_periodic()
602 ? ((qh->ps.usecs + qh->ps.c_usecs) / qh->ps.bw_period) in qh_link_periodic()
603 : (qh->ps.usecs * 8); in qh_link_periodic()
605 list_add(&qh->intr_node, &ehci->intr_qh_list); in qh_link_periodic()
612 static void qh_unlink_periodic(struct ehci_hcd *ehci, struct ehci_qh *qh) in qh_unlink_periodic() argument
633 period = qh->ps.period ? : 1; in qh_unlink_periodic()
635 for (i = qh->ps.phase; i < ehci->periodic_size; i += period) in qh_unlink_periodic()
636 periodic_unlink (ehci, i, qh); in qh_unlink_periodic()
639 ehci_to_hcd(ehci)->self.bandwidth_allocated -= qh->ps.bw_period in qh_unlink_periodic()
640 ? ((qh->ps.usecs + qh->ps.c_usecs) / qh->ps.bw_period) in qh_unlink_periodic()
641 : (qh->ps.usecs * 8); in qh_unlink_periodic()
643 dev_dbg(&qh->ps.udev->dev, in qh_unlink_periodic()
645 qh->ps.period, in qh_unlink_periodic()
646 hc32_to_cpup(ehci, &qh->hw->hw_info2) & (QH_CMASK | QH_SMASK), in qh_unlink_periodic()
647 qh, qh->ps.phase, qh->ps.usecs, qh->ps.c_usecs); in qh_unlink_periodic()
650 qh->qh_state = QH_STATE_UNLINK; in qh_unlink_periodic()
651 qh->qh_next.ptr = NULL; in qh_unlink_periodic()
653 if (ehci->qh_scan_next == qh) in qh_unlink_periodic()
654 ehci->qh_scan_next = list_entry(qh->intr_node.next, in qh_unlink_periodic()
656 list_del(&qh->intr_node); in qh_unlink_periodic()
659 static void cancel_unlink_wait_intr(struct ehci_hcd *ehci, struct ehci_qh *qh) in cancel_unlink_wait_intr() argument
661 if (qh->qh_state != QH_STATE_LINKED || in cancel_unlink_wait_intr()
662 list_empty(&qh->unlink_node)) in cancel_unlink_wait_intr()
665 list_del_init(&qh->unlink_node); in cancel_unlink_wait_intr()
673 static void start_unlink_intr(struct ehci_hcd *ehci, struct ehci_qh *qh) in start_unlink_intr() argument
676 if (qh->qh_state != QH_STATE_LINKED) in start_unlink_intr()
680 cancel_unlink_wait_intr(ehci, qh); in start_unlink_intr()
682 qh_unlink_periodic (ehci, qh); in start_unlink_intr()
692 qh->unlink_cycle = ehci->intr_unlink_cycle; in start_unlink_intr()
695 list_add_tail(&qh->unlink_node, &ehci->intr_unlink); in start_unlink_intr()
701 else if (ehci->intr_unlink.next == &qh->unlink_node) { in start_unlink_intr()
713 struct ehci_qh *qh) in start_unlink_intr_wait() argument
715 qh->unlink_cycle = ehci->intr_unlink_wait_cycle; in start_unlink_intr_wait()
718 list_add_tail(&qh->unlink_node, &ehci->intr_unlink_wait); in start_unlink_intr_wait()
722 else if (ehci->intr_unlink_wait.next == &qh->unlink_node) { in start_unlink_intr_wait()
728 static void end_unlink_intr(struct ehci_hcd *ehci, struct ehci_qh *qh) in end_unlink_intr() argument
730 struct ehci_qh_hw *hw = qh->hw; in end_unlink_intr()
733 qh->qh_state = QH_STATE_IDLE; in end_unlink_intr()
736 if (!list_empty(&qh->qtd_list)) in end_unlink_intr()
737 qh_completions(ehci, qh); in end_unlink_intr()
740 if (!list_empty(&qh->qtd_list) && ehci->rh_state == EHCI_RH_RUNNING) { in end_unlink_intr()
741 rc = qh_schedule(ehci, qh); in end_unlink_intr()
743 qh_refresh(ehci, qh); in end_unlink_intr()
744 qh_link_periodic(ehci, qh); in end_unlink_intr()
755 qh, rc); in end_unlink_intr()
796 struct ehci_qh *qh, in check_intr_schedule() argument
804 if (qh->ps.c_usecs && uframe >= 6) /* FSTN territory? */ in check_intr_schedule()
807 if (!check_period(ehci, frame, uframe, qh->ps.bw_uperiod, qh->ps.usecs)) in check_intr_schedule()
809 if (!qh->ps.c_usecs) { in check_intr_schedule()
816 if (tt_available(ehci, &qh->ps, tt, frame, uframe)) { in check_intr_schedule()
822 qh->ps.bw_uperiod, qh->ps.c_usecs)) in check_intr_schedule()
839 mask = 0x03 << (uframe + qh->gap_uf); in check_intr_schedule()
843 if (tt_no_collision(ehci, qh->ps.bw_period, qh->ps.udev, frame, mask)) { in check_intr_schedule()
844 if (!check_period(ehci, frame, uframe + qh->gap_uf + 1, in check_intr_schedule()
845 qh->ps.bw_uperiod, qh->ps.c_usecs)) in check_intr_schedule()
847 if (!check_period(ehci, frame, uframe + qh->gap_uf, in check_intr_schedule()
848 qh->ps.bw_uperiod, qh->ps.c_usecs)) in check_intr_schedule()
860 static int qh_schedule(struct ehci_hcd *ehci, struct ehci_qh *qh) in qh_schedule() argument
865 struct ehci_qh_hw *hw = qh->hw; in qh_schedule()
871 if (qh->ps.phase != NO_FRAME) { in qh_schedule()
872 ehci_dbg(ehci, "reused qh %p schedule\n", qh); in qh_schedule()
878 tt = find_tt(qh->ps.udev); in qh_schedule()
889 if (qh->ps.bw_period) { in qh_schedule()
893 for (i = qh->ps.bw_period; i > 0; --i) { in qh_schedule()
894 frame = ++ehci->random_frame & (qh->ps.bw_period - 1); in qh_schedule()
897 frame, uframe, qh, &c_mask, tt); in qh_schedule()
905 status = check_intr_schedule(ehci, 0, 0, qh, &c_mask, tt); in qh_schedule()
911 qh->ps.phase = (qh->ps.period ? ehci->random_frame & in qh_schedule()
912 (qh->ps.period - 1) : 0); in qh_schedule()
913 qh->ps.bw_phase = qh->ps.phase & (qh->ps.bw_period - 1); in qh_schedule()
914 qh->ps.phase_uf = uframe; in qh_schedule()
915 qh->ps.cs_mask = qh->ps.period ? in qh_schedule()
921 hw->hw_info2 |= cpu_to_hc32(ehci, qh->ps.cs_mask); in qh_schedule()
922 reserve_release_intr_bandwidth(ehci, qh, 1); in qh_schedule()
936 struct ehci_qh *qh; in intr_submit() local
955 qh = qh_append_tds(ehci, urb, &empty, epnum, &urb->ep->hcpriv); in intr_submit()
956 if (qh == NULL) { in intr_submit()
960 if (qh->qh_state == QH_STATE_IDLE) { in intr_submit()
961 if ((status = qh_schedule (ehci, qh)) != 0) in intr_submit()
966 qh = qh_append_tds(ehci, urb, qtd_list, epnum, &urb->ep->hcpriv); in intr_submit()
967 BUG_ON (qh == NULL); in intr_submit()
970 if (qh->qh_state == QH_STATE_IDLE) { in intr_submit()
971 qh_refresh(ehci, qh); in intr_submit()
972 qh_link_periodic(ehci, qh); in intr_submit()
975 cancel_unlink_wait_intr(ehci, qh); in intr_submit()
994 struct ehci_qh *qh; in scan_intr() local
996 list_for_each_entry_safe(qh, ehci->qh_scan_next, &ehci->intr_qh_list, in scan_intr()
1000 if (!list_empty(&qh->qtd_list)) { in scan_intr()
1010 temp = qh_completions(ehci, qh); in scan_intr()
1012 start_unlink_intr(ehci, qh); in scan_intr()
1013 else if (unlikely(list_empty(&qh->qtd_list) && in scan_intr()
1014 qh->qh_state == QH_STATE_LINKED)) in scan_intr()
1015 start_unlink_intr_wait(ehci, qh); in scan_intr()