Lines Matching refs:urb
117 static void speed_bulk_callback(struct urb *urb);
118 static void write_bulk_callback(struct urb *urb);
119 static void irda_usb_receive(struct urb *urb);
303 struct urb *urb; in irda_usb_change_speed_xbofs() local
310 urb = self->speed_urb; in irda_usb_change_speed_xbofs()
311 if (urb->status != 0) { in irda_usb_change_speed_xbofs()
329 usb_fill_bulk_urb(urb, self->usbdev, in irda_usb_change_speed_xbofs()
333 urb->transfer_buffer_length = self->header_length; in irda_usb_change_speed_xbofs()
334 urb->transfer_flags = 0; in irda_usb_change_speed_xbofs()
337 if ((ret = usb_submit_urb(urb, GFP_ATOMIC))) { in irda_usb_change_speed_xbofs()
347 static void speed_bulk_callback(struct urb *urb) in speed_bulk_callback() argument
349 struct irda_usb_cb *self = urb->context; in speed_bulk_callback()
354 IRDA_ASSERT(urb == self->speed_urb, return;); in speed_bulk_callback()
357 if (urb->status != 0) { in speed_bulk_callback()
360 __func__, urb->status, urb->transfer_flags); in speed_bulk_callback()
389 struct urb *urb = self->tx_urb; in irda_usb_hard_xmit() local
438 if (urb->status != 0) { in irda_usb_hard_xmit()
468 usb_fill_bulk_urb(urb, self->usbdev, in irda_usb_hard_xmit()
478 urb->transfer_flags = URB_ZERO_PACKET; in irda_usb_hard_xmit()
519 if ((res = usb_submit_urb(urb, GFP_ATOMIC))) { in irda_usb_hard_xmit()
546 static void write_bulk_callback(struct urb *urb) in write_bulk_callback() argument
549 struct sk_buff *skb = urb->context; in write_bulk_callback()
555 IRDA_ASSERT(urb == self->tx_urb, return;); in write_bulk_callback()
559 urb->context = NULL; in write_bulk_callback()
562 if (urb->status != 0) { in write_bulk_callback()
565 __func__, urb->status, urb->transfer_flags); in write_bulk_callback()
624 struct urb *urb; in irda_usb_net_timeout() local
642 urb = self->speed_urb; in irda_usb_net_timeout()
643 if (urb->status != 0) { in irda_usb_net_timeout()
645 netdev->name, urb->status, urb->transfer_flags); in irda_usb_net_timeout()
647 switch (urb->status) { in irda_usb_net_timeout()
649 usb_unlink_urb(urb); in irda_usb_net_timeout()
658 urb->status = 0; in irda_usb_net_timeout()
666 urb = self->tx_urb; in irda_usb_net_timeout()
667 if (urb->status != 0) { in irda_usb_net_timeout()
668 struct sk_buff *skb = urb->context; in irda_usb_net_timeout()
671 netdev->name, urb->status, urb->transfer_flags); in irda_usb_net_timeout()
685 switch (urb->status) { in irda_usb_net_timeout()
687 usb_unlink_urb(urb); in irda_usb_net_timeout()
700 urb->context = NULL; in irda_usb_net_timeout()
702 urb->status = 0; in irda_usb_net_timeout()
755 static void irda_usb_submit(struct irda_usb_cb *self, struct sk_buff *skb, struct urb *urb) in irda_usb_submit() argument
762 IRDA_ASSERT(urb != NULL, return;); in irda_usb_submit()
769 usb_fill_bulk_urb(urb, self->usbdev, in irda_usb_submit()
773 urb->status = 0; in irda_usb_submit()
776 ret = usb_submit_urb(urb, GFP_ATOMIC); in irda_usb_submit()
792 static void irda_usb_receive(struct urb *urb) in irda_usb_receive() argument
794 struct sk_buff *skb = (struct sk_buff *) urb->context; in irda_usb_receive()
799 struct urb *next_urb; in irda_usb_receive()
802 pr_debug("%s(), len=%d\n", __func__, urb->actual_length); in irda_usb_receive()
818 if (urb->status != 0) { in irda_usb_receive()
819 switch (urb->status) { in irda_usb_receive()
836 __func__, urb->status, urb->transfer_flags); in irda_usb_receive()
850 self->rx_defer_timer.data = (unsigned long) urb; in irda_usb_receive()
858 if (urb->actual_length <= self->header_length) { in irda_usb_receive()
876 docopy = (urb->actual_length < IRDA_RX_COPY_THRESHOLD); in irda_usb_receive()
880 newskb = dev_alloc_skb(docopy ? urb->actual_length : in irda_usb_receive()
884 newskb = dev_alloc_skb(docopy ? urb->actual_length : in irda_usb_receive()
900 skb_copy_from_linear_data(skb, newskb->data, urb->actual_length); in irda_usb_receive()
914 skb_put(dataskb, urb->actual_length); in irda_usb_receive()
943 urb->context = NULL; in irda_usb_receive()
944 self->idle_rx_urb = urb; in irda_usb_receive()
958 struct urb *urb = (struct urb *) data; in irda_usb_rx_defer_expired() local
959 struct sk_buff *skb = (struct sk_buff *) urb->context; in irda_usb_rx_defer_expired()
962 struct urb *next_urb; in irda_usb_rx_defer_expired()
972 urb->context = NULL; in irda_usb_rx_defer_expired()
973 self->idle_rx_urb = urb; in irda_usb_rx_defer_expired()
1261 struct urb *urb = self->rx_urb[i]; in irda_usb_net_close() local
1262 struct sk_buff *skb = (struct sk_buff *) urb->context; in irda_usb_net_close()
1264 usb_kill_urb(urb); in irda_usb_net_close()
1268 urb->context = NULL; in irda_usb_net_close()
1639 self->rx_urb = kcalloc(self->max_rx_urb, sizeof(struct urb *), in irda_usb_probe()