Lines Matching refs:self
109 static void irda_usb_init_qos(struct irda_usb_cb *self) ;
112 static void irda_usb_change_speed_xbofs(struct irda_usb_cb *self);
115 static int irda_usb_open(struct irda_usb_cb *self);
116 static void irda_usb_close(struct irda_usb_cb *self);
148 static void irda_usb_build_header(struct irda_usb_cb *self, in irda_usb_build_header() argument
156 if (self->capability & IUC_STIR421X && in irda_usb_build_header()
157 ((self->new_speed != -1) || (self->new_xbofs != -1))) { in irda_usb_build_header()
162 if (self->new_speed == -1) in irda_usb_build_header()
163 self->new_speed = self->speed ; in irda_usb_build_header()
165 if (self->new_xbofs == -1) in irda_usb_build_header()
166 self->new_xbofs = self->xbofs ; in irda_usb_build_header()
170 if (self->new_speed != -1) { in irda_usb_build_header()
175 if ((self->capability & IUC_SPEED_BUG) && in irda_usb_build_header()
176 (!force) && (self->speed != -1)) { in irda_usb_build_header()
185 __func__, self->new_speed); in irda_usb_build_header()
186 self->speed = self->new_speed; in irda_usb_build_header()
190 switch (self->speed) { in irda_usb_build_header()
218 self->new_xbofs = 0; in irda_usb_build_header()
222 self->new_xbofs = 0; in irda_usb_build_header()
230 if (self->new_xbofs != -1) { in irda_usb_build_header()
232 __func__, self->new_xbofs); in irda_usb_build_header()
233 self->xbofs = self->new_xbofs; in irda_usb_build_header()
237 switch (self->xbofs) { in irda_usb_build_header()
300 static void irda_usb_change_speed_xbofs(struct irda_usb_cb *self) in irda_usb_change_speed_xbofs() argument
307 self->new_speed, self->new_xbofs); in irda_usb_change_speed_xbofs()
310 urb = self->speed_urb; in irda_usb_change_speed_xbofs()
317 frame = self->speed_buff; in irda_usb_change_speed_xbofs()
320 irda_usb_build_header(self, frame, 1); in irda_usb_change_speed_xbofs()
322 if (self->capability & IUC_STIR421X) { in irda_usb_change_speed_xbofs()
329 usb_fill_bulk_urb(urb, self->usbdev, in irda_usb_change_speed_xbofs()
330 usb_sndbulkpipe(self->usbdev, self->bulk_out_ep), in irda_usb_change_speed_xbofs()
332 speed_bulk_callback, self); in irda_usb_change_speed_xbofs()
333 urb->transfer_buffer_length = self->header_length; in irda_usb_change_speed_xbofs()
349 struct irda_usb_cb *self = urb->context; in speed_bulk_callback() local
352 IRDA_ASSERT(self != NULL, return;); in speed_bulk_callback()
354 IRDA_ASSERT(urb == self->speed_urb, return;); in speed_bulk_callback()
374 self->new_speed = -1; in speed_bulk_callback()
375 self->new_xbofs = -1; in speed_bulk_callback()
378 netif_wake_queue(self->netdev); in speed_bulk_callback()
388 struct irda_usb_cb *self = netdev_priv(netdev); in irda_usb_hard_xmit() local
389 struct urb *urb = self->tx_urb; in irda_usb_hard_xmit()
400 spin_lock_irqsave(&self->lock, flags); in irda_usb_hard_xmit()
405 if (!self->present) { in irda_usb_hard_xmit()
412 if ((xbofs != self->xbofs) && (xbofs != -1)) { in irda_usb_hard_xmit()
413 self->new_xbofs = xbofs; in irda_usb_hard_xmit()
418 if ((speed != self->speed) && (speed != -1)) { in irda_usb_hard_xmit()
420 self->new_speed = speed; in irda_usb_hard_xmit()
431 irda_usb_change_speed_xbofs(self); in irda_usb_hard_xmit()
443 skb_copy_from_linear_data(skb, self->tx_buff + self->header_length, skb->len); in irda_usb_hard_xmit()
446 if (self->capability & IUC_STIR421X) { in irda_usb_hard_xmit()
448 __u8* frame = self->tx_buff; in irda_usb_hard_xmit()
450 irda_usb_build_header(self, frame, 0); in irda_usb_hard_xmit()
462 irda_usb_build_header(self, self->tx_buff, 0); in irda_usb_hard_xmit()
466 ((struct irda_skb_cb *)skb->cb)->context = self; in irda_usb_hard_xmit()
468 usb_fill_bulk_urb(urb, self->usbdev, in irda_usb_hard_xmit()
469 usb_sndbulkpipe(self->usbdev, self->bulk_out_ep), in irda_usb_hard_xmit()
470 self->tx_buff, skb->len + self->header_length, in irda_usb_hard_xmit()
494 if (self->capability & IUC_NO_TURN) { in irda_usb_hard_xmit()
498 diff = ktime_us_delta(ktime_get(), self->stamp); in irda_usb_hard_xmit()
531 spin_unlock_irqrestore(&self->lock, flags); in irda_usb_hard_xmit()
538 spin_unlock_irqrestore(&self->lock, flags); in irda_usb_hard_xmit()
550 struct irda_usb_cb *self = ((struct irda_skb_cb *) skb->cb)->context; in write_bulk_callback() local
553 IRDA_ASSERT(self != NULL, return;); in write_bulk_callback()
555 IRDA_ASSERT(urb == self->tx_urb, return;); in write_bulk_callback()
580 spin_lock_irqsave(&self->lock, flags); in write_bulk_callback()
583 if ((!self->netopen) || (!self->present)) { in write_bulk_callback()
585 spin_unlock_irqrestore(&self->lock, flags); in write_bulk_callback()
590 if ((self->new_speed != -1) || (self->new_xbofs != -1)) { in write_bulk_callback()
591 if ((self->new_speed != self->speed) || in write_bulk_callback()
592 (self->new_xbofs != self->xbofs)) { in write_bulk_callback()
596 irda_usb_change_speed_xbofs(self); in write_bulk_callback()
599 self->new_speed = -1; in write_bulk_callback()
600 self->new_xbofs = -1; in write_bulk_callback()
602 netif_wake_queue(self->netdev); in write_bulk_callback()
606 netif_wake_queue(self->netdev); in write_bulk_callback()
608 spin_unlock_irqrestore(&self->lock, flags); in write_bulk_callback()
623 struct irda_usb_cb *self = netdev_priv(netdev); in irda_usb_net_timeout() local
628 IRDA_ASSERT(self != NULL, return;); in irda_usb_net_timeout()
631 spin_lock_irqsave(&self->lock, flags); in irda_usb_net_timeout()
634 if (!self->present) { in irda_usb_net_timeout()
637 spin_unlock_irqrestore(&self->lock, flags); in irda_usb_net_timeout()
642 urb = self->speed_urb; in irda_usb_net_timeout()
659 netif_wake_queue(self->netdev); in irda_usb_net_timeout()
666 urb = self->tx_urb; in irda_usb_net_timeout()
678 if(self->new_speed == -1) in irda_usb_net_timeout()
679 self->new_speed = self->speed; in irda_usb_net_timeout()
680 if(self->new_xbofs == -1) in irda_usb_net_timeout()
681 self->new_xbofs = self->xbofs; in irda_usb_net_timeout()
682 irda_usb_change_speed_xbofs(self); in irda_usb_net_timeout()
703 netif_wake_queue(self->netdev); in irda_usb_net_timeout()
708 spin_unlock_irqrestore(&self->lock, flags); in irda_usb_net_timeout()
755 static void irda_usb_submit(struct irda_usb_cb *self, struct sk_buff *skb, struct urb *urb) in irda_usb_submit() argument
766 cb->context = self; in irda_usb_submit()
769 usb_fill_bulk_urb(urb, self->usbdev, in irda_usb_submit()
770 usb_rcvbulkpipe(self->usbdev, self->bulk_in_ep), in irda_usb_submit()
795 struct irda_usb_cb *self; in irda_usb_receive() local
807 self = (struct irda_usb_cb *) cb->context; in irda_usb_receive()
808 IRDA_ASSERT(self != NULL, return;); in irda_usb_receive()
811 if ((!self->netopen) || (!self->present)) { in irda_usb_receive()
821 self->netdev->stats.rx_crc_errors++; in irda_usb_receive()
834 self->netdev->stats.rx_errors++; in irda_usb_receive()
849 self->rx_defer_timer.function = irda_usb_rx_defer_expired; in irda_usb_receive()
850 self->rx_defer_timer.data = (unsigned long) urb; in irda_usb_receive()
851 mod_timer(&self->rx_defer_timer, jiffies + (10 * HZ / 1000)); in irda_usb_receive()
856 if (urb->actual_length <= self->header_length) { in irda_usb_receive()
866 self->stamp = ktime_get(); in irda_usb_receive()
877 if (self->capability & IUC_STIR421X) in irda_usb_receive()
886 self->netdev->stats.rx_dropped++; in irda_usb_receive()
913 skb_pull(dataskb, self->header_length); in irda_usb_receive()
916 dataskb->dev = self->netdev; in irda_usb_receive()
923 self->netdev->stats.rx_bytes += len; in irda_usb_receive()
924 self->netdev->stats.rx_packets++; in irda_usb_receive()
938 next_urb = self->idle_rx_urb; in irda_usb_receive()
942 self->idle_rx_urb = urb; in irda_usb_receive()
946 irda_usb_submit(self, skb, next_urb); in irda_usb_receive()
958 struct irda_usb_cb *self; in irda_usb_rx_defer_expired() local
965 self = (struct irda_usb_cb *) cb->context; in irda_usb_rx_defer_expired()
966 IRDA_ASSERT(self != NULL, return;); in irda_usb_rx_defer_expired()
969 next_urb = self->idle_rx_urb; in irda_usb_rx_defer_expired()
971 self->idle_rx_urb = urb; in irda_usb_rx_defer_expired()
972 irda_usb_submit(self, skb, next_urb); in irda_usb_rx_defer_expired()
980 static int irda_usb_is_receiving(struct irda_usb_cb *self) in irda_usb_is_receiving() argument
1016 static int stir421x_fw_upload(struct irda_usb_cb *self, in stir421x_fw_upload() argument
1040 ret = usb_bulk_msg(self->usbdev, in stir421x_fw_upload()
1041 usb_sndbulkpipe(self->usbdev, in stir421x_fw_upload()
1042 self->bulk_out_ep), in stir421x_fw_upload()
1064 static int stir421x_patch_device(struct irda_usb_cb *self) in stir421x_patch_device() argument
1078 self->usbdev->descriptor.bcdDevice); in stir421x_patch_device()
1079 ret = request_firmware(&fw, stir421x_fw_name, &self->usbdev->dev); in stir421x_patch_device()
1113 if (self->usbdev->descriptor.bcdDevice == cpu_to_le16(fw_version)) { in stir421x_patch_device()
1129 ret = stir421x_fw_upload(self, &fw->data[i], in stir421x_patch_device()
1160 struct irda_usb_cb *self; in irda_usb_net_open() local
1166 self = netdev_priv(netdev); in irda_usb_net_open()
1167 IRDA_ASSERT(self != NULL, return -1;); in irda_usb_net_open()
1169 spin_lock_irqsave(&self->lock, flags); in irda_usb_net_open()
1171 if(!self->present) { in irda_usb_net_open()
1172 spin_unlock_irqrestore(&self->lock, flags); in irda_usb_net_open()
1177 if(self->needspatch) { in irda_usb_net_open()
1178 spin_unlock_irqrestore(&self->lock, flags); in irda_usb_net_open()
1185 self->speed = -1; in irda_usb_net_open()
1186 self->xbofs = -1; in irda_usb_net_open()
1187 self->new_speed = -1; in irda_usb_net_open()
1188 self->new_xbofs = -1; in irda_usb_net_open()
1192 self->netopen = 1; in irda_usb_net_open()
1193 spin_unlock_irqrestore(&self->lock, flags); in irda_usb_net_open()
1200 sprintf(hwname, "usb#%d", self->usbdev->devnum); in irda_usb_net_open()
1201 self->irlap = irlap_open(netdev, &self->qos, hwname); in irda_usb_net_open()
1202 IRDA_ASSERT(self->irlap != NULL, return -1;); in irda_usb_net_open()
1211 self->idle_rx_urb = self->rx_urb[IU_MAX_ACTIVE_RX_URBS]; in irda_usb_net_open()
1212 self->idle_rx_urb->context = NULL; in irda_usb_net_open()
1224 irda_usb_submit(self, skb, self->rx_urb[i]); in irda_usb_net_open()
1240 struct irda_usb_cb *self; in irda_usb_net_close() local
1244 self = netdev_priv(netdev); in irda_usb_net_close()
1245 IRDA_ASSERT(self != NULL, return -1;); in irda_usb_net_close()
1249 self->netopen = 0; in irda_usb_net_close()
1255 del_timer(&self->rx_defer_timer); in irda_usb_net_close()
1258 for (i = 0; i < self->max_rx_urb; i++) { in irda_usb_net_close()
1259 struct urb *urb = self->rx_urb[i]; in irda_usb_net_close()
1270 usb_kill_urb(self->tx_urb); in irda_usb_net_close()
1271 usb_kill_urb(self->speed_urb); in irda_usb_net_close()
1274 if (self->irlap) in irda_usb_net_close()
1275 irlap_close(self->irlap); in irda_usb_net_close()
1276 self->irlap = NULL; in irda_usb_net_close()
1289 struct irda_usb_cb *self; in irda_usb_net_ioctl() local
1293 self = netdev_priv(dev); in irda_usb_net_ioctl()
1294 IRDA_ASSERT(self != NULL, return -1;); in irda_usb_net_ioctl()
1303 spin_lock_irqsave(&self->lock, flags); in irda_usb_net_ioctl()
1305 if(self->present) { in irda_usb_net_ioctl()
1307 self->new_speed = irq->ifr_baudrate; in irda_usb_net_ioctl()
1308 irda_usb_change_speed_xbofs(self); in irda_usb_net_ioctl()
1310 spin_unlock_irqrestore(&self->lock, flags); in irda_usb_net_ioctl()
1316 if(self->netopen) in irda_usb_net_ioctl()
1317 irda_device_set_media_busy(self->netdev, TRUE); in irda_usb_net_ioctl()
1320 irq->ifr_receiving = irda_usb_is_receiving(self); in irda_usb_net_ioctl()
1342 static inline void irda_usb_init_qos(struct irda_usb_cb *self) in irda_usb_init_qos() argument
1347 desc = self->irda_desc; in irda_usb_init_qos()
1350 irda_init_max_qos_capabilies(&self->qos); in irda_usb_init_qos()
1355 self->qos.baud_rate.bits = le16_to_cpu(desc->wBaudRate); in irda_usb_init_qos()
1356 self->qos.min_turn_time.bits = desc->bmMinTurnaroundTime; in irda_usb_init_qos()
1357 self->qos.additional_bofs.bits = desc->bmAdditionalBOFs; in irda_usb_init_qos()
1358 self->qos.window_size.bits = desc->bmWindowSize; in irda_usb_init_qos()
1359 self->qos.data_size.bits = desc->bmDataSize; in irda_usb_init_qos()
1362 __func__, self->qos.baud_rate.bits, self->qos.data_size.bits, in irda_usb_init_qos()
1363 self->qos.window_size.bits, self->qos.additional_bofs.bits, in irda_usb_init_qos()
1364 self->qos.min_turn_time.bits); in irda_usb_init_qos()
1367 if(self->capability & IUC_SIR_ONLY) in irda_usb_init_qos()
1368 self->qos.baud_rate.bits &= 0x00ff; in irda_usb_init_qos()
1369 if(self->capability & IUC_SMALL_PKT) in irda_usb_init_qos()
1370 self->qos.data_size.bits = 0x07; in irda_usb_init_qos()
1371 if(self->capability & IUC_NO_WINDOW) in irda_usb_init_qos()
1372 self->qos.window_size.bits = 0x01; in irda_usb_init_qos()
1373 if(self->capability & IUC_MAX_WINDOW) in irda_usb_init_qos()
1374 self->qos.window_size.bits = 0x7f; in irda_usb_init_qos()
1375 if(self->capability & IUC_MAX_XBOFS) in irda_usb_init_qos()
1376 self->qos.additional_bofs.bits = 0x01; in irda_usb_init_qos()
1381 self->qos.min_turn_time.bits = qos_mtt_bits; in irda_usb_init_qos()
1387 irda_qos_bits_to_value(&self->qos); in irda_usb_init_qos()
1403 static inline int irda_usb_open(struct irda_usb_cb *self) in irda_usb_open() argument
1405 struct net_device *netdev = self->netdev; in irda_usb_open()
1409 irda_usb_init_qos(self); in irda_usb_open()
1419 static inline void irda_usb_close(struct irda_usb_cb *self) in irda_usb_close() argument
1422 unregister_netdev(self->netdev); in irda_usb_close()
1425 kfree(self->speed_buff); in irda_usb_close()
1426 self->speed_buff = NULL; in irda_usb_close()
1428 kfree(self->tx_buff); in irda_usb_close()
1429 self->tx_buff = NULL; in irda_usb_close()
1451 static inline int irda_usb_parse_endpoints(struct irda_usb_cb *self, struct usb_host_endpoint *endp… in irda_usb_parse_endpoints() argument
1456 self->bulk_in_ep = 0; in irda_usb_parse_endpoints()
1457 self->bulk_out_ep = 0; in irda_usb_parse_endpoints()
1458 self->bulk_int_ep = 0; in irda_usb_parse_endpoints()
1480 self->bulk_in_ep = ep; in irda_usb_parse_endpoints()
1483 self->bulk_out_ep = ep; in irda_usb_parse_endpoints()
1484 self->bulk_out_mtu = psize; in irda_usb_parse_endpoints()
1490 self->bulk_int_ep = ep; in irda_usb_parse_endpoints()
1499 __func__, self->bulk_in_ep, self->bulk_out_ep, in irda_usb_parse_endpoints()
1500 self->bulk_out_mtu, self->bulk_int_ep); in irda_usb_parse_endpoints()
1502 return (self->bulk_in_ep != 0) && (self->bulk_out_ep != 0); in irda_usb_parse_endpoints()
1600 struct irda_usb_cb *self; in irda_usb_probe() local
1615 net = alloc_irdadev(sizeof(*self)); in irda_usb_probe()
1620 self = netdev_priv(net); in irda_usb_probe()
1621 self->netdev = net; in irda_usb_probe()
1622 spin_lock_init(&self->lock); in irda_usb_probe()
1623 init_timer(&self->rx_defer_timer); in irda_usb_probe()
1625 self->capability = id->driver_info; in irda_usb_probe()
1626 self->needspatch = ((self->capability & IUC_STIR421X) != 0); in irda_usb_probe()
1629 if (self->capability & IUC_STIR421X) { in irda_usb_probe()
1630 self->max_rx_urb = IU_SIGMATEL_MAX_RX_URBS; in irda_usb_probe()
1631 self->header_length = USB_IRDA_STIR421X_HEADER; in irda_usb_probe()
1633 self->max_rx_urb = IU_MAX_RX_URBS; in irda_usb_probe()
1634 self->header_length = USB_IRDA_HEADER; in irda_usb_probe()
1637 self->rx_urb = kcalloc(self->max_rx_urb, sizeof(struct urb *), in irda_usb_probe()
1639 if (!self->rx_urb) in irda_usb_probe()
1642 for (i = 0; i < self->max_rx_urb; i++) { in irda_usb_probe()
1643 self->rx_urb[i] = usb_alloc_urb(0, GFP_KERNEL); in irda_usb_probe()
1644 if (!self->rx_urb[i]) { in irda_usb_probe()
1648 self->tx_urb = usb_alloc_urb(0, GFP_KERNEL); in irda_usb_probe()
1649 if (!self->tx_urb) { in irda_usb_probe()
1652 self->speed_urb = usb_alloc_urb(0, GFP_KERNEL); in irda_usb_probe()
1653 if (!self->speed_urb) { in irda_usb_probe()
1689 if(!irda_usb_parse_endpoints(self, interface->endpoint, in irda_usb_probe()
1696 self->usbdev = dev; in irda_usb_probe()
1704 if (self->needspatch) { in irda_usb_probe()
1705 ret = usb_control_msg (self->usbdev, usb_sndctrlpipe (self->usbdev, 0), in irda_usb_probe()
1715 self->irda_desc = irda_desc; in irda_usb_probe()
1716 self->present = 1; in irda_usb_probe()
1717 self->netopen = 0; in irda_usb_probe()
1718 self->usbintf = intf; in irda_usb_probe()
1724 self->speed_buff = kzalloc(IRDA_USB_SPEED_MTU, GFP_KERNEL); in irda_usb_probe()
1725 if (!self->speed_buff) in irda_usb_probe()
1728 self->tx_buff = kzalloc(IRDA_SKB_MAX_MTU + self->header_length, in irda_usb_probe()
1730 if (!self->tx_buff) in irda_usb_probe()
1733 ret = irda_usb_open(self); in irda_usb_probe()
1738 usb_set_intfdata(intf, self); in irda_usb_probe()
1740 if (self->needspatch) { in irda_usb_probe()
1742 ret = stir421x_patch_device(self); in irda_usb_probe()
1743 self->needspatch = (ret < 0); in irda_usb_probe()
1744 if (self->needspatch) { in irda_usb_probe()
1750 irda_desc = irda_usb_find_class_desc (self->usbintf); in irda_usb_probe()
1755 kfree(self->irda_desc); in irda_usb_probe()
1756 self->irda_desc = irda_desc; in irda_usb_probe()
1757 irda_usb_init_qos(self); in irda_usb_probe()
1762 unregister_netdev(self->netdev); in irda_usb_probe()
1764 kfree(self->tx_buff); in irda_usb_probe()
1766 kfree(self->speed_buff); in irda_usb_probe()
1769 usb_free_urb(self->speed_urb); in irda_usb_probe()
1771 usb_free_urb(self->tx_urb); in irda_usb_probe()
1773 for (i = 0; i < self->max_rx_urb; i++) in irda_usb_probe()
1774 usb_free_urb(self->rx_urb[i]); in irda_usb_probe()
1775 kfree(self->rx_urb); in irda_usb_probe()
1797 struct irda_usb_cb *self = usb_get_intfdata(intf); in irda_usb_disconnect() local
1801 if (!self) in irda_usb_disconnect()
1805 spin_lock_irqsave(&self->lock, flags); in irda_usb_disconnect()
1809 self->present = 0; in irda_usb_disconnect()
1812 del_timer(&self->rx_defer_timer); in irda_usb_disconnect()
1816 spin_unlock_irqrestore(&self->lock, flags); in irda_usb_disconnect()
1819 if((self->netopen) || (self->irlap)) { in irda_usb_disconnect()
1822 netif_stop_queue(self->netdev); in irda_usb_disconnect()
1824 for (i = 0; i < self->max_rx_urb; i++) in irda_usb_disconnect()
1825 usb_kill_urb(self->rx_urb[i]); in irda_usb_disconnect()
1828 usb_kill_urb(self->tx_urb); in irda_usb_disconnect()
1829 usb_kill_urb(self->speed_urb); in irda_usb_disconnect()
1833 irda_usb_close(self); in irda_usb_disconnect()
1835 self->usbdev = NULL; in irda_usb_disconnect()
1836 self->usbintf = NULL; in irda_usb_disconnect()
1839 for (i = 0; i < self->max_rx_urb; i++) in irda_usb_disconnect()
1840 usb_free_urb(self->rx_urb[i]); in irda_usb_disconnect()
1841 kfree(self->rx_urb); in irda_usb_disconnect()
1843 usb_free_urb(self->tx_urb); in irda_usb_disconnect()
1844 usb_free_urb(self->speed_urb); in irda_usb_disconnect()
1847 free_netdev(self->netdev); in irda_usb_disconnect()
1855 struct irda_usb_cb *self = usb_get_intfdata(intf); in irda_usb_suspend() local
1858 netif_device_detach(self->netdev); in irda_usb_suspend()
1860 if (self->tx_urb != NULL) in irda_usb_suspend()
1861 usb_kill_urb(self->tx_urb); in irda_usb_suspend()
1862 if (self->speed_urb != NULL) in irda_usb_suspend()
1863 usb_kill_urb(self->speed_urb); in irda_usb_suspend()
1864 for (i = 0; i < self->max_rx_urb; i++) { in irda_usb_suspend()
1865 if (self->rx_urb[i] != NULL) in irda_usb_suspend()
1866 usb_kill_urb(self->rx_urb[i]); in irda_usb_suspend()
1874 struct irda_usb_cb *self = usb_get_intfdata(intf); in irda_usb_resume() local
1877 for (i = 0; i < self->max_rx_urb; i++) { in irda_usb_resume()
1878 if (self->rx_urb[i] != NULL) in irda_usb_resume()
1879 usb_submit_urb(self->rx_urb[i], GFP_KERNEL); in irda_usb_resume()
1882 netif_device_attach(self->netdev); in irda_usb_resume()