Lines Matching refs:mcs

132 static int mcs_set_reg(struct mcs_cb *mcs, __u16 reg, __u16 val)  in mcs_set_reg()  argument
134 struct usb_device *dev = mcs->usbdev; in mcs_set_reg()
141 static int mcs_get_reg(struct mcs_cb *mcs, __u16 reg, __u16 * val) in mcs_get_reg() argument
143 struct usb_device *dev = mcs->usbdev; in mcs_get_reg()
158 static inline int mcs_setup_transceiver_vishay(struct mcs_cb *mcs) in mcs_setup_transceiver_vishay() argument
164 ret = mcs_get_reg(mcs, MCS_XCVR_REG, &rval); in mcs_setup_transceiver_vishay()
178 ret = mcs_set_reg(mcs, MCS_XCVR_REG, rval); in mcs_setup_transceiver_vishay()
183 ret = mcs_set_reg(mcs, MCS_XCVR_REG, rval); in mcs_setup_transceiver_vishay()
188 ret = mcs_set_reg(mcs, MCS_XCVR_REG, rval); in mcs_setup_transceiver_vishay()
198 static inline int mcs_setup_transceiver_agilent(struct mcs_cb *mcs) in mcs_setup_transceiver_agilent() argument
205 static inline int mcs_setup_transceiver_sharp(struct mcs_cb *mcs) in mcs_setup_transceiver_sharp() argument
212 static inline int mcs_setup_transceiver(struct mcs_cb *mcs) in mcs_setup_transceiver() argument
223 ret = mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_setup_transceiver()
227 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_setup_transceiver()
232 ret = mcs_set_reg(mcs, MCS_MINRXPW_REG, rval); in mcs_setup_transceiver()
236 ret = mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_setup_transceiver()
241 if(mcs->sir_tweak) in mcs_setup_transceiver()
251 mcs->speed = 9600; in mcs_setup_transceiver()
252 mcs->new_speed = 0; /* new_speed is set to 0 */ in mcs_setup_transceiver()
260 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_setup_transceiver()
265 switch (mcs->transceiver_type) { in mcs_setup_transceiver()
267 ret = mcs_setup_transceiver_vishay(mcs); in mcs_setup_transceiver()
271 ret = mcs_setup_transceiver_sharp(mcs); in mcs_setup_transceiver()
275 ret = mcs_setup_transceiver_agilent(mcs); in mcs_setup_transceiver()
280 mcs->transceiver_type); in mcs_setup_transceiver()
289 if (mcs->transceiver_type != MCS_TSC_SHARP) { in mcs_setup_transceiver()
291 ret = mcs_get_reg(mcs, MCS_XCVR_REG, &rval); in mcs_setup_transceiver()
294 if (mcs->receive_mode) in mcs_setup_transceiver()
298 ret = mcs_set_reg(mcs, MCS_XCVR_REG, rval); in mcs_setup_transceiver()
305 ret = mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_setup_transceiver()
311 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_setup_transceiver()
390 static void mcs_unwrap_mir(struct mcs_cb *mcs, __u8 *buf, int len) in mcs_unwrap_mir() argument
403 mcs->netdev->name, new_len); in mcs_unwrap_mir()
404 ++mcs->netdev->stats.rx_errors; in mcs_unwrap_mir()
405 ++mcs->netdev->stats.rx_length_errors; in mcs_unwrap_mir()
414 mcs->netdev->stats.rx_errors++; in mcs_unwrap_mir()
415 mcs->netdev->stats.rx_crc_errors++; in mcs_unwrap_mir()
421 ++mcs->netdev->stats.rx_dropped; in mcs_unwrap_mir()
430 skb->dev = mcs->netdev; in mcs_unwrap_mir()
434 mcs->netdev->stats.rx_packets++; in mcs_unwrap_mir()
435 mcs->netdev->stats.rx_bytes += new_len; in mcs_unwrap_mir()
442 static void mcs_unwrap_fir(struct mcs_cb *mcs, __u8 *buf, int len) in mcs_unwrap_fir() argument
456 mcs->netdev->name, new_len); in mcs_unwrap_fir()
457 ++mcs->netdev->stats.rx_errors; in mcs_unwrap_fir()
458 ++mcs->netdev->stats.rx_length_errors; in mcs_unwrap_fir()
466 mcs->netdev->stats.rx_errors++; in mcs_unwrap_fir()
467 mcs->netdev->stats.rx_crc_errors++; in mcs_unwrap_fir()
473 ++mcs->netdev->stats.rx_dropped; in mcs_unwrap_fir()
482 skb->dev = mcs->netdev; in mcs_unwrap_fir()
486 mcs->netdev->stats.rx_packets++; in mcs_unwrap_fir()
487 mcs->netdev->stats.rx_bytes += new_len; in mcs_unwrap_fir()
495 static inline int mcs_setup_urbs(struct mcs_cb *mcs) in mcs_setup_urbs() argument
497 mcs->rx_urb = NULL; in mcs_setup_urbs()
499 mcs->tx_urb = usb_alloc_urb(0, GFP_KERNEL); in mcs_setup_urbs()
500 if (!mcs->tx_urb) in mcs_setup_urbs()
503 mcs->rx_urb = usb_alloc_urb(0, GFP_KERNEL); in mcs_setup_urbs()
504 if (!mcs->rx_urb) { in mcs_setup_urbs()
505 usb_free_urb(mcs->tx_urb); in mcs_setup_urbs()
506 mcs->tx_urb = NULL; in mcs_setup_urbs()
517 static inline int mcs_receive_start(struct mcs_cb *mcs) in mcs_receive_start() argument
519 mcs->rx_buff.in_frame = FALSE; in mcs_receive_start()
520 mcs->rx_buff.state = OUTSIDE_FRAME; in mcs_receive_start()
522 usb_fill_bulk_urb(mcs->rx_urb, mcs->usbdev, in mcs_receive_start()
523 usb_rcvbulkpipe(mcs->usbdev, mcs->ep_in), in mcs_receive_start()
524 mcs->in_buf, 4096, mcs_receive_irq, mcs); in mcs_receive_start()
526 mcs->rx_urb->status = 0; in mcs_receive_start()
527 return usb_submit_urb(mcs->rx_urb, GFP_KERNEL); in mcs_receive_start()
531 static inline int mcs_find_endpoints(struct mcs_cb *mcs, in mcs_find_endpoints() argument
544 mcs->ep_in = ep[i].desc.bEndpointAddress; in mcs_find_endpoints()
546 mcs->ep_out = ep[i].desc.bEndpointAddress; in mcs_find_endpoints()
551 if ((mcs->ep_in != 0) && (mcs->ep_out != 0)) { in mcs_find_endpoints()
562 struct mcs_cb *mcs = container_of(work, struct mcs_cb, work); in mcs_speed_work() local
563 struct net_device *netdev = mcs->netdev; in mcs_speed_work()
565 mcs_speed_change(mcs); in mcs_speed_work()
572 static int mcs_speed_change(struct mcs_cb *mcs) in mcs_speed_change() argument
580 nspeed = mcs_speed_set[(mcs->new_speed >> 8) & 0x0f]; in mcs_speed_change()
583 mcs_get_reg(mcs, MCS_RESV_REG, &rval); in mcs_speed_change()
592 mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_speed_change()
595 if (mcs->new_speed <= 115200) { in mcs_speed_change()
598 if ((rst = (mcs->speed > 115200))) in mcs_speed_change()
599 mcs_set_reg(mcs, MCS_MINRXPW_REG, 0); in mcs_speed_change()
601 } else if (mcs->new_speed <= 1152000) { in mcs_speed_change()
604 if ((rst = !(mcs->speed == 576000 || mcs->speed == 1152000))) in mcs_speed_change()
605 mcs_set_reg(mcs, MCS_MINRXPW_REG, 5); in mcs_speed_change()
610 if ((rst = (mcs->speed != 4000000))) in mcs_speed_change()
611 mcs_set_reg(mcs, MCS_MINRXPW_REG, 5); in mcs_speed_change()
618 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_speed_change()
623 switch (mcs->transceiver_type) { in mcs_speed_change()
625 ret = mcs_setup_transceiver_vishay(mcs); in mcs_speed_change()
629 ret = mcs_setup_transceiver_sharp(mcs); in mcs_speed_change()
633 ret = mcs_setup_transceiver_agilent(mcs); in mcs_speed_change()
639 mcs->transceiver_type); in mcs_speed_change()
644 mcs_get_reg(mcs, MCS_MODE_REG, &rval); in mcs_speed_change()
646 ret = mcs_set_reg(mcs, MCS_MODE_REG, rval); in mcs_speed_change()
648 mcs->speed = mcs->new_speed; in mcs_speed_change()
650 mcs->new_speed = 0; in mcs_speed_change()
673 struct mcs_cb *mcs = netdev_priv(netdev); in mcs_net_close() local
678 kfree_skb(mcs->rx_buff.skb); in mcs_net_close()
681 usb_kill_urb(mcs->rx_urb); in mcs_net_close()
682 usb_free_urb(mcs->rx_urb); in mcs_net_close()
683 usb_kill_urb(mcs->tx_urb); in mcs_net_close()
684 usb_free_urb(mcs->tx_urb); in mcs_net_close()
687 if (mcs->irlap) in mcs_net_close()
688 irlap_close(mcs->irlap); in mcs_net_close()
690 mcs->irlap = NULL; in mcs_net_close()
697 struct mcs_cb *mcs = netdev_priv(netdev); in mcs_net_open() local
701 ret = usb_clear_halt(mcs->usbdev, in mcs_net_open()
702 usb_sndbulkpipe(mcs->usbdev, mcs->ep_in)); in mcs_net_open()
705 ret = usb_clear_halt(mcs->usbdev, in mcs_net_open()
706 usb_rcvbulkpipe(mcs->usbdev, mcs->ep_out)); in mcs_net_open()
710 ret = mcs_setup_transceiver(mcs); in mcs_net_open()
717 mcs->receiving = 0; in mcs_net_open()
718 mcs->rx_buff.truesize = IRDA_SKB_MAX_MTU; in mcs_net_open()
719 mcs->rx_buff.skb = dev_alloc_skb(IRDA_SKB_MAX_MTU); in mcs_net_open()
720 if (!mcs->rx_buff.skb) in mcs_net_open()
723 skb_reserve(mcs->rx_buff.skb, 1); in mcs_net_open()
724 mcs->rx_buff.head = mcs->rx_buff.skb->data; in mcs_net_open()
731 sprintf(hwname, "usb#%d", mcs->usbdev->devnum); in mcs_net_open()
732 mcs->irlap = irlap_open(netdev, &mcs->qos, hwname); in mcs_net_open()
733 if (!mcs->irlap) { in mcs_net_open()
738 if (!mcs_setup_urbs(mcs)) in mcs_net_open()
741 ret = mcs_receive_start(mcs); in mcs_net_open()
749 usb_free_urb(mcs->rx_urb); in mcs_net_open()
750 usb_free_urb(mcs->tx_urb); in mcs_net_open()
752 irlap_close(mcs->irlap); in mcs_net_open()
754 kfree_skb(mcs->rx_buff.skb); in mcs_net_open()
763 struct mcs_cb *mcs = urb->context; in mcs_receive_irq() local
767 if (!netif_running(mcs->netdev)) in mcs_receive_irq()
780 if(mcs->speed < 576000) { in mcs_receive_irq()
781 async_unwrap_char(mcs->netdev, &mcs->netdev->stats, in mcs_receive_irq()
782 &mcs->rx_buff, 0xc0); in mcs_receive_irq()
785 async_unwrap_char(mcs->netdev, &mcs->netdev->stats, in mcs_receive_irq()
786 &mcs->rx_buff, bytes[i]); in mcs_receive_irq()
788 async_unwrap_char(mcs->netdev, &mcs->netdev->stats, in mcs_receive_irq()
789 &mcs->rx_buff, 0xc1); in mcs_receive_irq()
792 else if(mcs->speed == 576000 || mcs->speed == 1152000) { in mcs_receive_irq()
793 mcs_unwrap_mir(mcs, urb->transfer_buffer, in mcs_receive_irq()
798 mcs_unwrap_fir(mcs, urb->transfer_buffer, in mcs_receive_irq()
809 struct mcs_cb *mcs = urb->context; in mcs_send_irq() local
810 struct net_device *ndev = mcs->netdev; in mcs_send_irq()
812 if (unlikely(mcs->new_speed)) in mcs_send_irq()
813 schedule_work(&mcs->work); in mcs_send_irq()
823 struct mcs_cb *mcs; in mcs_hard_xmit() local
828 mcs = netdev_priv(ndev); in mcs_hard_xmit()
830 spin_lock_irqsave(&mcs->lock, flags); in mcs_hard_xmit()
832 mcs->new_speed = irda_get_next_speed(skb); in mcs_hard_xmit()
833 if (likely(mcs->new_speed == mcs->speed)) in mcs_hard_xmit()
834 mcs->new_speed = 0; in mcs_hard_xmit()
837 if(mcs->speed < 576000) { in mcs_hard_xmit()
838 wraplen = mcs_wrap_sir_skb(skb, mcs->out_buf); in mcs_hard_xmit()
841 else if(mcs->speed == 576000 || mcs->speed == 1152000) { in mcs_hard_xmit()
842 wraplen = mcs_wrap_mir_skb(skb, mcs->out_buf); in mcs_hard_xmit()
846 wraplen = mcs_wrap_fir_skb(skb, mcs->out_buf); in mcs_hard_xmit()
848 usb_fill_bulk_urb(mcs->tx_urb, mcs->usbdev, in mcs_hard_xmit()
849 usb_sndbulkpipe(mcs->usbdev, mcs->ep_out), in mcs_hard_xmit()
850 mcs->out_buf, wraplen, mcs_send_irq, mcs); in mcs_hard_xmit()
852 if ((ret = usb_submit_urb(mcs->tx_urb, GFP_ATOMIC))) { in mcs_hard_xmit()
859 mcs->netdev->stats.tx_errors++; in mcs_hard_xmit()
863 mcs->netdev->stats.tx_packets++; in mcs_hard_xmit()
864 mcs->netdev->stats.tx_bytes += skb->len; in mcs_hard_xmit()
868 spin_unlock_irqrestore(&mcs->lock, flags); in mcs_hard_xmit()
888 struct mcs_cb *mcs; in mcs_probe() local
891 ndev = alloc_irdadev(sizeof(*mcs)); in mcs_probe()
905 mcs = netdev_priv(ndev); in mcs_probe()
906 mcs->usbdev = udev; in mcs_probe()
907 mcs->netdev = ndev; in mcs_probe()
908 spin_lock_init(&mcs->lock); in mcs_probe()
911 irda_init_max_qos_capabilies(&mcs->qos); in mcs_probe()
914 mcs->qos.baud_rate.bits &= in mcs_probe()
919 mcs->qos.min_turn_time.bits &= qos_mtt_bits; in mcs_probe()
920 irda_qos_bits_to_value(&mcs->qos); in mcs_probe()
923 INIT_WORK(&mcs->work, mcs_speed_work); in mcs_probe()
932 ret = mcs_find_endpoints(mcs, intf->cur_altsetting->endpoint, in mcs_probe()
946 mcs->transceiver_type = transceiver_type; in mcs_probe()
947 mcs->sir_tweak = sir_tweak; in mcs_probe()
948 mcs->receive_mode = receive_mode; in mcs_probe()
950 usb_set_intfdata(intf, mcs); in mcs_probe()
963 struct mcs_cb *mcs = usb_get_intfdata(intf); in mcs_disconnect() local
965 if (!mcs) in mcs_disconnect()
968 cancel_work_sync(&mcs->work); in mcs_disconnect()
970 unregister_netdev(mcs->netdev); in mcs_disconnect()
971 free_netdev(mcs->netdev); in mcs_disconnect()