Lines Matching refs:bcs
192 hfcpci_sched_event(struct BCState *bcs, int event) in hfcpci_sched_event() argument
194 test_and_set_bit(event, &bcs->event); in hfcpci_sched_event()
195 schedule_work(&bcs->tqueue); in hfcpci_sched_event()
205 if (cs->bcs[0].mode && (cs->bcs[0].channel == channel)) in Sel_BCS()
206 return (&cs->bcs[0]); in Sel_BCS()
207 else if (cs->bcs[1].mode && (cs->bcs[1].channel == channel)) in Sel_BCS()
208 return (&cs->bcs[1]); in Sel_BCS()
271 hfcpci_empty_fifo(struct BCState *bcs, bzfifo_type *bz, u_char *bdata, int count) in hfcpci_empty_fifo() argument
275 struct IsdnCardState *cs = bcs->cs; in hfcpci_empty_fifo()
291 bcs->err_inv++; in hfcpci_empty_fifo()
398 hfcpci_empty_fifo_trans(struct BCState *bcs, bzfifo_type *bz, u_char *bdata) in hfcpci_empty_fifo_trans() argument
438 skb_queue_tail(&bcs->rqueue, skb); in hfcpci_empty_fifo_trans()
439 hfcpci_sched_event(bcs, B_RCVBUFREADY); in hfcpci_empty_fifo_trans()
450 main_rec_hfcpci(struct BCState *bcs) in main_rec_hfcpci() argument
452 struct IsdnCardState *cs = bcs->cs; in main_rec_hfcpci()
461 if ((bcs->channel) && (!cs->hw.hfcpci.bswapped)) { in main_rec_hfcpci()
473 debugl1(cs, "rec_data %d blocked", bcs->channel); in main_rec_hfcpci()
479 bcs->channel, bz->f1, bz->f2); in main_rec_hfcpci()
488 bcs->channel, zp->z1, zp->z2, rcnt); in main_rec_hfcpci()
489 if ((skb = hfcpci_empty_fifo(bcs, bz, bdata, rcnt))) { in main_rec_hfcpci()
490 skb_queue_tail(&bcs->rqueue, skb); in main_rec_hfcpci()
491 hfcpci_sched_event(bcs, B_RCVBUFREADY); in main_rec_hfcpci()
505 } else if (bcs->mode == L1_MODE_TRANS) in main_rec_hfcpci()
506 receive = hfcpci_empty_fifo_trans(bcs, bz, bdata); in main_rec_hfcpci()
588 hfcpci_fill_fifo(struct BCState *bcs) in hfcpci_fill_fifo() argument
590 struct IsdnCardState *cs = bcs->cs; in hfcpci_fill_fifo()
598 if (!bcs->tx_skb) in hfcpci_fill_fifo()
600 if (bcs->tx_skb->len <= 0) in hfcpci_fill_fifo()
603 if ((bcs->channel) && (!cs->hw.hfcpci.bswapped)) { in hfcpci_fill_fifo()
611 if (bcs->mode == L1_MODE_TRANS) { in hfcpci_fill_fifo()
616 bcs->channel, *z1t, *z2t); in hfcpci_fill_fifo()
622 while ((fcnt < 2 * HFCPCI_BTRANS_THRESHOLD) && (bcs->tx_skb)) { in hfcpci_fill_fifo()
623 if (bcs->tx_skb->len < B_FIFO_SIZE - fcnt) { in hfcpci_fill_fifo()
625 count = bcs->tx_skb->len; in hfcpci_fill_fifo()
630 src = bcs->tx_skb->data; /* source pointer */ in hfcpci_fill_fifo()
643 bcs->tx_cnt -= bcs->tx_skb->len; in hfcpci_fill_fifo()
644 fcnt += bcs->tx_skb->len; in hfcpci_fill_fifo()
648 bcs->channel, bcs->tx_skb->len); in hfcpci_fill_fifo()
650 if (test_bit(FLG_LLI_L1WAKEUP, &bcs->st->lli.flag) && in hfcpci_fill_fifo()
651 (PACKET_NOACK != bcs->tx_skb->pkt_type)) { in hfcpci_fill_fifo()
653 spin_lock_irqsave(&bcs->aclock, flags); in hfcpci_fill_fifo()
654 bcs->ackcnt += bcs->tx_skb->len; in hfcpci_fill_fifo()
655 spin_unlock_irqrestore(&bcs->aclock, flags); in hfcpci_fill_fifo()
656 schedule_event(bcs, B_ACKPENDING); in hfcpci_fill_fifo()
659 dev_kfree_skb_any(bcs->tx_skb); in hfcpci_fill_fifo()
660 bcs->tx_skb = skb_dequeue(&bcs->squeue); /* fetch next data */ in hfcpci_fill_fifo()
662 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag); in hfcpci_fill_fifo()
667 bcs->channel, bz->f1, bz->f2, in hfcpci_fill_fifo()
685 bcs->channel, bcs->tx_skb->len, in hfcpci_fill_fifo()
688 if (count < bcs->tx_skb->len) { in hfcpci_fill_fifo()
693 count = bcs->tx_skb->len; /* get frame len */ in hfcpci_fill_fifo()
699 src = bcs->tx_skb->data; /* source pointer */ in hfcpci_fill_fifo()
712 bcs->tx_cnt -= bcs->tx_skb->len; in hfcpci_fill_fifo()
713 if (test_bit(FLG_LLI_L1WAKEUP, &bcs->st->lli.flag) && in hfcpci_fill_fifo()
714 (PACKET_NOACK != bcs->tx_skb->pkt_type)) { in hfcpci_fill_fifo()
716 spin_lock_irqsave(&bcs->aclock, flags); in hfcpci_fill_fifo()
717 bcs->ackcnt += bcs->tx_skb->len; in hfcpci_fill_fifo()
718 spin_unlock_irqrestore(&bcs->aclock, flags); in hfcpci_fill_fifo()
719 schedule_event(bcs, B_ACKPENDING); in hfcpci_fill_fifo()
725 dev_kfree_skb_any(bcs->tx_skb); in hfcpci_fill_fifo()
726 bcs->tx_skb = NULL; in hfcpci_fill_fifo()
727 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag); in hfcpci_fill_fifo()
933 struct BCState *bcs; in hfcpci_interrupt() local
984 if (!(bcs = Sel_BCS(cs, cs->hw.hfcpci.bswapped ? 1 : 0))) { in hfcpci_interrupt()
988 main_rec_hfcpci(bcs); in hfcpci_interrupt()
993 else if (!(bcs = Sel_BCS(cs, 1))) { in hfcpci_interrupt()
997 main_rec_hfcpci(bcs); in hfcpci_interrupt()
1000 if (!(bcs = Sel_BCS(cs, cs->hw.hfcpci.bswapped ? 1 : 0))) { in hfcpci_interrupt()
1004 if (bcs->tx_skb) { in hfcpci_interrupt()
1006 hfcpci_fill_fifo(bcs); in hfcpci_interrupt()
1009 debugl1(cs, "fill_data %d blocked", bcs->channel); in hfcpci_interrupt()
1011 if ((bcs->tx_skb = skb_dequeue(&bcs->squeue))) { in hfcpci_interrupt()
1013 hfcpci_fill_fifo(bcs); in hfcpci_interrupt()
1016 debugl1(cs, "fill_data %d blocked", bcs->channel); in hfcpci_interrupt()
1018 hfcpci_sched_event(bcs, B_XMTBUFREADY); in hfcpci_interrupt()
1024 if (!(bcs = Sel_BCS(cs, 1))) { in hfcpci_interrupt()
1028 if (bcs->tx_skb) { in hfcpci_interrupt()
1030 hfcpci_fill_fifo(bcs); in hfcpci_interrupt()
1033 debugl1(cs, "fill_data %d blocked", bcs->channel); in hfcpci_interrupt()
1035 if ((bcs->tx_skb = skb_dequeue(&bcs->squeue))) { in hfcpci_interrupt()
1037 hfcpci_fill_fifo(bcs); in hfcpci_interrupt()
1040 debugl1(cs, "fill_data %d blocked", bcs->channel); in hfcpci_interrupt()
1042 hfcpci_sched_event(bcs, B_XMTBUFREADY); in hfcpci_interrupt()
1253 hfcpci_send_data(struct BCState *bcs) in hfcpci_send_data() argument
1255 struct IsdnCardState *cs = bcs->cs; in hfcpci_send_data()
1258 hfcpci_fill_fifo(bcs); in hfcpci_send_data()
1261 debugl1(cs, "send_data %d blocked", bcs->channel); in hfcpci_send_data()
1268 mode_hfcpci(struct BCState *bcs, int mode, int bc) in mode_hfcpci() argument
1270 struct IsdnCardState *cs = bcs->cs; in mode_hfcpci()
1275 mode, bc, bcs->channel); in mode_hfcpci()
1276 bcs->mode = mode; in mode_hfcpci()
1277 bcs->channel = bc; in mode_hfcpci()
1391 struct BCState *bcs = st->l1.bcs; in hfcpci_l2l1() local
1397 spin_lock_irqsave(&bcs->cs->lock, flags); in hfcpci_l2l1()
1398 if (bcs->tx_skb) { in hfcpci_l2l1()
1399 skb_queue_tail(&bcs->squeue, skb); in hfcpci_l2l1()
1401 bcs->tx_skb = skb; in hfcpci_l2l1()
1403 bcs->cs->BC_Send_Data(bcs); in hfcpci_l2l1()
1405 spin_unlock_irqrestore(&bcs->cs->lock, flags); in hfcpci_l2l1()
1408 spin_lock_irqsave(&bcs->cs->lock, flags); in hfcpci_l2l1()
1409 if (bcs->tx_skb) { in hfcpci_l2l1()
1410 spin_unlock_irqrestore(&bcs->cs->lock, flags); in hfcpci_l2l1()
1415 bcs->tx_skb = skb; in hfcpci_l2l1()
1416 bcs->cs->BC_Send_Data(bcs); in hfcpci_l2l1()
1417 spin_unlock_irqrestore(&bcs->cs->lock, flags); in hfcpci_l2l1()
1420 if (!bcs->tx_skb) { in hfcpci_l2l1()
1427 spin_lock_irqsave(&bcs->cs->lock, flags); in hfcpci_l2l1()
1428 test_and_set_bit(BC_FLG_ACTIV, &bcs->Flag); in hfcpci_l2l1()
1429 mode_hfcpci(bcs, st->l1.mode, st->l1.bc); in hfcpci_l2l1()
1430 spin_unlock_irqrestore(&bcs->cs->lock, flags); in hfcpci_l2l1()
1437 spin_lock_irqsave(&bcs->cs->lock, flags); in hfcpci_l2l1()
1438 test_and_clear_bit(BC_FLG_ACTIV, &bcs->Flag); in hfcpci_l2l1()
1439 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag); in hfcpci_l2l1()
1440 mode_hfcpci(bcs, 0, st->l1.bc); in hfcpci_l2l1()
1441 spin_unlock_irqrestore(&bcs->cs->lock, flags); in hfcpci_l2l1()
1451 close_hfcpci(struct BCState *bcs) in close_hfcpci() argument
1453 mode_hfcpci(bcs, 0, bcs->channel); in close_hfcpci()
1454 if (test_and_clear_bit(BC_FLG_INIT, &bcs->Flag)) { in close_hfcpci()
1455 skb_queue_purge(&bcs->rqueue); in close_hfcpci()
1456 skb_queue_purge(&bcs->squeue); in close_hfcpci()
1457 if (bcs->tx_skb) { in close_hfcpci()
1458 dev_kfree_skb_any(bcs->tx_skb); in close_hfcpci()
1459 bcs->tx_skb = NULL; in close_hfcpci()
1460 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag); in close_hfcpci()
1469 open_hfcpcistate(struct IsdnCardState *cs, struct BCState *bcs) in open_hfcpcistate() argument
1471 if (!test_and_set_bit(BC_FLG_INIT, &bcs->Flag)) { in open_hfcpcistate()
1472 skb_queue_head_init(&bcs->rqueue); in open_hfcpcistate()
1473 skb_queue_head_init(&bcs->squeue); in open_hfcpcistate()
1475 bcs->tx_skb = NULL; in open_hfcpcistate()
1476 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag); in open_hfcpcistate()
1477 bcs->event = 0; in open_hfcpcistate()
1478 bcs->tx_cnt = 0; in open_hfcpcistate()
1486 setstack_2b(struct PStack *st, struct BCState *bcs) in setstack_2b() argument
1488 bcs->channel = st->l1.bc; in setstack_2b()
1489 if (open_hfcpcistate(st->l1.hardware, bcs)) in setstack_2b()
1491 st->l1.bcs = bcs; in setstack_2b()
1494 bcs->st = st; in setstack_2b()
1581 cs->bcs[0].BC_SetStack = setstack_2b; in inithfcpci()
1582 cs->bcs[1].BC_SetStack = setstack_2b; in inithfcpci()
1583 cs->bcs[0].BC_Close = close_hfcpci; in inithfcpci()
1584 cs->bcs[1].BC_Close = close_hfcpci; in inithfcpci()
1588 mode_hfcpci(cs->bcs, 0, 0); in inithfcpci()
1589 mode_hfcpci(cs->bcs + 1, 0, 1); in inithfcpci()