Lines Matching refs:bcs
143 GetFreeFifoBytes_B(struct BCState *bcs) in GetFreeFifoBytes_B() argument
147 if (bcs->hw.hfc.f1 == bcs->hw.hfc.f2) in GetFreeFifoBytes_B()
148 return (bcs->cs->hw.hfcD.bfifosize); in GetFreeFifoBytes_B()
149 s = bcs->hw.hfc.send[bcs->hw.hfc.f1] - bcs->hw.hfc.send[bcs->hw.hfc.f2]; in GetFreeFifoBytes_B()
151 s += bcs->cs->hw.hfcD.bfifosize; in GetFreeFifoBytes_B()
152 s = bcs->cs->hw.hfcD.bfifosize - s; in GetFreeFifoBytes_B()
183 *hfc_empty_fifo(struct BCState *bcs, int count) in hfc_empty_fifo() argument
187 struct IsdnCardState *cs = bcs->cs; in hfc_empty_fifo()
198 cip = HFCB_FIFO | HFCB_FIFO_OUT | HFCB_REC | HFCB_CHANNEL(bcs->channel); in hfc_empty_fifo()
207 cip = HFCB_FIFO | HFCB_FIFO_OUT | HFCB_REC | HFCB_CHANNEL(bcs->channel); in hfc_empty_fifo()
209 bcs->err_inv++; in hfc_empty_fifo()
219 cip = HFCB_FIFO | HFCB_FIFO_OUT | HFCB_REC | HFCB_CHANNEL(bcs->channel); in hfc_empty_fifo()
229 printk(KERN_WARNING "HFC FIFO channel %d BUSY Error\n", bcs->channel); in hfc_empty_fifo()
241 bcs->channel, chksum, stat); in hfc_empty_fifo()
247 bcs->err_crc++; in hfc_empty_fifo()
255 HFCB_REC | HFCB_CHANNEL(bcs->channel)); in hfc_empty_fifo()
261 hfc_fill_fifo(struct BCState *bcs) in hfc_fill_fifo() argument
263 struct IsdnCardState *cs = bcs->cs; in hfc_fill_fifo()
268 if (!bcs->tx_skb) in hfc_fill_fifo()
270 if (bcs->tx_skb->len <= 0) in hfc_fill_fifo()
272 SelFiFo(cs, HFCB_SEND | HFCB_CHANNEL(bcs->channel)); in hfc_fill_fifo()
273 cip = HFCB_FIFO | HFCB_F1 | HFCB_SEND | HFCB_CHANNEL(bcs->channel); in hfc_fill_fifo()
275 bcs->hw.hfc.f1 = ReadReg(cs, HFCD_DATA, cip); in hfc_fill_fifo()
277 cip = HFCB_FIFO | HFCB_F2 | HFCB_SEND | HFCB_CHANNEL(bcs->channel); in hfc_fill_fifo()
279 bcs->hw.hfc.f2 = ReadReg(cs, HFCD_DATA, cip); in hfc_fill_fifo()
280 …bcs->hw.hfc.send[bcs->hw.hfc.f1] = ReadZReg(cs, HFCB_FIFO | HFCB_Z1 | HFCB_SEND | HFCB_CHANNEL(bcs… in hfc_fill_fifo()
283 bcs->channel, bcs->hw.hfc.f1, bcs->hw.hfc.f2, in hfc_fill_fifo()
284 bcs->hw.hfc.send[bcs->hw.hfc.f1]); in hfc_fill_fifo()
285 fcnt = bcs->hw.hfc.f1 - bcs->hw.hfc.f2; in hfc_fill_fifo()
293 count = GetFreeFifoBytes_B(bcs); in hfc_fill_fifo()
296 bcs->channel, bcs->tx_skb->len, in hfc_fill_fifo()
298 if (count < bcs->tx_skb->len) { in hfc_fill_fifo()
303 cip = HFCB_FIFO | HFCB_FIFO_IN | HFCB_SEND | HFCB_CHANNEL(bcs->channel); in hfc_fill_fifo()
307 WriteReg(cs, HFCD_DATA_NODEB, cip, bcs->tx_skb->data[idx++]); in hfc_fill_fifo()
308 while (idx < bcs->tx_skb->len) { in hfc_fill_fifo()
311 WriteReg(cs, HFCD_DATA_NODEB, cip, bcs->tx_skb->data[idx]); in hfc_fill_fifo()
314 if (idx != bcs->tx_skb->len) { in hfc_fill_fifo()
316 printk(KERN_WARNING "HFC S FIFO channel %d BUSY Error\n", bcs->channel); in hfc_fill_fifo()
318 bcs->tx_cnt -= bcs->tx_skb->len; in hfc_fill_fifo()
319 if (test_bit(FLG_LLI_L1WAKEUP, &bcs->st->lli.flag) && in hfc_fill_fifo()
320 (PACKET_NOACK != bcs->tx_skb->pkt_type)) { in hfc_fill_fifo()
322 spin_lock_irqsave(&bcs->aclock, flags); in hfc_fill_fifo()
323 bcs->ackcnt += bcs->tx_skb->len; in hfc_fill_fifo()
324 spin_unlock_irqrestore(&bcs->aclock, flags); in hfc_fill_fifo()
325 schedule_event(bcs, B_ACKPENDING); in hfc_fill_fifo()
327 dev_kfree_skb_any(bcs->tx_skb); in hfc_fill_fifo()
328 bcs->tx_skb = NULL; in hfc_fill_fifo()
332 ReadReg(cs, HFCD_DATA, HFCB_FIFO | HFCB_F1_INC | HFCB_SEND | HFCB_CHANNEL(bcs->channel)); in hfc_fill_fifo()
334 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag); in hfc_fill_fifo()
339 hfc_send_data(struct BCState *bcs) in hfc_send_data() argument
341 struct IsdnCardState *cs = bcs->cs; in hfc_send_data()
344 hfc_fill_fifo(bcs); in hfc_send_data()
347 debugl1(cs, "send_data %d blocked", bcs->channel); in hfc_send_data()
351 main_rec_2bds0(struct BCState *bcs) in main_rec_2bds0() argument
353 struct IsdnCardState *cs = bcs->cs; in main_rec_2bds0()
362 debugl1(cs, "rec_data %d blocked", bcs->channel); in main_rec_2bds0()
365 SelFiFo(cs, HFCB_REC | HFCB_CHANNEL(bcs->channel)); in main_rec_2bds0()
366 cip = HFCB_FIFO | HFCB_F1 | HFCB_REC | HFCB_CHANNEL(bcs->channel); in main_rec_2bds0()
369 cip = HFCB_FIFO | HFCB_F2 | HFCB_REC | HFCB_CHANNEL(bcs->channel); in main_rec_2bds0()
375 bcs->channel, f1, f2); in main_rec_2bds0()
376 z1 = ReadZReg(cs, HFCB_FIFO | HFCB_Z1 | HFCB_REC | HFCB_CHANNEL(bcs->channel)); in main_rec_2bds0()
377 z2 = ReadZReg(cs, HFCB_FIFO | HFCB_Z2 | HFCB_REC | HFCB_CHANNEL(bcs->channel)); in main_rec_2bds0()
384 bcs->channel, z1, z2, rcnt); in main_rec_2bds0()
385 if ((skb = hfc_empty_fifo(bcs, rcnt))) { in main_rec_2bds0()
386 skb_queue_tail(&bcs->rqueue, skb); in main_rec_2bds0()
387 schedule_event(bcs, B_RCVBUFREADY); in main_rec_2bds0()
405 mode_2bs0(struct BCState *bcs, int mode, int bc) in mode_2bs0() argument
407 struct IsdnCardState *cs = bcs->cs; in mode_2bs0()
411 mode, bc, bcs->channel); in mode_2bs0()
412 bcs->mode = mode; in mode_2bs0()
413 bcs->channel = bc; in mode_2bs0()
455 struct BCState *bcs = st->l1.bcs; in hfc_l2l1() local
461 spin_lock_irqsave(&bcs->cs->lock, flags); in hfc_l2l1()
462 if (bcs->tx_skb) { in hfc_l2l1()
463 skb_queue_tail(&bcs->squeue, skb); in hfc_l2l1()
465 bcs->tx_skb = skb; in hfc_l2l1()
467 bcs->cs->BC_Send_Data(bcs); in hfc_l2l1()
469 spin_unlock_irqrestore(&bcs->cs->lock, flags); in hfc_l2l1()
472 spin_lock_irqsave(&bcs->cs->lock, flags); in hfc_l2l1()
473 if (bcs->tx_skb) { in hfc_l2l1()
477 bcs->tx_skb = skb; in hfc_l2l1()
478 bcs->cs->BC_Send_Data(bcs); in hfc_l2l1()
480 spin_unlock_irqrestore(&bcs->cs->lock, flags); in hfc_l2l1()
483 if (!bcs->tx_skb) { in hfc_l2l1()
490 spin_lock_irqsave(&bcs->cs->lock, flags); in hfc_l2l1()
491 test_and_set_bit(BC_FLG_ACTIV, &bcs->Flag); in hfc_l2l1()
492 mode_2bs0(bcs, st->l1.mode, st->l1.bc); in hfc_l2l1()
493 spin_unlock_irqrestore(&bcs->cs->lock, flags); in hfc_l2l1()
500 spin_lock_irqsave(&bcs->cs->lock, flags); in hfc_l2l1()
501 test_and_clear_bit(BC_FLG_ACTIV, &bcs->Flag); in hfc_l2l1()
502 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag); in hfc_l2l1()
503 mode_2bs0(bcs, 0, st->l1.bc); in hfc_l2l1()
504 spin_unlock_irqrestore(&bcs->cs->lock, flags); in hfc_l2l1()
511 close_2bs0(struct BCState *bcs) in close_2bs0() argument
513 mode_2bs0(bcs, 0, bcs->channel); in close_2bs0()
514 if (test_and_clear_bit(BC_FLG_INIT, &bcs->Flag)) { in close_2bs0()
515 skb_queue_purge(&bcs->rqueue); in close_2bs0()
516 skb_queue_purge(&bcs->squeue); in close_2bs0()
517 if (bcs->tx_skb) { in close_2bs0()
518 dev_kfree_skb_any(bcs->tx_skb); in close_2bs0()
519 bcs->tx_skb = NULL; in close_2bs0()
520 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag); in close_2bs0()
526 open_hfcstate(struct IsdnCardState *cs, struct BCState *bcs) in open_hfcstate() argument
528 if (!test_and_set_bit(BC_FLG_INIT, &bcs->Flag)) { in open_hfcstate()
529 skb_queue_head_init(&bcs->rqueue); in open_hfcstate()
530 skb_queue_head_init(&bcs->squeue); in open_hfcstate()
532 bcs->tx_skb = NULL; in open_hfcstate()
533 test_and_clear_bit(BC_FLG_BUSY, &bcs->Flag); in open_hfcstate()
534 bcs->event = 0; in open_hfcstate()
535 bcs->tx_cnt = 0; in open_hfcstate()
540 setstack_2b(struct PStack *st, struct BCState *bcs) in setstack_2b() argument
542 bcs->channel = st->l1.bc; in setstack_2b()
543 if (open_hfcstate(st->l1.hardware, bcs)) in setstack_2b()
545 st->l1.bcs = bcs; in setstack_2b()
548 bcs->st = st; in setstack_2b()
756 if (cs->bcs[0].mode && (cs->bcs[0].channel == channel)) in Sel_BCS()
757 return (&cs->bcs[0]); in Sel_BCS()
758 else if (cs->bcs[1].mode && (cs->bcs[1].channel == channel)) in Sel_BCS()
759 return (&cs->bcs[1]); in Sel_BCS()
768 struct BCState *bcs; in hfc2bds0_interrupt() local
796 if (!(bcs = Sel_BCS(cs, 0))) { in hfc2bds0_interrupt()
800 main_rec_2bds0(bcs); in hfc2bds0_interrupt()
803 if (!(bcs = Sel_BCS(cs, 1))) { in hfc2bds0_interrupt()
807 main_rec_2bds0(bcs); in hfc2bds0_interrupt()
810 if (!(bcs = Sel_BCS(cs, 0))) { in hfc2bds0_interrupt()
814 if (bcs->tx_skb) { in hfc2bds0_interrupt()
816 hfc_fill_fifo(bcs); in hfc2bds0_interrupt()
819 debugl1(cs, "fill_data %d blocked", bcs->channel); in hfc2bds0_interrupt()
821 if ((bcs->tx_skb = skb_dequeue(&bcs->squeue))) { in hfc2bds0_interrupt()
823 hfc_fill_fifo(bcs); in hfc2bds0_interrupt()
826 debugl1(cs, "fill_data %d blocked", bcs->channel); in hfc2bds0_interrupt()
828 schedule_event(bcs, B_XMTBUFREADY); in hfc2bds0_interrupt()
834 if (!(bcs = Sel_BCS(cs, 1))) { in hfc2bds0_interrupt()
838 if (bcs->tx_skb) { in hfc2bds0_interrupt()
840 hfc_fill_fifo(bcs); in hfc2bds0_interrupt()
843 debugl1(cs, "fill_data %d blocked", bcs->channel); in hfc2bds0_interrupt()
845 if ((bcs->tx_skb = skb_dequeue(&bcs->squeue))) { in hfc2bds0_interrupt()
847 hfc_fill_fifo(bcs); in hfc2bds0_interrupt()
850 debugl1(cs, "fill_data %d blocked", bcs->channel); in hfc2bds0_interrupt()
852 schedule_event(bcs, B_XMTBUFREADY); in hfc2bds0_interrupt()
1043 if (!cs->bcs[0].hw.hfc.send) in init2bds0()
1044 cs->bcs[0].hw.hfc.send = init_send_hfcd(32); in init2bds0()
1045 if (!cs->bcs[1].hw.hfc.send) in init2bds0()
1046 cs->bcs[1].hw.hfc.send = init_send_hfcd(32); in init2bds0()
1048 cs->bcs[0].BC_SetStack = setstack_2b; in init2bds0()
1049 cs->bcs[1].BC_SetStack = setstack_2b; in init2bds0()
1050 cs->bcs[0].BC_Close = close_2bs0; in init2bds0()
1051 cs->bcs[1].BC_Close = close_2bs0; in init2bds0()
1052 mode_2bs0(cs->bcs, 0, 0); in init2bds0()
1053 mode_2bs0(cs->bcs + 1, 0, 1); in init2bds0()
1059 kfree(cs->bcs[0].hw.hfc.send); in release2bds0()
1060 cs->bcs[0].hw.hfc.send = NULL; in release2bds0()
1061 kfree(cs->bcs[1].hw.hfc.send); in release2bds0()
1062 cs->bcs[1].hw.hfc.send = NULL; in release2bds0()