Lines Matching refs:skb

147 l2up(struct layer2 *l2, u_int prim, struct sk_buff *skb)  in l2up()  argument
153 mISDN_HEAD_PRIM(skb) = prim; in l2up()
154 mISDN_HEAD_ID(skb) = (l2->ch.nr << 16) | l2->ch.addr; in l2up()
155 err = l2->up->send(l2->up, skb); in l2up()
159 dev_kfree_skb(skb); in l2up()
166 struct sk_buff *skb; in l2up_create() local
172 skb = mI_alloc_skb(len, GFP_ATOMIC); in l2up_create()
173 if (!skb) in l2up_create()
175 hh = mISDN_HEAD_P(skb); in l2up_create()
179 memcpy(skb_put(skb, len), arg, len); in l2up_create()
180 err = l2->up->send(l2->up, skb); in l2up_create()
184 dev_kfree_skb(skb); in l2up_create()
189 l2down_skb(struct layer2 *l2, struct sk_buff *skb) { in l2down_skb() argument
192 ret = l2->ch.recv(l2->ch.peer, skb); in l2down_skb()
200 l2down_raw(struct layer2 *l2, struct sk_buff *skb) in l2down_raw() argument
202 struct mISDNhead *hh = mISDN_HEAD_P(skb); in l2down_raw()
206 skb_queue_tail(&l2->down_queue, skb); in l2down_raw()
209 l2->down_id = mISDN_HEAD_ID(skb); in l2down_raw()
211 return l2down_skb(l2, skb); in l2down_raw()
215 l2down(struct layer2 *l2, u_int prim, u_int id, struct sk_buff *skb) in l2down() argument
217 struct mISDNhead *hh = mISDN_HEAD_P(skb); in l2down()
221 return l2down_raw(l2, skb); in l2down()
227 struct sk_buff *skb; in l2down_create() local
231 skb = mI_alloc_skb(len, GFP_ATOMIC); in l2down_create()
232 if (!skb) in l2down_create()
234 hh = mISDN_HEAD_P(skb); in l2down_create()
238 memcpy(skb_put(skb, len), arg, len); in l2down_create()
239 err = l2down_raw(l2, skb); in l2down_create()
241 dev_kfree_skb(skb); in l2down_create()
246 ph_data_confirm(struct layer2 *l2, struct mISDNhead *hh, struct sk_buff *skb) { in ph_data_confirm() argument
247 struct sk_buff *nskb = skb; in ph_data_confirm()
262 dev_kfree_skb(skb); in ph_data_confirm()
290 struct sk_buff *skb; in l2_timeout() local
293 skb = mI_alloc_skb(0, GFP_ATOMIC); in l2_timeout()
294 if (!skb) { in l2_timeout()
300 hh = mISDN_HEAD_P(skb); in l2_timeout()
308 l2->ch.st->own.recv(&l2->ch.st->own, skb); in l2_timeout()
423 enqueue_super(struct layer2 *l2, struct sk_buff *skb) in enqueue_super() argument
425 if (l2down(l2, PH_DATA_REQ, l2_newid(l2), skb)) in enqueue_super()
426 dev_kfree_skb(skb); in enqueue_super()
430 enqueue_ui(struct layer2 *l2, struct sk_buff *skb) in enqueue_ui() argument
434 if (l2down(l2, PH_DATA_REQ, l2_newid(l2), skb)) in enqueue_ui()
435 dev_kfree_skb(skb); in enqueue_ui()
510 iframe_error(struct layer2 *l2, struct sk_buff *skb) in iframe_error() argument
513 int rsp = *skb->data & 0x2; in iframe_error()
520 if (skb->len < i) in iframe_error()
522 if ((skb->len - i) > l2->maxlen) in iframe_error()
528 super_error(struct layer2 *l2, struct sk_buff *skb) in super_error() argument
530 if (skb->len != l2addrsize(l2) + in super_error()
537 unnum_error(struct layer2 *l2, struct sk_buff *skb, int wantrsp) in unnum_error() argument
539 int rsp = (*skb->data & 0x2) >> 1; in unnum_error()
544 if (skb->len != l2addrsize(l2) + 1) in unnum_error()
550 UI_error(struct layer2 *l2, struct sk_buff *skb) in UI_error() argument
552 int rsp = *skb->data & 0x2; in UI_error()
557 if (skb->len > l2->maxlen + l2addrsize(l2) + 1) in UI_error()
563 FRMR_error(struct layer2 *l2, struct sk_buff *skb) in FRMR_error() argument
566 u_char *datap = skb->data + headers; in FRMR_error()
567 int rsp = *skb->data & 0x2; in FRMR_error()
574 if (skb->len < headers + 5) in FRMR_error()
581 if (skb->len < headers + 3) in FRMR_error()
603 struct sk_buff *skb; in setva() local
618 skb = skb_dequeue(&l2->tmp_queue); in setva()
619 while (skb) { in setva()
620 dev_kfree_skb(skb); in setva()
621 skb = skb_dequeue(&l2->tmp_queue); in setva()
626 send_uframe(struct layer2 *l2, struct sk_buff *skb, u_char cmd, u_char cr) in send_uframe() argument
633 if (skb) in send_uframe()
634 skb_trim(skb, 0); in send_uframe()
636 skb = mI_alloc_skb(i, GFP_ATOMIC); in send_uframe()
637 if (!skb) { in send_uframe()
643 memcpy(skb_put(skb, i), tmp, i); in send_uframe()
644 enqueue_super(l2, skb); in send_uframe()
649 get_PollFlag(struct layer2 *l2, struct sk_buff *skb) in get_PollFlag() argument
651 return skb->data[l2addrsize(l2)] & 0x10; in get_PollFlag()
655 get_PollFlagFree(struct layer2 *l2, struct sk_buff *skb) in get_PollFlagFree() argument
659 PF = get_PollFlag(l2, skb); in get_PollFlagFree()
660 dev_kfree_skb(skb); in get_PollFlagFree()
725 struct sk_buff *skb = arg; in l2_mdl_error_ua() local
728 if (get_PollFlagFree(l2, skb)) in l2_mdl_error_ua()
738 struct sk_buff *skb = arg; in l2_mdl_error_dm() local
741 if (get_PollFlagFree(l2, skb)) in l2_mdl_error_dm()
753 struct sk_buff *skb = arg; in l2_st8_mdl_error_dm() local
756 if (get_PollFlagFree(l2, skb)) in l2_st8_mdl_error_dm()
785 struct sk_buff *skb = arg; in l2_queue_ui_assign() local
787 skb_queue_tail(&l2->ui_queue, skb); in l2_queue_ui_assign()
796 struct sk_buff *skb = arg; in l2_queue_ui() local
798 skb_queue_tail(&l2->ui_queue, skb); in l2_queue_ui()
804 struct sk_buff *skb; in tx_ui() local
812 while ((skb = skb_dequeue(&l2->ui_queue))) { in tx_ui()
813 memcpy(skb_push(skb, i), header, i); in tx_ui()
814 enqueue_ui(l2, skb); in tx_ui()
822 struct sk_buff *skb = arg; in l2_send_ui() local
824 skb_queue_tail(&l2->ui_queue, skb); in l2_send_ui()
832 struct sk_buff *skb = arg; in l2_got_ui() local
834 skb_pull(skb, l2headersize(l2, 1)); in l2_got_ui()
841 l2up(l2, DL_UNITDATA_IND, skb); in l2_got_ui()
847 struct sk_buff *skb = arg; in l2_establish() local
852 dev_kfree_skb(skb); in l2_establish()
858 struct sk_buff *skb = arg; in l2_discard_i_setl3() local
864 dev_kfree_skb(skb); in l2_discard_i_setl3()
870 struct sk_buff *skb = arg; in l2_l3_reestablish() local
876 dev_kfree_skb(skb); in l2_l3_reestablish()
883 struct sk_buff *skb = arg; in l2_release() local
885 skb_trim(skb, 0); in l2_release()
886 l2up(l2, DL_RELEASE_CNF, skb); in l2_release()
892 struct sk_buff *skb = arg; in l2_pend_rel() local
896 dev_kfree_skb(skb); in l2_pend_rel()
903 struct sk_buff *skb = arg; in l2_disconnect() local
912 if (skb) in l2_disconnect()
913 dev_kfree_skb(skb); in l2_disconnect()
920 struct sk_buff *skb = arg; in l2_start_multi() local
927 send_uframe(l2, NULL, UA | get_PollFlag(l2, skb), RSP); in l2_start_multi()
930 skb_trim(skb, 0); in l2_start_multi()
931 l2up(l2, DL_ESTABLISH_IND, skb); in l2_start_multi()
940 struct sk_buff *skb = arg; in l2_send_UA() local
942 send_uframe(l2, skb, UA | get_PollFlag(l2, skb), RSP); in l2_send_UA()
949 struct sk_buff *skb = arg; in l2_send_DM() local
951 send_uframe(l2, skb, DM | get_PollFlag(l2, skb), RSP); in l2_send_DM()
958 struct sk_buff *skb = arg; in l2_restart_multi() local
961 send_uframe(l2, skb, UA | get_PollFlag(l2, skb), RSP); in l2_restart_multi()
993 struct sk_buff *skb = arg; in l2_stop_multi() local
999 send_uframe(l2, skb, UA | get_PollFlag(l2, skb), RSP); in l2_stop_multi()
1011 struct sk_buff *skb = arg; in l2_connected() local
1014 if (!get_PollFlag(l2, skb)) { in l2_connected()
1018 dev_kfree_skb(skb); in l2_connected()
1048 struct sk_buff *skb = arg; in l2_released() local
1050 if (!get_PollFlag(l2, skb)) { in l2_released()
1054 dev_kfree_skb(skb); in l2_released()
1066 struct sk_buff *skb = arg; in l2_reestablish() local
1068 if (!get_PollFlagFree(l2, skb)) { in l2_reestablish()
1078 struct sk_buff *skb = arg; in l2_st5_dm_release() local
1080 if (get_PollFlagFree(l2, skb)) { in l2_st5_dm_release()
1098 struct sk_buff *skb = arg; in l2_st6_dm_release() local
1100 if (get_PollFlagFree(l2, skb)) { in l2_st6_dm_release()
1112 struct sk_buff *skb; in enquiry_cr() local
1122 skb = mI_alloc_skb(i, GFP_ATOMIC); in enquiry_cr()
1123 if (!skb) { in enquiry_cr()
1128 memcpy(skb_put(skb, i), tmp, i); in enquiry_cr()
1129 enqueue_super(l2, skb); in enquiry_cr()
1196 struct sk_buff *skb = arg; in l2_st7_got_super() local
1200 rsp = *skb->data & 0x2; in l2_st7_got_super()
1204 skb_pull(skb, l2addrsize(l2)); in l2_st7_got_super()
1205 if (IsRNR(skb->data, l2)) { in l2_st7_got_super()
1210 if (IsREJ(skb->data, l2)) in l2_st7_got_super()
1214 PollFlag = (skb->data[1] & 0x1) == 0x1; in l2_st7_got_super()
1215 nr = skb->data[1] >> 1; in l2_st7_got_super()
1217 PollFlag = (skb->data[0] & 0x10); in l2_st7_got_super()
1218 nr = (skb->data[0] >> 5) & 0x7; in l2_st7_got_super()
1220 dev_kfree_skb(skb); in l2_st7_got_super()
1257 struct sk_buff *skb = arg; in l2_feed_i_if_reest() local
1260 skb_queue_tail(&l2->i_queue, skb); in l2_feed_i_if_reest()
1262 dev_kfree_skb(skb); in l2_feed_i_if_reest()
1269 struct sk_buff *skb = arg; in l2_feed_i_pull() local
1271 skb_queue_tail(&l2->i_queue, skb); in l2_feed_i_pull()
1279 struct sk_buff *skb = arg; in l2_feed_iqueue() local
1281 skb_queue_tail(&l2->i_queue, skb); in l2_feed_iqueue()
1288 struct sk_buff *skb = arg; in l2_got_iframe() local
1294 PollFlag = ((skb->data[i + 1] & 0x1) == 0x1); in l2_got_iframe()
1295 ns = skb->data[i] >> 1; in l2_got_iframe()
1296 nr = (skb->data[i + 1] >> 1) & 0x7f; in l2_got_iframe()
1298 PollFlag = (skb->data[i] & 0x10); in l2_got_iframe()
1299 ns = (skb->data[i] >> 1) & 0x7; in l2_got_iframe()
1300 nr = (skb->data[i] >> 5) & 0x7; in l2_got_iframe()
1303 dev_kfree_skb(skb); in l2_got_iframe()
1318 skb_pull(skb, l2headersize(l2, 0)); in l2_got_iframe()
1319 l2up(l2, DL_DATA_IND, skb); in l2_got_iframe()
1322 dev_kfree_skb(skb); in l2_got_iframe()
1479 struct sk_buff *skb, *nskb; in l2_pull_iqueue() local
1486 skb = skb_dequeue(&l2->i_queue); in l2_pull_iqueue()
1487 if (!skb) in l2_pull_iqueue()
1495 nskb = skb_realloc_headroom(skb, i); in l2_pull_iqueue()
1499 skb_queue_head(&l2->i_queue, skb); in l2_pull_iqueue()
1515 l2->windowar[p1] = skb; in l2_pull_iqueue()
1529 struct sk_buff *skb = arg; in l2_st8_got_super() local
1533 rsp = *skb->data & 0x2; in l2_st8_got_super()
1537 skb_pull(skb, l2addrsize(l2)); in l2_st8_got_super()
1539 if (IsRNR(skb->data, l2)) { in l2_st8_got_super()
1546 PollFlag = (skb->data[1] & 0x1) == 0x1; in l2_st8_got_super()
1547 nr = skb->data[1] >> 1; in l2_st8_got_super()
1549 PollFlag = (skb->data[0] & 0x10); in l2_st8_got_super()
1550 nr = (skb->data[0] >> 5) & 0x7; in l2_st8_got_super()
1552 dev_kfree_skb(skb); in l2_st8_got_super()
1583 struct sk_buff *skb = arg; in l2_got_FRMR() local
1585 skb_pull(skb, l2addrsize(l2) + 1); in l2_got_FRMR()
1587 if (!(skb->data[0] & 1) || ((skb->data[0] & 3) == 1) || /* I or S */ in l2_got_FRMR()
1588 (IsUA(skb->data) && (fi->state == ST_L2_7))) { in l2_got_FRMR()
1593 dev_kfree_skb(skb); in l2_got_FRMR()
1666 struct sk_buff *skb = arg; in l2_st14_persistent_da() local
1671 l2up(l2, DL_RELEASE_IND, skb); in l2_st14_persistent_da()
1673 dev_kfree_skb(skb); in l2_st14_persistent_da()
1680 struct sk_buff *skb = arg; in l2_st5_persistent_da() local
1690 dev_kfree_skb(skb); in l2_st5_persistent_da()
1697 struct sk_buff *skb = arg; in l2_st6_persistent_da() local
1701 l2up(l2, DL_RELEASE_CNF, skb); in l2_st6_persistent_da()
1711 struct sk_buff *skb = arg; in l2_persistent_da() local
1718 l2up(l2, DL_RELEASE_IND, skb); in l2_persistent_da()
1728 struct sk_buff *skb = arg; in l2_set_own_busy() local
1734 if (skb) in l2_set_own_busy()
1735 dev_kfree_skb(skb); in l2_set_own_busy()
1742 struct sk_buff *skb = arg; in l2_clear_own_busy() local
1748 if (skb) in l2_clear_own_busy()
1749 dev_kfree_skb(skb); in l2_clear_own_busy()
1868 ph_data_indication(struct layer2 *l2, struct mISDNhead *hh, struct sk_buff *skb) in ph_data_indication() argument
1870 u_char *datap = skb->data; in ph_data_indication()
1877 if (skb->len <= l) { in ph_data_indication()
1898 dev_kfree_skb(skb); in ph_data_indication()
1906 dev_kfree_skb(skb); in ph_data_indication()
1912 c = iframe_error(l2, skb); in ph_data_indication()
1914 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_I, skb); in ph_data_indication()
1916 c = super_error(l2, skb); in ph_data_indication()
1918 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_SUPER, skb); in ph_data_indication()
1920 c = UI_error(l2, skb); in ph_data_indication()
1922 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_UI, skb); in ph_data_indication()
1924 c = unnum_error(l2, skb, CMD); in ph_data_indication()
1926 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_SABME, skb); in ph_data_indication()
1928 c = unnum_error(l2, skb, RSP); in ph_data_indication()
1930 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_UA, skb); in ph_data_indication()
1932 c = unnum_error(l2, skb, CMD); in ph_data_indication()
1934 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_DISC, skb); in ph_data_indication()
1936 c = unnum_error(l2, skb, RSP); in ph_data_indication()
1938 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_DM, skb); in ph_data_indication()
1940 c = FRMR_error(l2, skb); in ph_data_indication()
1942 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_FRMR, skb); in ph_data_indication()
1954 l2_send(struct mISDNchannel *ch, struct sk_buff *skb) in l2_send() argument
1957 struct mISDNhead *hh = mISDN_HEAD_P(skb); in l2_send()
1974 ret = ph_data_indication(l2, hh, skb); in l2_send()
1977 ret = ph_data_confirm(l2, hh, skb); in l2_send()
1984 EV_L2_DL_ESTABLISH_REQ, skb); in l2_send()
1989 ret = mISDN_FsmEvent(&l2->l2m, EV_L1_DEACTIVATE, skb); in l2_send()
1994 ret = l2->up->send(l2->up, skb); in l2_send()
1997 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_DL_DATA, skb); in l2_send()
2000 ret = mISDN_FsmEvent(&l2->l2m, EV_L2_DL_UNITDATA, skb); in l2_send()
2009 EV_L2_DL_ESTABLISH_REQ, skb); in l2_send()
2017 skb); in l2_send()
2025 skb); in l2_send()
2039 dev_kfree_skb(skb); in l2_send()