Home
last modified time | relevance | path

Searched refs:nskb (Results 1 – 84 of 84) sorted by relevance

/linux-4.4.14/net/ipv4/netfilter/
Dnf_reject_ipv4.c45 struct iphdr *nf_reject_iphdr_put(struct sk_buff *nskb, in nf_reject_iphdr_put() argument
51 skb_reset_network_header(nskb); in nf_reject_iphdr_put()
52 niph = (struct iphdr *)skb_put(nskb, sizeof(struct iphdr)); in nf_reject_iphdr_put()
64 nskb->protocol = htons(ETH_P_IP); in nf_reject_iphdr_put()
70 void nf_reject_ip_tcphdr_put(struct sk_buff *nskb, const struct sk_buff *oldskb, in nf_reject_ip_tcphdr_put() argument
73 struct iphdr *niph = ip_hdr(nskb); in nf_reject_ip_tcphdr_put()
76 skb_reset_transport_header(nskb); in nf_reject_ip_tcphdr_put()
77 tcph = (struct tcphdr *)skb_put(nskb, sizeof(struct tcphdr)); in nf_reject_ip_tcphdr_put()
95 nskb->ip_summed = CHECKSUM_PARTIAL; in nf_reject_ip_tcphdr_put()
96 nskb->csum_start = (unsigned char *)tcph - nskb->head; in nf_reject_ip_tcphdr_put()
[all …]
Dipt_SYNPROXY.c43 const struct sk_buff *skb, struct sk_buff *nskb, in synproxy_send_tcp() argument
51 nskb->ip_summed = CHECKSUM_PARTIAL; in synproxy_send_tcp()
52 nskb->csum_start = (unsigned char *)nth - nskb->head; in synproxy_send_tcp()
53 nskb->csum_offset = offsetof(struct tcphdr, check); in synproxy_send_tcp()
55 skb_dst_set_noref(nskb, skb_dst(skb)); in synproxy_send_tcp()
56 nskb->protocol = htons(ETH_P_IP); in synproxy_send_tcp()
57 if (ip_route_me_harder(net, nskb, RTN_UNSPEC)) in synproxy_send_tcp()
61 nskb->nfct = nfct; in synproxy_send_tcp()
62 nskb->nfctinfo = ctinfo; in synproxy_send_tcp()
66 ip_local_out(net, nskb->sk, nskb); in synproxy_send_tcp()
[all …]
/linux-4.4.14/net/llc/
Dllc_c_ac.c202 struct sk_buff *nskb = llc_alloc_frame(sk, llc->dev, LLC_PDU_TYPE_U, 0); in llc_conn_ac_send_disc_cmd_p_set_x() local
204 if (nskb) { in llc_conn_ac_send_disc_cmd_p_set_x()
207 llc_pdu_header_init(nskb, LLC_PDU_TYPE_U, sap->laddr.lsap, in llc_conn_ac_send_disc_cmd_p_set_x()
209 llc_pdu_init_as_disc_cmd(nskb, 1); in llc_conn_ac_send_disc_cmd_p_set_x()
210 rc = llc_mac_hdr_init(nskb, llc->dev->dev_addr, llc->daddr.mac); in llc_conn_ac_send_disc_cmd_p_set_x()
213 llc_conn_send_pdu(sk, nskb); in llc_conn_ac_send_disc_cmd_p_set_x()
219 kfree_skb(nskb); in llc_conn_ac_send_disc_cmd_p_set_x()
227 struct sk_buff *nskb = llc_alloc_frame(sk, llc->dev, LLC_PDU_TYPE_U, 0); in llc_conn_ac_send_dm_rsp_f_set_p() local
229 if (nskb) { in llc_conn_ac_send_dm_rsp_f_set_p()
234 llc_pdu_header_init(nskb, LLC_PDU_TYPE_U, sap->laddr.lsap, in llc_conn_ac_send_dm_rsp_f_set_p()
[all …]
Dllc_station.c52 struct sk_buff *nskb = llc_alloc_frame(NULL, skb->dev, LLC_PDU_TYPE_U, in llc_station_ac_send_xid_r() local
55 if (!nskb) in llc_station_ac_send_xid_r()
60 llc_pdu_header_init(nskb, LLC_PDU_TYPE_U, 0, dsap, LLC_PDU_RSP); in llc_station_ac_send_xid_r()
61 llc_pdu_init_as_xid_rsp(nskb, LLC_XID_NULL_CLASS_2, 127); in llc_station_ac_send_xid_r()
62 rc = llc_mac_hdr_init(nskb, skb->dev->dev_addr, mac_da); in llc_station_ac_send_xid_r()
65 dev_queue_xmit(nskb); in llc_station_ac_send_xid_r()
69 kfree_skb(nskb); in llc_station_ac_send_xid_r()
78 struct sk_buff *nskb; in llc_station_ac_send_test_r() local
82 nskb = llc_alloc_frame(NULL, skb->dev, LLC_PDU_TYPE_U, data_size); in llc_station_ac_send_test_r()
84 if (!nskb) in llc_station_ac_send_test_r()
[all …]
Dllc_s_ac.c101 struct sk_buff *nskb; in llc_sap_action_send_xid_r() local
106 nskb = llc_alloc_frame(NULL, skb->dev, LLC_PDU_TYPE_U, in llc_sap_action_send_xid_r()
108 if (!nskb) in llc_sap_action_send_xid_r()
110 llc_pdu_header_init(nskb, LLC_PDU_TYPE_U, sap->laddr.lsap, dsap, in llc_sap_action_send_xid_r()
112 llc_pdu_init_as_xid_rsp(nskb, LLC_XID_NULL_CLASS_2, 0); in llc_sap_action_send_xid_r()
113 rc = llc_mac_hdr_init(nskb, mac_sa, mac_da); in llc_sap_action_send_xid_r()
115 rc = dev_queue_xmit(nskb); in llc_sap_action_send_xid_r()
146 struct sk_buff *nskb; in llc_sap_action_send_test_r() local
156 nskb = llc_alloc_frame(NULL, skb->dev, LLC_PDU_TYPE_U, data_size); in llc_sap_action_send_test_r()
157 if (!nskb) in llc_sap_action_send_test_r()
[all …]
/linux-4.4.14/net/bridge/netfilter/
Dnft_reject_bridge.c28 struct sk_buff *nskb) in nft_reject_br_push_etherhdr() argument
32 eth = (struct ethhdr *)skb_push(nskb, ETH_HLEN); in nft_reject_br_push_etherhdr()
33 skb_reset_mac_header(nskb); in nft_reject_br_push_etherhdr()
37 skb_pull(nskb, ETH_HLEN); in nft_reject_br_push_etherhdr()
47 struct sk_buff *nskb; in nft_reject_br_send_v4_tcp_reset() local
59 nskb = alloc_skb(sizeof(struct iphdr) + sizeof(struct tcphdr) + in nft_reject_br_send_v4_tcp_reset()
61 if (!nskb) in nft_reject_br_send_v4_tcp_reset()
64 skb_reserve(nskb, LL_MAX_HEADER); in nft_reject_br_send_v4_tcp_reset()
65 niph = nf_reject_iphdr_put(nskb, oldskb, IPPROTO_TCP, in nft_reject_br_send_v4_tcp_reset()
67 nf_reject_ip_tcphdr_put(nskb, oldskb, oth); in nft_reject_br_send_v4_tcp_reset()
[all …]
/linux-4.4.14/net/ipv6/netfilter/
Dnf_reject_ipv6.c67 struct ipv6hdr *nf_reject_ip6hdr_put(struct sk_buff *nskb, in nf_reject_ip6hdr_put() argument
76 skb_put(nskb, sizeof(struct ipv6hdr)); in nf_reject_ip6hdr_put()
77 skb_reset_network_header(nskb); in nf_reject_ip6hdr_put()
78 ip6h = ipv6_hdr(nskb); in nf_reject_ip6hdr_put()
85 nskb->protocol = htons(ETH_P_IPV6); in nf_reject_ip6hdr_put()
91 void nf_reject_ip6_tcphdr_put(struct sk_buff *nskb, in nf_reject_ip6_tcphdr_put() argument
98 skb_reset_transport_header(nskb); in nf_reject_ip6_tcphdr_put()
99 tcph = (struct tcphdr *)skb_put(nskb, sizeof(struct tcphdr)); in nf_reject_ip6_tcphdr_put()
125 tcph->check = csum_ipv6_magic(&ipv6_hdr(nskb)->saddr, in nf_reject_ip6_tcphdr_put()
126 &ipv6_hdr(nskb)->daddr, in nf_reject_ip6_tcphdr_put()
[all …]
Dip6t_SYNPROXY.c41 const struct sk_buff *skb, struct sk_buff *nskb, in synproxy_send_tcp() argument
51 nskb->ip_summed = CHECKSUM_PARTIAL; in synproxy_send_tcp()
52 nskb->csum_start = (unsigned char *)nth - nskb->head; in synproxy_send_tcp()
53 nskb->csum_offset = offsetof(struct tcphdr, check); in synproxy_send_tcp()
71 skb_dst_set(nskb, dst); in synproxy_send_tcp()
74 nskb->nfct = nfct; in synproxy_send_tcp()
75 nskb->nfctinfo = ctinfo; in synproxy_send_tcp()
79 ip6_local_out(net, nskb->sk, nskb); in synproxy_send_tcp()
83 kfree_skb(nskb); in synproxy_send_tcp()
91 struct sk_buff *nskb; in synproxy_send_client_synack() local
[all …]
/linux-4.4.14/net/dsa/
Dtag_trailer.c19 struct sk_buff *nskb; in trailer_xmit() local
33 nskb = alloc_skb(NET_IP_ALIGN + skb->len + padlen + 4, GFP_ATOMIC); in trailer_xmit()
34 if (nskb == NULL) { in trailer_xmit()
38 skb_reserve(nskb, NET_IP_ALIGN); in trailer_xmit()
40 skb_reset_mac_header(nskb); in trailer_xmit()
41 skb_set_network_header(nskb, skb_network_header(skb) - skb->head); in trailer_xmit()
42 skb_set_transport_header(nskb, skb_transport_header(skb) - skb->head); in trailer_xmit()
43 skb_copy_and_csum_dev(skb, skb_put(nskb, skb->len)); in trailer_xmit()
47 u8 *pad = skb_put(nskb, padlen); in trailer_xmit()
51 trailer = skb_put(nskb, 4); in trailer_xmit()
[all …]
Dslave.c595 struct sk_buff *nskb; in dsa_slave_xmit() local
601 nskb = p->xmit(skb, dev); in dsa_slave_xmit()
602 if (!nskb) in dsa_slave_xmit()
609 return dsa_netpoll_send_skb(p, nskb); in dsa_slave_xmit()
614 nskb->dev = p->parent->dst->master_netdev; in dsa_slave_xmit()
615 dev_queue_xmit(nskb); in dsa_slave_xmit()
/linux-4.4.14/net/bluetooth/cmtp/
Dcore.c109 struct sk_buff *skb = session->reassembly[id], *nskb; in cmtp_add_msgpart() local
116 nskb = alloc_skb(size, GFP_ATOMIC); in cmtp_add_msgpart()
117 if (!nskb) { in cmtp_add_msgpart()
123 skb_copy_from_linear_data(skb, skb_put(nskb, skb->len), skb->len); in cmtp_add_msgpart()
125 memcpy(skb_put(nskb, count), buf, count); in cmtp_add_msgpart()
127 session->reassembly[id] = nskb; in cmtp_add_msgpart()
212 struct sk_buff *skb, *nskb; in cmtp_process_transmit() local
218 nskb = alloc_skb(session->mtu, GFP_ATOMIC); in cmtp_process_transmit()
219 if (!nskb) { in cmtp_process_transmit()
227 tail = session->mtu - nskb->len; in cmtp_process_transmit()
[all …]
/linux-4.4.14/net/sctp/
Doutput.c384 struct sk_buff *nskb; in sctp_packet_transmit() local
404 nskb = alloc_skb(packet->size + MAX_HEADER, GFP_ATOMIC); in sctp_packet_transmit()
405 if (!nskb) in sctp_packet_transmit()
409 skb_reserve(nskb, packet->overhead + MAX_HEADER); in sctp_packet_transmit()
414 sctp_packet_set_owner_w(nskb, sk); in sctp_packet_transmit()
425 skb_dst_set(nskb, dst); in sctp_packet_transmit()
428 sh = (struct sctphdr *)skb_push(nskb, sizeof(struct sctphdr)); in sctp_packet_transmit()
429 skb_reset_transport_header(nskb); in sctp_packet_transmit()
493 auth = skb_tail_pointer(nskb); in sctp_packet_transmit()
495 memcpy(skb_put(nskb, chunk->skb->len), in sctp_packet_transmit()
[all …]
/linux-4.4.14/drivers/bluetooth/
Dhci_bcsp.c180 struct sk_buff *nskb; in bcsp_prepare_pkt() local
230 nskb = alloc_skb((len + 6) * 2 + 2, GFP_ATOMIC); in bcsp_prepare_pkt()
231 if (!nskb) in bcsp_prepare_pkt()
234 bt_cb(nskb)->pkt_type = pkt_type; in bcsp_prepare_pkt()
236 bcsp_slip_msgdelim(nskb); in bcsp_prepare_pkt()
257 bcsp_slip_one_byte(nskb, hdr[i]); in bcsp_prepare_pkt()
265 bcsp_slip_one_byte(nskb, data[i]); in bcsp_prepare_pkt()
274 bcsp_slip_one_byte(nskb, (u8) ((bcsp_txmsg_crc >> 8) & 0x00ff)); in bcsp_prepare_pkt()
275 bcsp_slip_one_byte(nskb, (u8) (bcsp_txmsg_crc & 0x00ff)); in bcsp_prepare_pkt()
278 bcsp_slip_msgdelim(nskb); in bcsp_prepare_pkt()
[all …]
Dhci_h5.c104 struct sk_buff *nskb; in h5_link_control() local
106 nskb = alloc_skb(3, GFP_ATOMIC); in h5_link_control()
107 if (!nskb) in h5_link_control()
110 bt_cb(nskb)->pkt_type = HCI_3WIRE_LINK_PKT; in h5_link_control()
112 memcpy(skb_put(nskb, len), data, len); in h5_link_control()
114 skb_queue_tail(&h5->unrel, nskb); in h5_link_control()
626 struct sk_buff *nskb; in h5_prepare_pkt() local
641 nskb = alloc_skb((len + 6) * 2 + 2, GFP_ATOMIC); in h5_prepare_pkt()
642 if (!nskb) in h5_prepare_pkt()
645 bt_cb(nskb)->pkt_type = pkt_type; in h5_prepare_pkt()
[all …]
Dbfusb.c468 struct sk_buff *nskb; in bfusb_send_frame() local
492 nskb = bt_skb_alloc(count + 32, GFP_ATOMIC); in bfusb_send_frame()
493 if (!nskb) { in bfusb_send_frame()
498 nskb->dev = (void *) data; in bfusb_send_frame()
507 memcpy(skb_put(nskb, 3), buf, 3); in bfusb_send_frame()
508 skb_copy_from_linear_data_offset(skb, sent, skb_put(nskb, size), size); in bfusb_send_frame()
515 if ((nskb->len % data->bulk_pkt_size) == 0) { in bfusb_send_frame()
518 memcpy(skb_put(nskb, 2), buf, 2); in bfusb_send_frame()
523 skb_queue_tail(&data->transmit_q, nskb); in bfusb_send_frame()
/linux-4.4.14/include/linux/can/
Dskb.h64 struct sk_buff *nskb = skb_clone(skb, GFP_ATOMIC); in can_create_echo_skb() local
66 if (likely(nskb)) { in can_create_echo_skb()
67 can_skb_set_owner(nskb, skb->sk); in can_create_echo_skb()
69 return nskb; in can_create_echo_skb()
/linux-4.4.14/net/bluetooth/bnep/
Dcore.c301 struct sk_buff *nskb; in bnep_rx_frame() local
366 nskb = alloc_skb(2 + ETH_HLEN + skb->len, GFP_KERNEL); in bnep_rx_frame()
367 if (!nskb) { in bnep_rx_frame()
372 skb_reserve(nskb, 2); in bnep_rx_frame()
377 memcpy(__skb_put(nskb, ETH_HLEN), &s->eh, ETH_HLEN); in bnep_rx_frame()
381 memcpy(__skb_put(nskb, ETH_ALEN), s->eh.h_dest, ETH_ALEN); in bnep_rx_frame()
382 memcpy(__skb_put(nskb, ETH_ALEN), skb_mac_header(skb), ETH_ALEN); in bnep_rx_frame()
383 put_unaligned(s->eh.h_proto, (__be16 *) __skb_put(nskb, 2)); in bnep_rx_frame()
387 memcpy(__skb_put(nskb, ETH_ALEN), skb_mac_header(skb), in bnep_rx_frame()
389 memcpy(__skb_put(nskb, ETH_ALEN + 2), s->eh.h_source, in bnep_rx_frame()
[all …]
/linux-4.4.14/drivers/net/wireless/ath/ath9k/
Dhif_usb.c308 struct sk_buff *nskb = NULL; in __hif_usb_tx() local
328 nskb = __skb_dequeue(&hif_dev->tx.tx_skb_queue); in __hif_usb_tx()
331 BUG_ON(!nskb); in __hif_usb_tx()
338 *hdr++ = cpu_to_le16(nskb->len); in __hif_usb_tx()
341 memcpy(buf, nskb->data, nskb->len); in __hif_usb_tx()
342 tx_buf->len = nskb->len + 4; in __hif_usb_tx()
350 __skb_queue_tail(&tx_buf->skb_queue, nskb); in __hif_usb_tx()
527 struct sk_buff *nskb, *skb_pool[MAX_PKT_NUM_IN_TRANSFER]; in ath9k_hif_usb_rx_stream() local
593 nskb = __dev_alloc_skb(pkt_len + 32, GFP_ATOMIC); in ath9k_hif_usb_rx_stream()
594 if (!nskb) { in ath9k_hif_usb_rx_stream()
[all …]
/linux-4.4.14/drivers/isdn/mISDN/
Ddsp_core.c285 struct sk_buff *nskb; in dsp_control_req() local
591 nskb = _alloc_mISDN_skb(PH_CONTROL_IND, MISDN_ID_ANY, in dsp_control_req()
593 if (nskb) { in dsp_control_req()
595 if (dsp->up->send(dsp->up, nskb)) in dsp_control_req()
596 dev_kfree_skb(nskb); in dsp_control_req()
598 dev_kfree_skb(nskb); in dsp_control_req()
743 struct sk_buff *nskb; in dsp_function() local
749 nskb = _alloc_mISDN_skb(PH_CONTROL_IND, in dsp_function()
752 if (nskb) { in dsp_function()
755 dsp->up, nskb)) in dsp_function()
[all …]
Ddsp_cmx.c1315 struct sk_buff *nskb, *txskb; local
1354 nskb = mI_alloc_skb(len + preload, GFP_ATOMIC);
1355 if (!nskb) {
1361 hh = mISDN_HEAD_P(nskb);
1370 d = skb_put(nskb, preload + len); /* result */
1584 skb_queue_tail(&dsp->sendq, nskb);
1598 memcpy(skb_put(txskb, len), nskb->data + preload,
1609 dsp_change_volume(nskb, dsp->tx_volume);
1612 dsp_pipeline_process_tx(&dsp->pipeline, nskb->data,
1613 nskb->len);
[all …]
Ddsp_tones.c440 struct sk_buff *nskb; in dsp_tone_hw_message() local
443 nskb = _alloc_mISDN_skb(PH_CONTROL_REQ, in dsp_tone_hw_message()
446 if (nskb) { in dsp_tone_hw_message()
448 if (dsp->ch.recv(dsp->ch.peer, nskb)) in dsp_tone_hw_message()
449 dev_kfree_skb(nskb); in dsp_tone_hw_message()
451 dev_kfree_skb(nskb); in dsp_tone_hw_message()
Dl1oip_core.c365 struct sk_buff *nskb; in l1oip_socket_recv() local
396 nskb = mI_alloc_skb((remotecodec == 3) ? (len << 1) : len, GFP_ATOMIC); in l1oip_socket_recv()
397 if (!nskb) { in l1oip_socket_recv()
401 p = skb_put(nskb, (remotecodec == 3) ? (len << 1) : len); in l1oip_socket_recv()
414 dch->rx_skb = nskb; in l1oip_socket_recv()
444 hc->chan[channel].disorder_skb = nskb; in l1oip_socket_recv()
445 nskb = skb; in l1oip_socket_recv()
451 if (nskb) in l1oip_socket_recv()
453 queue_ch_frame(&bch->ch, PH_DATA_IND, rx_counter, nskb); in l1oip_socket_recv()
Dlayer2.c247 struct sk_buff *nskb = skb; in ph_data_confirm() local
252 nskb = skb_dequeue(&l2->down_queue); in ph_data_confirm()
253 if (nskb) { in ph_data_confirm()
254 l2->down_id = mISDN_HEAD_ID(nskb); in ph_data_confirm()
255 if (l2down_skb(l2, nskb)) { in ph_data_confirm()
256 dev_kfree_skb(nskb); in ph_data_confirm()
272 nskb = skb_dequeue(&l2->down_queue); in ph_data_confirm()
273 if (nskb) { in ph_data_confirm()
274 l2->down_id = mISDN_HEAD_ID(nskb); in ph_data_confirm()
275 if (l2down_skb(l2, nskb)) { in ph_data_confirm()
[all …]
/linux-4.4.14/drivers/isdn/i4l/
Disdn_v110.c463 struct sk_buff *nskb; in isdn_v110_encode() local
483 if (!(nskb = dev_alloc_skb(size + v->skbres + sizeof(int)))) { in isdn_v110_encode()
487 skb_reserve(nskb, v->skbres + sizeof(int)); in isdn_v110_encode()
489 memcpy(skb_put(nskb, v->framelen), v->OnlineFrame, v->framelen); in isdn_v110_encode()
490 *((int *)skb_push(nskb, sizeof(int))) = 0; in isdn_v110_encode()
491 return nskb; in isdn_v110_encode()
495 rbuf = skb_put(nskb, size); in isdn_v110_encode()
512 skb_trim(nskb, olen); in isdn_v110_encode()
513 *((int *)skb_push(nskb, sizeof(int))) = rlen; in isdn_v110_encode()
514 return nskb; in isdn_v110_encode()
Disdn_common.c2005 struct sk_buff *nskb = NULL; in isdn_writebuf_skb_stub() local
2011 nskb = isdn_v110_encode(dev->v110[idx], skb); in isdn_writebuf_skb_stub()
2013 if (!nskb) in isdn_writebuf_skb_stub()
2015 v110_ret = *((int *)nskb->data); in isdn_writebuf_skb_stub()
2016 skb_pull(nskb, sizeof(int)); in isdn_writebuf_skb_stub()
2017 if (!nskb->len) { in isdn_writebuf_skb_stub()
2018 dev_kfree_skb(nskb); in isdn_writebuf_skb_stub()
2023 ret = dev->drv[drvidx]->interface->writebuf_skb(drvidx, chan, ack, nskb); in isdn_writebuf_skb_stub()
2066 dev_kfree_skb(nskb); in isdn_writebuf_skb_stub()
Disdn_ppp.c1220 struct sk_buff *nskb = skb_realloc_headroom(skb, len); in isdn_ppp_skb_push() local
1222 if (!nskb) { in isdn_ppp_skb_push()
1229 *skb_p = nskb; in isdn_ppp_skb_push()
1230 return skb_push(nskb, len); in isdn_ppp_skb_push()
/linux-4.4.14/net/x25/
Dx25_dev.c102 struct sk_buff *nskb; in x25_lapb_receive_frame() local
108 nskb = skb_copy(skb, GFP_ATOMIC); in x25_lapb_receive_frame()
109 if (!nskb) in x25_lapb_receive_frame()
112 skb = nskb; in x25_lapb_receive_frame()
/linux-4.4.14/include/linux/
Dnetlink.h103 struct sk_buff *nskb; in netlink_skb_clone() local
105 nskb = skb_clone(skb, gfp_mask); in netlink_skb_clone()
106 if (!nskb) in netlink_skb_clone()
111 nskb->destructor = skb->destructor; in netlink_skb_clone()
113 return nskb; in netlink_skb_clone()
Dskbuff.h1314 struct sk_buff *nskb = skb_clone(skb, pri); in skb_share_check() local
1316 if (likely(nskb)) in skb_share_check()
1320 skb = nskb; in skb_share_check()
1350 struct sk_buff *nskb = skb_copy(skb, pri); in skb_unshare() local
1353 if (likely(nskb)) in skb_unshare()
1357 skb = nskb; in skb_unshare()
/linux-4.4.14/arch/ia64/hp/sim/
Dsimeth.c403 struct sk_buff *nskb; in make_new_skb() local
409 nskb = dev_alloc_skb(SIMETH_FRAME_SIZE + 2); in make_new_skb()
410 if ( nskb == NULL ) { in make_new_skb()
415 skb_reserve(nskb, 2); /* Align IP on 16 byte boundaries */ in make_new_skb()
417 skb_put(nskb,SIMETH_FRAME_SIZE); in make_new_skb()
419 return nskb; in make_new_skb()
/linux-4.4.14/include/net/netfilter/ipv4/
Dnf_reject.h13 struct iphdr *nf_reject_iphdr_put(struct sk_buff *nskb,
16 void nf_reject_ip_tcphdr_put(struct sk_buff *nskb, const struct sk_buff *oldskb,
/linux-4.4.14/include/net/netfilter/ipv6/
Dnf_reject.h14 struct ipv6hdr *nf_reject_ip6hdr_put(struct sk_buff *nskb,
17 void nf_reject_ip6_tcphdr_put(struct sk_buff *nskb,
/linux-4.4.14/drivers/net/ipvlan/
Dipvlan_core.c195 struct sk_buff *skb, *nskb; in ipvlan_process_multicast() local
231 nskb = skb_clone(skb, GFP_ATOMIC); in ipvlan_process_multicast()
232 if (!nskb) in ipvlan_process_multicast()
235 nskb->pkt_type = pkt_type; in ipvlan_process_multicast()
236 nskb->dev = ipvlan->dev; in ipvlan_process_multicast()
238 ret = dev_forward_skb(ipvlan->dev, nskb); in ipvlan_process_multicast()
240 ret = netif_rx(nskb); in ipvlan_process_multicast()
611 struct sk_buff *nskb = skb_clone(skb, GFP_ATOMIC); in ipvlan_handle_mode_l2() local
619 if (nskb) in ipvlan_handle_mode_l2()
620 ipvlan_multicast_enqueue(port, nskb); in ipvlan_handle_mode_l2()
/linux-4.4.14/net/core/
Dskbuff.c3034 struct sk_buff *nskb; in skb_segment() local
3071 nskb = skb_clone(list_skb, GFP_ATOMIC); in skb_segment()
3074 if (unlikely(!nskb)) in skb_segment()
3077 if (unlikely(pskb_trim(nskb, len))) { in skb_segment()
3078 kfree_skb(nskb); in skb_segment()
3082 hsize = skb_end_offset(nskb); in skb_segment()
3083 if (skb_cow_head(nskb, doffset + headroom)) { in skb_segment()
3084 kfree_skb(nskb); in skb_segment()
3088 nskb->truesize += skb_end_offset(nskb) - hsize; in skb_segment()
3089 skb_release_head_state(nskb); in skb_segment()
[all …]
Ddatagram.c136 struct sk_buff *nskb; in skb_set_peeked() local
145 nskb = skb_clone(skb, GFP_ATOMIC); in skb_set_peeked()
146 if (!nskb) in skb_set_peeked()
149 skb->prev->next = nskb; in skb_set_peeked()
150 skb->next->prev = nskb; in skb_set_peeked()
151 nskb->prev = skb->prev; in skb_set_peeked()
152 nskb->next = skb->next; in skb_set_peeked()
155 skb = nskb; in skb_set_peeked()
Ddev.c4257 struct sk_buff *nskb = *pp; in dev_gro_receive() local
4259 *pp = nskb->next; in dev_gro_receive()
4260 nskb->next = NULL; in dev_gro_receive()
4261 napi_gro_complete(nskb); in dev_gro_receive()
4272 struct sk_buff *nskb = napi->gro_list; in dev_gro_receive() local
4275 while (nskb->next) { in dev_gro_receive()
4276 pp = &nskb->next; in dev_gro_receive()
4277 nskb = *pp; in dev_gro_receive()
4280 nskb->next = NULL; in dev_gro_receive()
4281 napi_gro_complete(nskb); in dev_gro_receive()
Drtnetlink.c2417 struct sk_buff *nskb; in rtnl_getlink() local
2442 nskb = nlmsg_new(if_nlmsg_size(dev, ext_filter_mask), GFP_KERNEL); in rtnl_getlink()
2443 if (nskb == NULL) in rtnl_getlink()
2446 err = rtnl_fill_ifinfo(nskb, dev, RTM_NEWLINK, NETLINK_CB(skb).portid, in rtnl_getlink()
2451 kfree_skb(nskb); in rtnl_getlink()
2453 err = rtnl_unicast(nskb, net, NETLINK_CB(skb).portid); in rtnl_getlink()
/linux-4.4.14/drivers/net/irda/
Dstir4200.c319 struct sk_buff *skb, *nskb; in fir_eof() local
341 nskb = dev_alloc_skb(len + 1); in fir_eof()
342 if (unlikely(!nskb)) { in fir_eof()
346 skb_reserve(nskb, 1); in fir_eof()
347 skb = nskb; in fir_eof()
348 skb_copy_to_linear_data(nskb, rx_buff->data, len); in fir_eof()
350 nskb = dev_alloc_skb(rx_buff->truesize); in fir_eof()
351 if (unlikely(!nskb)) { in fir_eof()
355 skb_reserve(nskb, 1); in fir_eof()
357 rx_buff->skb = nskb; in fir_eof()
[all …]
/linux-4.4.14/include/net/
Ddst.h298 static inline void __skb_dst_copy(struct sk_buff *nskb, unsigned long refdst) in __skb_dst_copy() argument
300 nskb->_skb_refdst = refdst; in __skb_dst_copy()
301 if (!(nskb->_skb_refdst & SKB_DST_NOREF)) in __skb_dst_copy()
302 dst_clone(skb_dst(nskb)); in __skb_dst_copy()
305 static inline void skb_dst_copy(struct sk_buff *nskb, const struct sk_buff *oskb) in skb_dst_copy() argument
307 __skb_dst_copy(nskb, oskb->_skb_refdst); in skb_dst_copy()
/linux-4.4.14/net/ipv4/
Dtcp_output.c1872 struct sk_buff *skb, *nskb, *next; in tcp_mtu_probe() local
1932 nskb = sk_stream_alloc_skb(sk, probe_size, GFP_ATOMIC, false); in tcp_mtu_probe()
1933 if (!nskb) in tcp_mtu_probe()
1935 sk->sk_wmem_queued += nskb->truesize; in tcp_mtu_probe()
1936 sk_mem_charge(sk, nskb->truesize); in tcp_mtu_probe()
1940 TCP_SKB_CB(nskb)->seq = TCP_SKB_CB(skb)->seq; in tcp_mtu_probe()
1941 TCP_SKB_CB(nskb)->end_seq = TCP_SKB_CB(skb)->seq + probe_size; in tcp_mtu_probe()
1942 TCP_SKB_CB(nskb)->tcp_flags = TCPHDR_ACK; in tcp_mtu_probe()
1943 TCP_SKB_CB(nskb)->sacked = 0; in tcp_mtu_probe()
1944 nskb->csum = 0; in tcp_mtu_probe()
[all …]
Dip_output.c253 struct sk_buff *nskb = segs->next; in ip_finish_output_gso() local
261 segs = nskb; in ip_finish_output_gso()
1550 struct sk_buff *nskb; in ip_send_unicast_reply() local
1599 nskb = skb_peek(&sk->sk_write_queue); in ip_send_unicast_reply()
1600 if (nskb) { in ip_send_unicast_reply()
1602 *((__sum16 *)skb_transport_header(nskb) + in ip_send_unicast_reply()
1603 arg->csumoffset) = csum_fold(csum_add(nskb->csum, in ip_send_unicast_reply()
1605 nskb->ip_summed = CHECKSUM_NONE; in ip_send_unicast_reply()
Dtcp_input.c4730 struct sk_buff *nskb; in tcp_collapse() local
4732 nskb = alloc_skb(copy, GFP_ATOMIC); in tcp_collapse()
4733 if (!nskb) in tcp_collapse()
4736 memcpy(nskb->cb, skb->cb, sizeof(skb->cb)); in tcp_collapse()
4737 TCP_SKB_CB(nskb)->seq = TCP_SKB_CB(nskb)->end_seq = start; in tcp_collapse()
4738 __skb_queue_before(list, skb, nskb); in tcp_collapse()
4739 skb_set_owner_r(nskb, sk); in tcp_collapse()
4749 if (skb_copy_bits(skb, offset, skb_put(nskb, size), size)) in tcp_collapse()
4751 TCP_SKB_CB(nskb)->end_seq += size; in tcp_collapse()
/linux-4.4.14/drivers/net/ethernet/sun/
Dsunvnet.c1047 struct sk_buff *nskb; in vnet_skb_shape() local
1073 nskb = alloc_and_align_skb(skb->dev, len); in vnet_skb_shape()
1074 if (nskb == NULL) { in vnet_skb_shape()
1078 skb_reserve(nskb, VNET_PACKET_SKIP); in vnet_skb_shape()
1080 nskb->protocol = skb->protocol; in vnet_skb_shape()
1082 skb_set_mac_header(nskb, offset); in vnet_skb_shape()
1084 skb_set_network_header(nskb, offset); in vnet_skb_shape()
1086 skb_set_transport_header(nskb, offset); in vnet_skb_shape()
1089 nskb->csum_offset = skb->csum_offset; in vnet_skb_shape()
1090 nskb->ip_summed = skb->ip_summed; in vnet_skb_shape()
[all …]
/linux-4.4.14/drivers/net/
Dmacvlan.c238 struct sk_buff *nskb; in macvlan_broadcast() local
256 nskb = skb_clone(skb, GFP_ATOMIC); in macvlan_broadcast()
257 if (likely(nskb)) in macvlan_broadcast()
259 nskb, vlan, eth, in macvlan_broadcast()
261 netif_rx_ni(nskb); in macvlan_broadcast()
315 struct sk_buff *nskb; in macvlan_broadcast_enqueue() local
318 nskb = skb_clone(skb, GFP_ATOMIC); in macvlan_broadcast_enqueue()
319 if (!nskb) in macvlan_broadcast_enqueue()
324 __skb_queue_tail(&port->bc_queue, nskb); in macvlan_broadcast_enqueue()
336 kfree_skb(nskb); in macvlan_broadcast_enqueue()
[all …]
Dvirtio_net.c387 struct sk_buff *nskb = alloc_skb(0, GFP_ATOMIC); in receive_mergeable() local
389 if (unlikely(!nskb)) in receive_mergeable()
392 skb_shinfo(curr_skb)->frag_list = nskb; in receive_mergeable()
394 curr_skb->next = nskb; in receive_mergeable()
395 curr_skb = nskb; in receive_mergeable()
396 head_skb->truesize += nskb->truesize; in receive_mergeable()
Dxen-netfront.c878 struct sk_buff *nskb; in xennet_fill_frags() local
880 while ((nskb = __skb_dequeue(list))) { in xennet_fill_frags()
883 skb_frag_t *nfrag = &skb_shinfo(nskb)->frags[0]; in xennet_fill_frags()
896 skb_shinfo(nskb)->nr_frags = 0; in xennet_fill_frags()
897 kfree_skb(nskb); in xennet_fill_frags()
Dmacvtap.c378 struct sk_buff *nskb = segs->next; in macvtap_handle_frame() local
382 segs = nskb; in macvtap_handle_frame()
/linux-4.4.14/net/can/
Dgw.c354 struct sk_buff *nskb; in can_can_gw_rcv() local
396 nskb = skb_copy(skb, GFP_ATOMIC); in can_can_gw_rcv()
398 nskb = skb_clone(skb, GFP_ATOMIC); in can_can_gw_rcv()
400 if (!nskb) { in can_can_gw_rcv()
406 cgw_hops(nskb) = cgw_hops(skb) + 1; in can_can_gw_rcv()
409 if (gwj->limit_hops && cgw_hops(nskb) == 1) in can_can_gw_rcv()
410 cgw_hops(nskb) = max_hops - gwj->limit_hops + 1; in can_can_gw_rcv()
412 nskb->dev = gwj->dst.dev; in can_can_gw_rcv()
415 cf = (struct can_frame *)nskb->data; in can_can_gw_rcv()
432 nskb->tstamp.tv64 = 0; in can_can_gw_rcv()
[all …]
/linux-4.4.14/net/nfc/
Drawsock.c373 struct sk_buff *skb_copy = NULL, *nskb; in nfc_send_to_raw_sock() local
393 nskb = skb_clone(skb_copy, GFP_ATOMIC); in nfc_send_to_raw_sock()
394 if (!nskb) in nfc_send_to_raw_sock()
397 if (sock_queue_rcv_skb(sk, nskb)) in nfc_send_to_raw_sock()
398 kfree_skb(nskb); in nfc_send_to_raw_sock()
Dllcp_core.c673 struct sk_buff *skb_copy = NULL, *nskb; in nfc_llcp_send_to_raw_sock() local
697 nskb = skb_clone(skb_copy, GFP_ATOMIC); in nfc_llcp_send_to_raw_sock()
698 if (!nskb) in nfc_llcp_send_to_raw_sock()
701 if (sock_queue_rcv_skb(sk, nskb)) in nfc_llcp_send_to_raw_sock()
702 kfree_skb(nskb); in nfc_llcp_send_to_raw_sock()
/linux-4.4.14/net/xfrm/
Dxfrm_output.c180 struct sk_buff *nskb = segs->next; in xfrm_output_gso() local
187 kfree_skb_list(nskb); in xfrm_output_gso()
191 segs = nskb; in xfrm_output_gso()
/linux-4.4.14/net/iucv/
Daf_iucv.c320 struct sk_buff *nskb; in afiucv_hs_send() local
366 nskb = skb_clone(skb, GFP_ATOMIC); in afiucv_hs_send()
367 if (!nskb) in afiucv_hs_send()
369 skb_queue_tail(&iucv->send_skb_q, nskb); in afiucv_hs_send()
372 skb_unlink(nskb, &iucv->send_skb_q); in afiucv_hs_send()
373 kfree_skb(nskb); in afiucv_hs_send()
1211 struct sk_buff *nskb; in iucv_fragment_skb() local
1220 nskb = alloc_skb(size, GFP_ATOMIC | GFP_DMA); in iucv_fragment_skb()
1221 if (!nskb) in iucv_fragment_skb()
1225 IUCV_SKB_CB(nskb)->class = IUCV_SKB_CB(skb)->class; in iucv_fragment_skb()
[all …]
/linux-4.4.14/net/netfilter/
Dnfnetlink_queue.c558 struct sk_buff *nskb; in __nfqnl_enqueue_packet() local
563 nskb = nfqnl_build_packet_message(net, queue, entry, &packet_id_ptr); in __nfqnl_enqueue_packet()
564 if (nskb == NULL) { in __nfqnl_enqueue_packet()
585 err = nfnetlink_unicast(nskb, net, queue->peer_portid, MSG_DONTWAIT); in __nfqnl_enqueue_packet()
597 kfree_skb(nskb); in __nfqnl_enqueue_packet()
715 struct sk_buff *nskb = segs->next; in nfqnl_enqueue_packet() local
723 segs = nskb; in nfqnl_enqueue_packet()
740 struct sk_buff *nskb; in nfqnl_mangle() local
749 nskb = skb_copy_expand(e->skb, skb_headroom(e->skb), in nfqnl_mangle()
751 if (!nskb) { in nfqnl_mangle()
[all …]
Dnf_conntrack_core.c1353 static void nf_conntrack_attach(struct sk_buff *nskb, const struct sk_buff *skb) in nf_conntrack_attach() argument
1366 nskb->nfct = &ct->ct_general; in nf_conntrack_attach()
1367 nskb->nfctinfo = ctinfo; in nf_conntrack_attach()
1368 nf_conntrack_get(nskb->nfct); in nf_conntrack_attach()
/linux-4.4.14/drivers/net/xen-netback/
Dnetback.c866 struct sk_buff *nskb) in xenvif_get_requests() argument
890 shinfo = skb_shinfo(nskb); in xenvif_get_requests()
902 skb_shinfo(skb)->frag_list = nskb; in xenvif_get_requests()
1290 struct sk_buff *skb, *nskb; in xenvif_tx_build_gops() local
1412 nskb = NULL; in xenvif_tx_build_gops()
1417 nskb = xenvif_alloc_skb(0); in xenvif_tx_build_gops()
1418 if (unlikely(nskb == NULL)) { in xenvif_tx_build_gops()
1435 kfree_skb(nskb); in xenvif_tx_build_gops()
1473 frag_overflow, nskb); in xenvif_tx_build_gops()
1497 struct sk_buff *nskb = skb_shinfo(skb)->frag_list; in xenvif_handle_frag_list() local
[all …]
/linux-4.4.14/drivers/isdn/capi/
Dcapi.c385 struct sk_buff *nskb; in gen_data_b3_resp_for() local
386 nskb = alloc_skb(CAPI_DATA_B3_RESP_LEN, GFP_KERNEL); in gen_data_b3_resp_for()
387 if (nskb) { in gen_data_b3_resp_for()
389 unsigned char *s = skb_put(nskb, CAPI_DATA_B3_RESP_LEN); in gen_data_b3_resp_for()
398 return nskb; in gen_data_b3_resp_for()
405 struct sk_buff *nskb; in handle_recv_skb() local
439 nskb = gen_data_b3_resp_for(mp, skb); in handle_recv_skb()
440 if (!nskb) { in handle_recv_skb()
447 errcode = capi20_put_message(mp->ap, nskb); in handle_recv_skb()
457 kfree_skb(nskb); in handle_recv_skb()
Dcapidrv.c2112 struct sk_buff *nskb = skb_realloc_headroom(skb, msglen); in if_sendbuf() local
2113 if (!nskb) { in if_sendbuf()
2121 memcpy(skb_push(nskb, msglen), sendcmsg.buf, msglen); in if_sendbuf()
2122 errcode = capi20_put_message(&global.ap, nskb); in if_sendbuf()
2132 dev_kfree_skb(nskb); in if_sendbuf()
/linux-4.4.14/net/bluetooth/
Dhci_sock.c163 struct sk_buff *nskb; in hci_send_to_sock() local
202 nskb = skb_clone(skb_copy, GFP_ATOMIC); in hci_send_to_sock()
203 if (!nskb) in hci_send_to_sock()
206 if (sock_queue_rcv_skb(sk, nskb)) in hci_send_to_sock()
207 kfree_skb(nskb); in hci_send_to_sock()
226 struct sk_buff *nskb; in hci_send_to_channel() local
242 nskb = skb_clone(skb, GFP_ATOMIC); in hci_send_to_channel()
243 if (!nskb) in hci_send_to_channel()
246 if (sock_queue_rcv_skb(sk, nskb)) in hci_send_to_channel()
247 kfree_skb(nskb); in hci_send_to_channel()
Dl2cap_core.c2843 struct sk_buff *nskb; in l2cap_raw_recv() local
2858 nskb = skb_clone(skb, GFP_KERNEL); in l2cap_raw_recv()
2859 if (!nskb) in l2cap_raw_recv()
2861 if (chan->ops->recv(chan, nskb)) in l2cap_raw_recv()
2862 kfree_skb(nskb); in l2cap_raw_recv()
/linux-4.4.14/drivers/s390/net/
Dctcm_main.c471 struct sk_buff *nskb; in ctcm_transmit_skb() local
518 nskb = alloc_skb(skb->len, GFP_ATOMIC | GFP_DMA); in ctcm_transmit_skb()
519 if (!nskb) { in ctcm_transmit_skb()
525 memcpy(skb_put(nskb, skb->len), skb->data, skb->len); in ctcm_transmit_skb()
526 atomic_inc(&nskb->users); in ctcm_transmit_skb()
529 skb = nskb; in ctcm_transmit_skb()
670 struct sk_buff *nskb; in ctcmpc_transmit_skb() local
727 nskb = __dev_alloc_skb(skb->len, GFP_ATOMIC | GFP_DMA); in ctcmpc_transmit_skb()
728 if (!nskb) { in ctcmpc_transmit_skb()
731 memcpy(skb_put(nskb, skb->len), skb->data, skb->len); in ctcmpc_transmit_skb()
[all …]
Dnetiucv.c1188 struct sk_buff *nskb = skb; in netiucv_transmit_skb() local
1197 nskb = alloc_skb(skb->len + NETIUCV_HDRLEN + in netiucv_transmit_skb()
1199 if (!nskb) { in netiucv_transmit_skb()
1204 skb_reserve(nskb, NETIUCV_HDRLEN); in netiucv_transmit_skb()
1205 memcpy(skb_put(nskb, skb->len), in netiucv_transmit_skb()
1213 header.next = nskb->len + NETIUCV_HDRLEN; in netiucv_transmit_skb()
1214 memcpy(skb_push(nskb, NETIUCV_HDRLEN), &header, NETIUCV_HDRLEN); in netiucv_transmit_skb()
1216 memcpy(skb_put(nskb, NETIUCV_HDRLEN), &header, NETIUCV_HDRLEN); in netiucv_transmit_skb()
1224 nskb->data, nskb->len); in netiucv_transmit_skb()
1238 dev_kfree_skb(nskb); in netiucv_transmit_skb()
[all …]
/linux-4.4.14/net/sched/
Dsch_generic.c66 struct sk_buff *nskb = q->dequeue(q); in try_bulk_dequeue_skb() local
68 if (!nskb) in try_bulk_dequeue_skb()
71 bytelimit -= nskb->len; /* covers GSO len */ in try_bulk_dequeue_skb()
72 skb->next = nskb; in try_bulk_dequeue_skb()
73 skb = nskb; in try_bulk_dequeue_skb()
Dsch_tbf.c161 struct sk_buff *segs, *nskb; in tbf_segment() local
173 nskb = segs->next; in tbf_segment()
184 segs = nskb; in tbf_segment()
Dsch_choke.c249 struct sk_buff *nskb, in choke_match_random() argument
259 return choke_get_classid(nskb) == choke_get_classid(oskb); in choke_match_random()
261 return choke_match_flow(oskb, nskb); in choke_match_random()
Dsch_netem.c377 static void tfifo_enqueue(struct sk_buff *nskb, struct Qdisc *sch) in tfifo_enqueue() argument
380 psched_time_t tnext = netem_skb_cb(nskb)->time_to_send; in tfifo_enqueue()
393 rb_link_node(&nskb->rbnode, parent, p); in tfifo_enqueue()
394 rb_insert_color(&nskb->rbnode, &q->t_root); in tfifo_enqueue()
/linux-4.4.14/net/openvswitch/
Ddatapath.c338 struct sk_buff *segs, *nskb; in queue_gso_packets() local
372 nskb = skb->next; in queue_gso_packets()
377 } while ((skb = nskb)); in queue_gso_packets()
422 struct sk_buff *nskb = NULL; in queue_userspace_packet() local
438 nskb = skb_clone(skb, GFP_ATOMIC); in queue_userspace_packet()
439 if (!nskb) in queue_userspace_packet()
442 nskb = __vlan_hwaccel_push_inside(nskb); in queue_userspace_packet()
443 if (!nskb) in queue_userspace_packet()
446 skb = nskb; in queue_userspace_packet()
539 kfree_skb(nskb); in queue_userspace_packet()
/linux-4.4.14/drivers/scsi/fcoe/
Dfcoe_transport.c337 struct sk_buff *nskb; in fcoe_start_io() local
340 nskb = skb_clone(skb, GFP_ATOMIC); in fcoe_start_io()
341 if (!nskb) in fcoe_start_io()
343 rc = dev_queue_xmit(nskb); in fcoe_start_io()
/linux-4.4.14/drivers/isdn/hisax/
Disdnl1.c206 struct sk_buff *skb, *nskb; in DChannel_proc_rcv() local
238 if ((nskb = skb_clone(skb, GFP_ATOMIC))) in DChannel_proc_rcv()
239 stptr->l1.l1l2(stptr, PH_DATA | INDICATION, nskb); in DChannel_proc_rcv()
246 if ((nskb = skb_clone(skb, GFP_ATOMIC))) in DChannel_proc_rcv()
247 stptr->l1.l1tei(stptr, PH_DATA | INDICATION, nskb); in DChannel_proc_rcv()
Dcallc.c1746 struct sk_buff *nskb; in HiSax_writebuf_skb() local
1775 nskb = skb_clone(skb, GFP_ATOMIC); in HiSax_writebuf_skb()
1776 if (nskb) { in HiSax_writebuf_skb()
1777 nskb->truesize = nskb->len; in HiSax_writebuf_skb()
1779 nskb->pkt_type = PACKET_NOACK; in HiSax_writebuf_skb()
1781 st->l3.l3l2(st, DL_DATA | REQUEST, nskb); in HiSax_writebuf_skb()
1784 st->l2.l2l1(st, PH_DATA | REQUEST, nskb); in HiSax_writebuf_skb()
Disdnl2.c1250 struct sk_buff *skb, *nskb; in l2_pull_iqueue() local
1265 nskb = skb_realloc_headroom(skb, hdr_space_needed); in l2_pull_iqueue()
1266 if (!nskb) { in l2_pull_iqueue()
1294 memcpy(skb_push(nskb, i), header, i); in l2_pull_iqueue()
1295 st->l2.l2l1(st, PH_PULL | INDICATION, nskb); in l2_pull_iqueue()
/linux-4.4.14/net/netlink/
Daf_netlink.c222 struct sk_buff *nskb; in __netlink_deliver_tap_skb() local
229 nskb = netlink_to_full_skb(skb, GFP_ATOMIC); in __netlink_deliver_tap_skb()
231 nskb = skb_clone(skb, GFP_ATOMIC); in __netlink_deliver_tap_skb()
232 if (nskb) { in __netlink_deliver_tap_skb()
233 nskb->dev = dev; in __netlink_deliver_tap_skb()
234 nskb->protocol = htons((u16) sk->sk_protocol); in __netlink_deliver_tap_skb()
235 nskb->pkt_type = netlink_is_kernel(sk) ? in __netlink_deliver_tap_skb()
237 skb_reset_network_header(nskb); in __netlink_deliver_tap_skb()
238 ret = dev_queue_xmit(nskb); in __netlink_deliver_tap_skb()
1809 struct sk_buff *nskb = skb_clone(skb, allocation); in netlink_trim() local
[all …]
/linux-4.4.14/drivers/isdn/isdnloop/
Disdnloop.c404 struct sk_buff *nskb; in isdnloop_sendbuf() local
417 nskb = dev_alloc_skb(skb->len); in isdnloop_sendbuf()
418 if (nskb) { in isdnloop_sendbuf()
420 skb_put(nskb, len), len); in isdnloop_sendbuf()
421 skb_queue_tail(&card->bqueue[channel], nskb); in isdnloop_sendbuf()
/linux-4.4.14/drivers/net/wireless/
Dmac80211_hwsim.c1177 struct sk_buff *nskb; in mac80211_hwsim_tx_frame_no_nl() local
1212 nskb = dev_alloc_skb(128); in mac80211_hwsim_tx_frame_no_nl()
1213 if (!nskb) { in mac80211_hwsim_tx_frame_no_nl()
1219 skb_add_rx_frag(nskb, 0, page, 0, skb->len, skb->len); in mac80211_hwsim_tx_frame_no_nl()
1221 nskb = skb_copy(skb, GFP_ATOMIC); in mac80211_hwsim_tx_frame_no_nl()
1222 if (!nskb) in mac80211_hwsim_tx_frame_no_nl()
1231 memcpy(IEEE80211_SKB_RXCB(nskb), &rx_status, sizeof(rx_status)); in mac80211_hwsim_tx_frame_no_nl()
1233 mac80211_hwsim_add_vendor_rtap(nskb); in mac80211_hwsim_tx_frame_no_nl()
1236 data2->rx_bytes += nskb->len; in mac80211_hwsim_tx_frame_no_nl()
1237 ieee80211_rx_irqsafe(data2->hw, nskb); in mac80211_hwsim_tx_frame_no_nl()
/linux-4.4.14/drivers/net/ethernet/broadcom/
Dbcm63xx_enet.c386 struct sk_buff *nskb; in bcm_enet_receive_queue() local
388 nskb = napi_alloc_skb(&priv->napi, len); in bcm_enet_receive_queue()
389 if (!nskb) { in bcm_enet_receive_queue()
397 memcpy(nskb->data, skb->data, len); in bcm_enet_receive_queue()
400 skb = nskb; in bcm_enet_receive_queue()
602 struct sk_buff *nskb; in bcm_enet_start_xmit() local
604 nskb = skb_copy_expand(skb, 0, needed, GFP_ATOMIC); in bcm_enet_start_xmit()
605 if (!nskb) { in bcm_enet_start_xmit()
610 skb = nskb; in bcm_enet_start_xmit()
Dbcmsysport.c953 struct sk_buff *nskb; in bcm_sysport_insert_tsb() local
962 nskb = skb_realloc_headroom(skb, sizeof(*tsb)); in bcm_sysport_insert_tsb()
964 if (!nskb) { in bcm_sysport_insert_tsb()
969 skb = nskb; in bcm_sysport_insert_tsb()
Dtg3.c7852 struct sk_buff *segs, *nskb; in tg3_tso_bug() local
7877 nskb = segs; in tg3_tso_bug()
7879 nskb->next = NULL; in tg3_tso_bug()
7880 tg3_start_xmit(nskb, tp->dev); in tg3_tso_bug()
/linux-4.4.14/drivers/isdn/icn/
Dicn.c709 struct sk_buff *nskb; in icn_sendbuf() local
722 nskb = skb_clone(skb, GFP_ATOMIC); in icn_sendbuf()
723 if (nskb) { in icn_sendbuf()
727 *(skb_push(nskb, 1)) = ack ? 1 : 0; in icn_sendbuf()
728 skb_queue_tail(&card->spqueue[channel], nskb); in icn_sendbuf()
/linux-4.4.14/net/mac80211/
Drx.c2950 struct sk_buff *nskb; in ieee80211_rx_h_action_return() local
2979 nskb = skb_copy_expand(rx->skb, local->hw.extra_tx_headroom, 0, in ieee80211_rx_h_action_return()
2981 if (nskb) { in ieee80211_rx_h_action_return()
2982 struct ieee80211_mgmt *nmgmt = (void *)nskb->data; in ieee80211_rx_h_action_return()
2988 memset(nskb->cb, 0, sizeof(nskb->cb)); in ieee80211_rx_h_action_return()
2991 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(nskb); in ieee80211_rx_h_action_return()
3001 __ieee80211_tx_skb_tid_band(rx->sdata, nskb, 7, in ieee80211_rx_h_action_return()
/linux-4.4.14/drivers/net/wireless/ath/ath6kl/
Dtxrx.c494 struct sk_buff *nskb; in ath6kl_data_tx() local
496 nskb = skb_copy_expand(skb, HTC_HDR_LENGTH, 0, GFP_ATOMIC); in ath6kl_data_tx()
497 if (nskb == NULL) in ath6kl_data_tx()
500 skb = nskb; in ath6kl_data_tx()
/linux-4.4.14/net/appletalk/
Dddp.c1380 struct sk_buff *nskb = skb_realloc_headroom(skb, 32); in atalk_route_packet() local
1382 skb = nskb; in atalk_route_packet()
/linux-4.4.14/drivers/net/ethernet/marvell/
Dskge.c3090 struct sk_buff *nskb; in skge_rx_get() local
3092 nskb = netdev_alloc_skb_ip_align(dev, skge->rx_buf_size); in skge_rx_get()
3093 if (!nskb) in skge_rx_get()
3101 if (skge_rx_setup(skge, e, nskb, skge->rx_buf_size) < 0) { in skge_rx_get()
3102 dev_kfree_skb(nskb); in skge_rx_get()
/linux-4.4.14/drivers/net/usb/
Dr8152.c1388 struct sk_buff *segs, *nskb; in r8152_csum_workaround() local
1398 nskb = segs; in r8152_csum_workaround()
1400 nskb->next = NULL; in r8152_csum_workaround()
1401 __skb_queue_tail(&seg_list, nskb); in r8152_csum_workaround()
/linux-4.4.14/net/packet/
Daf_packet.c2028 struct sk_buff *nskb = skb_clone(skb, GFP_ATOMIC); in packet_rcv() local
2029 if (nskb == NULL) in packet_rcv()
2037 skb = nskb; in packet_rcv()
/linux-4.4.14/drivers/net/ethernet/realtek/
Dr8169.c6943 struct sk_buff *segs, *nskb; in r8169_csum_workaround() local
6951 nskb = segs; in r8169_csum_workaround()
6953 nskb->next = NULL; in r8169_csum_workaround()
6954 rtl8169_start_xmit(nskb, tp->dev); in r8169_csum_workaround()