skb_out           415 drivers/infiniband/sw/rxe/rxe_net.c 	int skb_out = atomic_dec_return(&qp->skb_out);
skb_out           418 drivers/infiniband/sw/rxe/rxe_net.c 		     skb_out < RXE_INFLIGHT_SKBS_PER_QP_LOW))
skb_out           432 drivers/infiniband/sw/rxe/rxe_net.c 	atomic_inc(&pkt->qp->skb_out);
skb_out           440 drivers/infiniband/sw/rxe/rxe_net.c 		atomic_dec(&pkt->qp->skb_out);
skb_out           216 drivers/infiniband/sw/rxe/rxe_qp.c 	atomic_set(&qp->skb_out, 0);
skb_out           673 drivers/infiniband/sw/rxe/rxe_req.c 	if (unlikely(atomic_read(&qp->skb_out) >
skb_out           273 drivers/infiniband/sw/rxe/rxe_verbs.h 	atomic_t		skb_out;
skb_out           217 drivers/net/ethernet/huawei/hinic/hinic_rx.c 			goto skb_out;
skb_out           226 drivers/net/ethernet/huawei/hinic/hinic_rx.c 			goto skb_out;
skb_out           234 drivers/net/ethernet/huawei/hinic/hinic_rx.c skb_out:
skb_out           221 drivers/net/usb/cdc_mbim.c 	struct sk_buff *skb_out;
skb_out           292 drivers/net/usb/cdc_mbim.c 	skb_out = cdc_ncm_fill_tx_frame(dev, skb, sign);
skb_out           294 drivers/net/usb/cdc_mbim.c 	return skb_out;
skb_out          1119 drivers/net/usb/cdc_ncm.c 	struct sk_buff *skb_out;
skb_out          1142 drivers/net/usb/cdc_ncm.c 	skb_out = ctx->tx_curr_skb;
skb_out          1145 drivers/net/usb/cdc_ncm.c 	if (!skb_out) {
skb_out          1148 drivers/net/usb/cdc_ncm.c 			skb_out = alloc_skb(ctx->tx_curr_size, GFP_ATOMIC);
skb_out          1154 drivers/net/usb/cdc_ncm.c 			if (skb_out == NULL) {
skb_out          1160 drivers/net/usb/cdc_ncm.c 		if (skb_out == NULL) {
skb_out          1170 drivers/net/usb/cdc_ncm.c 			skb_out = alloc_skb(ctx->tx_curr_size, GFP_ATOMIC);
skb_out          1173 drivers/net/usb/cdc_ncm.c 			if (skb_out == NULL) {
skb_out          1183 drivers/net/usb/cdc_ncm.c 		nth16 = skb_put_zero(skb_out, sizeof(struct usb_cdc_ncm_nth16));
skb_out          1208 drivers/net/usb/cdc_ncm.c 		ndp16 = cdc_ncm_ndp(ctx, skb_out, sign, skb->len + ctx->tx_modulus + ctx->tx_remainder);
skb_out          1211 drivers/net/usb/cdc_ncm.c 		cdc_ncm_align_tail(skb_out,  ctx->tx_modulus, ctx->tx_remainder, ctx->tx_curr_size);
skb_out          1214 drivers/net/usb/cdc_ncm.c 		if (!ndp16 || skb_out->len + skb->len + delayed_ndp_size > ctx->tx_curr_size) {
skb_out          1241 drivers/net/usb/cdc_ncm.c 		ndp16->dpe16[index].wDatagramIndex = cpu_to_le16(skb_out->len);
skb_out          1243 drivers/net/usb/cdc_ncm.c 		skb_put_data(skb_out, skb->data, skb->len);
skb_out          1268 drivers/net/usb/cdc_ncm.c 		ctx->tx_curr_skb = skb_out;
skb_out          1274 drivers/net/usb/cdc_ncm.c 		ctx->tx_curr_skb = skb_out;
skb_out          1289 drivers/net/usb/cdc_ncm.c 		nth16 = (struct usb_cdc_ncm_nth16 *)skb_out->data;
skb_out          1290 drivers/net/usb/cdc_ncm.c 		cdc_ncm_align_tail(skb_out, ctx->tx_ndp_modulus, 0, ctx->tx_curr_size - ctx->max_ndp_size);
skb_out          1291 drivers/net/usb/cdc_ncm.c 		nth16->wNdpIndex = cpu_to_le16(skb_out->len);
skb_out          1292 drivers/net/usb/cdc_ncm.c 		skb_put_data(skb_out, ctx->delayed_ndp16, ctx->max_ndp_size);
skb_out          1308 drivers/net/usb/cdc_ncm.c 	    skb_out->len > ctx->min_tx_pkt) {
skb_out          1309 drivers/net/usb/cdc_ncm.c 		padding_count = ctx->tx_curr_size - skb_out->len;
skb_out          1310 drivers/net/usb/cdc_ncm.c 		skb_put_zero(skb_out, padding_count);
skb_out          1311 drivers/net/usb/cdc_ncm.c 	} else if (skb_out->len < ctx->tx_curr_size &&
skb_out          1312 drivers/net/usb/cdc_ncm.c 		   (skb_out->len % dev->maxpacket) == 0) {
skb_out          1313 drivers/net/usb/cdc_ncm.c 		skb_put_u8(skb_out, 0);	/* force short packet */
skb_out          1317 drivers/net/usb/cdc_ncm.c 	nth16 = (struct usb_cdc_ncm_nth16 *)skb_out->data;
skb_out          1318 drivers/net/usb/cdc_ncm.c 	nth16->wBlockLength = cpu_to_le16(skb_out->len);
skb_out          1324 drivers/net/usb/cdc_ncm.c 	ctx->tx_overhead += skb_out->len - ctx->tx_curr_frame_payload;
skb_out          1331 drivers/net/usb/cdc_ncm.c 	usbnet_set_skb_tx_stats(skb_out, n,
skb_out          1332 drivers/net/usb/cdc_ncm.c 				(long)ctx->tx_curr_frame_payload - skb_out->len);
skb_out          1334 drivers/net/usb/cdc_ncm.c 	return skb_out;
skb_out          1387 drivers/net/usb/cdc_ncm.c 	struct sk_buff *skb_out;
skb_out          1401 drivers/net/usb/cdc_ncm.c 	skb_out = cdc_ncm_fill_tx_frame(dev, skb, cpu_to_le32(USB_CDC_NCM_NDP16_NOCRC_SIGN));
skb_out          1403 drivers/net/usb/cdc_ncm.c 	return skb_out;
skb_out            95 drivers/staging/gdm724x/gdm_lte.c 	struct sk_buff *skb_out;
skb_out           154 drivers/staging/gdm724x/gdm_lte.c 	skb_out = dev_alloc_skb(skb_in->len);
skb_out           155 drivers/staging/gdm724x/gdm_lte.c 	if (!skb_out)
skb_out           157 drivers/staging/gdm724x/gdm_lte.c 	skb_reserve(skb_out, NET_IP_ALIGN);
skb_out           159 drivers/staging/gdm724x/gdm_lte.c 	skb_put_data(skb_out, mac_header_data, mac_header_len);
skb_out           160 drivers/staging/gdm724x/gdm_lte.c 	skb_put_data(skb_out, arp_out, sizeof(struct arphdr));
skb_out           161 drivers/staging/gdm724x/gdm_lte.c 	skb_put_data(skb_out, arp_data_out, sizeof(struct arpdata));
skb_out           163 drivers/staging/gdm724x/gdm_lte.c 	skb_out->protocol = ((struct ethhdr *)mac_header_data)->h_proto;
skb_out           164 drivers/staging/gdm724x/gdm_lte.c 	skb_out->dev = skb_in->dev;
skb_out           165 drivers/staging/gdm724x/gdm_lte.c 	skb_reset_mac_header(skb_out);
skb_out           166 drivers/staging/gdm724x/gdm_lte.c 	skb_pull(skb_out, ETH_HLEN);
skb_out           168 drivers/staging/gdm724x/gdm_lte.c 	gdm_lte_rx(skb_out, nic, nic_type);
skb_out           215 drivers/staging/gdm724x/gdm_lte.c 	struct sk_buff *skb_out;
skb_out           315 drivers/staging/gdm724x/gdm_lte.c 	skb_out = dev_alloc_skb(skb_in->len);
skb_out           316 drivers/staging/gdm724x/gdm_lte.c 	if (!skb_out)
skb_out           318 drivers/staging/gdm724x/gdm_lte.c 	skb_reserve(skb_out, NET_IP_ALIGN);
skb_out           320 drivers/staging/gdm724x/gdm_lte.c 	skb_put_data(skb_out, mac_header_data, mac_header_len);
skb_out           321 drivers/staging/gdm724x/gdm_lte.c 	skb_put_data(skb_out, &ipv6_out, sizeof(struct ipv6hdr));
skb_out           322 drivers/staging/gdm724x/gdm_lte.c 	skb_put_data(skb_out, &icmp6_out, sizeof(struct icmp6hdr));
skb_out           323 drivers/staging/gdm724x/gdm_lte.c 	skb_put_data(skb_out, &na, sizeof(struct neighbour_advertisement));
skb_out           325 drivers/staging/gdm724x/gdm_lte.c 	skb_out->protocol = ((struct ethhdr *)mac_header_data)->h_proto;
skb_out           326 drivers/staging/gdm724x/gdm_lte.c 	skb_out->dev = skb_in->dev;
skb_out           327 drivers/staging/gdm724x/gdm_lte.c 	skb_reset_mac_header(skb_out);
skb_out           328 drivers/staging/gdm724x/gdm_lte.c 	skb_pull(skb_out, ETH_HLEN);
skb_out           330 drivers/staging/gdm724x/gdm_lte.c 	gdm_lte_rx(skb_out, nic, nic_type);
skb_out           253 net/batman-adv/fragmentation.c 	struct sk_buff *skb_out;
skb_out           262 net/batman-adv/fragmentation.c 	skb_out = entry->skb;
skb_out           265 net/batman-adv/fragmentation.c 	packet = (struct batadv_frag_packet *)skb_out->data;
skb_out           269 net/batman-adv/fragmentation.c 	if (pskb_expand_head(skb_out, 0, size - skb_out->len, GFP_ATOMIC) < 0) {
skb_out           270 net/batman-adv/fragmentation.c 		kfree_skb(skb_out);
skb_out           271 net/batman-adv/fragmentation.c 		skb_out = NULL;
skb_out           279 net/batman-adv/fragmentation.c 	skb_pull(skb_out, hdr_size);
skb_out           280 net/batman-adv/fragmentation.c 	skb_out->ip_summed = CHECKSUM_NONE;
skb_out           281 net/batman-adv/fragmentation.c 	memmove(skb_out->data - ETH_HLEN, skb_mac_header(skb_out), ETH_HLEN);
skb_out           282 net/batman-adv/fragmentation.c 	skb_set_mac_header(skb_out, -ETH_HLEN);
skb_out           283 net/batman-adv/fragmentation.c 	skb_reset_network_header(skb_out);
skb_out           284 net/batman-adv/fragmentation.c 	skb_reset_transport_header(skb_out);
skb_out           289 net/batman-adv/fragmentation.c 		skb_put_data(skb_out, entry->skb->data + hdr_size, size);
skb_out           295 net/batman-adv/fragmentation.c 	return skb_out;
skb_out           315 net/batman-adv/fragmentation.c 	struct sk_buff *skb_out = NULL;
skb_out           327 net/batman-adv/fragmentation.c 	skb_out = batadv_frag_merge_packets(&head);
skb_out           328 net/batman-adv/fragmentation.c 	if (!skb_out)
skb_out           334 net/batman-adv/fragmentation.c 	*skb = skb_out;
skb_out           238 net/hsr/hsr_netlink.c 	struct sk_buff *skb_out;
skb_out           269 net/hsr/hsr_netlink.c 	skb_out = genlmsg_new(NLMSG_GOODSIZE, GFP_ATOMIC);
skb_out           270 net/hsr/hsr_netlink.c 	if (!skb_out) {
skb_out           275 net/hsr/hsr_netlink.c 	msg_head = genlmsg_put(skb_out, NETLINK_CB(skb_in).portid,
skb_out           283 net/hsr/hsr_netlink.c 	res = nla_put_u32(skb_out, HSR_A_IFINDEX, hsr_dev->ifindex);
skb_out           300 net/hsr/hsr_netlink.c 	res = nla_put(skb_out, HSR_A_NODE_ADDR, ETH_ALEN,
skb_out           306 net/hsr/hsr_netlink.c 		res = nla_put(skb_out, HSR_A_NODE_ADDR_B, ETH_ALEN,
skb_out           311 net/hsr/hsr_netlink.c 		res = nla_put_u32(skb_out, HSR_A_ADDR_B_IFINDEX,
skb_out           317 net/hsr/hsr_netlink.c 	res = nla_put_u32(skb_out, HSR_A_IF1_AGE, hsr_node_if1_age);
skb_out           320 net/hsr/hsr_netlink.c 	res = nla_put_u16(skb_out, HSR_A_IF1_SEQ, hsr_node_if1_seq);
skb_out           325 net/hsr/hsr_netlink.c 		res = nla_put_u32(skb_out, HSR_A_IF1_IFINDEX,
skb_out           330 net/hsr/hsr_netlink.c 	res = nla_put_u32(skb_out, HSR_A_IF2_AGE, hsr_node_if2_age);
skb_out           333 net/hsr/hsr_netlink.c 	res = nla_put_u16(skb_out, HSR_A_IF2_SEQ, hsr_node_if2_seq);
skb_out           338 net/hsr/hsr_netlink.c 		res = nla_put_u32(skb_out, HSR_A_IF2_IFINDEX,
skb_out           345 net/hsr/hsr_netlink.c 	genlmsg_end(skb_out, msg_head);
skb_out           346 net/hsr/hsr_netlink.c 	genlmsg_unicast(genl_info_net(info), skb_out, info->snd_portid);
skb_out           357 net/hsr/hsr_netlink.c 	kfree_skb(skb_out);
skb_out           371 net/hsr/hsr_netlink.c 	struct sk_buff *skb_out;
skb_out           396 net/hsr/hsr_netlink.c 	skb_out = genlmsg_new(GENLMSG_DEFAULT_SIZE, GFP_ATOMIC);
skb_out           397 net/hsr/hsr_netlink.c 	if (!skb_out) {
skb_out           402 net/hsr/hsr_netlink.c 	msg_head = genlmsg_put(skb_out, NETLINK_CB(skb_in).portid,
skb_out           411 net/hsr/hsr_netlink.c 		res = nla_put_u32(skb_out, HSR_A_IFINDEX, hsr_dev->ifindex);
skb_out           421 net/hsr/hsr_netlink.c 		res = nla_put(skb_out, HSR_A_NODE_ADDR, ETH_ALEN, addr);
skb_out           424 net/hsr/hsr_netlink.c 				genlmsg_end(skb_out, msg_head);
skb_out           425 net/hsr/hsr_netlink.c 				genlmsg_unicast(genl_info_net(info), skb_out,
skb_out           436 net/hsr/hsr_netlink.c 	genlmsg_end(skb_out, msg_head);
skb_out           437 net/hsr/hsr_netlink.c 	genlmsg_unicast(genl_info_net(info), skb_out, info->snd_portid);
skb_out           448 net/hsr/hsr_netlink.c 	nlmsg_free(skb_out);
skb_out          2749 net/key/af_key.c 	struct sk_buff *skb_out;
skb_out          2752 net/key/af_key.c 	skb_out = alloc_skb(sizeof(struct sadb_msg) + 16, GFP_ATOMIC);
skb_out          2753 net/key/af_key.c 	if (!skb_out)
skb_out          2755 net/key/af_key.c 	hdr = skb_put(skb_out, sizeof(struct sadb_msg));
skb_out          2764 net/key/af_key.c 	pfkey_broadcast(skb_out, GFP_ATOMIC, BROADCAST_ALL, NULL, c->net);