Lines Matching refs:skb
31 static struct sk_buff *dccp_skb_entail(struct sock *sk, struct sk_buff *skb) in dccp_skb_entail() argument
33 skb_set_owner_w(skb, sk); in dccp_skb_entail()
35 sk->sk_send_head = skb; in dccp_skb_entail()
45 static int dccp_transmit_skb(struct sock *sk, struct sk_buff *skb) in dccp_transmit_skb() argument
47 if (likely(skb != NULL)) { in dccp_transmit_skb()
51 struct dccp_skb_cb *dcb = DCCP_SKB_CB(skb); in dccp_transmit_skb()
91 WARN_ON(skb->sk); in dccp_transmit_skb()
92 skb_set_owner_w(skb, sk); in dccp_transmit_skb()
96 if (dccp_insert_options(sk, skb)) { in dccp_transmit_skb()
97 kfree_skb(skb); in dccp_transmit_skb()
103 dh = dccp_zeroed_hdr(skb, dccp_header_size); in dccp_transmit_skb()
116 dccp_hdr_set_ack(dccp_hdr_ack_bits(skb), ackno); in dccp_transmit_skb()
120 dccp_hdr_request(skb)->dccph_req_service = in dccp_transmit_skb()
129 dccp_hdr_reset(skb)->dccph_reset_code = in dccp_transmit_skb()
134 icsk->icsk_af_ops->send_check(sk, skb); in dccp_transmit_skb()
141 err = icsk->icsk_af_ops->queue_xmit(sk, skb, &inet->cork.fl); in dccp_transmit_skb()
248 struct sk_buff *skb = dccp_qpolicy_pop(sk); in dccp_xmit_packet() local
250 if (unlikely(skb == NULL)) in dccp_xmit_packet()
252 len = skb->len; in dccp_xmit_packet()
273 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_DATAACK; in dccp_xmit_packet()
275 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_DATAACK; in dccp_xmit_packet()
277 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_DATA; in dccp_xmit_packet()
280 err = dccp_transmit_skb(sk, skb); in dccp_xmit_packet()
310 struct sk_buff *skb; in dccp_flush_write_queue() local
313 while (*time_budget > 0 && (skb = skb_peek(&sk->sk_write_queue))) { in dccp_flush_write_queue()
314 rc = ccid_hc_tx_send_packet(dp->dccps_hc_tx_ccid, sk, skb); in dccp_flush_write_queue()
340 kfree_skb(skb); in dccp_flush_write_queue()
349 struct sk_buff *skb; in dccp_write_xmit() local
351 while ((skb = dccp_qpolicy_top(sk))) { in dccp_write_xmit()
352 int rc = ccid_hc_tx_send_packet(dp->dccps_hc_tx_ccid, sk, skb); in dccp_write_xmit()
365 dccp_qpolicy_drop(sk, skb); in dccp_write_xmit()
401 struct sk_buff *skb = sock_wmalloc(sk, sk->sk_prot->max_header, 1, in dccp_make_response() local
403 if (skb == NULL) in dccp_make_response()
407 skb_reserve(skb, sk->sk_prot->max_header); in dccp_make_response()
409 skb_dst_set(skb, dst_clone(dst)); in dccp_make_response()
414 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_RESPONSE; in dccp_make_response()
415 DCCP_SKB_CB(skb)->dccpd_seq = dreq->dreq_gss; in dccp_make_response()
421 if (dccp_insert_options_rsk(dreq, skb)) in dccp_make_response()
425 dh = dccp_zeroed_hdr(skb, dccp_header_size); in dccp_make_response()
430 DCCP_SKB_CB(skb)->dccpd_opt_len) / 4; in dccp_make_response()
434 dccp_hdr_set_ack(dccp_hdr_ack_bits(skb), dreq->dreq_gsr); in dccp_make_response()
435 dccp_hdr_response(skb)->dccph_resp_service = dreq->dreq_service; in dccp_make_response()
437 dccp_csum_outgoing(skb); in dccp_make_response()
442 return skb; in dccp_make_response()
444 kfree_skb(skb); in dccp_make_response()
459 struct sk_buff *skb; in dccp_ctl_make_reset() local
461 skb = alloc_skb(sk->sk_prot->max_header, GFP_ATOMIC); in dccp_ctl_make_reset()
462 if (skb == NULL) in dccp_ctl_make_reset()
465 skb_reserve(skb, sk->sk_prot->max_header); in dccp_ctl_make_reset()
468 dh = dccp_zeroed_hdr(skb, dccp_hdr_reset_len); in dccp_ctl_make_reset()
475 dhr = dccp_hdr_reset(skb); in dccp_ctl_make_reset()
494 dccp_hdr_set_ack(dccp_hdr_ack_bits(skb), dcb->dccpd_seq); in dccp_ctl_make_reset()
496 dccp_csum_outgoing(skb); in dccp_ctl_make_reset()
497 return skb; in dccp_ctl_make_reset()
505 struct sk_buff *skb; in dccp_send_reset() local
515 skb = sock_wmalloc(sk, sk->sk_prot->max_header, 1, GFP_ATOMIC); in dccp_send_reset()
516 if (skb == NULL) in dccp_send_reset()
520 skb_reserve(skb, sk->sk_prot->max_header); in dccp_send_reset()
521 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_RESET; in dccp_send_reset()
522 DCCP_SKB_CB(skb)->dccpd_reset_code = code; in dccp_send_reset()
524 return dccp_transmit_skb(sk, skb); in dccp_send_reset()
532 struct sk_buff *skb; in dccp_connect() local
549 skb = alloc_skb(sk->sk_prot->max_header, sk->sk_allocation); in dccp_connect()
550 if (unlikely(skb == NULL)) in dccp_connect()
554 skb_reserve(skb, sk->sk_prot->max_header); in dccp_connect()
556 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_REQUEST; in dccp_connect()
558 dccp_transmit_skb(sk, dccp_skb_entail(sk, skb)); in dccp_connect()
574 struct sk_buff *skb = alloc_skb(sk->sk_prot->max_header, in dccp_send_ack() local
577 if (skb == NULL) { in dccp_send_ack()
587 skb_reserve(skb, sk->sk_prot->max_header); in dccp_send_ack()
588 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_ACK; in dccp_send_ack()
589 dccp_transmit_skb(sk, skb); in dccp_send_ack()
636 struct sk_buff *skb = alloc_skb(sk->sk_prot->max_header, GFP_ATOMIC); in dccp_send_sync() local
638 if (skb == NULL) { in dccp_send_sync()
645 skb_reserve(skb, sk->sk_prot->max_header); in dccp_send_sync()
646 DCCP_SKB_CB(skb)->dccpd_type = pkt_type; in dccp_send_sync()
647 DCCP_SKB_CB(skb)->dccpd_ack_seq = ackno; in dccp_send_sync()
655 dccp_transmit_skb(sk, skb); in dccp_send_sync()
668 struct sk_buff *skb; in dccp_send_close() local
671 skb = alloc_skb(sk->sk_prot->max_header, prio); in dccp_send_close()
672 if (skb == NULL) in dccp_send_close()
676 skb_reserve(skb, sk->sk_prot->max_header); in dccp_send_close()
678 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_CLOSEREQ; in dccp_send_close()
680 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_CLOSE; in dccp_send_close()
683 skb = dccp_skb_entail(sk, skb); in dccp_send_close()
697 dccp_transmit_skb(sk, skb); in dccp_send_close()