Lines Matching refs:skb
31 static struct sk_buff *dccp_skb_entail(struct sock *sk, struct sk_buff *skb) in dccp_skb_entail() argument
33 skb_set_owner_w(skb, sk); in dccp_skb_entail()
35 sk->sk_send_head = skb; in dccp_skb_entail()
45 static int dccp_transmit_skb(struct sock *sk, struct sk_buff *skb) in dccp_transmit_skb() argument
47 if (likely(skb != NULL)) { in dccp_transmit_skb()
51 struct dccp_skb_cb *dcb = DCCP_SKB_CB(skb); in dccp_transmit_skb()
91 WARN_ON(skb->sk); in dccp_transmit_skb()
92 skb_set_owner_w(skb, sk); in dccp_transmit_skb()
96 if (dccp_insert_options(sk, skb)) { in dccp_transmit_skb()
97 kfree_skb(skb); in dccp_transmit_skb()
103 dh = dccp_zeroed_hdr(skb, dccp_header_size); in dccp_transmit_skb()
116 dccp_hdr_set_ack(dccp_hdr_ack_bits(skb), ackno); in dccp_transmit_skb()
120 dccp_hdr_request(skb)->dccph_req_service = in dccp_transmit_skb()
129 dccp_hdr_reset(skb)->dccph_reset_code = in dccp_transmit_skb()
134 icsk->icsk_af_ops->send_check(sk, skb); in dccp_transmit_skb()
141 err = icsk->icsk_af_ops->queue_xmit(sk, skb, &inet->cork.fl); in dccp_transmit_skb()
248 struct sk_buff *skb = dccp_qpolicy_pop(sk); in dccp_xmit_packet() local
250 if (unlikely(skb == NULL)) in dccp_xmit_packet()
252 len = skb->len; in dccp_xmit_packet()
273 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_DATAACK; in dccp_xmit_packet()
275 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_DATAACK; in dccp_xmit_packet()
277 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_DATA; in dccp_xmit_packet()
280 err = dccp_transmit_skb(sk, skb); in dccp_xmit_packet()
310 struct sk_buff *skb; in dccp_flush_write_queue() local
313 while (*time_budget > 0 && (skb = skb_peek(&sk->sk_write_queue))) { in dccp_flush_write_queue()
314 rc = ccid_hc_tx_send_packet(dp->dccps_hc_tx_ccid, sk, skb); in dccp_flush_write_queue()
340 kfree_skb(skb); in dccp_flush_write_queue()
349 struct sk_buff *skb; in dccp_write_xmit() local
351 while ((skb = dccp_qpolicy_top(sk))) { in dccp_write_xmit()
352 int rc = ccid_hc_tx_send_packet(dp->dccps_hc_tx_ccid, sk, skb); in dccp_write_xmit()
365 dccp_qpolicy_drop(sk, skb); in dccp_write_xmit()
401 struct sk_buff *skb; in dccp_make_response() local
407 skb = sock_wmalloc((struct sock *)sk, MAX_DCCP_HEADER, 1, in dccp_make_response()
409 if (!skb) in dccp_make_response()
412 skb_reserve(skb, MAX_DCCP_HEADER); in dccp_make_response()
414 skb_dst_set(skb, dst_clone(dst)); in dccp_make_response()
419 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_RESPONSE; in dccp_make_response()
420 DCCP_SKB_CB(skb)->dccpd_seq = dreq->dreq_gss; in dccp_make_response()
426 if (dccp_insert_options_rsk(dreq, skb)) in dccp_make_response()
430 dh = dccp_zeroed_hdr(skb, dccp_header_size); in dccp_make_response()
435 DCCP_SKB_CB(skb)->dccpd_opt_len) / 4; in dccp_make_response()
439 dccp_hdr_set_ack(dccp_hdr_ack_bits(skb), dreq->dreq_gsr); in dccp_make_response()
440 dccp_hdr_response(skb)->dccph_resp_service = dreq->dreq_service; in dccp_make_response()
442 dccp_csum_outgoing(skb); in dccp_make_response()
447 return skb; in dccp_make_response()
449 kfree_skb(skb); in dccp_make_response()
464 struct sk_buff *skb; in dccp_ctl_make_reset() local
466 skb = alloc_skb(sk->sk_prot->max_header, GFP_ATOMIC); in dccp_ctl_make_reset()
467 if (skb == NULL) in dccp_ctl_make_reset()
470 skb_reserve(skb, sk->sk_prot->max_header); in dccp_ctl_make_reset()
473 dh = dccp_zeroed_hdr(skb, dccp_hdr_reset_len); in dccp_ctl_make_reset()
480 dhr = dccp_hdr_reset(skb); in dccp_ctl_make_reset()
499 dccp_hdr_set_ack(dccp_hdr_ack_bits(skb), dcb->dccpd_seq); in dccp_ctl_make_reset()
501 dccp_csum_outgoing(skb); in dccp_ctl_make_reset()
502 return skb; in dccp_ctl_make_reset()
510 struct sk_buff *skb; in dccp_send_reset() local
520 skb = sock_wmalloc(sk, sk->sk_prot->max_header, 1, GFP_ATOMIC); in dccp_send_reset()
521 if (skb == NULL) in dccp_send_reset()
525 skb_reserve(skb, sk->sk_prot->max_header); in dccp_send_reset()
526 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_RESET; in dccp_send_reset()
527 DCCP_SKB_CB(skb)->dccpd_reset_code = code; in dccp_send_reset()
529 return dccp_transmit_skb(sk, skb); in dccp_send_reset()
537 struct sk_buff *skb; in dccp_connect() local
554 skb = alloc_skb(sk->sk_prot->max_header, sk->sk_allocation); in dccp_connect()
555 if (unlikely(skb == NULL)) in dccp_connect()
559 skb_reserve(skb, sk->sk_prot->max_header); in dccp_connect()
561 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_REQUEST; in dccp_connect()
563 dccp_transmit_skb(sk, dccp_skb_entail(sk, skb)); in dccp_connect()
579 struct sk_buff *skb = alloc_skb(sk->sk_prot->max_header, in dccp_send_ack() local
582 if (skb == NULL) { in dccp_send_ack()
592 skb_reserve(skb, sk->sk_prot->max_header); in dccp_send_ack()
593 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_ACK; in dccp_send_ack()
594 dccp_transmit_skb(sk, skb); in dccp_send_ack()
641 struct sk_buff *skb = alloc_skb(sk->sk_prot->max_header, GFP_ATOMIC); in dccp_send_sync() local
643 if (skb == NULL) { in dccp_send_sync()
650 skb_reserve(skb, sk->sk_prot->max_header); in dccp_send_sync()
651 DCCP_SKB_CB(skb)->dccpd_type = pkt_type; in dccp_send_sync()
652 DCCP_SKB_CB(skb)->dccpd_ack_seq = ackno; in dccp_send_sync()
660 dccp_transmit_skb(sk, skb); in dccp_send_sync()
673 struct sk_buff *skb; in dccp_send_close() local
676 skb = alloc_skb(sk->sk_prot->max_header, prio); in dccp_send_close()
677 if (skb == NULL) in dccp_send_close()
681 skb_reserve(skb, sk->sk_prot->max_header); in dccp_send_close()
683 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_CLOSEREQ; in dccp_send_close()
685 DCCP_SKB_CB(skb)->dccpd_type = DCCP_PKT_CLOSE; in dccp_send_close()
688 skb = dccp_skb_entail(sk, skb); in dccp_send_close()
702 dccp_transmit_skb(sk, skb); in dccp_send_close()