shiftlen 3523 include/linux/skbuff.h int skb_shift(struct sk_buff *tgt, struct sk_buff *skb, int shiftlen); shiftlen 290 include/linux/sunrpc/gss_krb5.h xdr_extend_head(struct xdr_buf *buf, unsigned int base, unsigned int shiftlen); shiftlen 488 include/linux/tcp.h int shiftlen); shiftlen 3299 net/core/skbuff.c int skb_shift(struct sk_buff *tgt, struct sk_buff *skb, int shiftlen) shiftlen 3304 net/core/skbuff.c BUG_ON(shiftlen > skb->len); shiftlen 3311 net/core/skbuff.c todo = shiftlen; shiftlen 3336 net/core/skbuff.c skb_frag_size_add(fragto, shiftlen); shiftlen 3337 net/core/skbuff.c skb_frag_size_sub(fragfrom, shiftlen); shiftlen 3338 net/core/skbuff.c skb_frag_off_add(fragfrom, shiftlen); shiftlen 3347 net/core/skbuff.c if ((shiftlen == skb->len) && shiftlen 3409 net/core/skbuff.c skb->len -= shiftlen; shiftlen 3410 net/core/skbuff.c skb->data_len -= shiftlen; shiftlen 3411 net/core/skbuff.c skb->truesize -= shiftlen; shiftlen 3412 net/core/skbuff.c tgt->len += shiftlen; shiftlen 3413 net/core/skbuff.c tgt->data_len += shiftlen; shiftlen 3414 net/core/skbuff.c tgt->truesize += shiftlen; shiftlen 3416 net/core/skbuff.c return shiftlen; shiftlen 1379 net/ipv4/tcp_input.c int pcount, int shiftlen) shiftlen 1386 net/ipv4/tcp_input.c if (unlikely(to->len + shiftlen >= 65535 * TCP_MIN_GSO_SIZE)) shiftlen 1390 net/ipv4/tcp_input.c return skb_shift(to, from, shiftlen); shiftlen 656 net/sunrpc/auth_gss/gss_krb5_crypto.c xdr_extend_head(struct xdr_buf *buf, unsigned int base, unsigned int shiftlen) shiftlen 660 net/sunrpc/auth_gss/gss_krb5_crypto.c if (shiftlen == 0) shiftlen 664 net/sunrpc/auth_gss/gss_krb5_crypto.c BUG_ON(shiftlen > RPC_MAX_AUTH_SIZE); shiftlen 668 net/sunrpc/auth_gss/gss_krb5_crypto.c memmove(p + shiftlen, p, buf->head[0].iov_len - base); shiftlen 670 net/sunrpc/auth_gss/gss_krb5_crypto.c buf->head[0].iov_len += shiftlen; shiftlen 671 net/sunrpc/auth_gss/gss_krb5_crypto.c buf->len += shiftlen;