sk_wmem_queued   1501 drivers/block/drbd/drbd_main.c 	if (sk->sk_wmem_queued > sk->sk_sndbuf * 4 / 5)
sk_wmem_queued    635 drivers/block/drbd/drbd_worker.c 			int queued = sk->sk_wmem_queued;
sk_wmem_queued    146 drivers/crypto/chelsio/chtls/chtls_cm.c 		sk->sk_wmem_queued -= skb->truesize;
sk_wmem_queued    709 drivers/crypto/chelsio/chtls/chtls_io.c 	sk->sk_wmem_queued -= total_size;
sk_wmem_queued    827 drivers/crypto/chelsio/chtls/chtls_io.c 	sk->sk_wmem_queued += skb->truesize;
sk_wmem_queued    901 drivers/crypto/chelsio/chtls/chtls_io.c 	sk->sk_wmem_queued   += copy;
sk_wmem_queued    915 drivers/crypto/chelsio/chtls/chtls_io.c 	return (cdev->max_host_sndbuf - sk->sk_wmem_queued);
sk_wmem_queued   1187 drivers/crypto/chelsio/chtls/chtls_io.c 		sk->sk_wmem_queued -= skb->truesize;
sk_wmem_queued   1266 drivers/crypto/chelsio/chtls/chtls_io.c 		sk->sk_wmem_queued += copy;
sk_wmem_queued    408 include/net/sock.h 	int			sk_wmem_queued;
sk_wmem_queued    886 include/net/sock.h 	return READ_ONCE(sk->sk_wmem_queued) >> 1;
sk_wmem_queued    891 include/net/sock.h 	return READ_ONCE(sk->sk_sndbuf) - READ_ONCE(sk->sk_wmem_queued);
sk_wmem_queued    896 include/net/sock.h 	WRITE_ONCE(sk->sk_wmem_queued, sk->sk_wmem_queued + val);
sk_wmem_queued   1220 include/net/sock.h 	if (READ_ONCE(sk->sk_wmem_queued) >= READ_ONCE(sk->sk_sndbuf))
sk_wmem_queued   2238 include/net/sock.h 	val = min(sk->sk_sndbuf, sk->sk_wmem_queued >> 1);
sk_wmem_queued    282 include/net/tcp.h 	if (sk->sk_wmem_queued > SOCK_MIN_SNDBUF &&
sk_wmem_queued    118 include/trace/events/sock.h 		__entry->wmem_queued = READ_ONCE(sk->sk_wmem_queued);
sk_wmem_queued   1827 net/core/sock.c 		newsk->sk_wmem_queued	= 0;
sk_wmem_queued   2533 net/core/sock.c 			if (sk->sk_wmem_queued < wmem0)
sk_wmem_queued   2547 net/core/sock.c 		    sk_mem_pages(sk->sk_wmem_queued +
sk_wmem_queued   2561 net/core/sock.c 		if (sk->sk_wmem_queued + size >= sk->sk_sndbuf)
sk_wmem_queued   3218 net/core/sock.c 	mem[SK_MEMINFO_WMEM_QUEUED] = READ_ONCE(sk->sk_wmem_queued);
sk_wmem_queued    207 net/core/stream.c 	WARN_ON(sk->sk_wmem_queued);
sk_wmem_queued    155 net/ipv4/af_inet.c 	WARN_ON(sk->sk_wmem_queued);
sk_wmem_queued    210 net/ipv4/inet_diag.c 			.idiag_wmem = READ_ONCE(sk->sk_wmem_queued),
sk_wmem_queued   1327 net/ipv4/tcp_output.c 	if (unlikely((sk->sk_wmem_queued >> 1) > limit &&
sk_wmem_queued   2913 net/ipv4/tcp_output.c 	    min_t(u32, sk->sk_wmem_queued + (sk->sk_wmem_queued >> 2),
sk_wmem_queued    420 net/iucv/af_iucv.c 	WARN_ON(sk->sk_wmem_queued);
sk_wmem_queued    704 net/kcm/kcmsock.c 		sk->sk_wmem_queued -= sent;
sk_wmem_queued    845 net/kcm/kcmsock.c 	sk->sk_wmem_queued += size;
sk_wmem_queued    449 net/sched/em_meta.c 	dst->value = READ_ONCE(sk->sk_wmem_queued);
sk_wmem_queued    176 net/sctp/diag.c 		mem[SK_MEMINFO_WMEM_QUEUED] = sk->sk_wmem_queued;
sk_wmem_queued    275 net/sctp/proc.c 		sk->sk_wmem_queued,
sk_wmem_queued    141 net/sctp/socket.c 	sk->sk_wmem_queued += chunk->skb->truesize + sizeof(struct sctp_chunk);
sk_wmem_queued   8988 net/sctp/socket.c 	sk->sk_wmem_queued -= skb->truesize + sizeof(struct sctp_chunk);
sk_wmem_queued   9145 net/sctp/socket.c 	return sk->sk_sndbuf > sk->sk_wmem_queued;