sw_ctx_rx 2267 net/tls/tls_sw.c struct tls_sw_context_rx *sw_ctx_rx = NULL; sw_ctx_rx 2296 net/tls/tls_sw.c sw_ctx_rx = kzalloc(sizeof(*sw_ctx_rx), GFP_KERNEL); sw_ctx_rx 2297 net/tls/tls_sw.c if (!sw_ctx_rx) { sw_ctx_rx 2301 net/tls/tls_sw.c ctx->priv_ctx_rx = sw_ctx_rx; sw_ctx_rx 2303 net/tls/tls_sw.c sw_ctx_rx = sw_ctx_rx 2318 net/tls/tls_sw.c crypto_init_wait(&sw_ctx_rx->async_wait); sw_ctx_rx 2319 net/tls/tls_sw.c spin_lock_init(&sw_ctx_rx->decrypt_compl_lock); sw_ctx_rx 2322 net/tls/tls_sw.c skb_queue_head_init(&sw_ctx_rx->rx_list); sw_ctx_rx 2323 net/tls/tls_sw.c aead = &sw_ctx_rx->aead_recv; sw_ctx_rx 2442 net/tls/tls_sw.c if (sw_ctx_rx) { sw_ctx_rx 2443 net/tls/tls_sw.c tfm = crypto_aead_tfm(sw_ctx_rx->aead_recv); sw_ctx_rx 2446 net/tls/tls_sw.c sw_ctx_rx->async_capable = false; sw_ctx_rx 2448 net/tls/tls_sw.c sw_ctx_rx->async_capable = sw_ctx_rx 2456 net/tls/tls_sw.c strp_init(&sw_ctx_rx->strp, sk, &cb);