a_ctx 220 drivers/crypto/chelsio/chcr_algo.c struct chcr_dev *dev = a_ctx(tfm)->dev; a_ctx 2257 drivers/crypto/chelsio/chcr_algo.c struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm)); a_ctx 2265 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 2280 drivers/crypto/chelsio/chcr_algo.c error = chcr_aead_dma_map(&ULD_CTX(a_ctx(tfm))->lldi.pdev->dev, req, a_ctx 2309 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 2327 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 2343 drivers/crypto/chelsio/chcr_algo.c struct adapter *adap = padap(a_ctx(tfm)->dev); a_ctx 2395 drivers/crypto/chelsio/chcr_algo.c FILL_SEC_CPL_OP_IVINSR(a_ctx(tfm)->tx_chan_id, 2, 1); a_ctx 2449 drivers/crypto/chelsio/chcr_algo.c create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size, a_ctx 2568 drivers/crypto/chelsio/chcr_algo.c struct chcr_context *ctx = a_ctx(tfm); a_ctx 2789 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 2816 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 2819 drivers/crypto/chelsio/chcr_algo.c unsigned int c_id = a_ctx(tfm)->tx_chan_id; a_ctx 2888 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 2902 drivers/crypto/chelsio/chcr_algo.c struct adapter *adap = padap(a_ctx(tfm)->dev); a_ctx 2968 drivers/crypto/chelsio/chcr_algo.c create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, 0, a_ctx 2985 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 2998 drivers/crypto/chelsio/chcr_algo.c struct adapter *adap = padap(a_ctx(tfm)->dev); a_ctx 3040 drivers/crypto/chelsio/chcr_algo.c a_ctx(tfm)->tx_chan_id, 2, 1); a_ctx 3083 drivers/crypto/chelsio/chcr_algo.c create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size, a_ctx 3097 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 3108 drivers/crypto/chelsio/chcr_algo.c return chcr_device_init(a_ctx(tfm)); a_ctx 3113 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 3121 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 3130 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 3168 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 3205 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 3229 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm)); a_ctx 3270 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead)); a_ctx 3301 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead)); a_ctx 3319 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead)); a_ctx 3344 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead)); a_ctx 3406 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(authenc)); a_ctx 3541 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(authenc)); a_ctx 3614 drivers/crypto/chelsio/chcr_algo.c cdev = a_ctx(tfm)->dev; a_ctx 3627 drivers/crypto/chelsio/chcr_algo.c u_ctx = ULD_CTX(a_ctx(tfm)); a_ctx 3629 drivers/crypto/chelsio/chcr_algo.c a_ctx(tfm)->tx_qidx)) { a_ctx 3638 drivers/crypto/chelsio/chcr_algo.c skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[a_ctx(tfm)->rx_qidx], size); a_ctx 3646 drivers/crypto/chelsio/chcr_algo.c set_wr_txq(skb, CPL_PRIORITY_DATA, a_ctx(tfm)->tx_qidx); a_ctx 3676 drivers/crypto/chelsio/chcr_algo.c struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));