rsa 242 crypto/rsa.c static struct akcipher_alg rsa = { rsa 262 crypto/rsa.c err = crypto_register_akcipher(&rsa); rsa 268 crypto/rsa.c crypto_unregister_akcipher(&rsa); rsa 278 crypto/rsa.c crypto_unregister_akcipher(&rsa); rsa 52 drivers/crypto/ccp/ccp-crypto-rsa.c req->dst_len = rctx->cmd.u.rsa.key_size >> 3; rsa 61 drivers/crypto/ccp/ccp-crypto-rsa.c return ctx->u.rsa.n_len; rsa 75 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.key_size = ctx->u.rsa.key_len; /* in bits */ rsa 77 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.exp = &ctx->u.rsa.e_sg; rsa 78 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.exp_len = ctx->u.rsa.e_len; rsa 80 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.exp = &ctx->u.rsa.d_sg; rsa 81 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.exp_len = ctx->u.rsa.d_len; rsa 83 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.mod = &ctx->u.rsa.n_sg; rsa 84 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.mod_len = ctx->u.rsa.n_len; rsa 85 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.src = req->src; rsa 86 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.src_len = req->src_len; rsa 87 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.dst = req->dst; rsa 115 drivers/crypto/ccp/ccp-crypto-rsa.c kzfree(ctx->u.rsa.e_buf); rsa 116 drivers/crypto/ccp/ccp-crypto-rsa.c ctx->u.rsa.e_buf = NULL; rsa 117 drivers/crypto/ccp/ccp-crypto-rsa.c ctx->u.rsa.e_len = 0; rsa 118 drivers/crypto/ccp/ccp-crypto-rsa.c kzfree(ctx->u.rsa.n_buf); rsa 119 drivers/crypto/ccp/ccp-crypto-rsa.c ctx->u.rsa.n_buf = NULL; rsa 120 drivers/crypto/ccp/ccp-crypto-rsa.c ctx->u.rsa.n_len = 0; rsa 121 drivers/crypto/ccp/ccp-crypto-rsa.c kzfree(ctx->u.rsa.d_buf); rsa 122 drivers/crypto/ccp/ccp-crypto-rsa.c ctx->u.rsa.d_buf = NULL; rsa 123 drivers/crypto/ccp/ccp-crypto-rsa.c ctx->u.rsa.d_len = 0; rsa 144 drivers/crypto/ccp/ccp-crypto-rsa.c ret = ccp_copy_and_save_keypart(&ctx->u.rsa.n_buf, &ctx->u.rsa.n_len, rsa 148 drivers/crypto/ccp/ccp-crypto-rsa.c sg_init_one(&ctx->u.rsa.n_sg, ctx->u.rsa.n_buf, ctx->u.rsa.n_len); rsa 150 drivers/crypto/ccp/ccp-crypto-rsa.c ctx->u.rsa.key_len = ctx->u.rsa.n_len << 3; /* convert to bits */ rsa 151 drivers/crypto/ccp/ccp-crypto-rsa.c if (ccp_check_key_length(ctx->u.rsa.key_len)) { rsa 156 drivers/crypto/ccp/ccp-crypto-rsa.c ret = ccp_copy_and_save_keypart(&ctx->u.rsa.e_buf, &ctx->u.rsa.e_len, rsa 160 drivers/crypto/ccp/ccp-crypto-rsa.c sg_init_one(&ctx->u.rsa.e_sg, ctx->u.rsa.e_buf, ctx->u.rsa.e_len); rsa 163 drivers/crypto/ccp/ccp-crypto-rsa.c ret = ccp_copy_and_save_keypart(&ctx->u.rsa.d_buf, rsa 164 drivers/crypto/ccp/ccp-crypto-rsa.c &ctx->u.rsa.d_len, rsa 168 drivers/crypto/ccp/ccp-crypto-rsa.c sg_init_one(&ctx->u.rsa.d_sg, rsa 169 drivers/crypto/ccp/ccp-crypto-rsa.c ctx->u.rsa.d_buf, ctx->u.rsa.d_len); rsa 263 drivers/crypto/ccp/ccp-crypto.h struct ccp_rsa_ctx rsa; rsa 238 drivers/crypto/ccp/ccp-dev-v3.c | (op->u.rsa.mod_size << REQ1_RSA_MOD_SIZE_SHIFT) rsa 241 drivers/crypto/ccp/ccp-dev-v3.c cr[1] = op->u.rsa.input_len - 1; rsa 573 drivers/crypto/ccp/ccp-dev-v3.c .rsa = ccp_perform_rsa, rsa 119 drivers/crypto/ccp/ccp-dev-v5.c } rsa; rsa 149 drivers/crypto/ccp/ccp-dev-v5.c #define CCP_RSA_SIZE(p) ((p)->rsa.size) rsa 468 drivers/crypto/ccp/ccp-dev-v5.c CCP_RSA_SIZE(&function) = (op->u.rsa.mod_size + 7) >> 3; rsa 471 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_LEN(&desc) = op->u.rsa.input_len; rsa 1092 drivers/crypto/ccp/ccp-dev-v5.c .rsa = ccp5_perform_rsa, rsa 550 drivers/crypto/ccp/ccp-dev.h struct ccp_rsa_op rsa; rsa 656 drivers/crypto/ccp/ccp-dev.h int (*rsa)(struct ccp_op *); rsa 1817 drivers/crypto/ccp/ccp-ops.c struct ccp_rsa_engine *rsa = &cmd->u.rsa; rsa 1824 drivers/crypto/ccp/ccp-ops.c if (rsa->key_size > cmd_q->ccp->vdata->rsamax) rsa 1827 drivers/crypto/ccp/ccp-ops.c if (!rsa->exp || !rsa->mod || !rsa->src || !rsa->dst) rsa 1842 drivers/crypto/ccp/ccp-ops.c o_len = 32 * ((rsa->key_size + 255) / 256); rsa 1870 drivers/crypto/ccp/ccp-ops.c ret = ccp_reverse_set_dm_area(&exp, 0, rsa->exp, 0, rsa->exp_len); rsa 1899 drivers/crypto/ccp/ccp-ops.c ret = ccp_reverse_set_dm_area(&src, 0, rsa->mod, 0, rsa->mod_len); rsa 1902 drivers/crypto/ccp/ccp-ops.c ret = ccp_reverse_set_dm_area(&src, o_len, rsa->src, 0, rsa->src_len); rsa 1919 drivers/crypto/ccp/ccp-ops.c op.u.rsa.mod_size = rsa->key_size; rsa 1920 drivers/crypto/ccp/ccp-ops.c op.u.rsa.input_len = i_len; rsa 1922 drivers/crypto/ccp/ccp-ops.c ret = cmd_q->ccp->vdata->perform->rsa(&op); rsa 1928 drivers/crypto/ccp/ccp-ops.c ccp_reverse_get_dm_area(&dst, 0, rsa->dst, 0, rsa->mod_len); rsa 161 drivers/crypto/qat/qat_common/qat_asym_algs.c struct qat_rsa_input_params rsa; rsa 165 drivers/crypto/qat/qat_common/qat_asym_algs.c struct qat_rsa_output_params rsa; rsa 174 drivers/crypto/qat/qat_common/qat_asym_algs.c struct qat_rsa_ctx *rsa; rsa 178 drivers/crypto/qat/qat_common/qat_asym_algs.c struct akcipher_request *rsa; rsa 556 drivers/crypto/qat/qat_common/qat_asym_algs.c struct akcipher_request *areq = req->areq.rsa; rsa 557 drivers/crypto/qat/qat_common/qat_asym_algs.c struct device *dev = &GET_DEV(req->ctx.rsa->inst->accel_dev); rsa 564 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_free_coherent(dev, req->ctx.rsa->key_sz, req->src_align, rsa 565 drivers/crypto/qat/qat_common/qat_asym_algs.c req->in.rsa.enc.m); rsa 567 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_unmap_single(dev, req->in.rsa.enc.m, req->ctx.rsa->key_sz, rsa 570 drivers/crypto/qat/qat_common/qat_asym_algs.c areq->dst_len = req->ctx.rsa->key_sz; rsa 575 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_free_coherent(dev, req->ctx.rsa->key_sz, req->dst_align, rsa 576 drivers/crypto/qat/qat_common/qat_asym_algs.c req->out.rsa.enc.c); rsa 578 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_unmap_single(dev, req->out.rsa.enc.c, req->ctx.rsa->key_sz, rsa 712 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->ctx.rsa = ctx; rsa 713 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->areq.rsa = req; rsa 719 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.enc.e = ctx->dma_e; rsa 720 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.enc.n = ctx->dma_n; rsa 732 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.enc.m = dma_map_single(dev, sg_virt(req->src), rsa 734 drivers/crypto/qat/qat_common/qat_asym_algs.c if (unlikely(dma_mapping_error(dev, qat_req->in.rsa.enc.m))) rsa 741 drivers/crypto/qat/qat_common/qat_asym_algs.c &qat_req->in.rsa.enc.m, rsa 751 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->out.rsa.enc.c = dma_map_single(dev, sg_virt(req->dst), rsa 755 drivers/crypto/qat/qat_common/qat_asym_algs.c if (unlikely(dma_mapping_error(dev, qat_req->out.rsa.enc.c))) rsa 760 drivers/crypto/qat/qat_common/qat_asym_algs.c &qat_req->out.rsa.enc.c, rsa 766 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.in_tab[3] = 0; rsa 767 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->out.rsa.out_tab[1] = 0; rsa 768 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->phy_in = dma_map_single(dev, &qat_req->in.rsa.enc.m, rsa 774 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->phy_out = dma_map_single(dev, &qat_req->out.rsa.enc.c, rsa 804 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->out.rsa.enc.c); rsa 806 drivers/crypto/qat/qat_common/qat_asym_algs.c if (!dma_mapping_error(dev, qat_req->out.rsa.enc.c)) rsa 807 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_unmap_single(dev, qat_req->out.rsa.enc.c, rsa 812 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.enc.m); rsa 814 drivers/crypto/qat/qat_common/qat_asym_algs.c if (!dma_mapping_error(dev, qat_req->in.rsa.enc.m)) rsa 815 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_unmap_single(dev, qat_req->in.rsa.enc.m, rsa 848 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->ctx.rsa = ctx; rsa 849 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->areq.rsa = req; rsa 856 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec_crt.p = ctx->dma_p; rsa 857 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec_crt.q = ctx->dma_q; rsa 858 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec_crt.dp = ctx->dma_dp; rsa 859 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec_crt.dq = ctx->dma_dq; rsa 860 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec_crt.qinv = ctx->dma_qinv; rsa 862 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec.d = ctx->dma_d; rsa 863 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec.n = ctx->dma_n; rsa 876 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec.c = dma_map_single(dev, sg_virt(req->src), rsa 878 drivers/crypto/qat/qat_common/qat_asym_algs.c if (unlikely(dma_mapping_error(dev, qat_req->in.rsa.dec.c))) rsa 885 drivers/crypto/qat/qat_common/qat_asym_algs.c &qat_req->in.rsa.dec.c, rsa 895 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->out.rsa.dec.m = dma_map_single(dev, sg_virt(req->dst), rsa 899 drivers/crypto/qat/qat_common/qat_asym_algs.c if (unlikely(dma_mapping_error(dev, qat_req->out.rsa.dec.m))) rsa 904 drivers/crypto/qat/qat_common/qat_asym_algs.c &qat_req->out.rsa.dec.m, rsa 912 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.in_tab[6] = 0; rsa 914 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.in_tab[3] = 0; rsa 915 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->out.rsa.out_tab[1] = 0; rsa 916 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->phy_in = dma_map_single(dev, &qat_req->in.rsa.dec.c, rsa 922 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->phy_out = dma_map_single(dev, &qat_req->out.rsa.dec.m, rsa 956 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->out.rsa.dec.m); rsa 958 drivers/crypto/qat/qat_common/qat_asym_algs.c if (!dma_mapping_error(dev, qat_req->out.rsa.dec.m)) rsa 959 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_unmap_single(dev, qat_req->out.rsa.dec.m, rsa 964 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec.c); rsa 966 drivers/crypto/qat/qat_common/qat_asym_algs.c if (!dma_mapping_error(dev, qat_req->in.rsa.dec.c)) rsa 967 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_unmap_single(dev, qat_req->in.rsa.dec.c, rsa 1300 drivers/crypto/qat/qat_common/qat_asym_algs.c static struct akcipher_alg rsa = { rsa 1341 drivers/crypto/qat/qat_common/qat_asym_algs.c rsa.base.cra_flags = 0; rsa 1342 drivers/crypto/qat/qat_common/qat_asym_algs.c ret = crypto_register_akcipher(&rsa); rsa 1356 drivers/crypto/qat/qat_common/qat_asym_algs.c crypto_unregister_akcipher(&rsa); rsa 61 drivers/gpu/drm/i915/gt/uc/intel_guc_fw.c u32 rsa[UOS_RSA_SCRATCH_COUNT]; rsa 65 drivers/gpu/drm/i915/gt/uc/intel_guc_fw.c copied = intel_uc_fw_copy_rsa(guc_fw, rsa, sizeof(rsa)); rsa 66 drivers/gpu/drm/i915/gt/uc/intel_guc_fw.c GEM_BUG_ON(copied < sizeof(rsa)); rsa 69 drivers/gpu/drm/i915/gt/uc/intel_guc_fw.c intel_uncore_write(uncore, UOS_RSA_SCRATCH(i), rsa[i]); rsa 396 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c struct rx_sa *rsa; rsa 400 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c hash_for_each_possible_rcu(ipsec->rx_sa_list, rsa, hlist, rsa 402 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c if (rsa->mode & IXGBE_RXTXMOD_VF) rsa 404 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c if (spi == rsa->xs->id.spi && rsa 405 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c ((ip4 && *daddr == rsa->xs->id.daddr.a4) || rsa 406 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, rsa 407 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c sizeof(rsa->xs->id.daddr.a6)))) && rsa 408 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c proto == rsa->xs->id.proto) { rsa 409 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c ret = rsa->xs; rsa 584 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c struct rx_sa rsa; rsa 599 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c memset(&rsa, 0, sizeof(rsa)); rsa 600 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.used = true; rsa 601 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.xs = xs; rsa 603 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c if (rsa.xs->id.proto & IPPROTO_ESP) rsa 604 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.decrypt = xs->ealg || xs->aead; rsa 607 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c ret = ixgbe_ipsec_parse_proto_keys(xs, rsa.key, &rsa.salt); rsa 615 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c memcpy(rsa.ipaddr, &xs->id.daddr.a6, 16); rsa 617 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c memcpy(&rsa.ipaddr[3], &xs->id.daddr.a4, 4); rsa 638 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.ipaddr, sizeof(rsa.ipaddr))) { rsa 653 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.iptbl_ind = match; rsa 658 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.iptbl_ind = first; rsa 661 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.ipaddr, sizeof(rsa.ipaddr)); rsa 665 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c ixgbe_ipsec_set_rx_ip(hw, rsa.iptbl_ind, rsa.ipaddr); rsa 670 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c memset(&rsa, 0, sizeof(rsa)); rsa 674 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.mode = IXGBE_RXMOD_VALID; rsa 675 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c if (rsa.xs->id.proto & IPPROTO_ESP) rsa 676 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.mode |= IXGBE_RXMOD_PROTO_ESP; rsa 677 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c if (rsa.decrypt) rsa 678 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.mode |= IXGBE_RXMOD_DECRYPT; rsa 679 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c if (rsa.xs->props.family == AF_INET6) rsa 680 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.mode |= IXGBE_RXMOD_IPV6; rsa 683 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c memcpy(&ipsec->rx_tbl[sa_idx], &rsa, sizeof(rsa)); rsa 685 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c ixgbe_ipsec_set_rx_sa(hw, sa_idx, rsa.xs->id.spi, rsa.key, rsa 686 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa.salt, rsa.mode, rsa.iptbl_ind); rsa 693 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c (__force u32)rsa.xs->id.spi); rsa 756 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c struct rx_sa *rsa; rsa 760 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa = &ipsec->rx_tbl[sa_idx]; rsa 762 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c if (!rsa->used) { rsa 769 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c hash_del_rcu(&rsa->hlist); rsa 774 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c ipi = rsa->iptbl_ind; rsa 786 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c memset(rsa, 0, sizeof(struct rx_sa)); rsa 1002 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c struct rx_sa *rsa; rsa 1011 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa = &ipsec->rx_tbl[sa_idx]; rsa 1013 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c if (!rsa->used) rsa 1016 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c if (!(rsa->mode & IXGBE_RXTXMOD_VF) || rsa 1017 drivers/net/ethernet/intel/ixgbe/ixgbe_ipsec.c rsa->vf != vf) { rsa 183 drivers/net/ethernet/intel/ixgbevf/ipsec.c struct rx_sa *rsa; rsa 186 drivers/net/ethernet/intel/ixgbevf/ipsec.c hash_for_each_possible_rcu(ipsec->rx_sa_list, rsa, hlist, rsa 188 drivers/net/ethernet/intel/ixgbevf/ipsec.c if (spi == rsa->xs->id.spi && rsa 189 drivers/net/ethernet/intel/ixgbevf/ipsec.c ((ip4 && *daddr == rsa->xs->id.daddr.a4) || rsa 190 drivers/net/ethernet/intel/ixgbevf/ipsec.c (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, rsa 191 drivers/net/ethernet/intel/ixgbevf/ipsec.c sizeof(rsa->xs->id.daddr.a6)))) && rsa 192 drivers/net/ethernet/intel/ixgbevf/ipsec.c proto == rsa->xs->id.proto) { rsa 193 drivers/net/ethernet/intel/ixgbevf/ipsec.c ret = rsa->xs; rsa 276 drivers/net/ethernet/intel/ixgbevf/ipsec.c struct rx_sa rsa; rsa 291 drivers/net/ethernet/intel/ixgbevf/ipsec.c memset(&rsa, 0, sizeof(rsa)); rsa 292 drivers/net/ethernet/intel/ixgbevf/ipsec.c rsa.used = true; rsa 293 drivers/net/ethernet/intel/ixgbevf/ipsec.c rsa.xs = xs; rsa 295 drivers/net/ethernet/intel/ixgbevf/ipsec.c if (rsa.xs->id.proto & IPPROTO_ESP) rsa 296 drivers/net/ethernet/intel/ixgbevf/ipsec.c rsa.decrypt = xs->ealg || xs->aead; rsa 299 drivers/net/ethernet/intel/ixgbevf/ipsec.c ret = ixgbevf_ipsec_parse_proto_keys(xs, rsa.key, &rsa.salt); rsa 307 drivers/net/ethernet/intel/ixgbevf/ipsec.c memcpy(rsa.ipaddr, &xs->id.daddr.a6, 16); rsa 309 drivers/net/ethernet/intel/ixgbevf/ipsec.c memcpy(&rsa.ipaddr[3], &xs->id.daddr.a4, 4); rsa 311 drivers/net/ethernet/intel/ixgbevf/ipsec.c rsa.mode = IXGBE_RXMOD_VALID; rsa 312 drivers/net/ethernet/intel/ixgbevf/ipsec.c if (rsa.xs->id.proto & IPPROTO_ESP) rsa 313 drivers/net/ethernet/intel/ixgbevf/ipsec.c rsa.mode |= IXGBE_RXMOD_PROTO_ESP; rsa 314 drivers/net/ethernet/intel/ixgbevf/ipsec.c if (rsa.decrypt) rsa 315 drivers/net/ethernet/intel/ixgbevf/ipsec.c rsa.mode |= IXGBE_RXMOD_DECRYPT; rsa 316 drivers/net/ethernet/intel/ixgbevf/ipsec.c if (rsa.xs->props.family == AF_INET6) rsa 317 drivers/net/ethernet/intel/ixgbevf/ipsec.c rsa.mode |= IXGBE_RXMOD_IPV6; rsa 322 drivers/net/ethernet/intel/ixgbevf/ipsec.c rsa.pfsa = ret; rsa 325 drivers/net/ethernet/intel/ixgbevf/ipsec.c memcpy(&ipsec->rx_tbl[sa_idx], &rsa, sizeof(rsa)); rsa 333 drivers/net/ethernet/intel/ixgbevf/ipsec.c (__force u32)rsa.xs->id.spi); rsa 655 include/linux/ccp.h struct ccp_rsa_engine rsa;