Lines Matching refs:ctx

198 static void append_key_aead(u32 *desc, struct caam_ctx *ctx,  in append_key_aead()  argument
202 unsigned int enckeylen = ctx->enckeylen; in append_key_aead()
213 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len, in append_key_aead()
214 ctx->split_key_len, CLASS_2 | in append_key_aead()
216 append_key_as_imm(desc, (void *)ctx->key + in append_key_aead()
217 ctx->split_key_pad_len, enckeylen, in append_key_aead()
220 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 | in append_key_aead()
222 append_key(desc, ctx->key_dma + ctx->split_key_pad_len, in append_key_aead()
228 nonce = (u32 *)((void *)ctx->key + ctx->split_key_pad_len + in append_key_aead()
240 static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx, in init_sh_desc_key_aead() argument
252 append_key_aead(desc, ctx, keys_fit_inline, is_rfc3686); in init_sh_desc_key_aead()
259 struct caam_ctx *ctx = crypto_aead_ctx(aead); in aead_null_set_sh_desc() local
260 struct device *jrdev = ctx->jrdev; in aead_null_set_sh_desc()
270 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX) in aead_null_set_sh_desc()
274 desc = ctx->sh_desc_enc; in aead_null_set_sh_desc()
282 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len, in aead_null_set_sh_desc()
283 ctx->split_key_len, CLASS_2 | in aead_null_set_sh_desc()
286 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 | in aead_null_set_sh_desc()
311 append_operation(desc, ctx->class2_alg_type | in aead_null_set_sh_desc()
324 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB | in aead_null_set_sh_desc()
327 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in aead_null_set_sh_desc()
330 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in aead_null_set_sh_desc()
347 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX) in aead_null_set_sh_desc()
350 desc = ctx->sh_desc_dec; in aead_null_set_sh_desc()
359 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len, in aead_null_set_sh_desc()
360 ctx->split_key_len, CLASS_2 | in aead_null_set_sh_desc()
363 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 | in aead_null_set_sh_desc()
368 append_operation(desc, ctx->class2_alg_type | in aead_null_set_sh_desc()
409 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 | in aead_null_set_sh_desc()
412 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in aead_null_set_sh_desc()
415 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in aead_null_set_sh_desc()
434 struct caam_ctx *ctx = crypto_aead_ctx(aead); in aead_set_sh_desc() local
435 struct device *jrdev = ctx->jrdev; in aead_set_sh_desc()
440 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) == in aead_set_sh_desc()
445 if (!ctx->enckeylen) in aead_set_sh_desc()
472 ctx->split_key_pad_len + ctx->enckeylen + in aead_set_sh_desc()
478 desc = ctx->sh_desc_enc; in aead_set_sh_desc()
481 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686); in aead_set_sh_desc()
484 append_operation(desc, ctx->class2_alg_type | in aead_set_sh_desc()
507 append_operation(desc, ctx->class1_alg_type | in aead_set_sh_desc()
516 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB | in aead_set_sh_desc()
519 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in aead_set_sh_desc()
522 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in aead_set_sh_desc()
539 ctx->split_key_pad_len + ctx->enckeylen + in aead_set_sh_desc()
545 desc = ctx->sh_desc_dec; in aead_set_sh_desc()
548 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686); in aead_set_sh_desc()
551 append_operation(desc, ctx->class2_alg_type | in aead_set_sh_desc()
575 append_operation(desc, ctx->class1_alg_type | in aead_set_sh_desc()
578 append_dec_op1(desc, ctx->class1_alg_type); in aead_set_sh_desc()
586 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 | in aead_set_sh_desc()
589 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in aead_set_sh_desc()
592 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in aead_set_sh_desc()
611 ctx->split_key_pad_len + ctx->enckeylen + in aead_set_sh_desc()
617 desc = ctx->sh_desc_givenc; in aead_set_sh_desc()
620 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686); in aead_set_sh_desc()
645 append_operation(desc, ctx->class2_alg_type | in aead_set_sh_desc()
649 append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize); in aead_set_sh_desc()
679 append_operation(desc, ctx->class1_alg_type | in aead_set_sh_desc()
694 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB | in aead_set_sh_desc()
697 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in aead_set_sh_desc()
700 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) { in aead_set_sh_desc()
717 struct caam_ctx *ctx = crypto_aead_ctx(authenc); in aead_setauthsize() local
719 ctx->authsize = authsize; in aead_setauthsize()
727 struct caam_ctx *ctx = crypto_aead_ctx(aead); in gcm_set_sh_desc() local
728 struct device *jrdev = ctx->jrdev; in gcm_set_sh_desc()
734 if (!ctx->enckeylen || !ctx->authsize) in gcm_set_sh_desc()
743 ctx->enckeylen <= CAAM_DESC_BYTES_MAX) in gcm_set_sh_desc()
746 desc = ctx->sh_desc_enc; in gcm_set_sh_desc()
754 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in gcm_set_sh_desc()
755 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG); in gcm_set_sh_desc()
757 append_key(desc, ctx->key_dma, ctx->enckeylen, in gcm_set_sh_desc()
762 append_operation(desc, ctx->class1_alg_type | in gcm_set_sh_desc()
815 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB | in gcm_set_sh_desc()
818 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in gcm_set_sh_desc()
821 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in gcm_set_sh_desc()
837 ctx->enckeylen <= CAAM_DESC_BYTES_MAX) in gcm_set_sh_desc()
840 desc = ctx->sh_desc_dec; in gcm_set_sh_desc()
849 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in gcm_set_sh_desc()
850 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG); in gcm_set_sh_desc()
852 append_key(desc, ctx->key_dma, ctx->enckeylen, in gcm_set_sh_desc()
857 append_operation(desc, ctx->class1_alg_type | in gcm_set_sh_desc()
896 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 | in gcm_set_sh_desc()
899 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in gcm_set_sh_desc()
902 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in gcm_set_sh_desc()
917 struct caam_ctx *ctx = crypto_aead_ctx(authenc); in gcm_setauthsize() local
919 ctx->authsize = authsize; in gcm_setauthsize()
927 struct caam_ctx *ctx = crypto_aead_ctx(aead); in rfc4106_set_sh_desc() local
928 struct device *jrdev = ctx->jrdev; in rfc4106_set_sh_desc()
933 if (!ctx->enckeylen || !ctx->authsize) in rfc4106_set_sh_desc()
942 ctx->enckeylen <= CAAM_DESC_BYTES_MAX) in rfc4106_set_sh_desc()
945 desc = ctx->sh_desc_enc; in rfc4106_set_sh_desc()
953 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in rfc4106_set_sh_desc()
954 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG); in rfc4106_set_sh_desc()
956 append_key(desc, ctx->key_dma, ctx->enckeylen, in rfc4106_set_sh_desc()
961 append_operation(desc, ctx->class1_alg_type | in rfc4106_set_sh_desc()
994 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB | in rfc4106_set_sh_desc()
997 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in rfc4106_set_sh_desc()
1000 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in rfc4106_set_sh_desc()
1016 ctx->enckeylen <= CAAM_DESC_BYTES_MAX) in rfc4106_set_sh_desc()
1019 desc = ctx->sh_desc_dec; in rfc4106_set_sh_desc()
1027 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in rfc4106_set_sh_desc()
1028 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG); in rfc4106_set_sh_desc()
1030 append_key(desc, ctx->key_dma, ctx->enckeylen, in rfc4106_set_sh_desc()
1035 append_operation(desc, ctx->class1_alg_type | in rfc4106_set_sh_desc()
1068 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 | in rfc4106_set_sh_desc()
1071 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in rfc4106_set_sh_desc()
1074 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in rfc4106_set_sh_desc()
1090 struct caam_ctx *ctx = crypto_aead_ctx(authenc); in rfc4106_setauthsize() local
1092 ctx->authsize = authsize; in rfc4106_setauthsize()
1100 struct caam_ctx *ctx = crypto_aead_ctx(aead); in rfc4543_set_sh_desc() local
1101 struct device *jrdev = ctx->jrdev; in rfc4543_set_sh_desc()
1107 if (!ctx->enckeylen || !ctx->authsize) in rfc4543_set_sh_desc()
1116 ctx->enckeylen <= CAAM_DESC_BYTES_MAX) in rfc4543_set_sh_desc()
1119 desc = ctx->sh_desc_enc; in rfc4543_set_sh_desc()
1127 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in rfc4543_set_sh_desc()
1128 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG); in rfc4543_set_sh_desc()
1130 append_key(desc, ctx->key_dma, ctx->enckeylen, in rfc4543_set_sh_desc()
1135 append_operation(desc, ctx->class1_alg_type | in rfc4543_set_sh_desc()
1167 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB | in rfc4543_set_sh_desc()
1170 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in rfc4543_set_sh_desc()
1173 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in rfc4543_set_sh_desc()
1189 ctx->enckeylen <= CAAM_DESC_BYTES_MAX) in rfc4543_set_sh_desc()
1192 desc = ctx->sh_desc_dec; in rfc4543_set_sh_desc()
1200 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in rfc4543_set_sh_desc()
1201 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG); in rfc4543_set_sh_desc()
1203 append_key(desc, ctx->key_dma, ctx->enckeylen, in rfc4543_set_sh_desc()
1208 append_operation(desc, ctx->class1_alg_type | in rfc4543_set_sh_desc()
1245 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 | in rfc4543_set_sh_desc()
1248 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in rfc4543_set_sh_desc()
1251 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in rfc4543_set_sh_desc()
1267 struct caam_ctx *ctx = crypto_aead_ctx(authenc); in rfc4543_setauthsize() local
1269 ctx->authsize = authsize; in rfc4543_setauthsize()
1275 static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in, in gen_split_aead_key() argument
1278 return gen_split_key(ctx->jrdev, ctx->key, ctx->split_key_len, in gen_split_aead_key()
1279 ctx->split_key_pad_len, key_in, authkeylen, in gen_split_aead_key()
1280 ctx->alg_op); in gen_split_aead_key()
1288 struct caam_ctx *ctx = crypto_aead_ctx(aead); in aead_setkey() local
1289 struct device *jrdev = ctx->jrdev; in aead_setkey()
1297 ctx->split_key_len = mdpadlen[(ctx->alg_op & OP_ALG_ALGSEL_SUBMASK) >> in aead_setkey()
1299 ctx->split_key_pad_len = ALIGN(ctx->split_key_len, 16); in aead_setkey()
1301 if (ctx->split_key_pad_len + keys.enckeylen > CAAM_MAX_KEY_SIZE) in aead_setkey()
1309 ctx->split_key_len, ctx->split_key_pad_len); in aead_setkey()
1314 ret = gen_split_aead_key(ctx, keys.authkey, keys.authkeylen); in aead_setkey()
1320 memcpy(ctx->key + ctx->split_key_pad_len, keys.enckey, keys.enckeylen); in aead_setkey()
1322 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->split_key_pad_len + in aead_setkey()
1324 if (dma_mapping_error(jrdev, ctx->key_dma)) { in aead_setkey()
1330 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, in aead_setkey()
1331 ctx->split_key_pad_len + keys.enckeylen, 1); in aead_setkey()
1334 ctx->enckeylen = keys.enckeylen; in aead_setkey()
1338 dma_unmap_single(jrdev, ctx->key_dma, ctx->split_key_pad_len + in aead_setkey()
1351 struct caam_ctx *ctx = crypto_aead_ctx(aead); in gcm_setkey() local
1352 struct device *jrdev = ctx->jrdev; in gcm_setkey()
1360 memcpy(ctx->key, key, keylen); in gcm_setkey()
1361 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, in gcm_setkey()
1363 if (dma_mapping_error(jrdev, ctx->key_dma)) { in gcm_setkey()
1367 ctx->enckeylen = keylen; in gcm_setkey()
1371 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen, in gcm_setkey()
1381 struct caam_ctx *ctx = crypto_aead_ctx(aead); in rfc4106_setkey() local
1382 struct device *jrdev = ctx->jrdev; in rfc4106_setkey()
1393 memcpy(ctx->key, key, keylen); in rfc4106_setkey()
1399 ctx->enckeylen = keylen - 4; in rfc4106_setkey()
1401 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen, in rfc4106_setkey()
1403 if (dma_mapping_error(jrdev, ctx->key_dma)) { in rfc4106_setkey()
1410 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen, in rfc4106_setkey()
1420 struct caam_ctx *ctx = crypto_aead_ctx(aead); in rfc4543_setkey() local
1421 struct device *jrdev = ctx->jrdev; in rfc4543_setkey()
1432 memcpy(ctx->key, key, keylen); in rfc4543_setkey()
1438 ctx->enckeylen = keylen - 4; in rfc4543_setkey()
1440 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen, in rfc4543_setkey()
1442 if (dma_mapping_error(jrdev, ctx->key_dma)) { in rfc4543_setkey()
1449 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen, in rfc4543_setkey()
1459 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); in ablkcipher_setkey() local
1463 struct device *jrdev = ctx->jrdev; in ablkcipher_setkey()
1470 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) == in ablkcipher_setkey()
1497 memcpy(ctx->key, key, keylen); in ablkcipher_setkey()
1498 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, in ablkcipher_setkey()
1500 if (dma_mapping_error(jrdev, ctx->key_dma)) { in ablkcipher_setkey()
1504 ctx->enckeylen = keylen; in ablkcipher_setkey()
1507 desc = ctx->sh_desc_enc; in ablkcipher_setkey()
1514 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in ablkcipher_setkey()
1515 ctx->enckeylen, CLASS_1 | in ablkcipher_setkey()
1545 append_operation(desc, ctx->class1_alg_type | in ablkcipher_setkey()
1551 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in ablkcipher_setkey()
1554 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in ablkcipher_setkey()
1565 desc = ctx->sh_desc_dec; in ablkcipher_setkey()
1573 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in ablkcipher_setkey()
1574 ctx->enckeylen, CLASS_1 | in ablkcipher_setkey()
1605 append_operation(desc, ctx->class1_alg_type | in ablkcipher_setkey()
1608 append_dec_op1(desc, ctx->class1_alg_type); in ablkcipher_setkey()
1613 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in ablkcipher_setkey()
1616 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in ablkcipher_setkey()
1628 desc = ctx->sh_desc_givenc; in ablkcipher_setkey()
1636 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in ablkcipher_setkey()
1637 ctx->enckeylen, CLASS_1 | in ablkcipher_setkey()
1685 append_operation(desc, ctx->class1_alg_type | in ablkcipher_setkey()
1691 ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc, in ablkcipher_setkey()
1694 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) { in ablkcipher_setkey()
1711 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); in xts_ablkcipher_setkey() local
1712 struct device *jrdev = ctx->jrdev; in xts_ablkcipher_setkey()
1723 memcpy(ctx->key, key, keylen); in xts_ablkcipher_setkey()
1724 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE); in xts_ablkcipher_setkey()
1725 if (dma_mapping_error(jrdev, ctx->key_dma)) { in xts_ablkcipher_setkey()
1729 ctx->enckeylen = keylen; in xts_ablkcipher_setkey()
1732 desc = ctx->sh_desc_enc; in xts_ablkcipher_setkey()
1739 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in xts_ablkcipher_setkey()
1740 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG); in xts_ablkcipher_setkey()
1759 append_operation(desc, ctx->class1_alg_type | OP_ALG_AS_INITFINAL | in xts_ablkcipher_setkey()
1765 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc), in xts_ablkcipher_setkey()
1767 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in xts_ablkcipher_setkey()
1778 desc = ctx->sh_desc_dec; in xts_ablkcipher_setkey()
1786 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen, in xts_ablkcipher_setkey()
1787 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG); in xts_ablkcipher_setkey()
1806 append_dec_op1(desc, ctx->class1_alg_type); in xts_ablkcipher_setkey()
1811 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc), in xts_ablkcipher_setkey()
1813 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in xts_ablkcipher_setkey()
1814 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma, in xts_ablkcipher_setkey()
1815 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE); in xts_ablkcipher_setkey()
2035 struct caam_ctx *ctx = crypto_aead_ctx(aead); in init_aead_job() local
2036 int authsize = ctx->authsize; in init_aead_job()
2044 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; in init_aead_job()
2045 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; in init_aead_job()
2094 struct caam_ctx *ctx = crypto_aead_ctx(aead); in init_gcm_job() local
2112 append_data(desc, ctx->key + ctx->enckeylen, 4); in init_gcm_job()
2126 struct caam_ctx *ctx = crypto_aead_ctx(aead); in init_authenc_job() local
2127 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) == in init_authenc_job()
2270 struct caam_ctx *ctx = crypto_aead_ctx(aead); in aead_edesc_alloc() local
2271 struct device *jrdev = ctx->jrdev; in aead_edesc_alloc()
2279 unsigned int authsize = ctx->authsize; in aead_edesc_alloc()
2377 struct caam_ctx *ctx = crypto_aead_ctx(aead); in gcm_encrypt() local
2378 struct device *jrdev = ctx->jrdev; in gcm_encrypt()
2420 struct caam_ctx *ctx = crypto_aead_ctx(aead); in aead_encrypt() local
2421 struct device *jrdev = ctx->jrdev; in aead_encrypt()
2456 struct caam_ctx *ctx = crypto_aead_ctx(aead); in gcm_decrypt() local
2457 struct device *jrdev = ctx->jrdev; in gcm_decrypt()
2499 struct caam_ctx *ctx = crypto_aead_ctx(aead); in aead_decrypt() local
2500 struct device *jrdev = ctx->jrdev; in aead_decrypt()
2559 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); in ablkcipher_edesc_alloc() local
2560 struct device *jrdev = ctx->jrdev; in ablkcipher_edesc_alloc()
2654 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); in ablkcipher_encrypt() local
2655 struct device *jrdev = ctx->jrdev; in ablkcipher_encrypt()
2667 init_ablkcipher_job(ctx->sh_desc_enc, in ablkcipher_encrypt()
2668 ctx->sh_desc_enc_dma, edesc, req, iv_contig); in ablkcipher_encrypt()
2691 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); in ablkcipher_decrypt() local
2692 struct device *jrdev = ctx->jrdev; in ablkcipher_decrypt()
2704 init_ablkcipher_job(ctx->sh_desc_dec, in ablkcipher_decrypt()
2705 ctx->sh_desc_dec_dma, edesc, req, iv_contig); in ablkcipher_decrypt()
2735 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); in ablkcipher_giv_edesc_alloc() local
2736 struct device *jrdev = ctx->jrdev; in ablkcipher_giv_edesc_alloc()
2832 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); in ablkcipher_givencrypt() local
2833 struct device *jrdev = ctx->jrdev; in ablkcipher_givencrypt()
2845 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma, in ablkcipher_givencrypt()
4328 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam) in caam_init_common() argument
4330 ctx->jrdev = caam_jr_alloc(); in caam_init_common()
4331 if (IS_ERR(ctx->jrdev)) { in caam_init_common()
4333 return PTR_ERR(ctx->jrdev); in caam_init_common()
4337 ctx->class1_alg_type = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; in caam_init_common()
4338 ctx->class2_alg_type = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; in caam_init_common()
4339 ctx->alg_op = OP_TYPE_CLASS2_ALG | caam->alg_op; in caam_init_common()
4349 struct caam_ctx *ctx = crypto_tfm_ctx(tfm); in caam_cra_init() local
4351 return caam_init_common(ctx, &caam_alg->caam); in caam_cra_init()
4359 struct caam_ctx *ctx = crypto_aead_ctx(tfm); in caam_aead_init() local
4361 return caam_init_common(ctx, &caam_alg->caam); in caam_aead_init()
4364 static void caam_exit_common(struct caam_ctx *ctx) in caam_exit_common() argument
4366 if (ctx->sh_desc_enc_dma && in caam_exit_common()
4367 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma)) in caam_exit_common()
4368 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma, in caam_exit_common()
4369 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE); in caam_exit_common()
4370 if (ctx->sh_desc_dec_dma && in caam_exit_common()
4371 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma)) in caam_exit_common()
4372 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma, in caam_exit_common()
4373 desc_bytes(ctx->sh_desc_dec), DMA_TO_DEVICE); in caam_exit_common()
4374 if (ctx->sh_desc_givenc_dma && in caam_exit_common()
4375 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma)) in caam_exit_common()
4376 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma, in caam_exit_common()
4377 desc_bytes(ctx->sh_desc_givenc), in caam_exit_common()
4379 if (ctx->key_dma && in caam_exit_common()
4380 !dma_mapping_error(ctx->jrdev, ctx->key_dma)) in caam_exit_common()
4381 dma_unmap_single(ctx->jrdev, ctx->key_dma, in caam_exit_common()
4382 ctx->enckeylen + ctx->split_key_pad_len, in caam_exit_common()
4385 caam_jr_free(ctx->jrdev); in caam_exit_common()