Lines Matching refs:ctx
227 static inline void append_key_ahash(u32 *desc, struct caam_hash_ctx *ctx) in append_key_ahash() argument
229 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len, in append_key_ahash()
230 ctx->split_key_len, CLASS_2 | in append_key_ahash()
235 static inline void init_sh_desc_key_ahash(u32 *desc, struct caam_hash_ctx *ctx) in init_sh_desc_key_ahash() argument
241 if (ctx->split_key_len) { in init_sh_desc_key_ahash()
246 append_key_ahash(desc, ctx); in init_sh_desc_key_ahash()
279 struct caam_hash_ctx *ctx) in ahash_ctx_data_to_out() argument
281 init_sh_desc_key_ahash(desc, ctx); in ahash_ctx_data_to_out()
285 LDST_CLASS_2_CCB | ctx->ctx_len); in ahash_ctx_data_to_out()
298 int digestsize, struct caam_hash_ctx *ctx) in ahash_data_to_out() argument
300 init_sh_desc_key_ahash(desc, ctx); in ahash_data_to_out()
313 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_set_sh_desc() local
315 struct device *jrdev = ctx->jrdev; in ahash_set_sh_desc()
319 if (ctx->split_key_len) in ahash_set_sh_desc()
323 desc = ctx->sh_desc_update; in ahash_set_sh_desc()
329 LDST_CLASS_2_CCB | ctx->ctx_len); in ahash_set_sh_desc()
332 append_operation(desc, ctx->alg_type | OP_ALG_AS_UPDATE | in ahash_set_sh_desc()
336 ahash_append_load_str(desc, ctx->ctx_len); in ahash_set_sh_desc()
338 ctx->sh_desc_update_dma = dma_map_single(jrdev, desc, desc_bytes(desc), in ahash_set_sh_desc()
340 if (dma_mapping_error(jrdev, ctx->sh_desc_update_dma)) { in ahash_set_sh_desc()
351 desc = ctx->sh_desc_update_first; in ahash_set_sh_desc()
353 ahash_data_to_out(desc, have_key | ctx->alg_type, OP_ALG_AS_INIT, in ahash_set_sh_desc()
354 ctx->ctx_len, ctx); in ahash_set_sh_desc()
356 ctx->sh_desc_update_first_dma = dma_map_single(jrdev, desc, in ahash_set_sh_desc()
359 if (dma_mapping_error(jrdev, ctx->sh_desc_update_first_dma)) { in ahash_set_sh_desc()
370 desc = ctx->sh_desc_fin; in ahash_set_sh_desc()
372 ahash_ctx_data_to_out(desc, have_key | ctx->alg_type, in ahash_set_sh_desc()
373 OP_ALG_AS_FINALIZE, digestsize, ctx); in ahash_set_sh_desc()
375 ctx->sh_desc_fin_dma = dma_map_single(jrdev, desc, desc_bytes(desc), in ahash_set_sh_desc()
377 if (dma_mapping_error(jrdev, ctx->sh_desc_fin_dma)) { in ahash_set_sh_desc()
388 desc = ctx->sh_desc_finup; in ahash_set_sh_desc()
390 ahash_ctx_data_to_out(desc, have_key | ctx->alg_type, in ahash_set_sh_desc()
391 OP_ALG_AS_FINALIZE, digestsize, ctx); in ahash_set_sh_desc()
393 ctx->sh_desc_finup_dma = dma_map_single(jrdev, desc, desc_bytes(desc), in ahash_set_sh_desc()
395 if (dma_mapping_error(jrdev, ctx->sh_desc_finup_dma)) { in ahash_set_sh_desc()
406 desc = ctx->sh_desc_digest; in ahash_set_sh_desc()
408 ahash_data_to_out(desc, have_key | ctx->alg_type, OP_ALG_AS_INITFINAL, in ahash_set_sh_desc()
409 digestsize, ctx); in ahash_set_sh_desc()
411 ctx->sh_desc_digest_dma = dma_map_single(jrdev, desc, in ahash_set_sh_desc()
414 if (dma_mapping_error(jrdev, ctx->sh_desc_digest_dma)) { in ahash_set_sh_desc()
428 static int gen_split_hash_key(struct caam_hash_ctx *ctx, const u8 *key_in, in gen_split_hash_key() argument
431 return gen_split_key(ctx->jrdev, ctx->key, ctx->split_key_len, in gen_split_hash_key()
432 ctx->split_key_pad_len, key_in, keylen, in gen_split_hash_key()
433 ctx->alg_op); in gen_split_hash_key()
437 static int hash_digest_key(struct caam_hash_ctx *ctx, const u8 *key_in, in hash_digest_key() argument
440 struct device *jrdev = ctx->jrdev; in hash_digest_key()
471 append_operation(desc, ctx->alg_type | OP_ALG_ENCRYPT | in hash_digest_key()
517 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_setkey() local
518 struct device *jrdev = ctx->jrdev; in ahash_setkey()
533 ret = hash_digest_key(ctx, key, &keylen, hashed_key, in ahash_setkey()
541 ctx->split_key_len = mdpadlen[(ctx->alg_op & OP_ALG_ALGSEL_SUBMASK) >> in ahash_setkey()
543 ctx->split_key_pad_len = ALIGN(ctx->split_key_len, 16); in ahash_setkey()
547 ctx->split_key_len, ctx->split_key_pad_len); in ahash_setkey()
552 ret = gen_split_hash_key(ctx, key, keylen); in ahash_setkey()
556 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->split_key_pad_len, in ahash_setkey()
558 if (dma_mapping_error(jrdev, ctx->key_dma)) { in ahash_setkey()
565 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, in ahash_setkey()
566 ctx->split_key_pad_len, 1); in ahash_setkey()
571 dma_unmap_single(jrdev, ctx->key_dma, ctx->split_key_pad_len, in ahash_setkey()
624 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_unmap_ctx() local
628 dma_unmap_single(dev, state->ctx_dma, ctx->ctx_len, flag); in ahash_unmap_ctx()
640 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_done() local
657 ctx->ctx_len, 1); in ahash_done()
673 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_done_bi() local
686 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_BIDIRECTIONAL); in ahash_done_bi()
692 ctx->ctx_len, 1); in ahash_done_bi()
710 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_done_ctx_src() local
727 ctx->ctx_len, 1); in ahash_done_ctx_src()
743 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_done_ctx_dst() local
756 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_FROM_DEVICE); in ahash_done_ctx_dst()
762 ctx->ctx_len, 1); in ahash_done_ctx_dst()
776 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_update_ctx() local
778 struct device *jrdev = ctx->jrdev; in ahash_update_ctx()
787 u32 *sh_desc = ctx->sh_desc_update, *desc; in ahash_update_ctx()
788 dma_addr_t ptr = ctx->sh_desc_update_dma; in ahash_update_ctx()
824 ret = ctx_map_to_sec4_sg(desc, jrdev, state, ctx->ctx_len, in ahash_update_ctx()
862 append_seq_in_ptr(desc, edesc->sec4_sg_dma, ctx->ctx_len + in ahash_update_ctx()
865 append_seq_out_ptr(desc, state->ctx_dma, ctx->ctx_len, 0); in ahash_update_ctx()
877 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, in ahash_update_ctx()
901 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_final_ctx() local
903 struct device *jrdev = ctx->jrdev; in ahash_final_ctx()
910 u32 *sh_desc = ctx->sh_desc_fin, *desc; in ahash_final_ctx()
911 dma_addr_t ptr = ctx->sh_desc_fin_dma; in ahash_final_ctx()
938 ret = ctx_map_to_sec4_sg(desc, jrdev, state, ctx->ctx_len, in ahash_final_ctx()
955 append_seq_in_ptr(desc, edesc->sec4_sg_dma, ctx->ctx_len + buflen, in ahash_final_ctx()
984 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_finup_ctx() local
986 struct device *jrdev = ctx->jrdev; in ahash_finup_ctx()
993 u32 *sh_desc = ctx->sh_desc_finup, *desc; in ahash_finup_ctx()
994 dma_addr_t ptr = ctx->sh_desc_finup_dma; in ahash_finup_ctx()
1026 ret = ctx_map_to_sec4_sg(desc, jrdev, state, ctx->ctx_len, in ahash_finup_ctx()
1045 append_seq_in_ptr(desc, edesc->sec4_sg_dma, ctx->ctx_len + in ahash_finup_ctx()
1074 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_digest() local
1075 struct device *jrdev = ctx->jrdev; in ahash_digest()
1078 u32 *sh_desc = ctx->sh_desc_digest, *desc; in ahash_digest()
1079 dma_addr_t ptr = ctx->sh_desc_digest_dma; in ahash_digest()
1154 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_final_no_ctx() local
1156 struct device *jrdev = ctx->jrdev; in ahash_final_no_ctx()
1161 u32 *sh_desc = ctx->sh_desc_digest, *desc; in ahash_final_no_ctx()
1162 dma_addr_t ptr = ctx->sh_desc_digest_dma; in ahash_final_no_ctx()
1217 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_update_no_ctx() local
1219 struct device *jrdev = ctx->jrdev; in ahash_update_no_ctx()
1230 u32 *desc, *sh_desc = ctx->sh_desc_update_first; in ahash_update_no_ctx()
1231 dma_addr_t ptr = ctx->sh_desc_update_first_dma; in ahash_update_no_ctx()
1290 ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len); in ahash_update_no_ctx()
1307 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, in ahash_update_no_ctx()
1332 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_finup_no_ctx() local
1334 struct device *jrdev = ctx->jrdev; in ahash_finup_no_ctx()
1341 u32 *sh_desc = ctx->sh_desc_digest, *desc; in ahash_finup_no_ctx()
1342 dma_addr_t ptr = ctx->sh_desc_digest_dma; in ahash_finup_no_ctx()
1417 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_update_first() local
1419 struct device *jrdev = ctx->jrdev; in ahash_update_first()
1426 u32 *sh_desc = ctx->sh_desc_update_first, *desc; in ahash_update_first()
1427 dma_addr_t ptr = ctx->sh_desc_update_first_dma; in ahash_update_first()
1495 ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len); in ahash_update_first()
1513 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, in ahash_update_first()
1578 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_export() local
1581 memcpy(out, ctx, sizeof(struct caam_hash_ctx)); in ahash_export()
1590 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); in ahash_import() local
1593 memcpy(ctx, in, sizeof(struct caam_hash_ctx)); in ahash_import()
1758 struct caam_hash_ctx *ctx = crypto_tfm_ctx(tfm); in caam_hash_cra_init() local
1772 ctx->jrdev = caam_jr_alloc(); in caam_hash_cra_init()
1773 if (IS_ERR(ctx->jrdev)) { in caam_hash_cra_init()
1775 return PTR_ERR(ctx->jrdev); in caam_hash_cra_init()
1778 ctx->alg_type = OP_TYPE_CLASS2_ALG | caam_hash->alg_type; in caam_hash_cra_init()
1779 ctx->alg_op = OP_TYPE_CLASS2_ALG | caam_hash->alg_op; in caam_hash_cra_init()
1781 ctx->ctx_len = runninglen[(ctx->alg_op & OP_ALG_ALGSEL_SUBMASK) >> in caam_hash_cra_init()
1794 struct caam_hash_ctx *ctx = crypto_tfm_ctx(tfm); in caam_hash_cra_exit() local
1796 if (ctx->sh_desc_update_dma && in caam_hash_cra_exit()
1797 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_update_dma)) in caam_hash_cra_exit()
1798 dma_unmap_single(ctx->jrdev, ctx->sh_desc_update_dma, in caam_hash_cra_exit()
1799 desc_bytes(ctx->sh_desc_update), in caam_hash_cra_exit()
1801 if (ctx->sh_desc_update_first_dma && in caam_hash_cra_exit()
1802 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_update_first_dma)) in caam_hash_cra_exit()
1803 dma_unmap_single(ctx->jrdev, ctx->sh_desc_update_first_dma, in caam_hash_cra_exit()
1804 desc_bytes(ctx->sh_desc_update_first), in caam_hash_cra_exit()
1806 if (ctx->sh_desc_fin_dma && in caam_hash_cra_exit()
1807 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_fin_dma)) in caam_hash_cra_exit()
1808 dma_unmap_single(ctx->jrdev, ctx->sh_desc_fin_dma, in caam_hash_cra_exit()
1809 desc_bytes(ctx->sh_desc_fin), DMA_TO_DEVICE); in caam_hash_cra_exit()
1810 if (ctx->sh_desc_digest_dma && in caam_hash_cra_exit()
1811 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_digest_dma)) in caam_hash_cra_exit()
1812 dma_unmap_single(ctx->jrdev, ctx->sh_desc_digest_dma, in caam_hash_cra_exit()
1813 desc_bytes(ctx->sh_desc_digest), in caam_hash_cra_exit()
1815 if (ctx->sh_desc_finup_dma && in caam_hash_cra_exit()
1816 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_finup_dma)) in caam_hash_cra_exit()
1817 dma_unmap_single(ctx->jrdev, ctx->sh_desc_finup_dma, in caam_hash_cra_exit()
1818 desc_bytes(ctx->sh_desc_finup), DMA_TO_DEVICE); in caam_hash_cra_exit()
1820 caam_jr_free(ctx->jrdev); in caam_hash_cra_exit()