Lines Matching refs:jrdev
180 struct device *jrdev; member
260 struct device *jrdev = ctx->jrdev; in aead_null_set_sh_desc() local
327 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in aead_null_set_sh_desc()
330 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in aead_null_set_sh_desc()
331 dev_err(jrdev, "unable to map shared descriptor\n"); in aead_null_set_sh_desc()
412 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in aead_null_set_sh_desc()
415 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in aead_null_set_sh_desc()
416 dev_err(jrdev, "unable to map shared descriptor\n"); in aead_null_set_sh_desc()
435 struct device *jrdev = ctx->jrdev; in aead_set_sh_desc() local
519 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in aead_set_sh_desc()
522 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in aead_set_sh_desc()
523 dev_err(jrdev, "unable to map shared descriptor\n"); in aead_set_sh_desc()
589 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in aead_set_sh_desc()
592 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in aead_set_sh_desc()
593 dev_err(jrdev, "unable to map shared descriptor\n"); in aead_set_sh_desc()
697 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in aead_set_sh_desc()
700 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) { in aead_set_sh_desc()
701 dev_err(jrdev, "unable to map shared descriptor\n"); in aead_set_sh_desc()
728 struct device *jrdev = ctx->jrdev; in gcm_set_sh_desc() local
818 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in gcm_set_sh_desc()
821 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in gcm_set_sh_desc()
822 dev_err(jrdev, "unable to map shared descriptor\n"); in gcm_set_sh_desc()
899 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in gcm_set_sh_desc()
902 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in gcm_set_sh_desc()
903 dev_err(jrdev, "unable to map shared descriptor\n"); in gcm_set_sh_desc()
928 struct device *jrdev = ctx->jrdev; in rfc4106_set_sh_desc() local
997 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in rfc4106_set_sh_desc()
1000 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in rfc4106_set_sh_desc()
1001 dev_err(jrdev, "unable to map shared descriptor\n"); in rfc4106_set_sh_desc()
1071 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in rfc4106_set_sh_desc()
1074 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in rfc4106_set_sh_desc()
1075 dev_err(jrdev, "unable to map shared descriptor\n"); in rfc4106_set_sh_desc()
1101 struct device *jrdev = ctx->jrdev; in rfc4543_set_sh_desc() local
1170 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in rfc4543_set_sh_desc()
1173 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in rfc4543_set_sh_desc()
1174 dev_err(jrdev, "unable to map shared descriptor\n"); in rfc4543_set_sh_desc()
1248 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in rfc4543_set_sh_desc()
1251 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in rfc4543_set_sh_desc()
1252 dev_err(jrdev, "unable to map shared descriptor\n"); in rfc4543_set_sh_desc()
1278 return gen_split_key(ctx->jrdev, ctx->key, ctx->split_key_len, in gen_split_aead_key()
1289 struct device *jrdev = ctx->jrdev; in aead_setkey() local
1322 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->split_key_pad_len + in aead_setkey()
1324 if (dma_mapping_error(jrdev, ctx->key_dma)) { in aead_setkey()
1325 dev_err(jrdev, "unable to map key i/o memory\n"); in aead_setkey()
1338 dma_unmap_single(jrdev, ctx->key_dma, ctx->split_key_pad_len + in aead_setkey()
1352 struct device *jrdev = ctx->jrdev; in gcm_setkey() local
1361 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, in gcm_setkey()
1363 if (dma_mapping_error(jrdev, ctx->key_dma)) { in gcm_setkey()
1364 dev_err(jrdev, "unable to map key i/o memory\n"); in gcm_setkey()
1371 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen, in gcm_setkey()
1382 struct device *jrdev = ctx->jrdev; in rfc4106_setkey() local
1401 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen, in rfc4106_setkey()
1403 if (dma_mapping_error(jrdev, ctx->key_dma)) { in rfc4106_setkey()
1404 dev_err(jrdev, "unable to map key i/o memory\n"); in rfc4106_setkey()
1410 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen, in rfc4106_setkey()
1421 struct device *jrdev = ctx->jrdev; in rfc4543_setkey() local
1440 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen, in rfc4543_setkey()
1442 if (dma_mapping_error(jrdev, ctx->key_dma)) { in rfc4543_setkey()
1443 dev_err(jrdev, "unable to map key i/o memory\n"); in rfc4543_setkey()
1449 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen, in rfc4543_setkey()
1463 struct device *jrdev = ctx->jrdev; in ablkcipher_setkey() local
1498 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, in ablkcipher_setkey()
1500 if (dma_mapping_error(jrdev, ctx->key_dma)) { in ablkcipher_setkey()
1501 dev_err(jrdev, "unable to map key i/o memory\n"); in ablkcipher_setkey()
1551 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, in ablkcipher_setkey()
1554 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in ablkcipher_setkey()
1555 dev_err(jrdev, "unable to map shared descriptor\n"); in ablkcipher_setkey()
1613 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, in ablkcipher_setkey()
1616 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in ablkcipher_setkey()
1617 dev_err(jrdev, "unable to map shared descriptor\n"); in ablkcipher_setkey()
1691 ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc, in ablkcipher_setkey()
1694 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) { in ablkcipher_setkey()
1695 dev_err(jrdev, "unable to map shared descriptor\n"); in ablkcipher_setkey()
1712 struct device *jrdev = ctx->jrdev; in xts_ablkcipher_setkey() local
1719 dev_err(jrdev, "key size mismatch\n"); in xts_ablkcipher_setkey()
1724 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE); in xts_ablkcipher_setkey()
1725 if (dma_mapping_error(jrdev, ctx->key_dma)) { in xts_ablkcipher_setkey()
1726 dev_err(jrdev, "unable to map key i/o memory\n"); in xts_ablkcipher_setkey()
1765 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc), in xts_ablkcipher_setkey()
1767 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) { in xts_ablkcipher_setkey()
1768 dev_err(jrdev, "unable to map shared descriptor\n"); in xts_ablkcipher_setkey()
1811 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc), in xts_ablkcipher_setkey()
1813 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) { in xts_ablkcipher_setkey()
1814 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma, in xts_ablkcipher_setkey()
1816 dev_err(jrdev, "unable to map shared descriptor\n"); in xts_ablkcipher_setkey()
1912 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, in aead_encrypt_done() argument
1919 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in aead_encrypt_done()
1925 caam_jr_strstatus(jrdev, err); in aead_encrypt_done()
1927 aead_unmap(jrdev, edesc, req); in aead_encrypt_done()
1934 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, in aead_decrypt_done() argument
1941 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in aead_decrypt_done()
1947 caam_jr_strstatus(jrdev, err); in aead_decrypt_done()
1949 aead_unmap(jrdev, edesc, req); in aead_decrypt_done()
1962 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, in ablkcipher_encrypt_done() argument
1971 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in ablkcipher_encrypt_done()
1978 caam_jr_strstatus(jrdev, err); in ablkcipher_encrypt_done()
1989 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_encrypt_done()
1995 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, in ablkcipher_decrypt_done() argument
2004 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in ablkcipher_decrypt_done()
2010 caam_jr_strstatus(jrdev, err); in ablkcipher_decrypt_done()
2021 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_decrypt_done()
2271 struct device *jrdev = ctx->jrdev; in aead_edesc_alloc() local
2307 dev_err(jrdev, "could not allocate extended descriptor\n"); in aead_edesc_alloc()
2312 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1, in aead_edesc_alloc()
2315 dev_err(jrdev, "unable to map source\n"); in aead_edesc_alloc()
2320 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1, in aead_edesc_alloc()
2323 dev_err(jrdev, "unable to map source\n"); in aead_edesc_alloc()
2328 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1, in aead_edesc_alloc()
2331 dev_err(jrdev, "unable to map destination\n"); in aead_edesc_alloc()
2332 dma_unmap_sg(jrdev, req->src, src_nents ? : 1, in aead_edesc_alloc()
2359 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in aead_edesc_alloc()
2361 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in aead_edesc_alloc()
2362 dev_err(jrdev, "unable to map S/G table\n"); in aead_edesc_alloc()
2363 aead_unmap(jrdev, edesc, req); in aead_edesc_alloc()
2378 struct device *jrdev = ctx->jrdev; in gcm_encrypt() local
2397 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); in gcm_encrypt()
2401 aead_unmap(jrdev, edesc, req); in gcm_encrypt()
2421 struct device *jrdev = ctx->jrdev; in aead_encrypt() local
2441 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); in aead_encrypt()
2445 aead_unmap(jrdev, edesc, req); in aead_encrypt()
2457 struct device *jrdev = ctx->jrdev; in gcm_decrypt() local
2476 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); in gcm_decrypt()
2480 aead_unmap(jrdev, edesc, req); in gcm_decrypt()
2500 struct device *jrdev = ctx->jrdev; in aead_decrypt() local
2526 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); in aead_decrypt()
2530 aead_unmap(jrdev, edesc, req); in aead_decrypt()
2560 struct device *jrdev = ctx->jrdev; in ablkcipher_edesc_alloc() local
2578 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1, in ablkcipher_edesc_alloc()
2581 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1, in ablkcipher_edesc_alloc()
2583 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1, in ablkcipher_edesc_alloc()
2587 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE); in ablkcipher_edesc_alloc()
2588 if (dma_mapping_error(jrdev, iv_dma)) { in ablkcipher_edesc_alloc()
2589 dev_err(jrdev, "unable to map IV\n"); in ablkcipher_edesc_alloc()
2608 dev_err(jrdev, "could not allocate extended descriptor\n"); in ablkcipher_edesc_alloc()
2631 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in ablkcipher_edesc_alloc()
2633 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ablkcipher_edesc_alloc()
2634 dev_err(jrdev, "unable to map S/G table\n"); in ablkcipher_edesc_alloc()
2655 struct device *jrdev = ctx->jrdev; in ablkcipher_encrypt() local
2675 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); in ablkcipher_encrypt()
2680 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_encrypt()
2692 struct device *jrdev = ctx->jrdev; in ablkcipher_decrypt() local
2713 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req); in ablkcipher_decrypt()
2717 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_decrypt()
2736 struct device *jrdev = ctx->jrdev; in ablkcipher_giv_edesc_alloc() local
2754 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1, in ablkcipher_giv_edesc_alloc()
2757 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1, in ablkcipher_giv_edesc_alloc()
2759 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1, in ablkcipher_giv_edesc_alloc()
2767 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE); in ablkcipher_giv_edesc_alloc()
2768 if (dma_mapping_error(jrdev, iv_dma)) { in ablkcipher_giv_edesc_alloc()
2769 dev_err(jrdev, "unable to map IV\n"); in ablkcipher_giv_edesc_alloc()
2784 dev_err(jrdev, "could not allocate extended descriptor\n"); in ablkcipher_giv_edesc_alloc()
2808 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in ablkcipher_giv_edesc_alloc()
2810 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ablkcipher_giv_edesc_alloc()
2811 dev_err(jrdev, "unable to map S/G table\n"); in ablkcipher_giv_edesc_alloc()
2833 struct device *jrdev = ctx->jrdev; in ablkcipher_givencrypt() local
2854 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); in ablkcipher_givencrypt()
2859 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_givencrypt()
4330 ctx->jrdev = caam_jr_alloc(); in caam_init_common()
4331 if (IS_ERR(ctx->jrdev)) { in caam_init_common()
4333 return PTR_ERR(ctx->jrdev); in caam_init_common()
4367 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma)) in caam_exit_common()
4368 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma, in caam_exit_common()
4371 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma)) in caam_exit_common()
4372 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma, in caam_exit_common()
4375 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma)) in caam_exit_common()
4376 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma, in caam_exit_common()
4380 !dma_mapping_error(ctx->jrdev, ctx->key_dma)) in caam_exit_common()
4381 dma_unmap_single(ctx->jrdev, ctx->key_dma, in caam_exit_common()
4385 caam_jr_free(ctx->jrdev); in caam_exit_common()