jrdev 111 drivers/crypto/caam/caamalg.c struct device *jrdev; jrdev 120 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 121 drivers/crypto/caam/caamalg.c struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); jrdev 142 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, jrdev 161 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, jrdev 173 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 174 drivers/crypto/caam/caamalg.c struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); jrdev 244 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, jrdev 266 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, jrdev 290 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, jrdev 311 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 335 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, jrdev 352 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, jrdev 376 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 401 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, jrdev 419 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, jrdev 444 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 469 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, jrdev 487 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, jrdev 510 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 520 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, jrdev 526 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, jrdev 566 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 567 drivers/crypto/caam/caamalg.c struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); jrdev 574 drivers/crypto/caam/caamalg.c dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", jrdev 595 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->key_dma, jrdev 601 drivers/crypto/caam/caamalg.c ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, jrdev 610 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + jrdev 648 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 661 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); jrdev 671 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 690 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, jrdev 699 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 718 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, jrdev 730 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 746 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, jrdev 753 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, jrdev 845 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 850 drivers/crypto/caam/caamalg.c dev_err(jrdev, "key size mismatch\n"); jrdev 861 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, jrdev 867 drivers/crypto/caam/caamalg.c dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, jrdev 963 drivers/crypto/caam/caamalg.c static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, jrdev 970 drivers/crypto/caam/caamalg.c dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); jrdev 975 drivers/crypto/caam/caamalg.c ecode = caam_jr_strstatus(jrdev, err); jrdev 977 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); jrdev 984 drivers/crypto/caam/caamalg.c static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, jrdev 991 drivers/crypto/caam/caamalg.c dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); jrdev 996 drivers/crypto/caam/caamalg.c ecode = caam_jr_strstatus(jrdev, err); jrdev 998 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); jrdev 1005 drivers/crypto/caam/caamalg.c static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, jrdev 1014 drivers/crypto/caam/caamalg.c dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); jrdev 1019 drivers/crypto/caam/caamalg.c ecode = caam_jr_strstatus(jrdev, err); jrdev 1021 drivers/crypto/caam/caamalg.c skcipher_unmap(jrdev, edesc, req); jrdev 1045 drivers/crypto/caam/caamalg.c static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, jrdev 1054 drivers/crypto/caam/caamalg.c dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); jrdev 1058 drivers/crypto/caam/caamalg.c ecode = caam_jr_strstatus(jrdev, err); jrdev 1060 drivers/crypto/caam/caamalg.c skcipher_unmap(jrdev, edesc, req); jrdev 1222 drivers/crypto/caam/caamalg.c struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); jrdev 1271 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1281 drivers/crypto/caam/caamalg.c dev_dbg(jrdev, "asked=%d, cryptlen%d\n", jrdev 1327 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1342 drivers/crypto/caam/caamalg.c dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", jrdev 1349 drivers/crypto/caam/caamalg.c dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", jrdev 1359 drivers/crypto/caam/caamalg.c dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", jrdev 1366 drivers/crypto/caam/caamalg.c mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, jrdev 1369 drivers/crypto/caam/caamalg.c dev_err(jrdev, "unable to map source\n"); jrdev 1375 drivers/crypto/caam/caamalg.c mapped_src_nents = dma_map_sg(jrdev, req->src, jrdev 1378 drivers/crypto/caam/caamalg.c dev_err(jrdev, "unable to map source\n"); jrdev 1387 drivers/crypto/caam/caamalg.c mapped_dst_nents = dma_map_sg(jrdev, req->dst, jrdev 1391 drivers/crypto/caam/caamalg.c dev_err(jrdev, "unable to map destination\n"); jrdev 1392 drivers/crypto/caam/caamalg.c dma_unmap_sg(jrdev, req->src, src_nents, jrdev 1417 drivers/crypto/caam/caamalg.c caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, jrdev 1444 drivers/crypto/caam/caamalg.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, jrdev 1446 drivers/crypto/caam/caamalg.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { jrdev 1447 drivers/crypto/caam/caamalg.c dev_err(jrdev, "unable to map S/G table\n"); jrdev 1448 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); jrdev 1463 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1481 drivers/crypto/caam/caamalg.c ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); jrdev 1485 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); jrdev 1497 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1514 drivers/crypto/caam/caamalg.c ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); jrdev 1518 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); jrdev 1530 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1547 drivers/crypto/caam/caamalg.c ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); jrdev 1551 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); jrdev 1568 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1587 drivers/crypto/caam/caamalg.c ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); jrdev 1591 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); jrdev 1603 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1621 drivers/crypto/caam/caamalg.c ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); jrdev 1625 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); jrdev 1642 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1665 drivers/crypto/caam/caamalg.c ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); jrdev 1669 drivers/crypto/caam/caamalg.c aead_unmap(jrdev, edesc, req); jrdev 1684 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1696 drivers/crypto/caam/caamalg.c dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", jrdev 1704 drivers/crypto/caam/caamalg.c dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", jrdev 1711 drivers/crypto/caam/caamalg.c mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, jrdev 1714 drivers/crypto/caam/caamalg.c dev_err(jrdev, "unable to map source\n"); jrdev 1718 drivers/crypto/caam/caamalg.c mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, jrdev 1721 drivers/crypto/caam/caamalg.c dev_err(jrdev, "unable to map source\n"); jrdev 1724 drivers/crypto/caam/caamalg.c mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, jrdev 1727 drivers/crypto/caam/caamalg.c dev_err(jrdev, "unable to map destination\n"); jrdev 1728 drivers/crypto/caam/caamalg.c dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); jrdev 1769 drivers/crypto/caam/caamalg.c dev_err(jrdev, "could not allocate extended descriptor\n"); jrdev 1770 drivers/crypto/caam/caamalg.c caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, jrdev 1788 drivers/crypto/caam/caamalg.c iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); jrdev 1789 drivers/crypto/caam/caamalg.c if (dma_mapping_error(jrdev, iv_dma)) { jrdev 1790 drivers/crypto/caam/caamalg.c dev_err(jrdev, "unable to map IV\n"); jrdev 1791 drivers/crypto/caam/caamalg.c caam_unmap(jrdev, req->src, req->dst, src_nents, jrdev 1816 drivers/crypto/caam/caamalg.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, jrdev 1819 drivers/crypto/caam/caamalg.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { jrdev 1820 drivers/crypto/caam/caamalg.c dev_err(jrdev, "unable to map S/G table\n"); jrdev 1821 drivers/crypto/caam/caamalg.c caam_unmap(jrdev, req->src, req->dst, src_nents, jrdev 1842 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1862 drivers/crypto/caam/caamalg.c ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req); jrdev 1867 drivers/crypto/caam/caamalg.c skcipher_unmap(jrdev, edesc, req); jrdev 1879 drivers/crypto/caam/caamalg.c struct device *jrdev = ctx->jrdev; jrdev 1899 drivers/crypto/caam/caamalg.c ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req); jrdev 1903 drivers/crypto/caam/caamalg.c skcipher_unmap(jrdev, edesc, req); jrdev 3414 drivers/crypto/caam/caamalg.c ctx->jrdev = caam_jr_alloc(); jrdev 3415 drivers/crypto/caam/caamalg.c if (IS_ERR(ctx->jrdev)) { jrdev 3417 drivers/crypto/caam/caamalg.c return PTR_ERR(ctx->jrdev); jrdev 3420 drivers/crypto/caam/caamalg.c priv = dev_get_drvdata(ctx->jrdev->parent); jrdev 3426 drivers/crypto/caam/caamalg.c dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, jrdev 3430 drivers/crypto/caam/caamalg.c if (dma_mapping_error(ctx->jrdev, dma_addr)) { jrdev 3431 drivers/crypto/caam/caamalg.c dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); jrdev 3432 drivers/crypto/caam/caamalg.c caam_jr_free(ctx->jrdev); jrdev 3470 drivers/crypto/caam/caamalg.c dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, jrdev 3473 drivers/crypto/caam/caamalg.c caam_jr_free(ctx->jrdev); jrdev 58 drivers/crypto/caam/caamalg_qi.c struct device *jrdev; jrdev 85 drivers/crypto/caam/caamalg_qi.c struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); jrdev 191 drivers/crypto/caam/caamalg_qi.c struct device *jrdev = ctx->jrdev; jrdev 192 drivers/crypto/caam/caamalg_qi.c struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); jrdev 199 drivers/crypto/caam/caamalg_qi.c dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", jrdev 220 drivers/crypto/caam/caamalg_qi.c dma_sync_single_for_device(jrdev->parent, ctx->key_dma, jrdev 226 drivers/crypto/caam/caamalg_qi.c ret = gen_split_key(jrdev, ctx->key, &ctx->adata, keys.authkey, jrdev 234 drivers/crypto/caam/caamalg_qi.c dma_sync_single_for_device(jrdev->parent, ctx->key_dma, jrdev 254 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver enc context update failed\n"); jrdev 263 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver dec context update failed\n"); jrdev 355 drivers/crypto/caam/caamalg_qi.c struct device *jrdev = ctx->jrdev; jrdev 368 drivers/crypto/caam/caamalg_qi.c dma_sync_single_for_device(jrdev->parent, ctx->key_dma, keylen, jrdev 381 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver enc context update failed\n"); jrdev 390 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver dec context update failed\n"); jrdev 461 drivers/crypto/caam/caamalg_qi.c struct device *jrdev = ctx->jrdev; jrdev 479 drivers/crypto/caam/caamalg_qi.c dma_sync_single_for_device(jrdev->parent, ctx->key_dma, jrdev 491 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver enc context update failed\n"); jrdev 500 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver dec context update failed\n"); jrdev 569 drivers/crypto/caam/caamalg_qi.c struct device *jrdev = ctx->jrdev; jrdev 587 drivers/crypto/caam/caamalg_qi.c dma_sync_single_for_device(jrdev->parent, ctx->key_dma, jrdev 599 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver enc context update failed\n"); jrdev 608 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver dec context update failed\n"); jrdev 623 drivers/crypto/caam/caamalg_qi.c struct device *jrdev = ctx->jrdev; jrdev 646 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver enc context update failed\n"); jrdev 655 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver dec context update failed\n"); jrdev 746 drivers/crypto/caam/caamalg_qi.c struct device *jrdev = ctx->jrdev; jrdev 750 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "key size mismatch\n"); jrdev 767 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver enc context update failed\n"); jrdev 776 drivers/crypto/caam/caamalg_qi.c dev_err(jrdev, "driver dec context update failed\n"); jrdev 2426 drivers/crypto/caam/caamalg_qi.c ctx->jrdev = caam_jr_alloc(); jrdev 2427 drivers/crypto/caam/caamalg_qi.c if (IS_ERR(ctx->jrdev)) { jrdev 2429 drivers/crypto/caam/caamalg_qi.c return PTR_ERR(ctx->jrdev); jrdev 2432 drivers/crypto/caam/caamalg_qi.c dev = ctx->jrdev->parent; jrdev 2443 drivers/crypto/caam/caamalg_qi.c caam_jr_free(ctx->jrdev); jrdev 2485 drivers/crypto/caam/caamalg_qi.c dma_unmap_single(ctx->jrdev->parent, ctx->key_dma, sizeof(ctx->key), jrdev 2488 drivers/crypto/caam/caamalg_qi.c caam_jr_free(ctx->jrdev); jrdev 100 drivers/crypto/caam/caamhash.c struct device *jrdev; jrdev 163 drivers/crypto/caam/caamhash.c static inline int map_seq_out_ptr_ctx(u32 *desc, struct device *jrdev, jrdev 168 drivers/crypto/caam/caamhash.c state->ctx_dma = dma_map_single(jrdev, state->caam_ctx, jrdev 170 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, state->ctx_dma)) { jrdev 171 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map ctx\n"); jrdev 182 drivers/crypto/caam/caamhash.c static inline int buf_map_to_sec4_sg(struct device *jrdev, jrdev 191 drivers/crypto/caam/caamhash.c state->buf_dma = dma_map_single(jrdev, current_buf(state), buflen, jrdev 193 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, state->buf_dma)) { jrdev 194 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map buf\n"); jrdev 205 drivers/crypto/caam/caamhash.c static inline int ctx_map_to_sec4_sg(struct device *jrdev, jrdev 210 drivers/crypto/caam/caamhash.c state->ctx_dma = dma_map_single(jrdev, state->caam_ctx, ctx_len, flag); jrdev 211 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, state->ctx_dma)) { jrdev 212 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map ctx\n"); jrdev 226 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 227 drivers/crypto/caam/caamhash.c struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); jrdev 236 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma, jrdev 247 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma, jrdev 257 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma, jrdev 268 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma, jrdev 282 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 289 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma, jrdev 299 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma, jrdev 312 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma, jrdev 322 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma, jrdev 334 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 341 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma, jrdev 351 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma, jrdev 361 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma, jrdev 371 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma, jrdev 384 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 392 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to allocate key input memory\n"); jrdev 398 drivers/crypto/caam/caamhash.c key_dma = dma_map_single(jrdev, key, *keylen, DMA_BIDIRECTIONAL); jrdev 399 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, key_dma)) { jrdev 400 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map key memory\n"); jrdev 424 drivers/crypto/caam/caamhash.c ret = caam_jr_enqueue(jrdev, desc, split_key_done, &result); jrdev 434 drivers/crypto/caam/caamhash.c dma_unmap_single(jrdev, key_dma, *keylen, DMA_BIDIRECTIONAL); jrdev 447 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 450 drivers/crypto/caam/caamhash.c struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); jrdev 454 drivers/crypto/caam/caamhash.c dev_dbg(jrdev, "keylen %d\n", keylen); jrdev 488 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(ctx->jrdev, jrdev 493 drivers/crypto/caam/caamhash.c ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, key, jrdev 511 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 519 drivers/crypto/caam/caamhash.c dma_sync_single_for_device(jrdev, ctx->adata.key_dma, keylen, jrdev 600 drivers/crypto/caam/caamhash.c static void ahash_done(struct device *jrdev, u32 *desc, u32 err, jrdev 611 drivers/crypto/caam/caamhash.c dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); jrdev 615 drivers/crypto/caam/caamhash.c ecode = caam_jr_strstatus(jrdev, err); jrdev 617 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_FROM_DEVICE); jrdev 628 drivers/crypto/caam/caamhash.c static void ahash_done_bi(struct device *jrdev, u32 *desc, u32 err, jrdev 639 drivers/crypto/caam/caamhash.c dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); jrdev 643 drivers/crypto/caam/caamhash.c ecode = caam_jr_strstatus(jrdev, err); jrdev 645 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_BIDIRECTIONAL); jrdev 660 drivers/crypto/caam/caamhash.c static void ahash_done_ctx_src(struct device *jrdev, u32 *desc, u32 err, jrdev 671 drivers/crypto/caam/caamhash.c dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); jrdev 675 drivers/crypto/caam/caamhash.c ecode = caam_jr_strstatus(jrdev, err); jrdev 677 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); jrdev 688 drivers/crypto/caam/caamhash.c static void ahash_done_ctx_dst(struct device *jrdev, u32 *desc, u32 err, jrdev 699 drivers/crypto/caam/caamhash.c dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); jrdev 703 drivers/crypto/caam/caamhash.c ecode = caam_jr_strstatus(jrdev, err); jrdev 705 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_FROM_DEVICE); jrdev 734 drivers/crypto/caam/caamhash.c dev_err(ctx->jrdev, "could not allocate extended descriptor\n"); jrdev 760 drivers/crypto/caam/caamhash.c src_dma = dma_map_single(ctx->jrdev, sg, sgsize, DMA_TO_DEVICE); jrdev 761 drivers/crypto/caam/caamhash.c if (dma_mapping_error(ctx->jrdev, src_dma)) { jrdev 762 drivers/crypto/caam/caamhash.c dev_err(ctx->jrdev, "unable to map S/G table\n"); jrdev 786 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 821 drivers/crypto/caam/caamhash.c dev_err(jrdev, "Invalid number of src SG.\n"); jrdev 826 drivers/crypto/caam/caamhash.c mapped_nents = dma_map_sg(jrdev, req->src, src_nents, jrdev 829 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to DMA map source\n"); jrdev 847 drivers/crypto/caam/caamhash.c dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); jrdev 854 drivers/crypto/caam/caamhash.c ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len, jrdev 859 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); jrdev 877 drivers/crypto/caam/caamhash.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, jrdev 880 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { jrdev 881 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map S/G table\n"); jrdev 895 drivers/crypto/caam/caamhash.c ret = caam_jr_enqueue(jrdev, desc, ahash_done_bi, req); jrdev 915 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_BIDIRECTIONAL); jrdev 925 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 948 drivers/crypto/caam/caamhash.c ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len, jrdev 953 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); jrdev 959 drivers/crypto/caam/caamhash.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, jrdev 961 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { jrdev 962 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map S/G table\n"); jrdev 975 drivers/crypto/caam/caamhash.c ret = caam_jr_enqueue(jrdev, desc, ahash_done_ctx_src, req); jrdev 981 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); jrdev 991 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 1004 drivers/crypto/caam/caamhash.c dev_err(jrdev, "Invalid number of src SG.\n"); jrdev 1009 drivers/crypto/caam/caamhash.c mapped_nents = dma_map_sg(jrdev, req->src, src_nents, jrdev 1012 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to DMA map source\n"); jrdev 1026 drivers/crypto/caam/caamhash.c dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); jrdev 1034 drivers/crypto/caam/caamhash.c ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len, jrdev 1039 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); jrdev 1055 drivers/crypto/caam/caamhash.c ret = caam_jr_enqueue(jrdev, desc, ahash_done_ctx_src, req); jrdev 1061 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); jrdev 1071 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 1084 drivers/crypto/caam/caamhash.c dev_err(jrdev, "Invalid number of src SG.\n"); jrdev 1089 drivers/crypto/caam/caamhash.c mapped_nents = dma_map_sg(jrdev, req->src, src_nents, jrdev 1092 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map source for DMA\n"); jrdev 1104 drivers/crypto/caam/caamhash.c dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); jrdev 1113 drivers/crypto/caam/caamhash.c ahash_unmap(jrdev, edesc, req, digestsize); jrdev 1120 drivers/crypto/caam/caamhash.c ret = map_seq_out_ptr_ctx(desc, jrdev, state, digestsize); jrdev 1122 drivers/crypto/caam/caamhash.c ahash_unmap(jrdev, edesc, req, digestsize); jrdev 1131 drivers/crypto/caam/caamhash.c ret = caam_jr_enqueue(jrdev, desc, ahash_done, req); jrdev 1135 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_FROM_DEVICE); jrdev 1148 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 1167 drivers/crypto/caam/caamhash.c state->buf_dma = dma_map_single(jrdev, buf, buflen, jrdev 1169 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, state->buf_dma)) { jrdev 1170 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map src\n"); jrdev 1177 drivers/crypto/caam/caamhash.c ret = map_seq_out_ptr_ctx(desc, jrdev, state, digestsize); jrdev 1185 drivers/crypto/caam/caamhash.c ret = caam_jr_enqueue(jrdev, desc, ahash_done, req); jrdev 1189 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_FROM_DEVICE); jrdev 1195 drivers/crypto/caam/caamhash.c ahash_unmap(jrdev, edesc, req, digestsize); jrdev 1207 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 1241 drivers/crypto/caam/caamhash.c dev_err(jrdev, "Invalid number of src SG.\n"); jrdev 1246 drivers/crypto/caam/caamhash.c mapped_nents = dma_map_sg(jrdev, req->src, src_nents, jrdev 1249 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to DMA map source\n"); jrdev 1268 drivers/crypto/caam/caamhash.c dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); jrdev 1275 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg, state); jrdev 1289 drivers/crypto/caam/caamhash.c edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, jrdev 1292 drivers/crypto/caam/caamhash.c if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { jrdev 1293 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map S/G table\n"); jrdev 1300 drivers/crypto/caam/caamhash.c ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len); jrdev 1308 drivers/crypto/caam/caamhash.c ret = caam_jr_enqueue(jrdev, desc, ahash_done_ctx_dst, req); jrdev 1331 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE); jrdev 1342 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 1354 drivers/crypto/caam/caamhash.c dev_err(jrdev, "Invalid number of src SG.\n"); jrdev 1359 drivers/crypto/caam/caamhash.c mapped_nents = dma_map_sg(jrdev, req->src, src_nents, jrdev 1362 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to DMA map source\n"); jrdev 1378 drivers/crypto/caam/caamhash.c dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); jrdev 1387 drivers/crypto/caam/caamhash.c ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg, state); jrdev 1394 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map S/G table\n"); jrdev 1398 drivers/crypto/caam/caamhash.c ret = map_seq_out_ptr_ctx(desc, jrdev, state, digestsize); jrdev 1406 drivers/crypto/caam/caamhash.c ret = caam_jr_enqueue(jrdev, desc, ahash_done, req); jrdev 1410 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_FROM_DEVICE); jrdev 1416 drivers/crypto/caam/caamhash.c ahash_unmap(jrdev, edesc, req, digestsize); jrdev 1428 drivers/crypto/caam/caamhash.c struct device *jrdev = ctx->jrdev; jrdev 1458 drivers/crypto/caam/caamhash.c dev_err(jrdev, "Invalid number of src SG.\n"); jrdev 1463 drivers/crypto/caam/caamhash.c mapped_nents = dma_map_sg(jrdev, req->src, src_nents, jrdev 1466 drivers/crypto/caam/caamhash.c dev_err(jrdev, "unable to map source for DMA\n"); jrdev 1483 drivers/crypto/caam/caamhash.c dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); jrdev 1500 drivers/crypto/caam/caamhash.c ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len); jrdev 1508 drivers/crypto/caam/caamhash.c ret = caam_jr_enqueue(jrdev, desc, ahash_done_ctx_dst, req); jrdev 1531 drivers/crypto/caam/caamhash.c ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE); jrdev 1831 drivers/crypto/caam/caamhash.c ctx->jrdev = caam_jr_alloc(); jrdev 1832 drivers/crypto/caam/caamhash.c if (IS_ERR(ctx->jrdev)) { jrdev 1834 drivers/crypto/caam/caamhash.c return PTR_ERR(ctx->jrdev); jrdev 1837 drivers/crypto/caam/caamhash.c priv = dev_get_drvdata(ctx->jrdev->parent); jrdev 1864 drivers/crypto/caam/caamhash.c ctx->adata.key_dma = dma_map_single_attrs(ctx->jrdev, ctx->key, jrdev 1868 drivers/crypto/caam/caamhash.c if (dma_mapping_error(ctx->jrdev, ctx->adata.key_dma)) { jrdev 1869 drivers/crypto/caam/caamhash.c dev_err(ctx->jrdev, "unable to map key\n"); jrdev 1870 drivers/crypto/caam/caamhash.c caam_jr_free(ctx->jrdev); jrdev 1875 drivers/crypto/caam/caamhash.c dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_update, jrdev 1878 drivers/crypto/caam/caamhash.c if (dma_mapping_error(ctx->jrdev, dma_addr)) { jrdev 1879 drivers/crypto/caam/caamhash.c dev_err(ctx->jrdev, "unable to map shared descriptors\n"); jrdev 1882 drivers/crypto/caam/caamhash.c dma_unmap_single_attrs(ctx->jrdev, ctx->adata.key_dma, jrdev 1887 drivers/crypto/caam/caamhash.c caam_jr_free(ctx->jrdev); jrdev 1914 drivers/crypto/caam/caamhash.c dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_update_dma, jrdev 1918 drivers/crypto/caam/caamhash.c dma_unmap_single_attrs(ctx->jrdev, ctx->adata.key_dma, jrdev 1921 drivers/crypto/caam/caamhash.c caam_jr_free(ctx->jrdev); jrdev 629 drivers/crypto/caam/caampkc.c struct device *jrdev = ctx->dev; jrdev 638 drivers/crypto/caam/caampkc.c dev_err(jrdev, "Output buffer length less than parameter n\n"); jrdev 655 drivers/crypto/caam/caampkc.c ret = caam_jr_enqueue(jrdev, edesc->hw_desc, rsa_pub_done, req); jrdev 659 drivers/crypto/caam/caampkc.c rsa_pub_unmap(jrdev, edesc, req); jrdev 662 drivers/crypto/caam/caampkc.c rsa_io_unmap(jrdev, edesc, req); jrdev 671 drivers/crypto/caam/caampkc.c struct device *jrdev = ctx->dev; jrdev 688 drivers/crypto/caam/caampkc.c ret = caam_jr_enqueue(jrdev, edesc->hw_desc, rsa_priv_f1_done, req); jrdev 692 drivers/crypto/caam/caampkc.c rsa_priv_f1_unmap(jrdev, edesc, req); jrdev 695 drivers/crypto/caam/caampkc.c rsa_io_unmap(jrdev, edesc, req); jrdev 704 drivers/crypto/caam/caampkc.c struct device *jrdev = ctx->dev; jrdev 721 drivers/crypto/caam/caampkc.c ret = caam_jr_enqueue(jrdev, edesc->hw_desc, rsa_priv_f2_done, req); jrdev 725 drivers/crypto/caam/caampkc.c rsa_priv_f2_unmap(jrdev, edesc, req); jrdev 728 drivers/crypto/caam/caampkc.c rsa_io_unmap(jrdev, edesc, req); jrdev 737 drivers/crypto/caam/caampkc.c struct device *jrdev = ctx->dev; jrdev 754 drivers/crypto/caam/caampkc.c ret = caam_jr_enqueue(jrdev, edesc->hw_desc, rsa_priv_f3_done, req); jrdev 758 drivers/crypto/caam/caampkc.c rsa_priv_f3_unmap(jrdev, edesc, req); jrdev 761 drivers/crypto/caam/caampkc.c rsa_io_unmap(jrdev, edesc, req); jrdev 73 drivers/crypto/caam/caamrng.c struct device *jrdev; jrdev 89 drivers/crypto/caam/caamrng.c static inline void rng_unmap_buf(struct device *jrdev, struct buf_data *bd) jrdev 92 drivers/crypto/caam/caamrng.c dma_unmap_single(jrdev, bd->addr, RN_BUF_SIZE, jrdev 98 drivers/crypto/caam/caamrng.c struct device *jrdev = ctx->jrdev; jrdev 101 drivers/crypto/caam/caamrng.c dma_unmap_single(jrdev, ctx->sh_desc_dma, jrdev 103 drivers/crypto/caam/caamrng.c rng_unmap_buf(jrdev, &ctx->bufs[0]); jrdev 104 drivers/crypto/caam/caamrng.c rng_unmap_buf(jrdev, &ctx->bufs[1]); jrdev 107 drivers/crypto/caam/caamrng.c static void rng_done(struct device *jrdev, u32 *desc, u32 err, void *context) jrdev 114 drivers/crypto/caam/caamrng.c caam_jr_strstatus(jrdev, err); jrdev 120 drivers/crypto/caam/caamrng.c dma_sync_single_for_cpu(jrdev, bd->addr, RN_BUF_SIZE, DMA_FROM_DEVICE); jrdev 129 drivers/crypto/caam/caamrng.c struct device *jrdev = ctx->jrdev; jrdev 133 drivers/crypto/caam/caamrng.c dev_dbg(jrdev, "submitting job %d\n", !(to_current ^ ctx->current_buf)); jrdev 135 drivers/crypto/caam/caamrng.c err = caam_jr_enqueue(jrdev, desc, rng_done, ctx); jrdev 169 drivers/crypto/caam/caamrng.c dev_dbg(ctx->jrdev, "%s: start reading at buffer %d, idx %d\n", jrdev 190 drivers/crypto/caam/caamrng.c dev_dbg(ctx->jrdev, "switched to buffer %d\n", ctx->current_buf); jrdev 199 drivers/crypto/caam/caamrng.c struct device *jrdev = ctx->jrdev; jrdev 210 drivers/crypto/caam/caamrng.c ctx->sh_desc_dma = dma_map_single(jrdev, desc, desc_bytes(desc), jrdev 212 drivers/crypto/caam/caamrng.c if (dma_mapping_error(jrdev, ctx->sh_desc_dma)) { jrdev 213 drivers/crypto/caam/caamrng.c dev_err(jrdev, "unable to map shared descriptor\n"); jrdev 225 drivers/crypto/caam/caamrng.c struct device *jrdev = ctx->jrdev; jrdev 233 drivers/crypto/caam/caamrng.c bd->addr = dma_map_single(jrdev, bd->buf, RN_BUF_SIZE, DMA_FROM_DEVICE); jrdev 234 drivers/crypto/caam/caamrng.c if (dma_mapping_error(jrdev, bd->addr)) { jrdev 235 drivers/crypto/caam/caamrng.c dev_err(jrdev, "unable to map dst\n"); jrdev 277 drivers/crypto/caam/caamrng.c static int caam_init_rng(struct caam_rng_ctx *ctx, struct device *jrdev) jrdev 281 drivers/crypto/caam/caamrng.c ctx->jrdev = jrdev; jrdev 308 drivers/crypto/caam/caamrng.c caam_jr_free(rng_ctx->jrdev); jrdev 217 drivers/crypto/caam/error.c static int report_ccb_status(struct device *jrdev, const u32 status, jrdev 257 drivers/crypto/caam/error.c dev_err_ratelimited(jrdev, "%08x: %s: %s %d: %s%s: %s%s\n", status, jrdev 264 drivers/crypto/caam/error.c static int report_jump_status(struct device *jrdev, const u32 status, jrdev 267 drivers/crypto/caam/error.c dev_err(jrdev, "%08x: %s: %s() not implemented\n", jrdev 273 drivers/crypto/caam/error.c static int report_deco_status(struct device *jrdev, const u32 status, jrdev 298 drivers/crypto/caam/error.c dev_err(jrdev, "%08x: %s: %s %d: %s%s\n", jrdev 327 drivers/crypto/caam/error.c static int report_jr_status(struct device *jrdev, const u32 status, jrdev 330 drivers/crypto/caam/error.c dev_err(jrdev, "%08x: %s: %s() not implemented\n", jrdev 336 drivers/crypto/caam/error.c static int report_cond_code_status(struct device *jrdev, const u32 status, jrdev 339 drivers/crypto/caam/error.c dev_err(jrdev, "%08x: %s: %s() not implemented\n", jrdev 345 drivers/crypto/caam/error.c int caam_strstatus(struct device *jrdev, u32 status, bool qi_v2) jrdev 348 drivers/crypto/caam/error.c int (*report_ssed)(struct device *jrdev, const u32 status, jrdev 377 drivers/crypto/caam/error.c return status_src[ssrc].report_ssed(jrdev, status, error); jrdev 380 drivers/crypto/caam/error.c dev_err(jrdev, "%d: %s\n", ssrc, error); jrdev 382 drivers/crypto/caam/error.c dev_err(jrdev, "%d: unknown error source\n", ssrc); jrdev 17 drivers/crypto/caam/error.h #define caam_jr_strstatus(jrdev, status) caam_strstatus(jrdev, status, false) jrdev 123 drivers/crypto/caam/jr.c struct device *jrdev; jrdev 126 drivers/crypto/caam/jr.c jrdev = &pdev->dev; jrdev 127 drivers/crypto/caam/jr.c jrpriv = dev_get_drvdata(jrdev); jrdev 133 drivers/crypto/caam/jr.c dev_err(jrdev, "Device is busy\n"); jrdev 146 drivers/crypto/caam/jr.c ret = caam_jr_shutdown(jrdev); jrdev 148 drivers/crypto/caam/jr.c dev_err(jrdev, "Failed to shut down job ring\n"); jrdev 499 drivers/crypto/caam/jr.c struct device *jrdev; jrdev 507 drivers/crypto/caam/jr.c jrdev = &pdev->dev; jrdev 508 drivers/crypto/caam/jr.c jrpriv = devm_kmalloc(jrdev, sizeof(*jrpriv), GFP_KERNEL); jrdev 512 drivers/crypto/caam/jr.c dev_set_drvdata(jrdev, jrpriv); jrdev 522 drivers/crypto/caam/jr.c dev_err(jrdev, "platform_get_resource() failed\n"); jrdev 526 drivers/crypto/caam/jr.c ctrl = devm_ioremap(jrdev, r->start, resource_size(r)); jrdev 528 drivers/crypto/caam/jr.c dev_err(jrdev, "devm_ioremap() failed\n"); jrdev 534 drivers/crypto/caam/jr.c error = dma_set_mask_and_coherent(jrdev, caam_get_dma_mask(jrdev)); jrdev 536 drivers/crypto/caam/jr.c dev_err(jrdev, "dma_set_mask_and_coherent failed (%d)\n", jrdev 544 drivers/crypto/caam/jr.c dev_err(jrdev, "irq_of_parse_and_map failed\n"); jrdev 548 drivers/crypto/caam/jr.c error = devm_add_action_or_reset(jrdev, caam_jr_irq_dispose_mapping, jrdev 554 drivers/crypto/caam/jr.c error = caam_jr_init(jrdev); /* now turn on hardware */ jrdev 558 drivers/crypto/caam/jr.c jrpriv->dev = jrdev; jrdev 565 drivers/crypto/caam/jr.c register_algs(jrdev->parent); jrdev 44 drivers/crypto/caam/key_gen.c int gen_split_key(struct device *jrdev, u8 *key_out, jrdev 59 drivers/crypto/caam/key_gen.c dev_dbg(jrdev, "split keylen %d split keylen padded %d\n", jrdev 69 drivers/crypto/caam/key_gen.c dev_err(jrdev, "unable to allocate key input memory\n"); jrdev 75 drivers/crypto/caam/key_gen.c dma_addr = dma_map_single(jrdev, key_out, local_max, DMA_BIDIRECTIONAL); jrdev 76 drivers/crypto/caam/key_gen.c if (dma_mapping_error(jrdev, dma_addr)) { jrdev 77 drivers/crypto/caam/key_gen.c dev_err(jrdev, "unable to map key memory\n"); jrdev 110 drivers/crypto/caam/key_gen.c ret = caam_jr_enqueue(jrdev, desc, split_key_done, &result); jrdev 121 drivers/crypto/caam/key_gen.c dma_unmap_single(jrdev, dma_addr, local_max, DMA_BIDIRECTIONAL); jrdev 46 drivers/crypto/caam/key_gen.h int gen_split_key(struct device *jrdev, u8 *key_out,