Lines Matching refs:rctx

305 	struct n2_hash_req_ctx *rctx = ahash_request_ctx(req);  in n2_hash_async_init()  local
309 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in n2_hash_async_init()
310 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in n2_hash_async_init()
312 return crypto_ahash_init(&rctx->fallback_req); in n2_hash_async_init()
317 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); in n2_hash_async_update() local
321 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in n2_hash_async_update()
322 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in n2_hash_async_update()
323 rctx->fallback_req.nbytes = req->nbytes; in n2_hash_async_update()
324 rctx->fallback_req.src = req->src; in n2_hash_async_update()
326 return crypto_ahash_update(&rctx->fallback_req); in n2_hash_async_update()
331 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); in n2_hash_async_final() local
335 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in n2_hash_async_final()
336 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in n2_hash_async_final()
337 rctx->fallback_req.result = req->result; in n2_hash_async_final()
339 return crypto_ahash_final(&rctx->fallback_req); in n2_hash_async_final()
344 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); in n2_hash_async_finup() local
348 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in n2_hash_async_finup()
349 rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in n2_hash_async_finup()
350 rctx->fallback_req.nbytes = req->nbytes; in n2_hash_async_finup()
351 rctx->fallback_req.src = req->src; in n2_hash_async_finup()
352 rctx->fallback_req.result = req->result; in n2_hash_async_finup()
354 return crypto_ahash_finup(&rctx->fallback_req); in n2_hash_async_finup()
523 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); in n2_do_async_digest() local
526 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in n2_do_async_digest()
527 rctx->fallback_req.base.flags = in n2_do_async_digest()
529 rctx->fallback_req.nbytes = req->nbytes; in n2_do_async_digest()
530 rctx->fallback_req.src = req->src; in n2_do_async_digest()
531 rctx->fallback_req.result = req->result; in n2_do_async_digest()
533 return crypto_ahash_digest(&rctx->fallback_req); in n2_do_async_digest()
598 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); in n2_hash_async_digest() local
606 memcpy(&rctx->u, n2alg->hash_init, n2alg->hw_op_hashsz); in n2_hash_async_digest()
610 &rctx->u, 0UL, 0); in n2_hash_async_digest()
616 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); in n2_hmac_async_digest() local
624 struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); in n2_hmac_async_digest() local
627 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in n2_hmac_async_digest()
628 rctx->fallback_req.base.flags = in n2_hmac_async_digest()
630 rctx->fallback_req.nbytes = req->nbytes; in n2_hmac_async_digest()
631 rctx->fallback_req.src = req->src; in n2_hmac_async_digest()
632 rctx->fallback_req.result = req->result; in n2_hmac_async_digest()
634 return crypto_ahash_digest(&rctx->fallback_req); in n2_hmac_async_digest()
636 memcpy(&rctx->u, n2alg->derived.hash_init, in n2_hmac_async_digest()
641 &rctx->u, in n2_hmac_async_digest()
875 struct n2_request_context *rctx = ablkcipher_request_ctx(req); in n2_compute_chunks() local
876 struct ablkcipher_walk *walk = &rctx->walk; in n2_compute_chunks()
888 INIT_LIST_HEAD(&rctx->chunk_list); in n2_compute_chunks()
890 chunk = &rctx->chunk; in n2_compute_chunks()
921 &rctx->chunk_list); in n2_compute_chunks()
948 list_add_tail(&chunk->entry, &rctx->chunk_list); in n2_compute_chunks()
956 struct n2_request_context *rctx = ablkcipher_request_ctx(req); in n2_chunk_complete() local
960 memcpy(rctx->walk.iv, final_iv, rctx->walk.blocksize); in n2_chunk_complete()
962 ablkcipher_walk_complete(&rctx->walk); in n2_chunk_complete()
963 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { in n2_chunk_complete()
965 if (unlikely(c != &rctx->chunk)) in n2_chunk_complete()
973 struct n2_request_context *rctx = ablkcipher_request_ctx(req); in n2_do_ecb() local
990 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { in n2_do_ecb()
995 if (unlikely(c != &rctx->chunk)) in n2_do_ecb()
1025 struct n2_request_context *rctx = ablkcipher_request_ctx(req); in n2_do_chaining() local
1046 iv_paddr = __pa(rctx->walk.iv); in n2_do_chaining()
1047 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, in n2_do_chaining()
1053 iv_paddr = c->dest_final - rctx->walk.blocksize; in n2_do_chaining()
1055 if (unlikely(c != &rctx->chunk)) in n2_do_chaining()
1060 list_for_each_entry_safe_reverse(c, tmp, &rctx->chunk_list, in n2_do_chaining()
1062 if (c == &rctx->chunk) { in n2_do_chaining()
1063 iv_paddr = __pa(rctx->walk.iv); in n2_do_chaining()
1067 rctx->walk.blocksize); in n2_do_chaining()
1074 rctx->walk.blocksize); in n2_do_chaining()
1075 final_iv_addr = rctx->temp_iv; in n2_do_chaining()
1076 memcpy(rctx->temp_iv, __va(pa), in n2_do_chaining()
1077 rctx->walk.blocksize); in n2_do_chaining()
1084 if (unlikely(c != &rctx->chunk)) in n2_do_chaining()