Lines Matching refs:rctx
93 static void req_ctx_init(struct mcryptd_hash_request_ctx *rctx, in req_ctx_init() argument
96 rctx->flag = HASH_UPDATE; in req_ctx_init()
363 static int sha1_mb_set_results(struct mcryptd_hash_request_ctx *rctx) in sha1_mb_set_results() argument
366 struct sha1_hash_ctx *sctx = shash_desc_ctx(&rctx->desc); in sha1_mb_set_results()
367 __be32 *dst = (__be32 *) rctx->out; in sha1_mb_set_results()
380 struct mcryptd_hash_request_ctx *rctx = *ret_rctx; in sha_finish_walk() local
384 while (!(rctx->flag & HASH_DONE)) { in sha_finish_walk()
385 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk()
391 if (crypto_ahash_walk_last(&rctx->walk)) { in sha_finish_walk()
392 rctx->flag |= HASH_DONE; in sha_finish_walk()
393 if (rctx->flag & HASH_FINAL) in sha_finish_walk()
397 sha_ctx = (struct sha1_hash_ctx *) shash_desc_ctx(&rctx->desc); in sha_finish_walk()
399 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, flag); in sha_finish_walk()
406 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); in sha_finish_walk()
408 rctx = NULL; in sha_finish_walk()
414 if (rctx->flag & HASH_FINAL) in sha_finish_walk()
415 sha1_mb_set_results(rctx); in sha_finish_walk()
418 *ret_rctx = rctx; in sha_finish_walk()
422 static int sha_complete_job(struct mcryptd_hash_request_ctx *rctx, in sha_complete_job() argument
426 struct ahash_request *req = cast_mcryptd_ctx_to_req(rctx); in sha_complete_job()
433 list_del(&rctx->waiter); in sha_complete_job()
437 rctx->complete(&req->base, err); in sha_complete_job()
440 rctx->complete(&req->base, err); in sha_complete_job()
469 static void sha1_mb_add_list(struct mcryptd_hash_request_ctx *rctx, in sha1_mb_add_list() argument
476 rctx->tag.arrival = jiffies; /* tag the arrival time */ in sha1_mb_add_list()
477 rctx->tag.seq_num = cstate->next_seq_num++; in sha1_mb_add_list()
478 next_flush = rctx->tag.arrival + delay; in sha1_mb_add_list()
479 rctx->tag.expire = next_flush; in sha1_mb_add_list()
482 list_add_tail(&rctx->waiter, &cstate->work_list); in sha1_mb_add_list()
491 struct mcryptd_hash_request_ctx *rctx = in sha1_mb_update() local
496 struct ahash_request *req = cast_mcryptd_ctx_to_req(rctx); in sha1_mb_update()
502 if (rctx->tag.cpu != smp_processor_id()) { in sha1_mb_update()
508 req_ctx_init(rctx, desc); in sha1_mb_update()
510 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha1_mb_update()
517 if (crypto_ahash_walk_last(&rctx->walk)) in sha1_mb_update()
518 rctx->flag |= HASH_DONE; in sha1_mb_update()
522 sha1_mb_add_list(rctx, cstate); in sha1_mb_update()
524 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, HASH_UPDATE); in sha1_mb_update()
533 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); in sha1_mb_update()
537 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); in sha1_mb_update()
538 ret = sha_finish_walk(&rctx, cstate, false); in sha1_mb_update()
540 if (!rctx) in sha1_mb_update()
543 sha_complete_job(rctx, cstate, ret); in sha1_mb_update()
550 struct mcryptd_hash_request_ctx *rctx = in sha1_mb_finup() local
555 struct ahash_request *req = cast_mcryptd_ctx_to_req(rctx); in sha1_mb_finup()
560 if (rctx->tag.cpu != smp_processor_id()) { in sha1_mb_finup()
566 req_ctx_init(rctx, desc); in sha1_mb_finup()
568 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha1_mb_finup()
575 if (crypto_ahash_walk_last(&rctx->walk)) { in sha1_mb_finup()
576 rctx->flag |= HASH_DONE; in sha1_mb_finup()
579 rctx->out = out; in sha1_mb_finup()
582 rctx->flag |= HASH_FINAL; in sha1_mb_finup()
584 sha1_mb_add_list(rctx, cstate); in sha1_mb_finup()
587 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, flag); in sha1_mb_finup()
599 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); in sha1_mb_finup()
600 ret = sha_finish_walk(&rctx, cstate, false); in sha1_mb_finup()
601 if (!rctx) in sha1_mb_finup()
604 sha_complete_job(rctx, cstate, ret); in sha1_mb_finup()
610 struct mcryptd_hash_request_ctx *rctx = in sha1_mb_final() local
620 if (rctx->tag.cpu != smp_processor_id()) { in sha1_mb_final()
626 req_ctx_init(rctx, desc); in sha1_mb_final()
628 rctx->out = out; in sha1_mb_final()
629 rctx->flag |= HASH_DONE | HASH_FINAL; in sha1_mb_final()
633 sha1_mb_add_list(rctx, cstate); in sha1_mb_final()
644 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); in sha1_mb_final()
648 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); in sha1_mb_final()
649 ret = sha_finish_walk(&rctx, cstate, false); in sha1_mb_final()
650 if (!rctx) in sha1_mb_final()
653 sha_complete_job(rctx, cstate, ret); in sha1_mb_final()
820 struct mcryptd_hash_request_ctx *rctx; in sha1_mb_flusher() local
829 rctx = list_entry(cstate->work_list.next, in sha1_mb_flusher()
831 if (time_before(cur_time, rctx->tag.expire)) in sha1_mb_flusher()
840 rctx = cast_hash_to_mcryptd_ctx(sha_ctx); in sha1_mb_flusher()
841 sha_finish_walk(&rctx, cstate, true); in sha1_mb_flusher()
842 sha_complete_job(rctx, cstate, 0); in sha1_mb_flusher()
846 rctx = list_entry(cstate->work_list.next, in sha1_mb_flusher()
849 next_flush = rctx->tag.expire; in sha1_mb_flusher()