Lines Matching refs:ctx

88 static inline struct ahash_request *cast_mcryptd_ctx_to_req(struct mcryptd_hash_request_ctx *ctx)  in cast_mcryptd_ctx_to_req()  argument
90 return container_of((void *) ctx, struct ahash_request, __ctx); in cast_mcryptd_ctx_to_req()
134 …ic struct sha1_hash_ctx *sha1_ctx_mgr_resubmit(struct sha1_ctx_mgr *mgr, struct sha1_hash_ctx *ctx) in sha1_ctx_mgr_resubmit() argument
136 while (ctx) { in sha1_ctx_mgr_resubmit()
137 if (ctx->status & HASH_CTX_STS_COMPLETE) { in sha1_ctx_mgr_resubmit()
139 ctx->status = HASH_CTX_STS_COMPLETE; in sha1_ctx_mgr_resubmit()
140 return ctx; in sha1_ctx_mgr_resubmit()
147 if (ctx->partial_block_buffer_length == 0 && in sha1_ctx_mgr_resubmit()
148 ctx->incoming_buffer_length) { in sha1_ctx_mgr_resubmit()
150 const void *buffer = ctx->incoming_buffer; in sha1_ctx_mgr_resubmit()
151 uint32_t len = ctx->incoming_buffer_length; in sha1_ctx_mgr_resubmit()
162 memcpy(ctx->partial_block_buffer, in sha1_ctx_mgr_resubmit()
165 ctx->partial_block_buffer_length = copy_len; in sha1_ctx_mgr_resubmit()
168 ctx->incoming_buffer_length = 0; in sha1_ctx_mgr_resubmit()
178 ctx->job.buffer = (uint8_t *) buffer; in sha1_ctx_mgr_resubmit()
179 ctx->job.len = len; in sha1_ctx_mgr_resubmit()
180 ctx = (struct sha1_hash_ctx *) sha1_job_mgr_submit(&mgr->mgr, in sha1_ctx_mgr_resubmit()
181 &ctx->job); in sha1_ctx_mgr_resubmit()
191 if (ctx->status & HASH_CTX_STS_LAST) { in sha1_ctx_mgr_resubmit()
193 uint8_t *buf = ctx->partial_block_buffer; in sha1_ctx_mgr_resubmit()
194 uint32_t n_extra_blocks = sha1_pad(buf, ctx->total_length); in sha1_ctx_mgr_resubmit()
196 ctx->status = (HASH_CTX_STS_PROCESSING | in sha1_ctx_mgr_resubmit()
198 ctx->job.buffer = buf; in sha1_ctx_mgr_resubmit()
199 ctx->job.len = (uint32_t) n_extra_blocks; in sha1_ctx_mgr_resubmit()
200 ctx = (struct sha1_hash_ctx *) sha1_job_mgr_submit(&mgr->mgr, &ctx->job); in sha1_ctx_mgr_resubmit()
204 ctx->status = HASH_CTX_STS_IDLE; in sha1_ctx_mgr_resubmit()
205 return ctx; in sha1_ctx_mgr_resubmit()
220 struct sha1_hash_ctx *ctx; in sha1_ctx_mgr_get_comp_ctx() local
222 ctx = (struct sha1_hash_ctx *) sha1_job_mgr_get_comp_job(&mgr->mgr); in sha1_ctx_mgr_get_comp_ctx()
223 return sha1_ctx_mgr_resubmit(mgr, ctx); in sha1_ctx_mgr_get_comp_ctx()
232 struct sha1_hash_ctx *ctx, in sha1_ctx_mgr_submit() argument
239 ctx->error = HASH_CTX_ERROR_INVALID_FLAGS; in sha1_ctx_mgr_submit()
240 return ctx; in sha1_ctx_mgr_submit()
243 if (ctx->status & HASH_CTX_STS_PROCESSING) { in sha1_ctx_mgr_submit()
245 ctx->error = HASH_CTX_ERROR_ALREADY_PROCESSING; in sha1_ctx_mgr_submit()
246 return ctx; in sha1_ctx_mgr_submit()
249 if ((ctx->status & HASH_CTX_STS_COMPLETE) && !(flags & HASH_FIRST)) { in sha1_ctx_mgr_submit()
251 ctx->error = HASH_CTX_ERROR_ALREADY_COMPLETED; in sha1_ctx_mgr_submit()
252 return ctx; in sha1_ctx_mgr_submit()
258 sha1_init_digest(ctx->job.result_digest); in sha1_ctx_mgr_submit()
261 ctx->total_length = 0; in sha1_ctx_mgr_submit()
264 ctx->partial_block_buffer_length = 0; in sha1_ctx_mgr_submit()
268 ctx->error = HASH_CTX_ERROR_NONE; in sha1_ctx_mgr_submit()
271 ctx->incoming_buffer = buffer; in sha1_ctx_mgr_submit()
272 ctx->incoming_buffer_length = len; in sha1_ctx_mgr_submit()
275 ctx->status = (flags & HASH_LAST) ? in sha1_ctx_mgr_submit()
280 ctx->total_length += len; in sha1_ctx_mgr_submit()
288 if ((ctx->partial_block_buffer_length) | (len < SHA1_BLOCK_SIZE)) { in sha1_ctx_mgr_submit()
290 uint32_t copy_len = SHA1_BLOCK_SIZE - ctx->partial_block_buffer_length; in sha1_ctx_mgr_submit()
296 memcpy(&ctx->partial_block_buffer[ctx->partial_block_buffer_length], in sha1_ctx_mgr_submit()
299 ctx->partial_block_buffer_length += copy_len; in sha1_ctx_mgr_submit()
300 ctx->incoming_buffer = (const void *)((const char *)buffer + copy_len); in sha1_ctx_mgr_submit()
301 ctx->incoming_buffer_length = len - copy_len; in sha1_ctx_mgr_submit()
305 assert(ctx->partial_block_buffer_length <= SHA1_BLOCK_SIZE); in sha1_ctx_mgr_submit()
308 if (ctx->partial_block_buffer_length >= SHA1_BLOCK_SIZE) { in sha1_ctx_mgr_submit()
309 ctx->partial_block_buffer_length = 0; in sha1_ctx_mgr_submit()
311 ctx->job.buffer = ctx->partial_block_buffer; in sha1_ctx_mgr_submit()
312 ctx->job.len = 1; in sha1_ctx_mgr_submit()
313 ctx = (struct sha1_hash_ctx *) sha1_job_mgr_submit(&mgr->mgr, &ctx->job); in sha1_ctx_mgr_submit()
317 return sha1_ctx_mgr_resubmit(mgr, ctx); in sha1_ctx_mgr_submit()
322 struct sha1_hash_ctx *ctx; in sha1_ctx_mgr_flush() local
325 ctx = (struct sha1_hash_ctx *) sha1_job_mgr_flush(&mgr->mgr); in sha1_ctx_mgr_flush()
328 if (!ctx) in sha1_ctx_mgr_flush()
334 ctx = sha1_ctx_mgr_resubmit(mgr, ctx); in sha1_ctx_mgr_flush()
341 if (ctx) in sha1_ctx_mgr_flush()
342 return ctx; in sha1_ctx_mgr_flush()
705 struct sha1_mb_ctx *ctx = crypto_ahash_ctx(tfm); in sha1_mb_async_init() local
707 struct mcryptd_ahash *mcryptd_tfm = ctx->mcryptd_tfm; in sha1_mb_async_init()
719 struct sha1_mb_ctx *ctx = crypto_ahash_ctx(tfm); in sha1_mb_async_update() local
720 struct mcryptd_ahash *mcryptd_tfm = ctx->mcryptd_tfm; in sha1_mb_async_update()
732 struct sha1_mb_ctx *ctx = crypto_ahash_ctx(tfm); in sha1_mb_async_finup() local
733 struct mcryptd_ahash *mcryptd_tfm = ctx->mcryptd_tfm; in sha1_mb_async_finup()
745 struct sha1_mb_ctx *ctx = crypto_ahash_ctx(tfm); in sha1_mb_async_final() local
746 struct mcryptd_ahash *mcryptd_tfm = ctx->mcryptd_tfm; in sha1_mb_async_final()
756 struct sha1_mb_ctx *ctx = crypto_ahash_ctx(tfm); in sha1_mb_async_digest() local
758 struct mcryptd_ahash *mcryptd_tfm = ctx->mcryptd_tfm; in sha1_mb_async_digest()
768 struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm); in sha1_mb_async_init_tfm() local
778 ctx->mcryptd_tfm = mcryptd_tfm; in sha1_mb_async_init_tfm()
788 struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm); in sha1_mb_async_exit_tfm() local
790 mcryptd_free_ahash(ctx->mcryptd_tfm); in sha1_mb_async_exit_tfm()