Lines Matching refs:ctx

165 	struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);  in img_hash_start()  local
168 if (ctx->flags & DRIVER_FLAGS_MD5) in img_hash_start()
170 else if (ctx->flags & DRIVER_FLAGS_SHA1) in img_hash_start()
172 else if (ctx->flags & DRIVER_FLAGS_SHA224) in img_hash_start()
174 else if (ctx->flags & DRIVER_FLAGS_SHA256) in img_hash_start()
211 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_dma_callback() local
213 if (ctx->bufcnt) { in img_hash_dma_callback()
214 img_hash_xmit_cpu(hdev, ctx->buffer, ctx->bufcnt, 0); in img_hash_dma_callback()
215 ctx->bufcnt = 0; in img_hash_dma_callback()
217 if (ctx->sg) in img_hash_dma_callback()
224 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_xmit_dma() local
226 ctx->dma_ct = dma_map_sg(hdev->dev, sg, 1, DMA_MEM_TO_DEV); in img_hash_xmit_dma()
227 if (ctx->dma_ct == 0) { in img_hash_xmit_dma()
235 ctx->dma_ct, in img_hash_xmit_dma()
254 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_write_via_cpu() local
256 ctx->bufcnt = sg_copy_to_buffer(hdev->req->src, sg_nents(ctx->sg), in img_hash_write_via_cpu()
257 ctx->buffer, hdev->req->nbytes); in img_hash_write_via_cpu()
259 ctx->total = hdev->req->nbytes; in img_hash_write_via_cpu()
260 ctx->bufcnt = 0; in img_hash_write_via_cpu()
266 return img_hash_xmit_cpu(hdev, ctx->buffer, ctx->total, 1); in img_hash_write_via_cpu()
271 struct img_hash_request_ctx *ctx = ahash_request_ctx(req); in img_hash_finish() local
276 memcpy(req->result, ctx->digest, ctx->digsize); in img_hash_finish()
283 struct img_hash_request_ctx *ctx = ahash_request_ctx(req); in img_hash_copy_hash() local
284 u32 *hash = (u32 *)ctx->digest; in img_hash_copy_hash()
287 for (i = (ctx->digsize / sizeof(u32)) - 1; i >= 0; i--) in img_hash_copy_hash()
288 hash[i] = img_hash_read_result_queue(ctx->hdev); in img_hash_copy_hash()
293 struct img_hash_request_ctx *ctx = ahash_request_ctx(req); in img_hash_finish_req() local
294 struct img_hash_dev *hdev = ctx->hdev; in img_hash_finish_req()
302 ctx->flags |= DRIVER_FLAGS_ERROR; in img_hash_finish_req()
314 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_write_via_dma() local
318 dev_dbg(hdev->dev, "xmit dma size: %d\n", ctx->total); in img_hash_write_via_dma()
320 if (!ctx->total) in img_hash_write_via_dma()
359 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_dma_task() local
364 if (!ctx->sg) in img_hash_dma_task()
367 addr = sg_virt(ctx->sg); in img_hash_dma_task()
368 nbytes = ctx->sg->length - ctx->offset; in img_hash_dma_task()
384 sg_init_one(&tsg, addr + ctx->offset, wsend * 4); in img_hash_dma_task()
387 ctx->flags |= DRIVER_FLAGS_CPU; in img_hash_dma_task()
389 img_hash_xmit_cpu(hdev, addr + ctx->offset, in img_hash_dma_task()
391 ctx->sent += wsend * 4; in img_hash_dma_task()
394 ctx->sent += wsend * 4; in img_hash_dma_task()
399 ctx->bufcnt = sg_pcopy_to_buffer(ctx->sgfirst, ctx->nents, in img_hash_dma_task()
400 ctx->buffer, bleft, ctx->sent); in img_hash_dma_task()
402 ctx->sg = sg_next(ctx->sg); in img_hash_dma_task()
403 while (ctx->sg && (ctx->bufcnt < 4)) { in img_hash_dma_task()
404 len = ctx->sg->length; in img_hash_dma_task()
405 if (likely(len > (4 - ctx->bufcnt))) in img_hash_dma_task()
406 len = 4 - ctx->bufcnt; in img_hash_dma_task()
407 tbc = sg_pcopy_to_buffer(ctx->sgfirst, ctx->nents, in img_hash_dma_task()
408 ctx->buffer + ctx->bufcnt, len, in img_hash_dma_task()
409 ctx->sent + ctx->bufcnt); in img_hash_dma_task()
410 ctx->bufcnt += tbc; in img_hash_dma_task()
411 if (tbc >= ctx->sg->length) { in img_hash_dma_task()
412 ctx->sg = sg_next(ctx->sg); in img_hash_dma_task()
417 ctx->sent += ctx->bufcnt; in img_hash_dma_task()
418 ctx->offset = tbc; in img_hash_dma_task()
423 ctx->offset = 0; in img_hash_dma_task()
424 ctx->sg = sg_next(ctx->sg); in img_hash_dma_task()
430 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_write_via_dma_stop() local
432 if (ctx->flags & DRIVER_FLAGS_SG) in img_hash_write_via_dma_stop()
433 dma_unmap_sg(hdev->dev, ctx->sg, ctx->dma_ct, DMA_TO_DEVICE); in img_hash_write_via_dma_stop()
441 struct img_hash_request_ctx *ctx = ahash_request_ctx(req); in img_hash_process_data() local
444 ctx->bufcnt = 0; in img_hash_process_data()
485 struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm); in img_hash_init() local
487 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_init()
498 struct img_hash_request_ctx *ctx; in img_hash_handle_queue() local
528 ctx = ahash_request_ctx(req); in img_hash_handle_queue()
531 ctx->op, req->nbytes); in img_hash_handle_queue()
549 struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm); in img_hash_update() local
551 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_update()
564 struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm); in img_hash_final() local
566 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_final()
578 struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm); in img_hash_finup() local
580 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); in img_hash_finup()
594 struct img_hash_request_ctx *ctx = ahash_request_ctx(req); in img_hash_digest() local
612 ctx->hdev = hdev; in img_hash_digest()
613 ctx->flags = 0; in img_hash_digest()
614 ctx->digsize = crypto_ahash_digestsize(tfm); in img_hash_digest()
616 switch (ctx->digsize) { in img_hash_digest()
618 ctx->flags |= DRIVER_FLAGS_SHA1; in img_hash_digest()
621 ctx->flags |= DRIVER_FLAGS_SHA256; in img_hash_digest()
624 ctx->flags |= DRIVER_FLAGS_SHA224; in img_hash_digest()
627 ctx->flags |= DRIVER_FLAGS_MD5; in img_hash_digest()
633 ctx->bufcnt = 0; in img_hash_digest()
634 ctx->offset = 0; in img_hash_digest()
635 ctx->sent = 0; in img_hash_digest()
636 ctx->total = req->nbytes; in img_hash_digest()
637 ctx->sg = req->src; in img_hash_digest()
638 ctx->sgfirst = req->src; in img_hash_digest()
639 ctx->nents = sg_nents(ctx->sg); in img_hash_digest()
648 struct img_hash_ctx *ctx = crypto_tfm_ctx(tfm); in img_hash_cra_init() local
652 ctx->fallback = crypto_alloc_ahash(alg_name, 0, in img_hash_cra_init()
654 if (IS_ERR(ctx->fallback)) { in img_hash_cra_init()
656 err = PTR_ERR(ctx->fallback); in img_hash_cra_init()