Lines Matching refs:req

2155 		       struct aead_request *req)  in aead_unmap()  argument
2157 struct crypto_aead *aead = crypto_aead_reqtfm(req); in aead_unmap()
2160 dma_unmap_sg_chained(dev, req->assoc, edesc->assoc_nents, in aead_unmap()
2163 caam_unmap(dev, req->src, req->dst, in aead_unmap()
2171 struct ablkcipher_request *req) in ablkcipher_unmap() argument
2173 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in ablkcipher_unmap()
2176 caam_unmap(dev, req->src, req->dst, in ablkcipher_unmap()
2185 struct aead_request *req = context; in aead_encrypt_done() local
2188 struct crypto_aead *aead = crypto_aead_reqtfm(req); in aead_encrypt_done()
2201 aead_unmap(jrdev, edesc, req); in aead_encrypt_done()
2205 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->assoc), in aead_encrypt_done()
2206 req->assoclen , 1); in aead_encrypt_done()
2208 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src) - ivsize, in aead_encrypt_done()
2211 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src), in aead_encrypt_done()
2212 edesc->src_nents ? 100 : req->cryptlen + in aead_encrypt_done()
2218 aead_request_complete(req, err); in aead_encrypt_done()
2224 struct aead_request *req = context; in aead_decrypt_done() local
2227 struct crypto_aead *aead = crypto_aead_reqtfm(req); in aead_decrypt_done()
2239 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, in aead_decrypt_done()
2242 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->dst), in aead_decrypt_done()
2243 req->cryptlen - ctx->authsize, 1); in aead_decrypt_done()
2249 aead_unmap(jrdev, edesc, req); in aead_decrypt_done()
2260 ((char *)sg_virt(req->assoc) - sizeof(struct iphdr)), in aead_decrypt_done()
2261 sizeof(struct iphdr) + req->assoclen + in aead_decrypt_done()
2262 ((req->cryptlen > 1500) ? 1500 : req->cryptlen) + in aead_decrypt_done()
2265 struct scatterlist *sg = sg_last(req->src, edesc->src_nents); in aead_decrypt_done()
2274 aead_request_complete(req, err); in aead_decrypt_done()
2280 struct ablkcipher_request *req = context; in ablkcipher_encrypt_done() local
2283 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in ablkcipher_encrypt_done()
2297 DUMP_PREFIX_ADDRESS, 16, 4, req->info, in ablkcipher_encrypt_done()
2300 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src), in ablkcipher_encrypt_done()
2301 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); in ablkcipher_encrypt_done()
2304 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_encrypt_done()
2307 ablkcipher_request_complete(req, err); in ablkcipher_encrypt_done()
2313 struct ablkcipher_request *req = context; in ablkcipher_decrypt_done() local
2316 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in ablkcipher_decrypt_done()
2329 DUMP_PREFIX_ADDRESS, 16, 4, req->info, in ablkcipher_decrypt_done()
2332 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src), in ablkcipher_decrypt_done()
2333 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); in ablkcipher_decrypt_done()
2336 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_decrypt_done()
2339 ablkcipher_request_complete(req, err); in ablkcipher_decrypt_done()
2347 struct aead_request *req, in init_aead_job() argument
2350 struct crypto_aead *aead = crypto_aead_reqtfm(req); in init_aead_job()
2362 req->assoclen, req->cryptlen, authsize); in init_aead_job()
2364 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->assoc), in init_aead_job()
2365 req->assoclen , 1); in init_aead_job()
2367 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, in init_aead_job()
2370 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src), in init_aead_job()
2371 edesc->src_nents ? 100 : req->cryptlen, 1); in init_aead_job()
2389 src_dma = sg_dma_address(req->assoc); in init_aead_job()
2398 append_seq_in_ptr(desc, src_dma, req->assoclen + ivsize + req->cryptlen, in init_aead_job()
2401 if (likely(req->src == req->dst)) { in init_aead_job()
2403 dst_dma = sg_dma_address(req->src); in init_aead_job()
2411 dst_dma = sg_dma_address(req->dst); in init_aead_job()
2420 append_seq_out_ptr(desc, dst_dma, req->cryptlen + authsize, in init_aead_job()
2423 append_seq_out_ptr(desc, dst_dma, req->cryptlen - authsize, in init_aead_job()
2432 struct aead_request *req, in init_aead_giv_job() argument
2435 struct crypto_aead *aead = crypto_aead_reqtfm(req); in init_aead_giv_job()
2447 req->assoclen, req->cryptlen, authsize); in init_aead_giv_job()
2449 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->assoc), in init_aead_giv_job()
2450 req->assoclen , 1); in init_aead_giv_job()
2452 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); in init_aead_giv_job()
2454 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src), in init_aead_giv_job()
2455 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_aead_giv_job()
2473 src_dma = sg_dma_address(req->assoc); in init_aead_giv_job()
2480 append_seq_in_ptr(desc, src_dma, req->assoclen + ivsize + req->cryptlen, in init_aead_giv_job()
2486 if (likely(req->src == req->dst)) { in init_aead_giv_job()
2499 append_seq_out_ptr(desc, dst_dma, ivsize + req->cryptlen + authsize, in init_aead_giv_job()
2508 struct ablkcipher_request *req, in init_ablkcipher_job() argument
2511 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in init_ablkcipher_job()
2520 DUMP_PREFIX_ADDRESS, 16, 4, req->info, in init_ablkcipher_job()
2523 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src), in init_ablkcipher_job()
2524 edesc->src_nents ? 100 : req->nbytes, 1); in init_ablkcipher_job()
2538 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options); in init_ablkcipher_job()
2540 if (likely(req->src == req->dst)) { in init_ablkcipher_job()
2542 dst_dma = sg_dma_address(req->src); in init_ablkcipher_job()
2550 dst_dma = sg_dma_address(req->dst); in init_ablkcipher_job()
2557 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options); in init_ablkcipher_job()
2565 struct ablkcipher_request *req, in init_ablkcipher_giv_job() argument
2568 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in init_ablkcipher_giv_job()
2577 DUMP_PREFIX_ADDRESS, 16, 4, req->info, in init_ablkcipher_giv_job()
2580 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src), in init_ablkcipher_giv_job()
2581 edesc->src_nents ? 100 : req->nbytes, 1); in init_ablkcipher_giv_job()
2588 src_dma = sg_dma_address(req->src); in init_ablkcipher_giv_job()
2595 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options); in init_ablkcipher_giv_job()
2605 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options); in init_ablkcipher_giv_job()
2611 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, in aead_edesc_alloc() argument
2615 struct crypto_aead *aead = crypto_aead_reqtfm(req); in aead_edesc_alloc()
2618 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | in aead_edesc_alloc()
2631 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained); in aead_edesc_alloc()
2633 if (unlikely(req->dst != req->src)) { in aead_edesc_alloc()
2634 src_nents = sg_count(req->src, req->cryptlen, &src_chained); in aead_edesc_alloc()
2635 dst_nents = sg_count(req->dst, in aead_edesc_alloc()
2636 req->cryptlen + in aead_edesc_alloc()
2640 src_nents = sg_count(req->src, in aead_edesc_alloc()
2641 req->cryptlen + in aead_edesc_alloc()
2646 sgc = dma_map_sg_chained(jrdev, req->assoc, assoc_nents ? : 1, in aead_edesc_alloc()
2648 if (likely(req->src == req->dst)) { in aead_edesc_alloc()
2649 sgc = dma_map_sg_chained(jrdev, req->src, src_nents ? : 1, in aead_edesc_alloc()
2652 sgc = dma_map_sg_chained(jrdev, req->src, src_nents ? : 1, in aead_edesc_alloc()
2654 sgc = dma_map_sg_chained(jrdev, req->dst, dst_nents ? : 1, in aead_edesc_alloc()
2658 iv_dma = dma_map_single(jrdev, req->iv, ivsize, DMA_TO_DEVICE); in aead_edesc_alloc()
2676 iv_dma + ivsize == sg_dma_address(req->assoc) && in aead_edesc_alloc()
2677 !src_nents && sg_dma_address(req->assoc) + in aead_edesc_alloc()
2678 req->assoclen == sg_dma_address(req->src)); in aead_edesc_alloc()
2680 all_contig = (!assoc_nents && sg_dma_address(req->assoc) + in aead_edesc_alloc()
2681 req->assoclen == iv_dma && !src_nents && in aead_edesc_alloc()
2682 iv_dma + ivsize == sg_dma_address(req->src)); in aead_edesc_alloc()
2716 sg_to_sec4_sg(req->assoc, in aead_edesc_alloc()
2728 sg_to_sec4_sg(req->assoc, in aead_edesc_alloc()
2735 sg_to_sec4_sg_last(req->src, in aead_edesc_alloc()
2742 sg_to_sec4_sg_last(req->dst, dst_nents, in aead_edesc_alloc()
2755 static int aead_encrypt(struct aead_request *req) in aead_encrypt() argument
2758 struct crypto_aead *aead = crypto_aead_reqtfm(req); in aead_encrypt()
2766 edesc = aead_edesc_alloc(req, DESC_JOB_IO_LEN * in aead_encrypt()
2772 init_aead_job(ctx->sh_desc_enc, ctx->sh_desc_enc_dma, edesc, req, in aead_encrypt()
2781 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); in aead_encrypt()
2785 aead_unmap(jrdev, edesc, req); in aead_encrypt()
2792 static int aead_decrypt(struct aead_request *req) in aead_decrypt() argument
2795 struct crypto_aead *aead = crypto_aead_reqtfm(req); in aead_decrypt()
2803 edesc = aead_edesc_alloc(req, DESC_JOB_IO_LEN * in aead_decrypt()
2810 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src), in aead_decrypt()
2811 req->cryptlen, 1); in aead_decrypt()
2816 ctx->sh_desc_dec_dma, edesc, req, all_contig, false); in aead_decrypt()
2824 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); in aead_decrypt()
2828 aead_unmap(jrdev, edesc, req); in aead_decrypt()
2842 struct aead_request *req = &greq->areq; in aead_giv_edesc_alloc() local
2843 struct crypto_aead *aead = crypto_aead_reqtfm(req); in aead_giv_edesc_alloc()
2846 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | in aead_giv_edesc_alloc()
2858 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained); in aead_giv_edesc_alloc()
2859 src_nents = sg_count(req->src, req->cryptlen, &src_chained); in aead_giv_edesc_alloc()
2861 if (unlikely(req->dst != req->src)) in aead_giv_edesc_alloc()
2862 dst_nents = sg_count(req->dst, req->cryptlen + ctx->authsize, in aead_giv_edesc_alloc()
2865 sgc = dma_map_sg_chained(jrdev, req->assoc, assoc_nents ? : 1, in aead_giv_edesc_alloc()
2867 if (likely(req->src == req->dst)) { in aead_giv_edesc_alloc()
2868 sgc = dma_map_sg_chained(jrdev, req->src, src_nents ? : 1, in aead_giv_edesc_alloc()
2871 sgc = dma_map_sg_chained(jrdev, req->src, src_nents ? : 1, in aead_giv_edesc_alloc()
2873 sgc = dma_map_sg_chained(jrdev, req->dst, dst_nents ? : 1, in aead_giv_edesc_alloc()
2896 sg_dma_address(req->assoc) || src_nents || in aead_giv_edesc_alloc()
2897 sg_dma_address(req->assoc) + req->assoclen != in aead_giv_edesc_alloc()
2898 sg_dma_address(req->src)) in aead_giv_edesc_alloc()
2902 sg_dma_address(req->assoc) + req->assoclen != iv_dma || in aead_giv_edesc_alloc()
2903 src_nents || iv_dma + ivsize != sg_dma_address(req->src)) in aead_giv_edesc_alloc()
2907 if (dst_nents || iv_dma + ivsize != sg_dma_address(req->dst)) in aead_giv_edesc_alloc()
2914 if (req->src == req->dst && in aead_giv_edesc_alloc()
2915 (src_nents || iv_dma + ivsize != sg_dma_address(req->src))) in aead_giv_edesc_alloc()
2923 if (is_gcm && req->src == req->dst && !(contig & GIV_DST_CONTIG)) in aead_giv_edesc_alloc()
2926 if (unlikely(req->src != req->dst)) { in aead_giv_edesc_alloc()
2956 sg_to_sec4_sg(req->assoc, assoc_nents, in aead_giv_edesc_alloc()
2966 sg_to_sec4_sg(req->assoc, assoc_nents, in aead_giv_edesc_alloc()
2971 sg_to_sec4_sg_last(req->src, src_nents, in aead_giv_edesc_alloc()
2977 if (is_gcm && req->src == req->dst && !(contig & GIV_DST_CONTIG)) { in aead_giv_edesc_alloc()
2981 sg_to_sec4_sg_last(req->src, src_nents, in aead_giv_edesc_alloc()
2985 if (unlikely(req->src != req->dst && !(contig & GIV_DST_CONTIG))) { in aead_giv_edesc_alloc()
2989 sg_to_sec4_sg_last(req->dst, dst_nents, in aead_giv_edesc_alloc()
3004 struct aead_request *req = &areq->areq; in aead_givencrypt() local
3006 struct crypto_aead *aead = crypto_aead_reqtfm(req); in aead_givencrypt()
3022 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src), in aead_givencrypt()
3023 req->cryptlen, 1); in aead_givencrypt()
3028 ctx->sh_desc_givenc_dma, edesc, req, contig); in aead_givencrypt()
3036 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); in aead_givencrypt()
3040 aead_unmap(jrdev, edesc, req); in aead_givencrypt()
3056 *req, int desc_bytes, in ablkcipher_edesc_alloc()
3059 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in ablkcipher_edesc_alloc()
3062 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | in ablkcipher_edesc_alloc()
3074 src_nents = sg_count(req->src, req->nbytes, &src_chained); in ablkcipher_edesc_alloc()
3076 if (req->dst != req->src) in ablkcipher_edesc_alloc()
3077 dst_nents = sg_count(req->dst, req->nbytes, &dst_chained); in ablkcipher_edesc_alloc()
3079 if (likely(req->src == req->dst)) { in ablkcipher_edesc_alloc()
3080 sgc = dma_map_sg_chained(jrdev, req->src, src_nents ? : 1, in ablkcipher_edesc_alloc()
3083 sgc = dma_map_sg_chained(jrdev, req->src, src_nents ? : 1, in ablkcipher_edesc_alloc()
3085 sgc = dma_map_sg_chained(jrdev, req->dst, dst_nents ? : 1, in ablkcipher_edesc_alloc()
3089 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE); in ablkcipher_edesc_alloc()
3099 if (!src_nents && iv_dma + ivsize == sg_dma_address(req->src)) in ablkcipher_edesc_alloc()
3125 sg_to_sec4_sg_last(req->src, src_nents, in ablkcipher_edesc_alloc()
3131 sg_to_sec4_sg_last(req->dst, dst_nents, in ablkcipher_edesc_alloc()
3154 static int ablkcipher_encrypt(struct ablkcipher_request *req) in ablkcipher_encrypt() argument
3157 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in ablkcipher_encrypt()
3165 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * in ablkcipher_encrypt()
3172 ctx->sh_desc_enc_dma, edesc, req, iv_contig); in ablkcipher_encrypt()
3179 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); in ablkcipher_encrypt()
3184 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_encrypt()
3191 static int ablkcipher_decrypt(struct ablkcipher_request *req) in ablkcipher_decrypt() argument
3194 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in ablkcipher_decrypt()
3202 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * in ablkcipher_decrypt()
3209 ctx->sh_desc_dec_dma, edesc, req, iv_contig); in ablkcipher_decrypt()
3217 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req); in ablkcipher_decrypt()
3221 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_decrypt()
3237 struct ablkcipher_request *req = &greq->creq; in ablkcipher_giv_edesc_alloc() local
3238 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in ablkcipher_giv_edesc_alloc()
3241 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | in ablkcipher_giv_edesc_alloc()
3253 src_nents = sg_count(req->src, req->nbytes, &src_chained); in ablkcipher_giv_edesc_alloc()
3255 if (unlikely(req->dst != req->src)) in ablkcipher_giv_edesc_alloc()
3256 dst_nents = sg_count(req->dst, req->nbytes, &dst_chained); in ablkcipher_giv_edesc_alloc()
3258 if (likely(req->src == req->dst)) { in ablkcipher_giv_edesc_alloc()
3259 sgc = dma_map_sg_chained(jrdev, req->src, src_nents ? : 1, in ablkcipher_giv_edesc_alloc()
3262 sgc = dma_map_sg_chained(jrdev, req->src, src_nents ? : 1, in ablkcipher_giv_edesc_alloc()
3264 sgc = dma_map_sg_chained(jrdev, req->dst, dst_nents ? : 1, in ablkcipher_giv_edesc_alloc()
3278 if (!dst_nents && iv_dma + ivsize == sg_dma_address(req->dst)) in ablkcipher_giv_edesc_alloc()
3303 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0); in ablkcipher_giv_edesc_alloc()
3311 sg_to_sec4_sg_last(req->dst, dst_nents, in ablkcipher_giv_edesc_alloc()
3336 struct ablkcipher_request *req = &creq->creq; in ablkcipher_givencrypt() local
3338 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); in ablkcipher_givencrypt()
3353 edesc, req, iv_contig); in ablkcipher_givencrypt()
3361 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); in ablkcipher_givencrypt()
3366 ablkcipher_unmap(jrdev, edesc, req); in ablkcipher_givencrypt()