Lines Matching refs:dmreq

83 			 struct dm_crypt_request *dmreq);
85 struct dm_crypt_request *dmreq);
187 static u8 *iv_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq);
245 struct dm_crypt_request *dmreq) in crypt_iv_plain_gen() argument
248 *(__le32 *)iv = cpu_to_le32(dmreq->iv_sector & 0xffffffff); in crypt_iv_plain_gen()
254 struct dm_crypt_request *dmreq) in crypt_iv_plain64_gen() argument
257 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_plain64_gen()
408 struct dm_crypt_request *dmreq) in crypt_iv_essiv_gen() argument
413 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_essiv_gen()
448 struct dm_crypt_request *dmreq) in crypt_iv_benbi_gen() argument
454 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
461 struct dm_crypt_request *dmreq) in crypt_iv_null_gen() argument
531 struct dm_crypt_request *dmreq, in crypt_iv_lmk_one() argument
559 buf[0] = cpu_to_le32(dmreq->iv_sector & 0xFFFFFFFF); in crypt_iv_lmk_one()
560 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000); in crypt_iv_lmk_one()
580 struct dm_crypt_request *dmreq) in crypt_iv_lmk_gen() argument
585 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_lmk_gen()
586 src = kmap_atomic(sg_page(&dmreq->sg_in)); in crypt_iv_lmk_gen()
587 r = crypt_iv_lmk_one(cc, iv, dmreq, src + dmreq->sg_in.offset); in crypt_iv_lmk_gen()
596 struct dm_crypt_request *dmreq) in crypt_iv_lmk_post() argument
601 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) in crypt_iv_lmk_post()
604 dst = kmap_atomic(sg_page(&dmreq->sg_out)); in crypt_iv_lmk_post()
605 r = crypt_iv_lmk_one(cc, iv, dmreq, dst + dmreq->sg_out.offset); in crypt_iv_lmk_post()
609 crypto_xor(dst + dmreq->sg_out.offset, iv, cc->iv_size); in crypt_iv_lmk_post()
679 struct dm_crypt_request *dmreq, in crypt_iv_tcw_whitening() argument
683 u64 sector = cpu_to_le64((u64)dmreq->iv_sector); in crypt_iv_tcw_whitening()
719 struct dm_crypt_request *dmreq) in crypt_iv_tcw_gen() argument
722 u64 sector = cpu_to_le64((u64)dmreq->iv_sector); in crypt_iv_tcw_gen()
727 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { in crypt_iv_tcw_gen()
728 src = kmap_atomic(sg_page(&dmreq->sg_in)); in crypt_iv_tcw_gen()
729 r = crypt_iv_tcw_whitening(cc, dmreq, src + dmreq->sg_in.offset); in crypt_iv_tcw_gen()
743 struct dm_crypt_request *dmreq) in crypt_iv_tcw_post() argument
748 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) in crypt_iv_tcw_post()
752 dst = kmap_atomic(sg_page(&dmreq->sg_out)); in crypt_iv_tcw_post()
753 r = crypt_iv_tcw_whitening(cc, dmreq, dst + dmreq->sg_out.offset); in crypt_iv_tcw_post()
825 struct dm_crypt_request *dmreq) in req_of_dmreq() argument
827 return (struct ablkcipher_request *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
831 struct dm_crypt_request *dmreq) in iv_of_dmreq() argument
833 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
843 struct dm_crypt_request *dmreq; in crypt_convert_block() local
847 dmreq = dmreq_of_req(cc, req); in crypt_convert_block()
848 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block()
850 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block()
851 dmreq->ctx = ctx; in crypt_convert_block()
852 sg_init_table(&dmreq->sg_in, 1); in crypt_convert_block()
853 sg_set_page(&dmreq->sg_in, bv_in.bv_page, 1 << SECTOR_SHIFT, in crypt_convert_block()
856 sg_init_table(&dmreq->sg_out, 1); in crypt_convert_block()
857 sg_set_page(&dmreq->sg_out, bv_out.bv_page, 1 << SECTOR_SHIFT, in crypt_convert_block()
864 r = cc->iv_gen_ops->generator(cc, iv, dmreq); in crypt_convert_block()
869 ablkcipher_request_set_crypt(req, &dmreq->sg_in, &dmreq->sg_out, in crypt_convert_block()
878 r = cc->iv_gen_ops->post(cc, iv, dmreq); in crypt_convert_block()
1357 struct dm_crypt_request *dmreq = async_req->data; in kcryptd_async_done() local
1358 struct convert_context *ctx = dmreq->ctx; in kcryptd_async_done()
1373 error = cc->iv_gen_ops->post(cc, iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
1378 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()