Lines Matching refs:dmreq

83 			 struct dm_crypt_request *dmreq);
85 struct dm_crypt_request *dmreq);
186 static u8 *iv_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq);
244 struct dm_crypt_request *dmreq) in crypt_iv_plain_gen() argument
247 *(__le32 *)iv = cpu_to_le32(dmreq->iv_sector & 0xffffffff); in crypt_iv_plain_gen()
253 struct dm_crypt_request *dmreq) in crypt_iv_plain64_gen() argument
256 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_plain64_gen()
407 struct dm_crypt_request *dmreq) in crypt_iv_essiv_gen() argument
412 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_essiv_gen()
447 struct dm_crypt_request *dmreq) in crypt_iv_benbi_gen() argument
453 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
460 struct dm_crypt_request *dmreq) in crypt_iv_null_gen() argument
530 struct dm_crypt_request *dmreq, in crypt_iv_lmk_one() argument
558 buf[0] = cpu_to_le32(dmreq->iv_sector & 0xFFFFFFFF); in crypt_iv_lmk_one()
559 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000); in crypt_iv_lmk_one()
579 struct dm_crypt_request *dmreq) in crypt_iv_lmk_gen() argument
584 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_lmk_gen()
585 src = kmap_atomic(sg_page(&dmreq->sg_in)); in crypt_iv_lmk_gen()
586 r = crypt_iv_lmk_one(cc, iv, dmreq, src + dmreq->sg_in.offset); in crypt_iv_lmk_gen()
595 struct dm_crypt_request *dmreq) in crypt_iv_lmk_post() argument
600 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) in crypt_iv_lmk_post()
603 dst = kmap_atomic(sg_page(&dmreq->sg_out)); in crypt_iv_lmk_post()
604 r = crypt_iv_lmk_one(cc, iv, dmreq, dst + dmreq->sg_out.offset); in crypt_iv_lmk_post()
608 crypto_xor(dst + dmreq->sg_out.offset, iv, cc->iv_size); in crypt_iv_lmk_post()
678 struct dm_crypt_request *dmreq, in crypt_iv_tcw_whitening() argument
682 u64 sector = cpu_to_le64((u64)dmreq->iv_sector); in crypt_iv_tcw_whitening()
718 struct dm_crypt_request *dmreq) in crypt_iv_tcw_gen() argument
721 u64 sector = cpu_to_le64((u64)dmreq->iv_sector); in crypt_iv_tcw_gen()
726 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { in crypt_iv_tcw_gen()
727 src = kmap_atomic(sg_page(&dmreq->sg_in)); in crypt_iv_tcw_gen()
728 r = crypt_iv_tcw_whitening(cc, dmreq, src + dmreq->sg_in.offset); in crypt_iv_tcw_gen()
742 struct dm_crypt_request *dmreq) in crypt_iv_tcw_post() argument
747 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) in crypt_iv_tcw_post()
751 dst = kmap_atomic(sg_page(&dmreq->sg_out)); in crypt_iv_tcw_post()
752 r = crypt_iv_tcw_whitening(cc, dmreq, dst + dmreq->sg_out.offset); in crypt_iv_tcw_post()
824 struct dm_crypt_request *dmreq) in req_of_dmreq() argument
826 return (struct ablkcipher_request *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
830 struct dm_crypt_request *dmreq) in iv_of_dmreq() argument
832 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
842 struct dm_crypt_request *dmreq; in crypt_convert_block() local
846 dmreq = dmreq_of_req(cc, req); in crypt_convert_block()
847 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block()
849 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block()
850 dmreq->ctx = ctx; in crypt_convert_block()
851 sg_init_table(&dmreq->sg_in, 1); in crypt_convert_block()
852 sg_set_page(&dmreq->sg_in, bv_in.bv_page, 1 << SECTOR_SHIFT, in crypt_convert_block()
855 sg_init_table(&dmreq->sg_out, 1); in crypt_convert_block()
856 sg_set_page(&dmreq->sg_out, bv_out.bv_page, 1 << SECTOR_SHIFT, in crypt_convert_block()
863 r = cc->iv_gen_ops->generator(cc, iv, dmreq); in crypt_convert_block()
868 ablkcipher_request_set_crypt(req, &dmreq->sg_in, &dmreq->sg_out, in crypt_convert_block()
877 r = cc->iv_gen_ops->post(cc, iv, dmreq); in crypt_convert_block()
1345 struct dm_crypt_request *dmreq = async_req->data; in kcryptd_async_done() local
1346 struct convert_context *ctx = dmreq->ctx; in kcryptd_async_done()
1356 error = cc->iv_gen_ops->post(cc, iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
1361 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()