dmreq              96 drivers/md/dm-crypt.c 			 struct dm_crypt_request *dmreq);
dmreq              98 drivers/md/dm-crypt.c 		    struct dm_crypt_request *dmreq);
dmreq             291 drivers/md/dm-crypt.c 			      struct dm_crypt_request *dmreq)
dmreq             294 drivers/md/dm-crypt.c 	*(__le32 *)iv = cpu_to_le32(dmreq->iv_sector & 0xffffffff);
dmreq             300 drivers/md/dm-crypt.c 				struct dm_crypt_request *dmreq)
dmreq             303 drivers/md/dm-crypt.c 	*(__le64 *)iv = cpu_to_le64(dmreq->iv_sector);
dmreq             309 drivers/md/dm-crypt.c 				  struct dm_crypt_request *dmreq)
dmreq             313 drivers/md/dm-crypt.c 	*(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector);
dmreq             319 drivers/md/dm-crypt.c 			      struct dm_crypt_request *dmreq)
dmreq             326 drivers/md/dm-crypt.c 	*(__le64 *)iv = cpu_to_le64(dmreq->iv_sector);
dmreq             366 drivers/md/dm-crypt.c 			      struct dm_crypt_request *dmreq)
dmreq             372 drivers/md/dm-crypt.c 	val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1);
dmreq             379 drivers/md/dm-crypt.c 			     struct dm_crypt_request *dmreq)
dmreq             454 drivers/md/dm-crypt.c 			    struct dm_crypt_request *dmreq,
dmreq             481 drivers/md/dm-crypt.c 	buf[0] = cpu_to_le32(dmreq->iv_sector & 0xFFFFFFFF);
dmreq             482 drivers/md/dm-crypt.c 	buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000);
dmreq             502 drivers/md/dm-crypt.c 			    struct dm_crypt_request *dmreq)
dmreq             508 drivers/md/dm-crypt.c 	if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) {
dmreq             509 drivers/md/dm-crypt.c 		sg = crypt_get_sg_data(cc, dmreq->sg_in);
dmreq             511 drivers/md/dm-crypt.c 		r = crypt_iv_lmk_one(cc, iv, dmreq, src + sg->offset);
dmreq             520 drivers/md/dm-crypt.c 			     struct dm_crypt_request *dmreq)
dmreq             526 drivers/md/dm-crypt.c 	if (bio_data_dir(dmreq->ctx->bio_in) == WRITE)
dmreq             529 drivers/md/dm-crypt.c 	sg = crypt_get_sg_data(cc, dmreq->sg_out);
dmreq             531 drivers/md/dm-crypt.c 	r = crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset);
dmreq             610 drivers/md/dm-crypt.c 				  struct dm_crypt_request *dmreq,
dmreq             614 drivers/md/dm-crypt.c 	__le64 sector = cpu_to_le64(dmreq->iv_sector);
dmreq             648 drivers/md/dm-crypt.c 			    struct dm_crypt_request *dmreq)
dmreq             652 drivers/md/dm-crypt.c 	__le64 sector = cpu_to_le64(dmreq->iv_sector);
dmreq             657 drivers/md/dm-crypt.c 	if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) {
dmreq             658 drivers/md/dm-crypt.c 		sg = crypt_get_sg_data(cc, dmreq->sg_in);
dmreq             660 drivers/md/dm-crypt.c 		r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset);
dmreq             674 drivers/md/dm-crypt.c 			     struct dm_crypt_request *dmreq)
dmreq             680 drivers/md/dm-crypt.c 	if (bio_data_dir(dmreq->ctx->bio_in) != WRITE)
dmreq             684 drivers/md/dm-crypt.c 	sg = crypt_get_sg_data(cc, dmreq->sg_out);
dmreq             686 drivers/md/dm-crypt.c 	r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset);
dmreq             693 drivers/md/dm-crypt.c 				struct dm_crypt_request *dmreq)
dmreq             718 drivers/md/dm-crypt.c 			    struct dm_crypt_request *dmreq)
dmreq             731 drivers/md/dm-crypt.c 	*(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size);
dmreq             913 drivers/md/dm-crypt.c static void *req_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq)
dmreq             915 drivers/md/dm-crypt.c 	return (void *)((char *)dmreq - cc->dmreq_start);
dmreq             919 drivers/md/dm-crypt.c 		       struct dm_crypt_request *dmreq)
dmreq             922 drivers/md/dm-crypt.c 		return (u8 *)ALIGN((unsigned long)(dmreq + 1),
dmreq             925 drivers/md/dm-crypt.c 		return (u8 *)ALIGN((unsigned long)(dmreq + 1),
dmreq             930 drivers/md/dm-crypt.c 		       struct dm_crypt_request *dmreq)
dmreq             932 drivers/md/dm-crypt.c 	return iv_of_dmreq(cc, dmreq) + cc->iv_size;
dmreq             936 drivers/md/dm-crypt.c 		       struct dm_crypt_request *dmreq)
dmreq             938 drivers/md/dm-crypt.c 	u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc->iv_size;
dmreq             943 drivers/md/dm-crypt.c 		       struct dm_crypt_request *dmreq)
dmreq             945 drivers/md/dm-crypt.c 	u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size +
dmreq             951 drivers/md/dm-crypt.c 				struct dm_crypt_request *dmreq)
dmreq             953 drivers/md/dm-crypt.c 	struct convert_context *ctx = dmreq->ctx;
dmreq             956 drivers/md/dm-crypt.c 	return &io->integrity_metadata[*org_tag_of_dmreq(cc, dmreq) *
dmreq             961 drivers/md/dm-crypt.c 			       struct dm_crypt_request *dmreq)
dmreq             963 drivers/md/dm-crypt.c 	return tag_from_dmreq(cc, dmreq) + cc->integrity_tag_size;
dmreq             973 drivers/md/dm-crypt.c 	struct dm_crypt_request *dmreq;
dmreq             984 drivers/md/dm-crypt.c 	dmreq = dmreq_of_req(cc, req);
dmreq             985 drivers/md/dm-crypt.c 	dmreq->iv_sector = ctx->cc_sector;
dmreq             987 drivers/md/dm-crypt.c 		dmreq->iv_sector >>= cc->sector_shift;
dmreq             988 drivers/md/dm-crypt.c 	dmreq->ctx = ctx;
dmreq             990 drivers/md/dm-crypt.c 	*org_tag_of_dmreq(cc, dmreq) = tag_offset;
dmreq             992 drivers/md/dm-crypt.c 	sector = org_sector_of_dmreq(cc, dmreq);
dmreq             995 drivers/md/dm-crypt.c 	iv = iv_of_dmreq(cc, dmreq);
dmreq             996 drivers/md/dm-crypt.c 	org_iv = org_iv_of_dmreq(cc, dmreq);
dmreq             997 drivers/md/dm-crypt.c 	tag = tag_from_dmreq(cc, dmreq);
dmreq             998 drivers/md/dm-crypt.c 	tag_iv = iv_tag_from_dmreq(cc, dmreq);
dmreq            1005 drivers/md/dm-crypt.c 	sg_init_table(dmreq->sg_in, 4);
dmreq            1006 drivers/md/dm-crypt.c 	sg_set_buf(&dmreq->sg_in[0], sector, sizeof(uint64_t));
dmreq            1007 drivers/md/dm-crypt.c 	sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size);
dmreq            1008 drivers/md/dm-crypt.c 	sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset);
dmreq            1009 drivers/md/dm-crypt.c 	sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size);
dmreq            1011 drivers/md/dm-crypt.c 	sg_init_table(dmreq->sg_out, 4);
dmreq            1012 drivers/md/dm-crypt.c 	sg_set_buf(&dmreq->sg_out[0], sector, sizeof(uint64_t));
dmreq            1013 drivers/md/dm-crypt.c 	sg_set_buf(&dmreq->sg_out[1], org_iv, cc->iv_size);
dmreq            1014 drivers/md/dm-crypt.c 	sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset);
dmreq            1015 drivers/md/dm-crypt.c 	sg_set_buf(&dmreq->sg_out[3], tag, cc->integrity_tag_size);
dmreq            1022 drivers/md/dm-crypt.c 			r = cc->iv_gen_ops->generator(cc, org_iv, dmreq);
dmreq            1035 drivers/md/dm-crypt.c 		aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out,
dmreq            1042 drivers/md/dm-crypt.c 		aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out,
dmreq            1054 drivers/md/dm-crypt.c 		r = cc->iv_gen_ops->post(cc, org_iv, dmreq);
dmreq            1070 drivers/md/dm-crypt.c 	struct dm_crypt_request *dmreq;
dmreq            1079 drivers/md/dm-crypt.c 	dmreq = dmreq_of_req(cc, req);
dmreq            1080 drivers/md/dm-crypt.c 	dmreq->iv_sector = ctx->cc_sector;
dmreq            1082 drivers/md/dm-crypt.c 		dmreq->iv_sector >>= cc->sector_shift;
dmreq            1083 drivers/md/dm-crypt.c 	dmreq->ctx = ctx;
dmreq            1085 drivers/md/dm-crypt.c 	*org_tag_of_dmreq(cc, dmreq) = tag_offset;
dmreq            1087 drivers/md/dm-crypt.c 	iv = iv_of_dmreq(cc, dmreq);
dmreq            1088 drivers/md/dm-crypt.c 	org_iv = org_iv_of_dmreq(cc, dmreq);
dmreq            1089 drivers/md/dm-crypt.c 	tag_iv = iv_tag_from_dmreq(cc, dmreq);
dmreq            1091 drivers/md/dm-crypt.c 	sector = org_sector_of_dmreq(cc, dmreq);
dmreq            1095 drivers/md/dm-crypt.c 	sg_in  = &dmreq->sg_in[0];
dmreq            1096 drivers/md/dm-crypt.c 	sg_out = &dmreq->sg_out[0];
dmreq            1109 drivers/md/dm-crypt.c 			r = cc->iv_gen_ops->generator(cc, org_iv, dmreq);
dmreq            1128 drivers/md/dm-crypt.c 		r = cc->iv_gen_ops->post(cc, org_iv, dmreq);
dmreq            1675 drivers/md/dm-crypt.c 	struct dm_crypt_request *dmreq = async_req->data;
dmreq            1676 drivers/md/dm-crypt.c 	struct convert_context *ctx = dmreq->ctx;
dmreq            1691 drivers/md/dm-crypt.c 		error = cc->iv_gen_ops->post(cc, org_iv_of_dmreq(cc, dmreq), dmreq);
dmreq            1696 drivers/md/dm-crypt.c 			    (unsigned long long)le64_to_cpu(*org_sector_of_dmreq(cc, dmreq)));
dmreq            1701 drivers/md/dm-crypt.c 	crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio);