Lines Matching refs:io
186 static void kcryptd_queue_crypt(struct dm_crypt_io *io);
908 struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); in crypt_free_req() local
910 if ((struct ablkcipher_request *)(io + 1) != req) in crypt_free_req()
987 static struct bio *crypt_alloc_buffer(struct dm_crypt_io *io, unsigned size) in crypt_alloc_buffer() argument
989 struct crypt_config *cc = io->cc; in crypt_alloc_buffer()
1005 clone_init(io, clone); in crypt_alloc_buffer()
1049 static void crypt_io_init(struct dm_crypt_io *io, struct crypt_config *cc, in crypt_io_init() argument
1052 io->cc = cc; in crypt_io_init()
1053 io->base_bio = bio; in crypt_io_init()
1054 io->sector = sector; in crypt_io_init()
1055 io->error = 0; in crypt_io_init()
1056 io->ctx.req = NULL; in crypt_io_init()
1057 atomic_set(&io->io_pending, 0); in crypt_io_init()
1060 static void crypt_inc_pending(struct dm_crypt_io *io) in crypt_inc_pending() argument
1062 atomic_inc(&io->io_pending); in crypt_inc_pending()
1069 static void crypt_dec_pending(struct dm_crypt_io *io) in crypt_dec_pending() argument
1071 struct crypt_config *cc = io->cc; in crypt_dec_pending()
1072 struct bio *base_bio = io->base_bio; in crypt_dec_pending()
1073 int error = io->error; in crypt_dec_pending()
1075 if (!atomic_dec_and_test(&io->io_pending)) in crypt_dec_pending()
1078 if (io->ctx.req) in crypt_dec_pending()
1079 crypt_free_req(cc, io->ctx.req, base_bio); in crypt_dec_pending()
1104 struct dm_crypt_io *io = clone->bi_private; in crypt_endio() local
1105 struct crypt_config *cc = io->cc; in crypt_endio()
1119 kcryptd_queue_crypt(io); in crypt_endio()
1124 io->error = error; in crypt_endio()
1126 crypt_dec_pending(io); in crypt_endio()
1129 static void clone_init(struct dm_crypt_io *io, struct bio *clone) in clone_init() argument
1131 struct crypt_config *cc = io->cc; in clone_init()
1133 clone->bi_private = io; in clone_init()
1136 clone->bi_rw = io->base_bio->bi_rw; in clone_init()
1139 static int kcryptd_io_read(struct dm_crypt_io *io, gfp_t gfp) in kcryptd_io_read() argument
1141 struct crypt_config *cc = io->cc; in kcryptd_io_read()
1150 clone = bio_clone_fast(io->base_bio, gfp, cc->bs); in kcryptd_io_read()
1154 crypt_inc_pending(io); in kcryptd_io_read()
1156 clone_init(io, clone); in kcryptd_io_read()
1157 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_io_read()
1165 struct dm_crypt_io *io = container_of(work, struct dm_crypt_io, work); in kcryptd_io_read_work() local
1167 crypt_inc_pending(io); in kcryptd_io_read_work()
1168 if (kcryptd_io_read(io, GFP_NOIO)) in kcryptd_io_read_work()
1169 io->error = -ENOMEM; in kcryptd_io_read_work()
1170 crypt_dec_pending(io); in kcryptd_io_read_work()
1173 static void kcryptd_queue_read(struct dm_crypt_io *io) in kcryptd_queue_read() argument
1175 struct crypt_config *cc = io->cc; in kcryptd_queue_read()
1177 INIT_WORK(&io->work, kcryptd_io_read_work); in kcryptd_queue_read()
1178 queue_work(cc->io_queue, &io->work); in kcryptd_queue_read()
1181 static void kcryptd_io_write(struct dm_crypt_io *io) in kcryptd_io_write() argument
1183 struct bio *clone = io->ctx.bio_out; in kcryptd_io_write()
1193 struct dm_crypt_io *io; in dmcrypt_write() local
1236 io = crypt_io_from_node(rb_first(&write_tree)); in dmcrypt_write()
1237 rb_erase(&io->rb_node, &write_tree); in dmcrypt_write()
1238 kcryptd_io_write(io); in dmcrypt_write()
1245 static void kcryptd_crypt_write_io_submit(struct dm_crypt_io *io, int async) in kcryptd_crypt_write_io_submit() argument
1247 struct bio *clone = io->ctx.bio_out; in kcryptd_crypt_write_io_submit()
1248 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_io_submit()
1253 if (unlikely(io->error < 0)) { in kcryptd_crypt_write_io_submit()
1256 crypt_dec_pending(io); in kcryptd_crypt_write_io_submit()
1261 BUG_ON(io->ctx.iter_out.bi_size); in kcryptd_crypt_write_io_submit()
1263 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_crypt_write_io_submit()
1273 sector = io->sector; in kcryptd_crypt_write_io_submit()
1281 rb_link_node(&io->rb_node, parent, rbp); in kcryptd_crypt_write_io_submit()
1282 rb_insert_color(&io->rb_node, &cc->write_tree); in kcryptd_crypt_write_io_submit()
1288 static void kcryptd_crypt_write_convert(struct dm_crypt_io *io) in kcryptd_crypt_write_convert() argument
1290 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_convert()
1293 sector_t sector = io->sector; in kcryptd_crypt_write_convert()
1299 crypt_inc_pending(io); in kcryptd_crypt_write_convert()
1300 crypt_convert_init(cc, &io->ctx, NULL, io->base_bio, sector); in kcryptd_crypt_write_convert()
1302 clone = crypt_alloc_buffer(io, io->base_bio->bi_iter.bi_size); in kcryptd_crypt_write_convert()
1304 io->error = -EIO; in kcryptd_crypt_write_convert()
1308 io->ctx.bio_out = clone; in kcryptd_crypt_write_convert()
1309 io->ctx.iter_out = clone->bi_iter; in kcryptd_crypt_write_convert()
1313 crypt_inc_pending(io); in kcryptd_crypt_write_convert()
1314 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_write_convert()
1316 io->error = -EIO; in kcryptd_crypt_write_convert()
1317 crypt_finished = atomic_dec_and_test(&io->ctx.cc_pending); in kcryptd_crypt_write_convert()
1321 kcryptd_crypt_write_io_submit(io, 0); in kcryptd_crypt_write_convert()
1322 io->sector = sector; in kcryptd_crypt_write_convert()
1326 crypt_dec_pending(io); in kcryptd_crypt_write_convert()
1329 static void kcryptd_crypt_read_done(struct dm_crypt_io *io) in kcryptd_crypt_read_done() argument
1331 crypt_dec_pending(io); in kcryptd_crypt_read_done()
1334 static void kcryptd_crypt_read_convert(struct dm_crypt_io *io) in kcryptd_crypt_read_convert() argument
1336 struct crypt_config *cc = io->cc; in kcryptd_crypt_read_convert()
1339 crypt_inc_pending(io); in kcryptd_crypt_read_convert()
1341 crypt_convert_init(cc, &io->ctx, io->base_bio, io->base_bio, in kcryptd_crypt_read_convert()
1342 io->sector); in kcryptd_crypt_read_convert()
1344 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_read_convert()
1346 io->error = -EIO; in kcryptd_crypt_read_convert()
1348 if (atomic_dec_and_test(&io->ctx.cc_pending)) in kcryptd_crypt_read_convert()
1349 kcryptd_crypt_read_done(io); in kcryptd_crypt_read_convert()
1351 crypt_dec_pending(io); in kcryptd_crypt_read_convert()
1359 struct dm_crypt_io *io = container_of(ctx, struct dm_crypt_io, ctx); in kcryptd_async_done() local
1360 struct crypt_config *cc = io->cc; in kcryptd_async_done()
1376 io->error = -EIO; in kcryptd_async_done()
1378 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()
1383 if (bio_data_dir(io->base_bio) == READ) in kcryptd_async_done()
1384 kcryptd_crypt_read_done(io); in kcryptd_async_done()
1386 kcryptd_crypt_write_io_submit(io, 1); in kcryptd_async_done()
1391 struct dm_crypt_io *io = container_of(work, struct dm_crypt_io, work); in kcryptd_crypt() local
1393 if (bio_data_dir(io->base_bio) == READ) in kcryptd_crypt()
1394 kcryptd_crypt_read_convert(io); in kcryptd_crypt()
1396 kcryptd_crypt_write_convert(io); in kcryptd_crypt()
1399 static void kcryptd_queue_crypt(struct dm_crypt_io *io) in kcryptd_queue_crypt() argument
1401 struct crypt_config *cc = io->cc; in kcryptd_queue_crypt()
1403 INIT_WORK(&io->work, kcryptd_crypt); in kcryptd_queue_crypt()
1404 queue_work(cc->crypt_queue, &io->work); in kcryptd_queue_crypt()
1907 struct dm_crypt_io *io; in crypt_map() local
1923 io = dm_per_bio_data(bio, cc->per_bio_data_size); in crypt_map()
1924 crypt_io_init(io, cc, bio, dm_target_offset(ti, bio->bi_iter.bi_sector)); in crypt_map()
1925 io->ctx.req = (struct ablkcipher_request *)(io + 1); in crypt_map()
1927 if (bio_data_dir(io->base_bio) == READ) { in crypt_map()
1928 if (kcryptd_io_read(io, GFP_NOWAIT)) in crypt_map()
1929 kcryptd_queue_read(io); in crypt_map()
1931 kcryptd_queue_crypt(io); in crypt_map()