Lines Matching refs:io

185 static void kcryptd_queue_crypt(struct dm_crypt_io *io);
902 struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); in crypt_free_req() local
904 if ((struct ablkcipher_request *)(io + 1) != req) in crypt_free_req()
973 static struct bio *crypt_alloc_buffer(struct dm_crypt_io *io, unsigned size) in crypt_alloc_buffer() argument
975 struct crypt_config *cc = io->cc; in crypt_alloc_buffer()
991 clone_init(io, clone); in crypt_alloc_buffer()
1035 static void crypt_io_init(struct dm_crypt_io *io, struct crypt_config *cc, in crypt_io_init() argument
1038 io->cc = cc; in crypt_io_init()
1039 io->base_bio = bio; in crypt_io_init()
1040 io->sector = sector; in crypt_io_init()
1041 io->error = 0; in crypt_io_init()
1042 io->ctx.req = NULL; in crypt_io_init()
1043 atomic_set(&io->io_pending, 0); in crypt_io_init()
1046 static void crypt_inc_pending(struct dm_crypt_io *io) in crypt_inc_pending() argument
1048 atomic_inc(&io->io_pending); in crypt_inc_pending()
1055 static void crypt_dec_pending(struct dm_crypt_io *io) in crypt_dec_pending() argument
1057 struct crypt_config *cc = io->cc; in crypt_dec_pending()
1058 struct bio *base_bio = io->base_bio; in crypt_dec_pending()
1059 int error = io->error; in crypt_dec_pending()
1061 if (!atomic_dec_and_test(&io->io_pending)) in crypt_dec_pending()
1064 if (io->ctx.req) in crypt_dec_pending()
1065 crypt_free_req(cc, io->ctx.req, base_bio); in crypt_dec_pending()
1089 struct dm_crypt_io *io = clone->bi_private; in crypt_endio() local
1090 struct crypt_config *cc = io->cc; in crypt_endio()
1105 kcryptd_queue_crypt(io); in crypt_endio()
1110 io->error = error; in crypt_endio()
1112 crypt_dec_pending(io); in crypt_endio()
1115 static void clone_init(struct dm_crypt_io *io, struct bio *clone) in clone_init() argument
1117 struct crypt_config *cc = io->cc; in clone_init()
1119 clone->bi_private = io; in clone_init()
1122 clone->bi_rw = io->base_bio->bi_rw; in clone_init()
1125 static int kcryptd_io_read(struct dm_crypt_io *io, gfp_t gfp) in kcryptd_io_read() argument
1127 struct crypt_config *cc = io->cc; in kcryptd_io_read()
1136 clone = bio_clone_fast(io->base_bio, gfp, cc->bs); in kcryptd_io_read()
1140 crypt_inc_pending(io); in kcryptd_io_read()
1142 clone_init(io, clone); in kcryptd_io_read()
1143 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_io_read()
1151 struct dm_crypt_io *io = container_of(work, struct dm_crypt_io, work); in kcryptd_io_read_work() local
1153 crypt_inc_pending(io); in kcryptd_io_read_work()
1154 if (kcryptd_io_read(io, GFP_NOIO)) in kcryptd_io_read_work()
1155 io->error = -ENOMEM; in kcryptd_io_read_work()
1156 crypt_dec_pending(io); in kcryptd_io_read_work()
1159 static void kcryptd_queue_read(struct dm_crypt_io *io) in kcryptd_queue_read() argument
1161 struct crypt_config *cc = io->cc; in kcryptd_queue_read()
1163 INIT_WORK(&io->work, kcryptd_io_read_work); in kcryptd_queue_read()
1164 queue_work(cc->io_queue, &io->work); in kcryptd_queue_read()
1167 static void kcryptd_io_write(struct dm_crypt_io *io) in kcryptd_io_write() argument
1169 struct bio *clone = io->ctx.bio_out; in kcryptd_io_write()
1179 struct dm_crypt_io *io; in dmcrypt_write() local
1224 io = crypt_io_from_node(rb_first(&write_tree)); in dmcrypt_write()
1225 rb_erase(&io->rb_node, &write_tree); in dmcrypt_write()
1226 kcryptd_io_write(io); in dmcrypt_write()
1233 static void kcryptd_crypt_write_io_submit(struct dm_crypt_io *io, int async) in kcryptd_crypt_write_io_submit() argument
1235 struct bio *clone = io->ctx.bio_out; in kcryptd_crypt_write_io_submit()
1236 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_io_submit()
1241 if (unlikely(io->error < 0)) { in kcryptd_crypt_write_io_submit()
1244 crypt_dec_pending(io); in kcryptd_crypt_write_io_submit()
1249 BUG_ON(io->ctx.iter_out.bi_size); in kcryptd_crypt_write_io_submit()
1251 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_crypt_write_io_submit()
1261 sector = io->sector; in kcryptd_crypt_write_io_submit()
1269 rb_link_node(&io->rb_node, parent, rbp); in kcryptd_crypt_write_io_submit()
1270 rb_insert_color(&io->rb_node, &cc->write_tree); in kcryptd_crypt_write_io_submit()
1276 static void kcryptd_crypt_write_convert(struct dm_crypt_io *io) in kcryptd_crypt_write_convert() argument
1278 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_convert()
1281 sector_t sector = io->sector; in kcryptd_crypt_write_convert()
1287 crypt_inc_pending(io); in kcryptd_crypt_write_convert()
1288 crypt_convert_init(cc, &io->ctx, NULL, io->base_bio, sector); in kcryptd_crypt_write_convert()
1290 clone = crypt_alloc_buffer(io, io->base_bio->bi_iter.bi_size); in kcryptd_crypt_write_convert()
1292 io->error = -EIO; in kcryptd_crypt_write_convert()
1296 io->ctx.bio_out = clone; in kcryptd_crypt_write_convert()
1297 io->ctx.iter_out = clone->bi_iter; in kcryptd_crypt_write_convert()
1301 crypt_inc_pending(io); in kcryptd_crypt_write_convert()
1302 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_write_convert()
1304 io->error = -EIO; in kcryptd_crypt_write_convert()
1305 crypt_finished = atomic_dec_and_test(&io->ctx.cc_pending); in kcryptd_crypt_write_convert()
1309 kcryptd_crypt_write_io_submit(io, 0); in kcryptd_crypt_write_convert()
1310 io->sector = sector; in kcryptd_crypt_write_convert()
1314 crypt_dec_pending(io); in kcryptd_crypt_write_convert()
1317 static void kcryptd_crypt_read_done(struct dm_crypt_io *io) in kcryptd_crypt_read_done() argument
1319 crypt_dec_pending(io); in kcryptd_crypt_read_done()
1322 static void kcryptd_crypt_read_convert(struct dm_crypt_io *io) in kcryptd_crypt_read_convert() argument
1324 struct crypt_config *cc = io->cc; in kcryptd_crypt_read_convert()
1327 crypt_inc_pending(io); in kcryptd_crypt_read_convert()
1329 crypt_convert_init(cc, &io->ctx, io->base_bio, io->base_bio, in kcryptd_crypt_read_convert()
1330 io->sector); in kcryptd_crypt_read_convert()
1332 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_read_convert()
1334 io->error = -EIO; in kcryptd_crypt_read_convert()
1336 if (atomic_dec_and_test(&io->ctx.cc_pending)) in kcryptd_crypt_read_convert()
1337 kcryptd_crypt_read_done(io); in kcryptd_crypt_read_convert()
1339 crypt_dec_pending(io); in kcryptd_crypt_read_convert()
1347 struct dm_crypt_io *io = container_of(ctx, struct dm_crypt_io, ctx); in kcryptd_async_done() local
1348 struct crypt_config *cc = io->cc; in kcryptd_async_done()
1359 io->error = -EIO; in kcryptd_async_done()
1361 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()
1366 if (bio_data_dir(io->base_bio) == READ) in kcryptd_async_done()
1367 kcryptd_crypt_read_done(io); in kcryptd_async_done()
1369 kcryptd_crypt_write_io_submit(io, 1); in kcryptd_async_done()
1374 struct dm_crypt_io *io = container_of(work, struct dm_crypt_io, work); in kcryptd_crypt() local
1376 if (bio_data_dir(io->base_bio) == READ) in kcryptd_crypt()
1377 kcryptd_crypt_read_convert(io); in kcryptd_crypt()
1379 kcryptd_crypt_write_convert(io); in kcryptd_crypt()
1382 static void kcryptd_queue_crypt(struct dm_crypt_io *io) in kcryptd_queue_crypt() argument
1384 struct crypt_config *cc = io->cc; in kcryptd_queue_crypt()
1386 INIT_WORK(&io->work, kcryptd_crypt); in kcryptd_queue_crypt()
1387 queue_work(cc->crypt_queue, &io->work); in kcryptd_queue_crypt()
1885 struct dm_crypt_io *io; in crypt_map() local
1901 io = dm_per_bio_data(bio, cc->per_bio_data_size); in crypt_map()
1902 crypt_io_init(io, cc, bio, dm_target_offset(ti, bio->bi_iter.bi_sector)); in crypt_map()
1903 io->ctx.req = (struct ablkcipher_request *)(io + 1); in crypt_map()
1905 if (bio_data_dir(io->base_bio) == READ) { in crypt_map()
1906 if (kcryptd_io_read(io, GFP_NOWAIT)) in crypt_map()
1907 kcryptd_queue_read(io); in crypt_map()
1909 kcryptd_queue_crypt(io); in crypt_map()