Lines Matching refs:io

33 struct io {  struct
90 static void store_io_and_region_in_bio(struct bio *bio, struct io *io, in store_io_and_region_in_bio() argument
93 if (unlikely(!IS_ALIGNED((unsigned long)io, DM_IO_MAX_REGIONS))) { in store_io_and_region_in_bio()
94 DMCRIT("Unaligned struct io pointer %p", io); in store_io_and_region_in_bio()
98 bio->bi_private = (void *)((unsigned long)io | region); in store_io_and_region_in_bio()
101 static void retrieve_io_and_region_from_bio(struct bio *bio, struct io **io, in retrieve_io_and_region_from_bio() argument
106 *io = (void *)(val & -(unsigned long)DM_IO_MAX_REGIONS); in retrieve_io_and_region_from_bio()
114 static void complete_io(struct io *io) in complete_io() argument
116 unsigned long error_bits = io->error_bits; in complete_io()
117 io_notify_fn fn = io->callback; in complete_io()
118 void *context = io->context; in complete_io()
120 if (io->vma_invalidate_size) in complete_io()
121 invalidate_kernel_vmap_range(io->vma_invalidate_address, in complete_io()
122 io->vma_invalidate_size); in complete_io()
124 mempool_free(io, io->client->pool); in complete_io()
128 static void dec_count(struct io *io, unsigned int region, int error) in dec_count() argument
131 set_bit(region, &io->error_bits); in dec_count()
133 if (atomic_dec_and_test(&io->count)) in dec_count()
134 complete_io(io); in dec_count()
139 struct io *io; in endio() local
148 retrieve_io_and_region_from_bio(bio, &io, &region); in endio()
152 dec_count(io, region, error); in endio()
281 struct dpages *dp, struct io *io) in do_region() argument
302 dec_count(io, region, -EOPNOTSUPP); in do_region()
320 bio = bio_alloc_bioset(GFP_NOIO, num_bvecs, io->client->bios); in do_region()
324 store_io_and_region_in_bio(bio, io, region); in do_region()
356 atomic_inc(&io->count); in do_region()
363 struct io *io, int sync) in dispatch_io() argument
380 do_region(rw, i, where + i, dp, io); in dispatch_io()
387 dec_count(io, 0, 0); in dispatch_io()
407 struct io *io; in sync_io() local
417 io = mempool_alloc(client->pool, GFP_NOIO); in sync_io()
418 io->error_bits = 0; in sync_io()
419 atomic_set(&io->count, 1); /* see dispatch_io() */ in sync_io()
420 io->client = client; in sync_io()
421 io->callback = sync_io_complete; in sync_io()
422 io->context = &sio; in sync_io()
424 io->vma_invalidate_address = dp->vma_invalidate_address; in sync_io()
425 io->vma_invalidate_size = dp->vma_invalidate_size; in sync_io()
427 dispatch_io(rw, num_regions, where, dp, io, 1); in sync_io()
441 struct io *io; in async_io() local
449 io = mempool_alloc(client->pool, GFP_NOIO); in async_io()
450 io->error_bits = 0; in async_io()
451 atomic_set(&io->count, 1); /* see dispatch_io() */ in async_io()
452 io->client = client; in async_io()
453 io->callback = fn; in async_io()
454 io->context = context; in async_io()
456 io->vma_invalidate_address = dp->vma_invalidate_address; in async_io()
457 io->vma_invalidate_size = dp->vma_invalidate_size; in async_io()
459 dispatch_io(rw, num_regions, where, dp, io, 0); in async_io()
529 _dm_io_cache = KMEM_CACHE(io, 0); in dm_io_init()