chunk_sects       333 drivers/md/raid0.c 	unsigned int chunk_sects = mddev->chunk_sectors;
chunk_sects       335 drivers/md/raid0.c 	if (is_power_of_2(chunk_sects)) {
chunk_sects       336 drivers/md/raid0.c 		int chunksect_bits = ffz(~chunk_sects);
chunk_sects       338 drivers/md/raid0.c 		sect_in_chunk  = sector & (chunk_sects - 1);
chunk_sects       345 drivers/md/raid0.c 		sect_in_chunk = sector_div(sector, chunk_sects);
chunk_sects       347 drivers/md/raid0.c 		sector_div(chunk, chunk_sects * zone->nb_dev);
chunk_sects       354 drivers/md/raid0.c 	*sector_offset = (chunk * chunk_sects) + sect_in_chunk;
chunk_sects       464 drivers/md/raid0.c 			unsigned int chunk_sects, struct bio *bio)
chunk_sects       466 drivers/md/raid0.c 	if (likely(is_power_of_2(chunk_sects))) {
chunk_sects       467 drivers/md/raid0.c 		return chunk_sects >=
chunk_sects       468 drivers/md/raid0.c 			((bio->bi_iter.bi_sector & (chunk_sects-1))
chunk_sects       472 drivers/md/raid0.c 		return chunk_sects >= (sector_div(sector, chunk_sects)
chunk_sects       575 drivers/md/raid0.c 	unsigned chunk_sects;
chunk_sects       589 drivers/md/raid0.c 	chunk_sects = mddev->chunk_sectors;
chunk_sects       591 drivers/md/raid0.c 	sectors = chunk_sects -
chunk_sects       592 drivers/md/raid0.c 		(likely(is_power_of_2(chunk_sects))
chunk_sects       593 drivers/md/raid0.c 		 ? (sector & (chunk_sects-1))
chunk_sects       594 drivers/md/raid0.c 		 : sector_div(sector, chunk_sects));
chunk_sects      1525 drivers/md/raid10.c 	int chunk_sects = chunk_mask + 1;
chunk_sects      1540 drivers/md/raid10.c 		     sectors > chunk_sects
chunk_sects      1544 drivers/md/raid10.c 		sectors = chunk_sects -
chunk_sects      1546 drivers/md/raid10.c 			 (chunk_sects - 1));
chunk_sects      5298 drivers/md/raid5.c 	unsigned chunk_sects = mddev->chunk_sectors;
chunk_sects      5299 drivers/md/raid5.c 	unsigned sectors = chunk_sects - (sector & (chunk_sects-1));