Lines Matching refs:new_chunk_sectors
6493 if (!mddev->new_chunk_sectors || in setup_conf()
6494 (mddev->new_chunk_sectors << 9) % PAGE_SIZE || in setup_conf()
6495 !is_power_of_2(mddev->new_chunk_sectors)) { in setup_conf()
6497 mdname(mddev), mddev->new_chunk_sectors << 9); in setup_conf()
6563 conf->chunk_sectors = mddev->new_chunk_sectors; in setup_conf()
6764 chunk_sectors = max(mddev->chunk_sectors, mddev->new_chunk_sectors); in run()
6786 abs(min_offset_diff) >= mddev->new_chunk_sectors) in run()
6812 BUG_ON(mddev->chunk_sectors != mddev->new_chunk_sectors); in run()
7323 ((mddev->new_chunk_sectors << 9) / STRIPE_SIZE) * 4 in check_stripe_cache()
7327 ((max(mddev->chunk_sectors, mddev->new_chunk_sectors) << 9) in check_stripe_cache()
7342 mddev->new_chunk_sectors == mddev->chunk_sectors) in check_reshape()
7362 if (mddev->new_chunk_sectors > mddev->chunk_sectors || in check_reshape()
7367 max(mddev->new_chunk_sectors, in check_reshape()
7420 conf->chunk_sectors = mddev->new_chunk_sectors; in raid5_start_reshape()
7494 mddev->new_chunk_sectors = in raid5_start_reshape()
7636 mddev->new_chunk_sectors = mddev->chunk_sectors; in raid45_takeover_raid0()
7667 mddev->new_chunk_sectors = chunksect; in raid5_takeover_raid1()
7713 int new_chunk = mddev->new_chunk_sectors; in raid5_check_reshape()
7747 int new_chunk = mddev->new_chunk_sectors; in raid6_check_reshape()