flex_size 875 fs/ext4/balloc.c int flex_size = ext4_flex_bg_size(EXT4_SB(inode->i_sb)); flex_size 880 fs/ext4/balloc.c if (flex_size >= EXT4_FLEX_SIZE_DIR_ALLOC_SCHEME) { flex_size 889 fs/ext4/balloc.c block_group &= ~(flex_size-1); flex_size 260 fs/ext4/block_validity.c int flex_size = ext4_flex_bg_size(sbi); flex_size 278 fs/ext4/block_validity.c ((i < 5) || ((i % flex_size) == 0))) flex_size 367 fs/ext4/ialloc.c int flex_size, struct orlov_stats *stats) flex_size 371 fs/ext4/ialloc.c if (flex_size > 1) { flex_size 429 fs/ext4/ialloc.c int flex_size = ext4_flex_bg_size(sbi); flex_size 433 fs/ext4/ialloc.c if (flex_size > 1) { flex_size 434 fs/ext4/ialloc.c ngroups = (real_ngroups + flex_size - 1) >> flex_size 463 fs/ext4/ialloc.c get_orlov_stats(sb, g, flex_size, &stats); flex_size 479 fs/ext4/ialloc.c if (flex_size == 1) { flex_size 491 fs/ext4/ialloc.c grp *= flex_size; flex_size 492 fs/ext4/ialloc.c for (i = 0; i < flex_size; i++) { flex_size 505 fs/ext4/ialloc.c min_inodes = avefreei - inodes_per_group*flex_size / 4; flex_size 508 fs/ext4/ialloc.c min_clusters = avefreec - EXT4_CLUSTERS_PER_GROUP(sb)*flex_size / 4; flex_size 516 fs/ext4/ialloc.c if (flex_size > 1) flex_size 522 fs/ext4/ialloc.c get_orlov_stats(sb, grp, flex_size, &stats); flex_size 567 fs/ext4/ialloc.c int flex_size = ext4_flex_bg_size(EXT4_SB(sb)); flex_size 576 fs/ext4/ialloc.c if (flex_size > 1) { flex_size 580 fs/ext4/ialloc.c parent_group &= ~(flex_size-1); flex_size 581 fs/ext4/ialloc.c last = parent_group + flex_size; flex_size 601 fs/ext4/ialloc.c *group = parent_group + flex_size; flex_size 2040 fs/ext4/mballoc.c int flex_size = ext4_flex_bg_size(EXT4_SB(ac->ac_sb)); flex_size 2071 fs/ext4/mballoc.c (flex_size >= EXT4_FLEX_SIZE_DIR_ALLOC_SCHEME) && flex_size 2072 fs/ext4/mballoc.c ((group % flex_size) == 0))