alloc_blocks     1200 drivers/usb/storage/sddr09.c 	int numblocks, alloc_len, alloc_blocks;
alloc_blocks     1222 drivers/usb/storage/sddr09.c 	alloc_blocks = min(numblocks, SDDR09_READ_MAP_BUFSZ >> CONTROL_SHIFT);
alloc_blocks     1223 drivers/usb/storage/sddr09.c 	alloc_len = (alloc_blocks << CONTROL_SHIFT);
alloc_blocks     1260 drivers/usb/storage/sddr09.c 				min(alloc_blocks, numblocks - i),
alloc_blocks       50 fs/hfs/btree.c 		if (HFS_I(tree->inode)->alloc_blocks >
alloc_blocks      235 fs/hfs/btree.c 				(loff_t)HFS_I(inode)->alloc_blocks *
alloc_blocks      352 fs/hfs/extent.c 		if (ablock >= HFS_I(inode)->alloc_blocks) {
alloc_blocks      398 fs/hfs/extent.c 	if (HFS_I(inode)->alloc_blocks == HFS_I(inode)->first_blocks)
alloc_blocks      401 fs/hfs/extent.c 		res = hfs_ext_read_extent(inode, HFS_I(inode)->alloc_blocks);
alloc_blocks      415 fs/hfs/extent.c 	if (HFS_I(inode)->alloc_blocks == HFS_I(inode)->first_blocks) {
alloc_blocks      425 fs/hfs/extent.c 					     HFS_I(inode)->alloc_blocks,
alloc_blocks      436 fs/hfs/extent.c 				     HFS_I(inode)->alloc_blocks -
alloc_blocks      449 fs/hfs/extent.c 		HFS_I(inode)->alloc_blocks += len;
alloc_blocks      469 fs/hfs/extent.c 	HFS_I(inode)->cached_start = HFS_I(inode)->alloc_blocks;
alloc_blocks      507 fs/hfs/extent.c 	alloc_cnt = HFS_I(inode)->alloc_blocks;
alloc_blocks      545 fs/hfs/extent.c 	HFS_I(inode)->alloc_blocks = blk_cnt;
alloc_blocks       77 fs/hfs/hfs_fs.h 	u16 alloc_blocks, clump_blocks;
alloc_blocks      224 fs/hfs/inode.c 		HFS_I(inode)->alloc_blocks = 0;
alloc_blocks      281 fs/hfs/inode.c 	HFS_I(inode)->alloc_blocks = be32_to_cpu(phys_size) /
alloc_blocks      414 fs/hfs/inode.c 		*phys_size = cpu_to_be32(HFS_I(inode)->alloc_blocks *
alloc_blocks      361 fs/hfsplus/btree.c 			(loff_t)hip->alloc_blocks <<
alloc_blocks      364 fs/hfsplus/btree.c 			hip->alloc_blocks << HFSPLUS_SB(tree->sb)->fs_shift;
alloc_blocks      244 fs/hfsplus/extents.c 		if (ablock >= hip->alloc_blocks) {
alloc_blocks      458 fs/hfsplus/extents.c 	if (hip->alloc_blocks == hip->first_blocks)
alloc_blocks      461 fs/hfsplus/extents.c 		res = hfsplus_ext_read_extent(inode, hip->alloc_blocks);
alloc_blocks      485 fs/hfsplus/extents.c 	if (hip->alloc_blocks <= hip->first_blocks) {
alloc_blocks      495 fs/hfsplus/extents.c 						 hip->alloc_blocks,
alloc_blocks      506 fs/hfsplus/extents.c 					 hip->alloc_blocks - hip->cached_start,
alloc_blocks      517 fs/hfsplus/extents.c 		hip->alloc_blocks += len;
alloc_blocks      536 fs/hfsplus/extents.c 	hip->cached_start = hip->alloc_blocks;
alloc_blocks      578 fs/hfsplus/extents.c 	alloc_cnt = hip->alloc_blocks;
alloc_blocks      619 fs/hfsplus/extents.c 	hip->alloc_blocks = blk_cnt;
alloc_blocks      216 fs/hfsplus/hfsplus_fs.h 	u32 alloc_blocks;
alloc_blocks      394 fs/hfsplus/inode.c 	hip->alloc_blocks = 0;
alloc_blocks      465 fs/hfsplus/inode.c 	hip->alloc_blocks = be32_to_cpu(fork->total_blocks);
alloc_blocks      485 fs/hfsplus/inode.c 	fork->total_blocks = cpu_to_be32(HFSPLUS_I(inode)->alloc_blocks);
alloc_blocks      193 fs/hfsplus/xattr.c 	while (hip->alloc_blocks < hip->clump_blocks) {
alloc_blocks      200 fs/hfsplus/xattr.c 			(loff_t)hip->alloc_blocks << sbi->alloc_blksz_shift;
alloc_blocks      201 fs/hfsplus/xattr.c 		hip->fs_blocks = hip->alloc_blocks << sbi->fs_shift;
alloc_blocks      318 fs/xfs/xfs_iomap.c 	xfs_fsblock_t alloc_blocks)
alloc_blocks      330 fs/xfs/xfs_iomap.c 	if (dq->q_res_bcount + alloc_blocks < dq->q_prealloc_lo_wmark)
alloc_blocks      408 fs/xfs/xfs_iomap.c 	xfs_fsblock_t		alloc_blocks = 0;
alloc_blocks      444 fs/xfs/xfs_iomap.c 		alloc_blocks = prev.br_blockcount << 1;
alloc_blocks      446 fs/xfs/xfs_iomap.c 		alloc_blocks = XFS_B_TO_FSB(mp, offset);
alloc_blocks      447 fs/xfs/xfs_iomap.c 	if (!alloc_blocks)
alloc_blocks      449 fs/xfs/xfs_iomap.c 	qblocks = alloc_blocks;
alloc_blocks      458 fs/xfs/xfs_iomap.c 	alloc_blocks = XFS_FILEOFF_MIN(roundup_pow_of_two(MAXEXTLEN),
alloc_blocks      459 fs/xfs/xfs_iomap.c 				       alloc_blocks);
alloc_blocks      478 fs/xfs/xfs_iomap.c 	if (xfs_quota_need_throttle(ip, XFS_DQ_USER, alloc_blocks))
alloc_blocks      481 fs/xfs/xfs_iomap.c 	if (xfs_quota_need_throttle(ip, XFS_DQ_GROUP, alloc_blocks))
alloc_blocks      484 fs/xfs/xfs_iomap.c 	if (xfs_quota_need_throttle(ip, XFS_DQ_PROJ, alloc_blocks))
alloc_blocks      495 fs/xfs/xfs_iomap.c 	alloc_blocks = min(alloc_blocks, qblocks);
alloc_blocks      499 fs/xfs/xfs_iomap.c 		alloc_blocks >>= shift;
alloc_blocks      504 fs/xfs/xfs_iomap.c 	if (alloc_blocks)
alloc_blocks      505 fs/xfs/xfs_iomap.c 		alloc_blocks = rounddown_pow_of_two(alloc_blocks);
alloc_blocks      506 fs/xfs/xfs_iomap.c 	if (alloc_blocks > MAXEXTLEN)
alloc_blocks      507 fs/xfs/xfs_iomap.c 		alloc_blocks = MAXEXTLEN;
alloc_blocks      515 fs/xfs/xfs_iomap.c 	while (alloc_blocks && alloc_blocks >= freesp)
alloc_blocks      516 fs/xfs/xfs_iomap.c 		alloc_blocks >>= 4;
alloc_blocks      518 fs/xfs/xfs_iomap.c 	if (alloc_blocks < mp->m_writeio_blocks)
alloc_blocks      519 fs/xfs/xfs_iomap.c 		alloc_blocks = mp->m_writeio_blocks;
alloc_blocks      520 fs/xfs/xfs_iomap.c 	trace_xfs_iomap_prealloc_size(ip, alloc_blocks, shift,
alloc_blocks      522 fs/xfs/xfs_iomap.c 	return alloc_blocks;