pool_offset       382 drivers/gpu/drm/ttm/ttm_page_alloc.c 	unsigned pool_offset;
pool_offset       390 drivers/gpu/drm/ttm/ttm_page_alloc.c 	pool_offset = ++start_pool % NUM_POOLS;
pool_offset       399 drivers/gpu/drm/ttm/ttm_page_alloc.c 		pool = &_manager->pools[(i + pool_offset)%NUM_POOLS];
pool_offset      1097 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	unsigned pool_offset;
pool_offset      1109 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	pool_offset = ++start_pool % _manager->npools;
pool_offset      1118 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		if (++idx < pool_offset)
pool_offset      1628 drivers/md/raid5-cache.c 	sector_t pool_offset;	/* offset of first page in the pool */
pool_offset      1659 drivers/md/raid5-cache.c 	ctx->pool_offset = 0;
pool_offset      1689 drivers/md/raid5-cache.c 	ctx->pool_offset = offset;
pool_offset      1716 drivers/md/raid5-cache.c 	if (offset < ctx->pool_offset ||
pool_offset      1717 drivers/md/raid5-cache.c 	    offset >= ctx->pool_offset + ctx->valid_pages * BLOCK_SECTORS) {
pool_offset      1723 drivers/md/raid5-cache.c 	BUG_ON(offset < ctx->pool_offset ||
pool_offset      1724 drivers/md/raid5-cache.c 	       offset >= ctx->pool_offset + ctx->valid_pages * BLOCK_SECTORS);
pool_offset      1727 drivers/md/raid5-cache.c 	       page_address(ctx->ra_pool[(offset - ctx->pool_offset) >>