_manager          217 drivers/gpu/drm/ttm/ttm_page_alloc.c static struct ttm_pool_manager *_manager;
_manager          243 drivers/gpu/drm/ttm/ttm_page_alloc.c 	return &_manager->pools[pool_index];
_manager          399 drivers/gpu/drm/ttm/ttm_page_alloc.c 		pool = &_manager->pools[(i + pool_offset)%NUM_POOLS];
_manager          422 drivers/gpu/drm/ttm/ttm_page_alloc.c 		pool = &_manager->pools[i];
_manager          590 drivers/gpu/drm/ttm/ttm_page_alloc.c 	if (count < _manager->options.small
_manager          593 drivers/gpu/drm/ttm/ttm_page_alloc.c 		unsigned alloc_size = _manager->options.alloc_size;
_manager          785 drivers/gpu/drm/ttm/ttm_page_alloc.c 		max_size = _manager->options.max_size;
_manager          810 drivers/gpu/drm/ttm/ttm_page_alloc.c 	if (pool->npages > _manager->options.max_size) {
_manager          811 drivers/gpu/drm/ttm/ttm_page_alloc.c 		npages = pool->npages - _manager->options.max_size;
_manager          963 drivers/gpu/drm/ttm/ttm_page_alloc.c 	WARN_ON(_manager);
_manager          967 drivers/gpu/drm/ttm/ttm_page_alloc.c 	_manager = kzalloc(sizeof(*_manager), GFP_KERNEL);
_manager          968 drivers/gpu/drm/ttm/ttm_page_alloc.c 	if (!_manager)
_manager          971 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_page_pool_init_locked(&_manager->wc_pool, GFP_HIGHUSER, "wc", 0);
_manager          973 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_page_pool_init_locked(&_manager->uc_pool, GFP_HIGHUSER, "uc", 0);
_manager          975 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_page_pool_init_locked(&_manager->wc_pool_dma32,
_manager          978 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_page_pool_init_locked(&_manager->uc_pool_dma32,
_manager          981 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_page_pool_init_locked(&_manager->wc_pool_huge,
_manager          987 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_page_pool_init_locked(&_manager->uc_pool_huge,
_manager          993 drivers/gpu/drm/ttm/ttm_page_alloc.c 	_manager->options.max_size = max_pages;
_manager          994 drivers/gpu/drm/ttm/ttm_page_alloc.c 	_manager->options.small = SMALL_ALLOCATION;
_manager          995 drivers/gpu/drm/ttm/ttm_page_alloc.c 	_manager->options.alloc_size = NUM_PAGES_TO_ALLOC;
_manager          997 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ret = kobject_init_and_add(&_manager->kobj, &ttm_pool_kobj_type,
_manager         1002 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ret = ttm_pool_mm_shrink_init(_manager);
_manager         1008 drivers/gpu/drm/ttm/ttm_page_alloc.c 	kobject_put(&_manager->kobj);
_manager         1009 drivers/gpu/drm/ttm/ttm_page_alloc.c 	_manager = NULL;
_manager         1018 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_pool_mm_shrink_fini(_manager);
_manager         1022 drivers/gpu/drm/ttm/ttm_page_alloc.c 		ttm_page_pool_free(&_manager->pools[i], FREE_ALL_PAGES, true);
_manager         1024 drivers/gpu/drm/ttm/ttm_page_alloc.c 	kobject_put(&_manager->kobj);
_manager         1025 drivers/gpu/drm/ttm/ttm_page_alloc.c 	_manager = NULL;
_manager         1174 drivers/gpu/drm/ttm/ttm_page_alloc.c 	if (!_manager) {
_manager         1181 drivers/gpu/drm/ttm/ttm_page_alloc.c 		p = &_manager->pools[i];
_manager          172 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c static struct ttm_pool_manager *_manager;
_manager          506 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	mutex_lock(&_manager->lock);
_manager          507 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	list_for_each_entry_reverse(p, &_manager->pools, pools) {
_manager          516 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		_manager->npools--;
_manager          534 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	mutex_unlock(&_manager->lock);
_manager          617 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	mutex_lock(&_manager->lock);
_manager          619 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	list_add(&sec_pool->pools, &_manager->pools);
_manager          620 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	_manager->npools++;
_manager          623 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	mutex_unlock(&_manager->lock);
_manager          790 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	unsigned count = _manager->options.small;
_manager         1065 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		if (pool->npages_free >= (_manager->options.max_size +
_manager         1067 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 			npages = pool->npages_free - _manager->options.max_size;
_manager         1102 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (list_empty(&_manager->pools))
_manager         1105 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (!mutex_trylock(&_manager->lock))
_manager         1107 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (!_manager->npools)
_manager         1109 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	pool_offset = ++start_pool % _manager->npools;
_manager         1110 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	list_for_each_entry(p, &_manager->pools, pools) {
_manager         1130 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	mutex_unlock(&_manager->lock);
_manager         1140 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (!mutex_trylock(&_manager->lock))
_manager         1142 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	list_for_each_entry(p, &_manager->pools, pools)
_manager         1144 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	mutex_unlock(&_manager->lock);
_manager         1165 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	WARN_ON(_manager);
_manager         1169 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	_manager = kzalloc(sizeof(*_manager), GFP_KERNEL);
_manager         1170 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (!_manager)
_manager         1173 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	mutex_init(&_manager->lock);
_manager         1174 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	INIT_LIST_HEAD(&_manager->pools);
_manager         1176 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	_manager->options.max_size = max_pages;
_manager         1177 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	_manager->options.small = SMALL_ALLOCATION;
_manager         1178 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	_manager->options.alloc_size = NUM_PAGES_TO_ALLOC;
_manager         1181 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	ret = kobject_init_and_add(&_manager->kobj, &ttm_pool_kobj_type,
_manager         1186 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	ret = ttm_dma_pool_mm_shrink_init(_manager);
_manager         1192 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	kobject_put(&_manager->kobj);
_manager         1193 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	_manager = NULL;
_manager         1202 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	ttm_dma_pool_mm_shrink_fini(_manager);
_manager         1204 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	list_for_each_entry_safe_reverse(p, t, &_manager->pools, pools) {
_manager         1211 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	kobject_put(&_manager->kobj);
_manager         1212 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	_manager = NULL;
_manager         1220 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (!_manager) {
_manager         1225 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	mutex_lock(&_manager->lock);
_manager         1226 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	list_for_each_entry(p, &_manager->pools, pools) {
_manager         1237 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	mutex_unlock(&_manager->lock);