Lines Matching refs:cache
193 struct mb_cache *cache = ce->e_cache; in __mb_cache_entry_forget() local
196 kmem_cache_free(cache->c_entry_cache, ce); in __mb_cache_entry_forget()
197 atomic_dec(&cache->c_entry_count); in __mb_cache_entry_forget()
293 struct mb_cache *cache; in mb_cache_shrink_count() local
297 list_for_each_entry(cache, &mb_cache_list, c_cache_list) { in mb_cache_shrink_count()
298 mb_debug("cache %s (%d)", cache->c_name, in mb_cache_shrink_count()
299 atomic_read(&cache->c_entry_count)); in mb_cache_shrink_count()
300 count += atomic_read(&cache->c_entry_count); in mb_cache_shrink_count()
328 struct mb_cache *cache = NULL; in mb_cache_create() local
338 cache = kmalloc(sizeof(struct mb_cache), GFP_KERNEL); in mb_cache_create()
339 if (!cache) in mb_cache_create()
341 cache->c_name = name; in mb_cache_create()
342 atomic_set(&cache->c_entry_count, 0); in mb_cache_create()
343 cache->c_bucket_bits = bucket_bits; in mb_cache_create()
344 cache->c_block_hash = kmalloc(bucket_count * in mb_cache_create()
346 if (!cache->c_block_hash) in mb_cache_create()
349 INIT_HLIST_BL_HEAD(&cache->c_block_hash[n]); in mb_cache_create()
350 cache->c_index_hash = kmalloc(bucket_count * in mb_cache_create()
352 if (!cache->c_index_hash) in mb_cache_create()
355 INIT_HLIST_BL_HEAD(&cache->c_index_hash[n]); in mb_cache_create()
363 cache->c_entry_cache = mb_cache_kmem_cache; in mb_cache_create()
369 cache->c_max_entries = bucket_count << 4; in mb_cache_create()
372 list_add(&cache->c_cache_list, &mb_cache_list); in mb_cache_create()
374 return cache; in mb_cache_create()
377 kfree(cache->c_index_hash); in mb_cache_create()
380 kfree(cache->c_block_hash); in mb_cache_create()
381 kfree(cache); in mb_cache_create()
452 mb_cache_destroy(struct mb_cache *cache) in mb_cache_destroy() argument
459 if (ce->e_cache == cache) in mb_cache_destroy()
462 list_del(&cache->c_cache_list); in mb_cache_destroy()
478 if (atomic_read(&cache->c_entry_count) > 0) { in mb_cache_destroy()
480 cache->c_name, in mb_cache_destroy()
481 atomic_read(&cache->c_entry_count)); in mb_cache_destroy()
488 kfree(cache->c_index_hash); in mb_cache_destroy()
489 kfree(cache->c_block_hash); in mb_cache_destroy()
490 kfree(cache); in mb_cache_destroy()
502 mb_cache_entry_alloc(struct mb_cache *cache, gfp_t gfp_flags) in mb_cache_entry_alloc() argument
506 if (atomic_read(&cache->c_entry_count) >= cache->c_max_entries) { in mb_cache_entry_alloc()
514 if (ce->e_cache == cache) { in mb_cache_entry_alloc()
546 ce = kmem_cache_alloc(cache->c_entry_cache, gfp_flags); in mb_cache_entry_alloc()
549 atomic_inc(&cache->c_entry_count); in mb_cache_entry_alloc()
553 ce->e_cache = cache; in mb_cache_entry_alloc()
557 ce->e_block_hash_p = &cache->c_block_hash[0]; in mb_cache_entry_alloc()
558 ce->e_index_hash_p = &cache->c_index_hash[0]; in mb_cache_entry_alloc()
582 struct mb_cache *cache = ce->e_cache; in mb_cache_entry_insert() local
591 cache->c_bucket_bits); in mb_cache_entry_insert()
592 block_hash_p = &cache->c_block_hash[bucket]; in mb_cache_entry_insert()
609 bucket = hash_long(key, cache->c_bucket_bits); in mb_cache_entry_insert()
610 index_hash_p = &cache->c_index_hash[bucket]; in mb_cache_entry_insert()
661 mb_cache_entry_get(struct mb_cache *cache, struct block_device *bdev, in mb_cache_entry_get() argument
670 cache->c_bucket_bits); in mb_cache_entry_get()
671 block_hash_p = &cache->c_block_hash[bucket]; in mb_cache_entry_get()
786 mb_cache_entry_find_first(struct mb_cache *cache, struct block_device *bdev, in mb_cache_entry_find_first() argument
789 unsigned int bucket = hash_long(key, cache->c_bucket_bits); in mb_cache_entry_find_first()
794 index_hash_p = &cache->c_index_hash[bucket]; in mb_cache_entry_find_first()
827 struct mb_cache *cache = prev->e_cache; in mb_cache_entry_find_next() local
828 unsigned int bucket = hash_long(key, cache->c_bucket_bits); in mb_cache_entry_find_next()
833 index_hash_p = &cache->c_index_hash[bucket]; in mb_cache_entry_find_next()