Lines Matching refs:cache
72 struct mlx5_mr_cache *cache = &dev->cache; in order2idx() local
74 if (order < cache->ent[0].order) in order2idx()
77 return order - cache->ent[0].order; in order2idx()
84 struct mlx5_mr_cache *cache = &dev->cache; in reg_mr_callback() local
86 struct mlx5_cache_ent *ent = &cache->ent[c]; in reg_mr_callback()
118 cache->last_add = jiffies; in reg_mr_callback()
136 struct mlx5_mr_cache *cache = &dev->cache; in add_keys() local
137 struct mlx5_cache_ent *ent = &cache->ent[c]; in add_keys()
190 struct mlx5_mr_cache *cache = &dev->cache; in remove_keys() local
191 struct mlx5_cache_ent *ent = &cache->ent[c]; in remove_keys()
343 static int someone_adding(struct mlx5_mr_cache *cache) in someone_adding() argument
348 if (cache->ent[i].cur < cache->ent[i].limit) in someone_adding()
358 struct mlx5_mr_cache *cache = &dev->cache; in __cache_work_func() local
362 if (cache->stopped) in __cache_work_func()
365 ent = &dev->cache.ent[i]; in __cache_work_func()
372 queue_delayed_work(cache->wq, &ent->dwork, in __cache_work_func()
377 queue_delayed_work(cache->wq, &ent->dwork, in __cache_work_func()
380 queue_work(cache->wq, &ent->work); in __cache_work_func()
396 if (!need_resched() && !someone_adding(cache) && in __cache_work_func()
397 time_after(jiffies, cache->last_add + 300 * HZ)) { in __cache_work_func()
400 queue_work(cache->wq, &ent->work); in __cache_work_func()
402 queue_delayed_work(cache->wq, &ent->dwork, 300 * HZ); in __cache_work_func()
425 struct mlx5_mr_cache *cache = &dev->cache; in alloc_cached_mr() local
438 ent = &cache->ent[i]; in alloc_cached_mr()
450 queue_work(cache->wq, &ent->work); in alloc_cached_mr()
455 queue_work(cache->wq, &ent->work); in alloc_cached_mr()
459 cache->ent[c].miss++; in alloc_cached_mr()
466 struct mlx5_mr_cache *cache = &dev->cache; in free_cached_mr() local
476 ent = &cache->ent[c]; in free_cached_mr()
485 queue_work(cache->wq, &ent->work); in free_cached_mr()
490 struct mlx5_mr_cache *cache = &dev->cache; in clean_keys() local
491 struct mlx5_cache_ent *ent = &cache->ent[c]; in clean_keys()
517 struct mlx5_mr_cache *cache = &dev->cache; in mlx5_mr_cache_debugfs_init() local
524 cache->root = debugfs_create_dir("mr_cache", dev->mdev->priv.dbg_root); in mlx5_mr_cache_debugfs_init()
525 if (!cache->root) in mlx5_mr_cache_debugfs_init()
529 ent = &cache->ent[i]; in mlx5_mr_cache_debugfs_init()
531 ent->dir = debugfs_create_dir(ent->name, cache->root); in mlx5_mr_cache_debugfs_init()
564 debugfs_remove_recursive(dev->cache.root); in mlx5_mr_cache_debugfs_cleanup()
576 struct mlx5_mr_cache *cache = &dev->cache; in mlx5_mr_cache_init() local
582 cache->wq = create_singlethread_workqueue("mkey_cache"); in mlx5_mr_cache_init()
583 if (!cache->wq) { in mlx5_mr_cache_init()
590 INIT_LIST_HEAD(&cache->ent[i].head); in mlx5_mr_cache_init()
591 spin_lock_init(&cache->ent[i].lock); in mlx5_mr_cache_init()
593 ent = &cache->ent[i]; in mlx5_mr_cache_init()
607 queue_work(cache->wq, &ent->work); in mlx5_mr_cache_init()
621 dev->cache.stopped = 1; in mlx5_mr_cache_cleanup()
622 flush_workqueue(dev->cache.wq); in mlx5_mr_cache_cleanup()
629 destroy_workqueue(dev->cache.wq); in mlx5_mr_cache_cleanup()