nlru               46 mm/list_lru.c  list_lru_from_memcg_idx(struct list_lru_node *nlru, int idx)
nlru               53 mm/list_lru.c  	memcg_lrus = rcu_dereference_check(nlru->memcg_lrus,
nlru               54 mm/list_lru.c  					   lockdep_is_held(&nlru->lock));
nlru               57 mm/list_lru.c  	return &nlru->lru;
nlru               71 mm/list_lru.c  list_lru_from_kmem(struct list_lru_node *nlru, void *ptr,
nlru               74 mm/list_lru.c  	struct list_lru_one *l = &nlru->lru;
nlru               77 mm/list_lru.c  	if (!nlru->memcg_lrus)
nlru               84 mm/list_lru.c  	l = list_lru_from_memcg_idx(nlru, memcg_cache_id(memcg));
nlru              110 mm/list_lru.c  list_lru_from_memcg_idx(struct list_lru_node *nlru, int idx)
nlru              112 mm/list_lru.c  	return &nlru->lru;
nlru              116 mm/list_lru.c  list_lru_from_kmem(struct list_lru_node *nlru, void *ptr,
nlru              121 mm/list_lru.c  	return &nlru->lru;
nlru              128 mm/list_lru.c  	struct list_lru_node *nlru = &lru->node[nid];
nlru              132 mm/list_lru.c  	spin_lock(&nlru->lock);
nlru              134 mm/list_lru.c  		l = list_lru_from_kmem(nlru, item, &memcg);
nlru              140 mm/list_lru.c  		nlru->nr_items++;
nlru              141 mm/list_lru.c  		spin_unlock(&nlru->lock);
nlru              144 mm/list_lru.c  	spin_unlock(&nlru->lock);
nlru              152 mm/list_lru.c  	struct list_lru_node *nlru = &lru->node[nid];
nlru              155 mm/list_lru.c  	spin_lock(&nlru->lock);
nlru              157 mm/list_lru.c  		l = list_lru_from_kmem(nlru, item, NULL);
nlru              160 mm/list_lru.c  		nlru->nr_items--;
nlru              161 mm/list_lru.c  		spin_unlock(&nlru->lock);
nlru              164 mm/list_lru.c  	spin_unlock(&nlru->lock);
nlru              187 mm/list_lru.c  	struct list_lru_node *nlru = &lru->node[nid];
nlru              192 mm/list_lru.c  	l = list_lru_from_memcg_idx(nlru, memcg_cache_id(memcg));
nlru              202 mm/list_lru.c  	struct list_lru_node *nlru;
nlru              204 mm/list_lru.c  	nlru = &lru->node[nid];
nlru              205 mm/list_lru.c  	return nlru->nr_items;
nlru              210 mm/list_lru.c  __list_lru_walk_one(struct list_lru_node *nlru, int memcg_idx,
nlru              219 mm/list_lru.c  	l = list_lru_from_memcg_idx(nlru, memcg_idx);
nlru              232 mm/list_lru.c  		ret = isolate(item, l, &nlru->lock, cb_arg);
nlru              235 mm/list_lru.c  			assert_spin_locked(&nlru->lock);
nlru              239 mm/list_lru.c  			nlru->nr_items--;
nlru              258 mm/list_lru.c  			assert_spin_locked(&nlru->lock);
nlru              272 mm/list_lru.c  	struct list_lru_node *nlru = &lru->node[nid];
nlru              275 mm/list_lru.c  	spin_lock(&nlru->lock);
nlru              276 mm/list_lru.c  	ret = __list_lru_walk_one(nlru, memcg_cache_id(memcg), isolate, cb_arg,
nlru              278 mm/list_lru.c  	spin_unlock(&nlru->lock);
nlru              288 mm/list_lru.c  	struct list_lru_node *nlru = &lru->node[nid];
nlru              291 mm/list_lru.c  	spin_lock_irq(&nlru->lock);
nlru              292 mm/list_lru.c  	ret = __list_lru_walk_one(nlru, memcg_cache_id(memcg), isolate, cb_arg,
nlru              294 mm/list_lru.c  	spin_unlock_irq(&nlru->lock);
nlru              309 mm/list_lru.c  			struct list_lru_node *nlru = &lru->node[nid];
nlru              311 mm/list_lru.c  			spin_lock(&nlru->lock);
nlru              312 mm/list_lru.c  			isolated += __list_lru_walk_one(nlru, memcg_idx,
nlru              315 mm/list_lru.c  			spin_unlock(&nlru->lock);
nlru              362 mm/list_lru.c  static int memcg_init_list_lru_node(struct list_lru_node *nlru)
nlru              376 mm/list_lru.c  	RCU_INIT_POINTER(nlru->memcg_lrus, memcg_lrus);
nlru              381 mm/list_lru.c  static void memcg_destroy_list_lru_node(struct list_lru_node *nlru)
nlru              388 mm/list_lru.c  	memcg_lrus = rcu_dereference_protected(nlru->memcg_lrus, true);
nlru              401 mm/list_lru.c  static int memcg_update_list_lru_node(struct list_lru_node *nlru,
nlru              408 mm/list_lru.c  	old = rcu_dereference_protected(nlru->memcg_lrus,
nlru              428 mm/list_lru.c  	spin_lock_irq(&nlru->lock);
nlru              429 mm/list_lru.c  	rcu_assign_pointer(nlru->memcg_lrus, new);
nlru              430 mm/list_lru.c  	spin_unlock_irq(&nlru->lock);
nlru              436 mm/list_lru.c  static void memcg_cancel_update_list_lru_node(struct list_lru_node *nlru,
nlru              441 mm/list_lru.c  	memcg_lrus = rcu_dereference_protected(nlru->memcg_lrus,
nlru              544 mm/list_lru.c  	struct list_lru_node *nlru = &lru->node[nid];
nlru              553 mm/list_lru.c  	spin_lock_irq(&nlru->lock);
nlru              555 mm/list_lru.c  	src = list_lru_from_memcg_idx(nlru, src_idx);
nlru              556 mm/list_lru.c  	dst = list_lru_from_memcg_idx(nlru, dst_idx);
nlru              565 mm/list_lru.c  	spin_unlock_irq(&nlru->lock);