loc_l              31 kernel/bpf/bpf_lru_list.c static struct list_head *local_free_list(struct bpf_lru_locallist *loc_l)
loc_l              33 kernel/bpf/bpf_lru_list.c 	return &loc_l->lists[LOCAL_FREE_LIST_IDX];
loc_l              36 kernel/bpf/bpf_lru_list.c static struct list_head *local_pending_list(struct bpf_lru_locallist *loc_l)
loc_l              38 kernel/bpf/bpf_lru_list.c 	return &loc_l->lists[LOCAL_PENDING_LIST_IDX];
loc_l             291 kernel/bpf/bpf_lru_list.c 			       struct bpf_lru_locallist *loc_l)
loc_l             296 kernel/bpf/bpf_lru_list.c 					 local_pending_list(loc_l), list) {
loc_l             319 kernel/bpf/bpf_lru_list.c 					   struct bpf_lru_locallist *loc_l)
loc_l             327 kernel/bpf/bpf_lru_list.c 	__local_list_flush(l, loc_l);
loc_l             333 kernel/bpf/bpf_lru_list.c 		__bpf_lru_node_move_to_free(l, node, local_free_list(loc_l),
loc_l             341 kernel/bpf/bpf_lru_list.c 				      local_free_list(loc_l),
loc_l             348 kernel/bpf/bpf_lru_list.c 				     struct bpf_lru_locallist *loc_l,
loc_l             357 kernel/bpf/bpf_lru_list.c 	list_add(&node->list, local_pending_list(loc_l));
loc_l             361 kernel/bpf/bpf_lru_list.c __local_list_pop_free(struct bpf_lru_locallist *loc_l)
loc_l             365 kernel/bpf/bpf_lru_list.c 	node = list_first_entry_or_null(local_free_list(loc_l),
loc_l             375 kernel/bpf/bpf_lru_list.c __local_list_pop_pending(struct bpf_lru *lru, struct bpf_lru_locallist *loc_l)
loc_l             382 kernel/bpf/bpf_lru_list.c 	list_for_each_entry_reverse(node, local_pending_list(loc_l),
loc_l             434 kernel/bpf/bpf_lru_list.c 	struct bpf_lru_locallist *loc_l, *steal_loc_l;
loc_l             441 kernel/bpf/bpf_lru_list.c 	loc_l = per_cpu_ptr(clru->local_list, cpu);
loc_l             443 kernel/bpf/bpf_lru_list.c 	raw_spin_lock_irqsave(&loc_l->lock, flags);
loc_l             445 kernel/bpf/bpf_lru_list.c 	node = __local_list_pop_free(loc_l);
loc_l             447 kernel/bpf/bpf_lru_list.c 		bpf_lru_list_pop_free_to_local(lru, loc_l);
loc_l             448 kernel/bpf/bpf_lru_list.c 		node = __local_list_pop_free(loc_l);
loc_l             452 kernel/bpf/bpf_lru_list.c 		__local_list_add_pending(lru, loc_l, cpu, node, hash);
loc_l             454 kernel/bpf/bpf_lru_list.c 	raw_spin_unlock_irqrestore(&loc_l->lock, flags);
loc_l             467 kernel/bpf/bpf_lru_list.c 	first_steal = loc_l->next_steal;
loc_l             483 kernel/bpf/bpf_lru_list.c 	loc_l->next_steal = steal;
loc_l             486 kernel/bpf/bpf_lru_list.c 		raw_spin_lock_irqsave(&loc_l->lock, flags);
loc_l             487 kernel/bpf/bpf_lru_list.c 		__local_list_add_pending(lru, loc_l, cpu, node, hash);
loc_l             488 kernel/bpf/bpf_lru_list.c 		raw_spin_unlock_irqrestore(&loc_l->lock, flags);
loc_l             512 kernel/bpf/bpf_lru_list.c 		struct bpf_lru_locallist *loc_l;
loc_l             514 kernel/bpf/bpf_lru_list.c 		loc_l = per_cpu_ptr(lru->common_lru.local_list, node->cpu);
loc_l             516 kernel/bpf/bpf_lru_list.c 		raw_spin_lock_irqsave(&loc_l->lock, flags);
loc_l             519 kernel/bpf/bpf_lru_list.c 			raw_spin_unlock_irqrestore(&loc_l->lock, flags);
loc_l             525 kernel/bpf/bpf_lru_list.c 		list_move(&node->list, local_free_list(loc_l));
loc_l             527 kernel/bpf/bpf_lru_list.c 		raw_spin_unlock_irqrestore(&loc_l->lock, flags);
loc_l             618 kernel/bpf/bpf_lru_list.c static void bpf_lru_locallist_init(struct bpf_lru_locallist *loc_l, int cpu)
loc_l             623 kernel/bpf/bpf_lru_list.c 		INIT_LIST_HEAD(&loc_l->lists[i]);
loc_l             625 kernel/bpf/bpf_lru_list.c 	loc_l->next_steal = cpu;
loc_l             627 kernel/bpf/bpf_lru_list.c 	raw_spin_lock_init(&loc_l->lock);
loc_l             670 kernel/bpf/bpf_lru_list.c 			struct bpf_lru_locallist *loc_l;
loc_l             672 kernel/bpf/bpf_lru_list.c 			loc_l = per_cpu_ptr(clru->local_list, cpu);
loc_l             673 kernel/bpf/bpf_lru_list.c 			bpf_lru_locallist_init(loc_l, cpu);