lists 592 arch/s390/include/asm/kvm_host.h struct list_head lists[FIRQ_LIST_COUNT]; lists 946 arch/s390/kvm/interrupt.c inti = list_first_entry_or_null(&fi->lists[FIRQ_LIST_PFAULT], lists 953 arch/s390/kvm/interrupt.c if (list_empty(&fi->lists[FIRQ_LIST_PFAULT])) lists 988 arch/s390/kvm/interrupt.c inti = list_first_entry_or_null(&fi->lists[FIRQ_LIST_VIRTIO], lists 1003 arch/s390/kvm/interrupt.c if (list_empty(&fi->lists[FIRQ_LIST_VIRTIO])) lists 1059 arch/s390/kvm/interrupt.c isc_list = &fi->lists[isc]; lists 1578 arch/s390/kvm/interrupt.c struct list_head *isc_list = &fi->lists[FIRQ_LIST_IO_ISC_0 + isc]; lists 1726 arch/s390/kvm/interrupt.c list_add_tail(&inti->list, &fi->lists[FIRQ_LIST_VIRTIO]); lists 1745 arch/s390/kvm/interrupt.c list_add_tail(&inti->list, &fi->lists[FIRQ_LIST_PFAULT]); lists 1799 arch/s390/kvm/interrupt.c list = &fi->lists[FIRQ_LIST_IO_ISC_0 + isc]; lists 2089 arch/s390/kvm/interrupt.c clear_irq_list(&fi->lists[i]); lists 2140 arch/s390/kvm/interrupt.c list_for_each_entry(inti, &fi->lists[i], list) { lists 2485 arch/s390/kvm/kvm-s390.c INIT_LIST_HEAD(&kvm->arch.float_int.lists[i]); lists 2924 drivers/clk/clk.c struct hlist_head **lists = (struct hlist_head **)s->private; lists 2932 drivers/clk/clk.c for (; *lists; lists++) lists 2933 drivers/clk/clk.c hlist_for_each_entry(c, *lists, child_node) lists 2983 drivers/clk/clk.c struct hlist_head **lists = (struct hlist_head **)s->private; lists 2988 drivers/clk/clk.c for (; *lists; lists++) { lists 2989 drivers/clk/clk.c hlist_for_each_entry(c, *lists, child_node) { lists 3883 drivers/clk/clk.c struct hlist_head **lists; lists 3888 drivers/clk/clk.c for (lists = all_lists; *lists; lists++) lists 3889 drivers/clk/clk.c hlist_for_each_entry(root, *lists, child_node) lists 37 drivers/gpu/drm/i915/gt/selftest_workarounds.c reference_lists_init(struct drm_i915_private *i915, struct wa_lists *lists) lists 42 drivers/gpu/drm/i915/gt/selftest_workarounds.c memset(lists, 0, sizeof(*lists)); lists 44 drivers/gpu/drm/i915/gt/selftest_workarounds.c wa_init_start(&lists->gt_wa_list, "GT_REF", "global"); lists 45 drivers/gpu/drm/i915/gt/selftest_workarounds.c gt_init_workarounds(i915, &lists->gt_wa_list); lists 46 drivers/gpu/drm/i915/gt/selftest_workarounds.c wa_init_finish(&lists->gt_wa_list); lists 49 drivers/gpu/drm/i915/gt/selftest_workarounds.c struct i915_wa_list *wal = &lists->engine[id].wa_list; lists 56 drivers/gpu/drm/i915/gt/selftest_workarounds.c &lists->engine[id].ctx_wa_list, lists 62 drivers/gpu/drm/i915/gt/selftest_workarounds.c reference_lists_fini(struct drm_i915_private *i915, struct wa_lists *lists) lists 68 drivers/gpu/drm/i915/gt/selftest_workarounds.c intel_wa_list_free(&lists->engine[id].wa_list); lists 70 drivers/gpu/drm/i915/gt/selftest_workarounds.c intel_wa_list_free(&lists->gt_wa_list); lists 1084 drivers/gpu/drm/i915/gt/selftest_workarounds.c verify_wa_lists(struct i915_gem_context *ctx, struct wa_lists *lists, lists 1092 drivers/gpu/drm/i915/gt/selftest_workarounds.c ok &= wa_list_verify(&i915->uncore, &lists->gt_wa_list, str); lists 1098 drivers/gpu/drm/i915/gt/selftest_workarounds.c &lists->engine[id].wa_list, lists 1102 drivers/gpu/drm/i915/gt/selftest_workarounds.c &lists->engine[id].ctx_wa_list, lists 1115 drivers/gpu/drm/i915/gt/selftest_workarounds.c struct wa_lists lists; lists 1132 drivers/gpu/drm/i915/gt/selftest_workarounds.c reference_lists_init(i915, &lists); lists 1134 drivers/gpu/drm/i915/gt/selftest_workarounds.c ok = verify_wa_lists(ctx, &lists, "before reset"); lists 1140 drivers/gpu/drm/i915/gt/selftest_workarounds.c ok = verify_wa_lists(ctx, &lists, "after reset"); lists 1145 drivers/gpu/drm/i915/gt/selftest_workarounds.c reference_lists_fini(i915, &lists); lists 1162 drivers/gpu/drm/i915/gt/selftest_workarounds.c struct wa_lists lists; lists 1175 drivers/gpu/drm/i915/gt/selftest_workarounds.c reference_lists_init(i915, &lists); lists 1183 drivers/gpu/drm/i915/gt/selftest_workarounds.c ok = verify_wa_lists(ctx, &lists, "before reset"); lists 1191 drivers/gpu/drm/i915/gt/selftest_workarounds.c ok = verify_wa_lists(ctx, &lists, "after idle reset"); lists 1222 drivers/gpu/drm/i915/gt/selftest_workarounds.c ok = verify_wa_lists(ctx, &lists, "after busy reset"); lists 1230 drivers/gpu/drm/i915/gt/selftest_workarounds.c reference_lists_fini(i915, &lists); lists 35 drivers/media/platform/vsp1/vsp1_dl.c struct vsp1_dl_header_list lists[8]; lists 571 drivers/media/platform/vsp1/vsp1_dl.c dl->header->lists[0].addr = dl->body0->dma; lists 752 drivers/media/platform/vsp1/vsp1_dl.c struct vsp1_dl_header_list *hdr = dl->header->lists; lists 763 drivers/media/platform/vsp1/vsp1_dl.c * sizeof(*dl->header->lists); lists 771 drivers/media/platform/vsp1/vsp1_dl.c * sizeof(*dl->header->lists); lists 2101 fs/nilfs2/btree.c struct list_head *lists, lists 2125 fs/nilfs2/btree.c list_for_each(head, &lists[level]) { lists 2139 fs/nilfs2/btree.c struct list_head lists[NILFS_BTREE_LEVEL_MAX]; lists 2148 fs/nilfs2/btree.c INIT_LIST_HEAD(&lists[level]); lists 2159 fs/nilfs2/btree.c lists, bh); lists 2169 fs/nilfs2/btree.c list_splice_tail(&lists[level], listp); lists 93 fs/xfs/xfs_mru_cache.c struct list_head *lists; /* Array of lists, one per grp. */ lists 145 fs/xfs/xfs_mru_cache.c lru_list = mru->lists + mru->lru_grp; lists 171 fs/xfs/xfs_mru_cache.c lru_list = mru->lists + ((mru->lru_grp + grp) % mru->grp_count); lists 215 fs/xfs/xfs_mru_cache.c list_add_tail(&elem->list_node, mru->lists + grp); lists 272 fs/xfs/xfs_mru_cache.c ASSERT(mru && mru->lists); lists 273 fs/xfs/xfs_mru_cache.c if (!mru || !mru->lists) lists 341 fs/xfs/xfs_mru_cache.c mru->lists = kmem_zalloc(mru->grp_count * sizeof(*mru->lists), 0); lists 343 fs/xfs/xfs_mru_cache.c if (!mru->lists) { lists 349 fs/xfs/xfs_mru_cache.c INIT_LIST_HEAD(mru->lists + grp); lists 366 fs/xfs/xfs_mru_cache.c if (err && mru && mru->lists) lists 367 fs/xfs/xfs_mru_cache.c kmem_free(mru->lists); lists 384 fs/xfs/xfs_mru_cache.c if (!mru || !mru->lists) lists 404 fs/xfs/xfs_mru_cache.c if (!mru || !mru->lists) lists 409 fs/xfs/xfs_mru_cache.c kmem_free(mru->lists); lists 426 fs/xfs/xfs_mru_cache.c ASSERT(mru && mru->lists); lists 427 fs/xfs/xfs_mru_cache.c if (!mru || !mru->lists) lists 459 fs/xfs/xfs_mru_cache.c ASSERT(mru && mru->lists); lists 460 fs/xfs/xfs_mru_cache.c if (!mru || !mru->lists) lists 515 fs/xfs/xfs_mru_cache.c ASSERT(mru && mru->lists); lists 516 fs/xfs/xfs_mru_cache.c if (!mru || !mru->lists) lists 51 include/linux/mm_inline.h list_add(&page->lru, &lruvec->lists[lru]); lists 58 include/linux/mm_inline.h list_add_tail(&page->lru, &lruvec->lists[lru]); lists 299 include/linux/mmzone.h struct list_head lists[NR_LRU_LISTS]; lists 338 include/linux/mmzone.h struct list_head lists[MIGRATE_PCPTYPES]; lists 33 kernel/bpf/bpf_lru_list.c return &loc_l->lists[LOCAL_FREE_LIST_IDX]; lists 38 kernel/bpf/bpf_lru_list.c return &loc_l->lists[LOCAL_PENDING_LIST_IDX]; lists 93 kernel/bpf/bpf_lru_list.c list_move(&node->list, &l->lists[tgt_type]); lists 121 kernel/bpf/bpf_lru_list.c list_move(&node->list, &l->lists[tgt_type]); lists 142 kernel/bpf/bpf_lru_list.c struct list_head *active = &l->lists[BPF_LRU_LIST_T_ACTIVE]; lists 169 kernel/bpf/bpf_lru_list.c struct list_head *inactive = &l->lists[BPF_LRU_LIST_T_INACTIVE]; lists 212 kernel/bpf/bpf_lru_list.c struct list_head *inactive = &l->lists[BPF_LRU_LIST_T_INACTIVE]; lists 272 kernel/bpf/bpf_lru_list.c if (!list_empty(&l->lists[BPF_LRU_LIST_T_INACTIVE])) lists 273 kernel/bpf/bpf_lru_list.c force_shrink_list = &l->lists[BPF_LRU_LIST_T_INACTIVE]; lists 275 kernel/bpf/bpf_lru_list.c force_shrink_list = &l->lists[BPF_LRU_LIST_T_ACTIVE]; lists 331 kernel/bpf/bpf_lru_list.c list_for_each_entry_safe(node, tmp_node, &l->lists[BPF_LRU_LIST_T_FREE], lists 414 kernel/bpf/bpf_lru_list.c free_list = &l->lists[BPF_LRU_LIST_T_FREE]; lists 571 kernel/bpf/bpf_lru_list.c list_add(&node->list, &l->lists[BPF_LRU_LIST_T_FREE]); lists 597 kernel/bpf/bpf_lru_list.c list_add(&node->list, &l->lists[BPF_LRU_LIST_T_FREE]); lists 623 kernel/bpf/bpf_lru_list.c INIT_LIST_HEAD(&loc_l->lists[i]); lists 635 kernel/bpf/bpf_lru_list.c INIT_LIST_HEAD(&l->lists[i]); lists 640 kernel/bpf/bpf_lru_list.c l->next_inactive_rotation = &l->lists[BPF_LRU_LIST_T_INACTIVE]; lists 31 kernel/bpf/bpf_lru_list.h struct list_head lists[NR_BPF_LRU_LIST_T]; lists 40 kernel/bpf/bpf_lru_list.h struct list_head lists[NR_BPF_LRU_LOCAL_LIST_T]; lists 96 mm/mmzone.c INIT_LIST_HEAD(&lruvec->lists[lru]); lists 1273 mm/page_alloc.c list = &pcp->lists[migratetype]; lists 3053 mm/page_alloc.c list_add(&page->lru, &pcp->lists[migratetype]); lists 3249 mm/page_alloc.c list = &pcp->lists[migratetype]; lists 6125 mm/page_alloc.c INIT_LIST_HEAD(&pcp->lists[migratetype]); lists 1688 mm/vmscan.c struct list_head *src = &lruvec->lists[lru]; lists 1904 mm/vmscan.c list_move(&page->lru, &lruvec->lists[lru]); lists 1664 sound/core/control.c static int _snd_ctl_register_ioctl(snd_kctl_ioctl_func_t fcn, struct list_head *lists) lists 1673 sound/core/control.c list_add_tail(&pn->list, lists); lists 1707 sound/core/control.c struct list_head *lists) lists 1714 sound/core/control.c list_for_each_entry(p, lists, list) { lists 3912 tools/perf/builtin-trace.c char *sep = NULL, *lists[2] = { NULL, NULL, }; lists 3947 tools/perf/builtin-trace.c if (lists[list]) { lists 3948 tools/perf/builtin-trace.c sprintf(lists[list] + strlen(lists[list]), ",%s", s); lists 3950 tools/perf/builtin-trace.c lists[list] = malloc(len); lists 3951 tools/perf/builtin-trace.c if (lists[list] == NULL) lists 3953 tools/perf/builtin-trace.c strcpy(lists[list], s); lists 3963 tools/perf/builtin-trace.c if (lists[1] != NULL) { lists 3968 tools/perf/builtin-trace.c trace->ev_qualifier = strlist__new(lists[1], &slist_config); lists 3981 tools/perf/builtin-trace.c if (lists[0]) { lists 3985 tools/perf/builtin-trace.c err = parse_events_option(&o, lists[0], 0);