nodemask 316 arch/x86/mm/numa.c static void __init numa_nodemask_from_meminfo(nodemask_t *nodemask, nodemask 324 arch/x86/mm/numa.c node_set(mi->blk[i].nid, *nodemask); nodemask 366 drivers/tty/sysrq.c .nodemask = NULL, nodemask 65 include/linux/cpuset.h int cpuset_nodemask_valid_mems_allowed(nodemask_t *nodemask); nodemask 152 include/linux/cpuset.h static inline void set_mems_allowed(nodemask_t nodemask) nodemask 159 include/linux/cpuset.h current->mems_allowed = nodemask; nodemask 202 include/linux/cpuset.h static inline int cpuset_nodemask_valid_mems_allowed(nodemask_t *nodemask) nodemask 269 include/linux/cpuset.h static inline void set_mems_allowed(nodemask_t nodemask) nodemask 491 include/linux/gfp.h nodemask_t *nodemask); nodemask 151 include/linux/mempolicy.h struct mempolicy **mpol, nodemask_t **nodemask); nodemask 273 include/linux/mempolicy.h struct mempolicy **mpol, nodemask_t **nodemask) nodemask 276 include/linux/mempolicy.h *nodemask = NULL; nodemask 35 include/linux/migrate.h int preferred_nid, nodemask_t *nodemask) nodemask 43 include/linux/migrate.h preferred_nid, nodemask); nodemask 54 include/linux/migrate.h preferred_nid, nodemask); nodemask 1432 include/linux/mm.h extern void show_free_areas(unsigned int flags, nodemask_t *nodemask); nodemask 2216 include/linux/mm.h extern void show_mem(unsigned int flags, nodemask_t *nodemask); nodemask 2225 include/linux/mm.h void warn_alloc(gfp_t gfp_mask, nodemask_t *nodemask, const char *fmt, ...); nodemask 1082 include/linux/mmzone.h #define for_each_zone_zonelist_nodemask(zone, z, zlist, highidx, nodemask) \ nodemask 1083 include/linux/mmzone.h for (z = first_zones_zonelist(zlist, highidx, nodemask), zone = zonelist_zone(z); \ nodemask 1085 include/linux/mmzone.h z = next_zones_zonelist(++z, highidx, nodemask), \ nodemask 1088 include/linux/mmzone.h #define for_next_zone_zonelist_nodemask(zone, z, zlist, highidx, nodemask) \ nodemask 1091 include/linux/mmzone.h z = next_zones_zonelist(++z, highidx, nodemask), \ nodemask 152 include/linux/nodemask.h #define node_isset(node, nodemask) test_bit((node), (nodemask).bits) nodemask 154 include/linux/nodemask.h #define node_test_and_set(node, nodemask) \ nodemask 155 include/linux/nodemask.h __node_test_and_set((node), &(nodemask)) nodemask 231 include/linux/nodemask.h #define nodes_full(nodemask) __nodes_full(&(nodemask), MAX_NUMNODES) nodemask 237 include/linux/nodemask.h #define nodes_weight(nodemask) __nodes_weight(&(nodemask), MAX_NUMNODES) nodemask 34 include/linux/oom.h nodemask_t *nodemask; nodemask 3359 kernel/cgroup/cpuset.c int cpuset_nodemask_valid_mems_allowed(nodemask_t *nodemask) nodemask 3361 kernel/cgroup/cpuset.c return nodes_intersects(*nodemask, current->mems_allowed); nodemask 436 kernel/irq/manage.c const struct cpumask *nodemask = cpumask_of_node(node); nodemask 439 kernel/irq/manage.c if (cpumask_intersects(&mask, nodemask)) nodemask 440 kernel/irq/manage.c cpumask_and(&mask, &mask, nodemask); nodemask 2029 kernel/sched/core.c const struct cpumask *nodemask = NULL; nodemask 2039 kernel/sched/core.c nodemask = cpumask_of_node(nid); nodemask 2042 kernel/sched/core.c for_each_cpu(dest_cpu, nodemask) { nodemask 373 kernel/smp.c const struct cpumask *nodemask; nodemask 382 kernel/smp.c nodemask = cpumask_of_node(cpu_to_node(cpu)); nodemask 383 kernel/smp.c for (cpu = cpumask_first_and(nodemask, mask); cpu < nr_cpu_ids; nodemask 384 kernel/smp.c cpu = cpumask_next_and(cpu, nodemask, mask)) { nodemask 11 lib/show_mem.c void show_mem(unsigned int filter, nodemask_t *nodemask) nodemask 17 lib/show_mem.c show_free_areas(filter, nodemask); nodemask 2050 mm/compaction.c ac->nodemask) { nodemask 2359 mm/compaction.c ac->nodemask) { nodemask 947 mm/hugetlb.c nodemask_t *nodemask; nodemask 964 mm/hugetlb.c nid = huge_node(vma, address, gfp_mask, &mpol, &nodemask); nodemask 965 mm/hugetlb.c page = dequeue_huge_page_nodemask(h, gfp_mask, nid, nodemask); nodemask 1116 mm/hugetlb.c int nid, nodemask_t *nodemask) nodemask 1126 mm/hugetlb.c for_each_zone_zonelist_nodemask(zone, z, zonelist, gfp_zone(gfp_mask), nodemask) { nodemask 1158 mm/hugetlb.c int nid, nodemask_t *nodemask) nodemask 1166 mm/hugetlb.c int nid, nodemask_t *nodemask) nodemask 1744 mm/hugetlb.c nodemask_t *nodemask; nodemask 1746 mm/hugetlb.c nid = huge_node(vma, addr, gfp_mask, &mpol, &nodemask); nodemask 1747 mm/hugetlb.c page = alloc_surplus_huge_page(h, gfp_mask, nid, nodemask); nodemask 1799 mm/hugetlb.c nodemask_t *nodemask; nodemask 1805 mm/hugetlb.c node = huge_node(vma, address, gfp_mask, &mpol, &nodemask); nodemask 1806 mm/hugetlb.c page = alloc_huge_page_nodemask(h, node, nodemask); nodemask 115 mm/internal.h nodemask_t *nodemask; nodemask 1593 mm/memcontrol.c .nodemask = NULL, nodemask 1943 mm/mempolicy.c struct mempolicy **mpol, nodemask_t **nodemask) nodemask 1948 mm/mempolicy.c *nodemask = NULL; /* assume !MPOL_BIND */ nodemask 1956 mm/mempolicy.c *nodemask = &(*mpol)->v.nodes; nodemask 90 mm/oom_kill.c const nodemask_t *mask = oc->nodemask; nodemask 286 mm/oom_kill.c if (oc->nodemask && nodemask 287 mm/oom_kill.c !nodes_subset(node_states[N_MEMORY], *oc->nodemask)) { nodemask 289 mm/oom_kill.c for_each_node_mask(nid, *oc->nodemask) nodemask 296 mm/oom_kill.c high_zoneidx, oc->nodemask) nodemask 445 mm/oom_kill.c nodemask_pr_args(oc->nodemask)); nodemask 464 mm/oom_kill.c show_mem(SHOW_MEM_FILTER_NODES, oc->nodemask); nodemask 1084 mm/oom_kill.c oc->nodemask = NULL; nodemask 1125 mm/oom_kill.c .nodemask = NULL, nodemask 2565 mm/page_alloc.c ac->nodemask) { nodemask 3594 mm/page_alloc.c ac->nodemask) { nodemask 3726 mm/page_alloc.c static void warn_alloc_show_mem(gfp_t gfp_mask, nodemask_t *nodemask) nodemask 3742 mm/page_alloc.c show_mem(filter, nodemask); nodemask 3745 mm/page_alloc.c void warn_alloc(gfp_t gfp_mask, nodemask_t *nodemask, const char *fmt, ...) nodemask 3759 mm/page_alloc.c nodemask_pr_args(nodemask)); nodemask 3765 mm/page_alloc.c warn_alloc_show_mem(gfp_mask, nodemask); nodemask 3794 mm/page_alloc.c .nodemask = ac->nodemask, nodemask 4045 mm/page_alloc.c ac->nodemask) { nodemask 4123 mm/page_alloc.c ac->nodemask); nodemask 4174 mm/page_alloc.c ac->nodemask) { nodemask 4309 mm/page_alloc.c ac->nodemask) { nodemask 4379 mm/page_alloc.c if (cpusets_enabled() && ac->nodemask && nodemask 4380 mm/page_alloc.c !cpuset_nodemask_valid_mems_allowed(ac->nodemask)) { nodemask 4381 mm/page_alloc.c ac->nodemask = NULL; nodemask 4442 mm/page_alloc.c ac->high_zoneidx, ac->nodemask); nodemask 4541 mm/page_alloc.c ac->nodemask = NULL; nodemask 4543 mm/page_alloc.c ac->high_zoneidx, ac->nodemask); nodemask 4666 mm/page_alloc.c warn_alloc(gfp_mask, ac->nodemask, nodemask 4673 mm/page_alloc.c int preferred_nid, nodemask_t *nodemask, nodemask 4679 mm/page_alloc.c ac->nodemask = nodemask; nodemask 4684 mm/page_alloc.c if (!ac->nodemask) nodemask 4685 mm/page_alloc.c ac->nodemask = &cpuset_current_mems_allowed; nodemask 4716 mm/page_alloc.c ac->high_zoneidx, ac->nodemask); nodemask 4724 mm/page_alloc.c nodemask_t *nodemask) nodemask 4742 mm/page_alloc.c if (!prepare_alloc_pages(gfp_mask, order, preferred_nid, nodemask, &ac, &alloc_mask, &alloc_flags)) nodemask 4771 mm/page_alloc.c if (unlikely(ac.nodemask != nodemask)) nodemask 4772 mm/page_alloc.c ac.nodemask = nodemask; nodemask 5194 mm/page_alloc.c static bool show_mem_node_skip(unsigned int flags, int nid, nodemask_t *nodemask) nodemask 5204 mm/page_alloc.c if (!nodemask) nodemask 5205 mm/page_alloc.c nodemask = &cpuset_current_mems_allowed; nodemask 5207 mm/page_alloc.c return !node_isset(nid, *nodemask); nodemask 5248 mm/page_alloc.c void show_free_areas(unsigned int filter, nodemask_t *nodemask) nodemask 5256 mm/page_alloc.c if (show_mem_node_skip(filter, zone_to_nid(zone), nodemask)) nodemask 5290 mm/page_alloc.c if (show_mem_node_skip(filter, pgdat->node_id, nodemask)) nodemask 5341 mm/page_alloc.c if (show_mem_node_skip(filter, zone_to_nid(zone), nodemask)) nodemask 5402 mm/page_alloc.c if (show_mem_node_skip(filter, zone_to_nid(zone), nodemask)) nodemask 74 mm/vmscan.c nodemask_t *nodemask; nodemask 2974 mm/vmscan.c sc->reclaim_idx, sc->nodemask) { nodemask 3104 mm/vmscan.c sc->nodemask) { nodemask 3181 mm/vmscan.c nodemask_t *nodemask) nodemask 3219 mm/vmscan.c gfp_zone(gfp_mask), nodemask) { nodemask 3265 mm/vmscan.c gfp_t gfp_mask, nodemask_t *nodemask) nodemask 3273 mm/vmscan.c .nodemask = nodemask, nodemask 3293 mm/vmscan.c if (throttle_direct_reclaim(sc.gfp_mask, zonelist, nodemask)) nodemask 342 tools/perf/bench/numa.c unsigned long nodemask; nodemask 348 tools/perf/bench/numa.c BUG_ON(g->p.nr_nodes > (int)sizeof(nodemask)*8); nodemask 349 tools/perf/bench/numa.c nodemask = 1L << node; nodemask 351 tools/perf/bench/numa.c ret = set_mempolicy(MPOL_BIND, &nodemask, sizeof(nodemask)*8); nodemask 352 tools/perf/bench/numa.c dprintf("binding to node %d, mask: %016lx => %d\n", node, nodemask, ret);