Lines Matching refs:nmask

477 	nodemask_t *nmask;  member
514 if (node_isset(nid, *qp->nmask) == !!(flags & MPOL_MF_INVERT)) in queue_pages_pte_range()
543 if (node_isset(nid, *qp->nmask) == !!(flags & MPOL_MF_INVERT)) in queue_pages_hugetlb()
641 .nmask = nodes, in queue_pages_range()
830 static long do_get_mempolicy(int *policy, nodemask_t *nmask, in do_get_mempolicy() argument
847 *nmask = cpuset_current_mems_allowed; in do_get_mempolicy()
903 if (nmask) { in do_get_mempolicy()
905 *nmask = pol->w.user_nodemask; in do_get_mempolicy()
908 get_policy_nodemask(pol, nmask); in do_get_mempolicy()
956 nodemask_t nmask; in migrate_to_node() local
960 nodes_clear(nmask); in migrate_to_node()
961 node_set(source, nmask); in migrate_to_node()
969 queue_pages_range(mm, mm->mmap->vm_start, mm->task_size, &nmask, in migrate_to_node()
1137 nodemask_t *nmask, unsigned long flags) in do_mbind() argument
1164 new = mpol_new(mode, mode_flags, nmask); in do_mbind()
1180 nmask ? nodes_addr(*nmask)[0] : NUMA_NO_NODE); in do_mbind()
1193 err = mpol_set_nodemask(new, nmask, scratch); in do_mbind()
1204 err = queue_pages_range(mm, start, end, nmask, in do_mbind()
1236 static int get_nodes(nodemask_t *nodes, const unsigned long __user *nmask, in get_nodes() argument
1245 if (maxnode == 0 || !nmask) in get_nodes()
1263 if (get_user(t, nmask + k)) in get_nodes()
1275 if (copy_from_user(nodes_addr(*nodes), nmask, nlongs*sizeof(unsigned long))) in get_nodes()
1299 unsigned long, mode, const unsigned long __user *, nmask, in SYSCALL_DEFINE6() argument
1313 err = get_nodes(&nodes, nmask, maxnode); in SYSCALL_DEFINE6()
1320 SYSCALL_DEFINE3(set_mempolicy, int, mode, const unsigned long __user *, nmask, in SYSCALL_DEFINE3() argument
1333 err = get_nodes(&nodes, nmask, maxnode); in SYSCALL_DEFINE3()
1436 unsigned long __user *, nmask, unsigned long, maxnode, in SYSCALL_DEFINE5() argument
1443 if (nmask != NULL && maxnode < MAX_NUMNODES) in SYSCALL_DEFINE5()
1454 if (nmask) in SYSCALL_DEFINE5()
1455 err = copy_nodes_to_user(nmask, maxnode, &nodes); in SYSCALL_DEFINE5()
1463 compat_ulong_t __user *, nmask, in COMPAT_SYSCALL_DEFINE5() argument
1475 if (nmask) in COMPAT_SYSCALL_DEFINE5()
1480 if (!err && nmask) { in COMPAT_SYSCALL_DEFINE5()
1485 err |= clear_user(nmask, ALIGN(maxnode-1, 8) / 8); in COMPAT_SYSCALL_DEFINE5()
1486 err |= compat_put_bitmap(nmask, bm, nr_bits); in COMPAT_SYSCALL_DEFINE5()
1492 COMPAT_SYSCALL_DEFINE3(set_mempolicy, int, mode, compat_ulong_t __user *, nmask, in COMPAT_SYSCALL_DEFINE3() argument
1503 if (nmask) { in COMPAT_SYSCALL_DEFINE3()
1504 err = compat_get_bitmap(bm, nmask, nr_bits); in COMPAT_SYSCALL_DEFINE3()
1516 compat_ulong_t, mode, compat_ulong_t __user *, nmask, in COMPAT_SYSCALL_DEFINE6() argument
1527 if (nmask) { in COMPAT_SYSCALL_DEFINE6()
1528 err = compat_get_bitmap(nodes_addr(bm), nmask, nr_bits); in COMPAT_SYSCALL_DEFINE6()
1966 nodemask_t *nmask; in alloc_pages_vma() local
1998 nmask = policy_nodemask(gfp, pol); in alloc_pages_vma()
1999 if (!nmask || node_isset(hpage_node, *nmask)) { in alloc_pages_vma()
2007 nmask = policy_nodemask(gfp, pol); in alloc_pages_vma()
2010 page = __alloc_pages_nodemask(gfp, order, zl, nmask); in alloc_pages_vma()