Lines Matching refs:nmask
477 nodemask_t *nmask; member
514 if (node_isset(nid, *qp->nmask) == !!(flags & MPOL_MF_INVERT)) in queue_pages_pte_range()
543 if (node_isset(nid, *qp->nmask) == !!(flags & MPOL_MF_INVERT)) in queue_pages_hugetlb()
644 .nmask = nodes, in queue_pages_range()
833 static long do_get_mempolicy(int *policy, nodemask_t *nmask, in do_get_mempolicy() argument
850 *nmask = cpuset_current_mems_allowed; in do_get_mempolicy()
906 if (nmask) { in do_get_mempolicy()
908 *nmask = pol->w.user_nodemask; in do_get_mempolicy()
911 get_policy_nodemask(pol, nmask); in do_get_mempolicy()
959 nodemask_t nmask; in migrate_to_node() local
963 nodes_clear(nmask); in migrate_to_node()
964 node_set(source, nmask); in migrate_to_node()
972 queue_pages_range(mm, mm->mmap->vm_start, mm->task_size, &nmask, in migrate_to_node()
1140 nodemask_t *nmask, unsigned long flags) in do_mbind() argument
1167 new = mpol_new(mode, mode_flags, nmask); in do_mbind()
1183 nmask ? nodes_addr(*nmask)[0] : NUMA_NO_NODE); in do_mbind()
1196 err = mpol_set_nodemask(new, nmask, scratch); in do_mbind()
1207 err = queue_pages_range(mm, start, end, nmask, in do_mbind()
1239 static int get_nodes(nodemask_t *nodes, const unsigned long __user *nmask, in get_nodes() argument
1248 if (maxnode == 0 || !nmask) in get_nodes()
1266 if (get_user(t, nmask + k)) in get_nodes()
1278 if (copy_from_user(nodes_addr(*nodes), nmask, nlongs*sizeof(unsigned long))) in get_nodes()
1302 unsigned long, mode, const unsigned long __user *, nmask, in SYSCALL_DEFINE6() argument
1316 err = get_nodes(&nodes, nmask, maxnode); in SYSCALL_DEFINE6()
1323 SYSCALL_DEFINE3(set_mempolicy, int, mode, const unsigned long __user *, nmask, in SYSCALL_DEFINE3() argument
1336 err = get_nodes(&nodes, nmask, maxnode); in SYSCALL_DEFINE3()
1439 unsigned long __user *, nmask, unsigned long, maxnode, in SYSCALL_DEFINE5() argument
1446 if (nmask != NULL && maxnode < MAX_NUMNODES) in SYSCALL_DEFINE5()
1457 if (nmask) in SYSCALL_DEFINE5()
1458 err = copy_nodes_to_user(nmask, maxnode, &nodes); in SYSCALL_DEFINE5()
1466 compat_ulong_t __user *, nmask, in COMPAT_SYSCALL_DEFINE5() argument
1478 if (nmask) in COMPAT_SYSCALL_DEFINE5()
1483 if (!err && nmask) { in COMPAT_SYSCALL_DEFINE5()
1488 err |= clear_user(nmask, ALIGN(maxnode-1, 8) / 8); in COMPAT_SYSCALL_DEFINE5()
1489 err |= compat_put_bitmap(nmask, bm, nr_bits); in COMPAT_SYSCALL_DEFINE5()
1495 COMPAT_SYSCALL_DEFINE3(set_mempolicy, int, mode, compat_ulong_t __user *, nmask, in COMPAT_SYSCALL_DEFINE3() argument
1506 if (nmask) { in COMPAT_SYSCALL_DEFINE3()
1507 err = compat_get_bitmap(bm, nmask, nr_bits); in COMPAT_SYSCALL_DEFINE3()
1519 compat_ulong_t, mode, compat_ulong_t __user *, nmask, in COMPAT_SYSCALL_DEFINE6() argument
1530 if (nmask) { in COMPAT_SYSCALL_DEFINE6()
1531 err = compat_get_bitmap(nodes_addr(bm), nmask, nr_bits); in COMPAT_SYSCALL_DEFINE6()
1969 nodemask_t *nmask; in alloc_pages_vma() local
2001 nmask = policy_nodemask(gfp, pol); in alloc_pages_vma()
2002 if (!nmask || node_isset(hpage_node, *nmask)) { in alloc_pages_vma()
2010 nmask = policy_nodemask(gfp, pol); in alloc_pages_vma()
2013 page = __alloc_pages_nodemask(gfp, order, zl, nmask); in alloc_pages_vma()