cpu_to_node        39 arch/alpha/include/asm/topology.h 		if (cpu_to_node(cpu) == node)
cpu_to_node       312 arch/arm/common/bL_switcher.c 				      cpu_to_node(cpu), "kswitcher_%d", cpu);
cpu_to_node        46 arch/arm64/kernel/irq.c 		p = arch_alloc_vmap_stack(IRQ_STACK_SIZE, cpu_to_node(cpu));
cpu_to_node        64 arch/arm64/kernel/sdei.c 	p = arch_alloc_vmap_stack(SDEI_STACK_SIZE, cpu_to_node(cpu));
cpu_to_node        63 arch/arm64/mm/numa.c 	int nid = cpu_to_node(cpu);
cpu_to_node        59 arch/ia64/kernel/topology.c 	unmap_cpu_from_node(num, cpu_to_node(num));
cpu_to_node       106 arch/ia64/uv/kernel/setup.c 		nid = cpu_to_node(cpu);
cpu_to_node        29 arch/mips/include/asm/sn/agent.h 	REMOTE_HUB_ADDR(COMPACT_TO_NASID_NODEID(cpu_to_node(_cpuid)),	    \
cpu_to_node       287 arch/mips/pci/pci-xtalk-bridge.c 		data->nnasid = COMPACT_TO_NASID_NODEID(cpu_to_node(cpu));
cpu_to_node        79 arch/mips/sgi-ip27/ip27-irq.c 	nasid = COMPACT_TO_NASID_NODEID(cpu_to_node(cpu));
cpu_to_node       145 arch/mips/sgi-ip27/ip27-irq.c 	info->nasid = cpu_to_node(hd->cpu);
cpu_to_node       169 arch/mips/sgi-ip27/ip27-smp.c 	REMOTE_HUB_SEND_INTR(COMPACT_TO_NASID_NODEID(cpu_to_node(destid)), irq);
cpu_to_node       793 arch/powerpc/kernel/smp.c 				cpu_to_node(cpu));
cpu_to_node       833 arch/powerpc/kernel/smp.c 					cpu_to_node(cpu));
cpu_to_node       858 arch/powerpc/kernel/smp.c 					GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node       860 arch/powerpc/kernel/smp.c 					GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node       862 arch/powerpc/kernel/smp.c 					GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node       682 arch/powerpc/kernel/vdso.c 	node = cpu_to_node(cpu);
cpu_to_node      5452 arch/powerpc/kvm/book3s_hv.c 		int node = cpu_to_node(first_cpu);
cpu_to_node       518 arch/powerpc/mm/numa.c 		if (cpu_to_node(sibling) != node) {
cpu_to_node      1221 arch/powerpc/mm/numa.c 		return cpu_to_node(cpu);
cpu_to_node       363 arch/powerpc/perf/imc-pmu.c 	nid = cpu_to_node(cpu);
cpu_to_node       404 arch/powerpc/perf/imc-pmu.c 	l_cpumask = cpumask_of_node(cpu_to_node(cpu));
cpu_to_node       443 arch/powerpc/perf/imc-pmu.c 	node_id = cpu_to_node(event->cpu);
cpu_to_node       539 arch/powerpc/perf/imc-pmu.c 	node_id = cpu_to_node(event->cpu);
cpu_to_node       586 arch/powerpc/perf/imc-pmu.c 	nid = cpu_to_node(cpu);
cpu_to_node       851 arch/powerpc/perf/imc-pmu.c 	int nid = cpu_to_node(cpu_id);
cpu_to_node      1098 arch/powerpc/perf/imc-pmu.c 	int phys_id = cpu_to_node(cpu_id), rc = 0;
cpu_to_node      1402 arch/powerpc/perf/imc-pmu.c 		nid = cpu_to_node(cpu);
cpu_to_node        37 arch/powerpc/platforms/cell/cpufreq_spudemand.c 	busy_spus = atomic_read(&cbe_spu_info[cpu_to_node(cpu)].busy_spus);
cpu_to_node        70 arch/powerpc/platforms/cell/spu_priv1_mmio.c 			*cpumask = cpumask_of_node(cpu_to_node(cpu));
cpu_to_node       302 arch/powerpc/platforms/cell/spufs/sched.c 	node = cpu_to_node(raw_smp_processor_id());
cpu_to_node       585 arch/powerpc/platforms/cell/spufs/sched.c 	node = cpu_to_node(raw_smp_processor_id());
cpu_to_node       633 arch/powerpc/platforms/cell/spufs/sched.c 	node = cpu_to_node(raw_smp_processor_id());
cpu_to_node       176 arch/powerpc/platforms/powernv/setup.c 			paca_ptrs[i]->mce_faulty_slbs = memblock_alloc_node(mmu_slb_size, __alignof__(*paca_ptrs[i]->mce_faulty_slbs), cpu_to_node(i));
cpu_to_node       188 arch/powerpc/platforms/pseries/dtl.c 	buf = kmem_cache_alloc_node(dtl_cache, GFP_KERNEL, cpu_to_node(dtl->cpu));
cpu_to_node      1341 arch/powerpc/sysdev/xive/common.c 				  GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      1545 arch/powerpc/sysdev/xive/common.c 	pages = alloc_pages_node(cpu_to_node(cpu), GFP_KERNEL, alloc_order);
cpu_to_node        71 arch/s390/include/asm/topology.h #define cpu_to_node cpu_to_node
cpu_to_node       162 arch/s390/kernel/vdso.c 	vd->node_id = cpu_to_node(vd->cpu_nr);
cpu_to_node       105 arch/sparc/kernel/cpumap.c 		id = cpu_to_node(cpu);
cpu_to_node       623 arch/sparc/kernel/setup_64.c 		node = cpu_to_node(i);
cpu_to_node      1587 arch/sparc/kernel/smp_64.c 	int node = cpu_to_node(cpu);
cpu_to_node      1615 arch/sparc/kernel/smp_64.c 	if (cpu_to_node(from) == cpu_to_node(to))
cpu_to_node       490 arch/x86/events/amd/core.c 	nb = kzalloc_node(sizeof(struct amd_nb), GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node       323 arch/x86/events/amd/uncore.c 			cpu_to_node(cpu));
cpu_to_node        86 arch/x86/events/intel/bts.c 	int node = (cpu == -1) ? cpu : cpu_to_node(cpu);
cpu_to_node      3621 arch/x86/events/intel/core.c 			    GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      3639 arch/x86/events/intel/core.c 			 GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      3661 arch/x86/events/intel/core.c 		cpuc->constraint_list = kzalloc_node(sz, GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node       321 arch/x86/events/intel/ds.c 	int node = cpu_to_node(cpu);
cpu_to_node       339 arch/x86/events/intel/ds.c 	int max, node = cpu_to_node(cpu);
cpu_to_node       614 arch/x86/events/intel/pt.c 	int node = cpu_to_node(cpu);
cpu_to_node      1203 arch/x86/events/intel/pt.c 	node = cpu_to_node(cpu);
cpu_to_node      1240 arch/x86/events/intel/pt.c 	int node = event->cpu == -1 ? -1 : cpu_to_node(event->cpu);
cpu_to_node       552 arch/x86/events/intel/rapl.c 		pmu = kzalloc_node(sizeof(*pmu), GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      1256 arch/x86/events/intel/uncore.c 			box = uncore_alloc_box(type, cpu_to_node(cpu));
cpu_to_node       743 arch/x86/include/asm/uv/uv_hub.h 	return uv_node_to_blade_id(cpu_to_node(cpu));
cpu_to_node      2131 arch/x86/kernel/apic/io_apic.c 	int node = cpu_to_node(0);
cpu_to_node       148 arch/x86/kernel/apic/x2apic_cluster.c 	if (alloc_clustermask(cpu, cpu_to_node(cpu)) < 0)
cpu_to_node      1376 arch/x86/kernel/apic/x2apic_uv_x.c 		int nid = cpu_to_node(cpu);
cpu_to_node      1516 arch/x86/kernel/apic/x2apic_uv_x.c 		nodeid = cpu_to_node(cpu);
cpu_to_node      1523 arch/x86/kernel/apic/x2apic_uv_x.c 			uv_cpu_hub_info(cpu)->memory_nid = cpu_to_node(cpu);
cpu_to_node       491 arch/x86/kernel/cpu/intel.c 		node = cpu_to_node(cpu);
cpu_to_node       574 arch/x86/kernel/cpu/resctrl/core.c 	d = kzalloc_node(sizeof(*d), GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      1182 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 						cpu_to_node(cpu),
cpu_to_node      1187 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 						cpu_to_node(cpu),
cpu_to_node      1192 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 						cpu_to_node(cpu),
cpu_to_node      1292 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 					cpu_to_node(plr->cpu),
cpu_to_node       161 arch/x86/kernel/espfix_64.c 	node = cpu_to_node(cpu);
cpu_to_node       114 arch/x86/kernel/irq_32.c 	int node = cpu_to_node(cpu);
cpu_to_node       747 arch/x86/kernel/kvm.c 				GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node       415 arch/x86/kernel/smpboot.c 	return (cpu_to_node(cpu1) == cpu_to_node(cpu2));
cpu_to_node       426 arch/x86/kernel/smpboot.c 		cpu1, name, cpu2, cpu_to_node(cpu1), cpu_to_node(cpu2));
cpu_to_node      2504 arch/x86/kvm/vmx/vmx.c 	int node = cpu_to_node(cpu);
cpu_to_node        37 arch/x86/platform/intel-mid/device_libs/platform_mrfld_wdt.c 	ioapic_set_alloc_attr(&info, cpu_to_node(0), 1, 0);
cpu_to_node      1965 arch/x86/platform/uv/tlb_uv.c 		bcp->osnode = cpu_to_node(cpu);
cpu_to_node      2220 arch/x86/platform/uv/tlb_uv.c 		zalloc_cpumask_var_node(mask, GFP_KERNEL, cpu_to_node(cur_cpu));
cpu_to_node      1013 arch/x86/platform/uv/uv_nmi.c 		int nid = cpu_to_node(cpu);
cpu_to_node       153 arch/x86/platform/uv/uv_time.c 		int nid = cpu_to_node(cpu);
cpu_to_node        92 block/blk-mq-cpumap.c 			return local_memory_node(cpu_to_node(i));
cpu_to_node      2443 block/blk-mq.c 				hctx->numa_node = local_memory_node(cpu_to_node(i));
cpu_to_node        93 crypto/scompress.c 		mem = vmalloc_node(SCOMP_SCRATCH_SIZE, cpu_to_node(i));
cpu_to_node        97 crypto/scompress.c 		mem = vmalloc_node(SCOMP_SCRATCH_SIZE, cpu_to_node(i));
cpu_to_node       484 drivers/acpi/acpi_processor.c 	try_offline_node(cpu_to_node(pr->id));
cpu_to_node       436 drivers/base/arch_topology.c 	const cpumask_t *core_mask = cpumask_of_node(cpu_to_node(cpu));
cpu_to_node        54 drivers/base/cpu.c 	from_nid = cpu_to_node(cpuid);
cpu_to_node        63 drivers/base/cpu.c 	to_nid = cpu_to_node(cpuid);
cpu_to_node        79 drivers/base/cpu.c 	unregister_cpu_under_node(logical_cpu, cpu_to_node(logical_cpu));
cpu_to_node       370 drivers/base/cpu.c 	cpu->node_id = cpu_to_node(num);
cpu_to_node       391 drivers/base/cpu.c 	register_cpu_under_node(num, cpu_to_node(num));
cpu_to_node       920 drivers/base/node.c 		if (cpu_to_node(cpu) == nid)
cpu_to_node       113 drivers/base/test/test_async_driver_probe.c 		nid = cpu_to_node(cpu);
cpu_to_node       148 drivers/base/test/test_async_driver_probe.c 		nid = cpu_to_node(cpu);
cpu_to_node       177 drivers/base/test/test_async_driver_probe.c 	nid = cpu_to_node(cpu);
cpu_to_node      4018 drivers/block/mtip32xx/mtip32xx.c 		cpu_to_node(raw_smp_processor_id()), raw_smp_processor_id());
cpu_to_node      4088 drivers/block/mtip32xx/mtip32xx.c 		cpu_to_node(dd->isr_binding), dd->isr_binding);
cpu_to_node       568 drivers/cpufreq/acpi-cpufreq.c 			GFP_KERNEL, cpu_to_node(i))) {
cpu_to_node       116 drivers/crypto/cavium/zip/zip_main.c 	return cpu_to_node(raw_smp_processor_id());
cpu_to_node       148 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_zip = find_zip_device(cpu_to_node(smp_processor_id()));
cpu_to_node       978 drivers/edac/mce_amd.c 		decode_dram_ecc(cpu_to_node(m->extcpu), m);
cpu_to_node       563 drivers/firmware/stratix10-svc.c 						cpu_to_node(cpu),
cpu_to_node       814 drivers/firmware/stratix10-svc.c 					      cpu_to_node(cpu),
cpu_to_node        97 drivers/hv/channel.c 	page = alloc_pages_node(cpu_to_node(newchannel->target_cpu),
cpu_to_node       380 drivers/hwtracing/coresight/coresight-etb10.c 	node = (event->cpu == -1) ? NUMA_NO_NODE : cpu_to_node(event->cpu);
cpu_to_node        84 drivers/hwtracing/coresight/coresight-etm-perf.c 	int node = event->cpu == -1 ? -1 : cpu_to_node(event->cpu);
cpu_to_node       395 drivers/hwtracing/coresight/coresight-tmc-etf.c 	node = (event->cpu == -1) ? NUMA_NO_NODE : cpu_to_node(event->cpu);
cpu_to_node      1066 drivers/hwtracing/coresight/coresight-tmc-etr.c 				 0, cpu_to_node(0), NULL);
cpu_to_node      1210 drivers/hwtracing/coresight/coresight-tmc-etr.c 	node = (event->cpu == -1) ? NUMA_NO_NODE : cpu_to_node(event->cpu);
cpu_to_node      1336 drivers/hwtracing/coresight/coresight-tmc-etr.c 	node = (event->cpu == -1) ? NUMA_NO_NODE : cpu_to_node(event->cpu);
cpu_to_node       970 drivers/infiniband/hw/hfi1/file_ops.c 		numa = cpu_to_node(fd->rec_cpu_num);
cpu_to_node      1297 drivers/infiniband/hw/qib/qib_file_ops.c 		cpu_to_node(fd->rec_cpu_num) :
cpu_to_node      1551 drivers/infiniband/hw/qib/qib_file_ops.c 			if (cpu_to_node(cpu) ==
cpu_to_node       157 drivers/infiniband/sw/siw/siw_main.c 		cpumask_set_cpu(i, siw_cpu_info.tx_valid_cpus[cpu_to_node(i)]);
cpu_to_node      3924 drivers/infiniband/ulp/srp/ib_srp.c 			if (cpu_to_node(cpu) != node)
cpu_to_node       332 drivers/md/dm-stats.c 		p = dm_kvzalloc(percpu_alloc_size, cpu_to_node(cpu));
cpu_to_node       340 drivers/md/dm-stats.c 			hi = dm_kvzalloc(s->histogram_alloc_size, cpu_to_node(cpu));
cpu_to_node        61 drivers/md/raid5.c #define cpu_to_group(cpu) cpu_to_node(cpu)
cpu_to_node       244 drivers/misc/sgi-xp/xpc_uv.c 	nid = cpu_to_node(cpu);
cpu_to_node       221 drivers/net/ethernet/amazon/ena/ena_netdev.c 	node = cpu_to_node(ena_irq->cpu);
cpu_to_node       370 drivers/net/ethernet/amazon/ena/ena_netdev.c 	node = cpu_to_node(ena_irq->cpu);
cpu_to_node      1220 drivers/net/ethernet/amazon/ena/ena_netdev.c 	numa_node = cpu_to_node(cpu);
cpu_to_node      1650 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.numa_node = cpu_to_node(tx_ring->cpu);
cpu_to_node      1717 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.numa_node = cpu_to_node(rx_ring->cpu);
cpu_to_node       204 drivers/net/ethernet/amd/xgbe/xgbe-drv.c 		node = cpu_to_node(cpu);
cpu_to_node       351 drivers/net/ethernet/huawei/hinic/hinic_main.c 		node = cpu_to_node(i);
cpu_to_node       850 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c 				node = cpu_to_node(cpu);
cpu_to_node      2148 drivers/net/ethernet/mellanox/mlx4/en_netdev.c 			node = cpu_to_node(i % num_online_cpus());
cpu_to_node      2162 drivers/net/ethernet/mellanox/mlx4/en_netdev.c 		node = cpu_to_node(i % num_online_cpus());
cpu_to_node       258 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 				       GFP_KERNEL, cpu_to_node(c->cpu));
cpu_to_node       352 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 				   GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node       391 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	rqp->wq.db_numa_node = cpu_to_node(c->cpu);
cpu_to_node       501 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 				      GFP_KERNEL, cpu_to_node(c->cpu));
cpu_to_node       551 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 		pp_params.nid       = cpu_to_node(c->cpu);
cpu_to_node      1014 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	param->wq.db_numa_node = cpu_to_node(c->cpu);
cpu_to_node      1020 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	err = mlx5e_alloc_xdpsq_db(sq, cpu_to_node(c->cpu));
cpu_to_node      1076 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	param->wq.db_numa_node = cpu_to_node(c->cpu);
cpu_to_node      1082 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	err = mlx5e_alloc_icosq_db(sq, cpu_to_node(c->cpu));
cpu_to_node      1168 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	param->wq.db_numa_node = cpu_to_node(c->cpu);
cpu_to_node      1174 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	err = mlx5e_alloc_txqsq_db(sq, cpu_to_node(c->cpu));
cpu_to_node      1580 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	param->wq.buf_numa_node = cpu_to_node(c->cpu);
cpu_to_node      1581 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	param->wq.db_numa_node  = cpu_to_node(c->cpu);
cpu_to_node      1980 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	c = kvzalloc_node(sizeof(*c), GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      1281 drivers/net/ethernet/socionext/netsec.c 	pp_params.nid = cpu_to_node(0);
cpu_to_node       284 drivers/net/hyperv/netvsc.c 	int node = cpu_to_node(nvchan->channel->target_cpu);
cpu_to_node      1330 drivers/pci/msi.c 		return local_memory_node(cpu_to_node(cpumask_first(mask)));
cpu_to_node       840 drivers/perf/arm_spe_pmu.c 	buf = kzalloc_node(sizeof(*buf), GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node       728 drivers/perf/thunderx2_pmu.c 		(tx2_pmu->node == cpu_to_node(cpu)))
cpu_to_node      2629 drivers/scsi/bnx2fc/bnx2fc_fcoe.c 					(void *)p, cpu_to_node(cpu),
cpu_to_node       419 drivers/scsi/bnx2i/bnx2i_init.c 					cpu_to_node(cpu),
cpu_to_node      9038 drivers/scsi/lpfc/lpfc_init.c 						   cpu_to_node(cpu));
cpu_to_node      14531 drivers/scsi/lpfc/lpfc_sli.c 			     GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      20594 drivers/scsi/lpfc/lpfc_sli.c 				   cpu_to_node(hdwq->io_wq->chann));
cpu_to_node      20737 drivers/scsi/lpfc/lpfc_sli.c 				   cpu_to_node(hdwq->io_wq->chann));
cpu_to_node      1896 drivers/scsi/qedi/qedi_main.c 					cpu_to_node(cpu),
cpu_to_node      7882 drivers/scsi/smartpqi/smartpqi_init.c 		cp_node = cpu_to_node(0);
cpu_to_node      1264 drivers/scsi/storvsc_drv.c 	node_mask = cpumask_of_node(cpu_to_node(q_num));
cpu_to_node      1320 drivers/scsi/storvsc_drv.c 			node_mask = cpumask_of_node(cpu_to_node(q_num));
cpu_to_node       133 drivers/watchdog/octeon-wdt-main.c 	int node = cpu_to_node(cpu);
cpu_to_node       306 drivers/watchdog/octeon-wdt-main.c 	node = cpu_to_node(cpu);
cpu_to_node       330 drivers/watchdog/octeon-wdt-main.c 	node = cpu_to_node(cpu);
cpu_to_node       353 drivers/watchdog/octeon-wdt-main.c 	node = cpu_to_node(cpu);
cpu_to_node       413 drivers/watchdog/octeon-wdt-main.c 		node = cpu_to_node(cpu);
cpu_to_node       470 drivers/watchdog/octeon-wdt-main.c 		node = cpu_to_node(cpu);
cpu_to_node       540 drivers/xen/xen-acpi-processor.c 			GFP_KERNEL, cpu_to_node(i))) {
cpu_to_node        34 include/asm-generic/topology.h #ifndef cpu_to_node
cpu_to_node      1136 include/linux/mm.h 	return cpu_to_node(cpupid_to_cpu(cpupid));
cpu_to_node       230 include/linux/sched/topology.h 	return cpu_to_node(task_cpu(p));
cpu_to_node        92 include/linux/topology.h #ifndef cpu_to_node
cpu_to_node       119 include/linux/topology.h 	return cpu_to_node(raw_smp_processor_id());
cpu_to_node       169 include/linux/topology.h 	_node_numa_mem_[cpu_to_node(cpu)] = node;
cpu_to_node       193 include/linux/topology.h 	return cpu_to_node(cpu);
cpu_to_node       227 include/linux/topology.h 	return cpumask_of_node(cpu_to_node(cpu));
cpu_to_node       511 include/trace/events/sched.h 		__entry->src_nid	= cpu_to_node(src_cpu);
cpu_to_node       513 include/trace/events/sched.h 		__entry->dst_nid	= cpu_to_node(dst_cpu);
cpu_to_node       563 include/trace/events/sched.h 		__entry->src_nid	= cpu_to_node(src_cpu);
cpu_to_node       568 include/trace/events/sched.h 		__entry->dst_nid	= cpu_to_node(dst_cpu);
cpu_to_node       343 kernel/bpf/cpumap.c 	numa = cpu_to_node(cpu);
cpu_to_node      1185 kernel/cpu.c   	err = try_online_node(cpu_to_node(cpu));
cpu_to_node      1976 kernel/cpu.c   		if (cpu_online(cpu) || !node_online(cpu_to_node(cpu)))
cpu_to_node        91 kernel/events/callchain.c 							 cpu_to_node(cpu));
cpu_to_node      9158 kernel/events/core.c 	int node = cpu_to_node(event->cpu == -1 ? 0 : event->cpu);
cpu_to_node      12137 kernel/events/core.c 		hlist = kzalloc_node(sizeof(*hlist), GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node       633 kernel/events/ring_buffer.c 	int node = (event->cpu == -1) ? -1 : cpu_to_node(event->cpu);
cpu_to_node       749 kernel/events/ring_buffer.c 	node = (cpu == -1) ? cpu : cpu_to_node(cpu);
cpu_to_node      2307 kernel/fork.c  	task = copy_process(&init_struct_pid, 0, cpu_to_node(cpu), &args);
cpu_to_node        80 kernel/irq/affinity.c 		cpumask_set_cpu(cpu, masks[cpu_to_node(cpu)]);
cpu_to_node       491 kernel/irq/irqdesc.c 			node = cpu_to_node(cpumask_first(mask));
cpu_to_node       453 kernel/kthread.c 	p = kthread_create_on_node(threadfn, data, cpu_to_node(cpu), namefmt,
cpu_to_node       696 kernel/kthread.c 		node = cpu_to_node(cpu);
cpu_to_node      1569 kernel/rcu/rcutorture.c 						  cpu_to_node(cpu),
cpu_to_node      2028 kernel/sched/core.c 	int nid = cpu_to_node(cpu);
cpu_to_node      6592 kernel/sched/core.c 			cpumask_size(), GFP_KERNEL, cpu_to_node(i));
cpu_to_node      6594 kernel/sched/core.c 			cpumask_size(), GFP_KERNEL, cpu_to_node(i));
cpu_to_node      2291 kernel/sched/deadline.c 					GFP_KERNEL, cpu_to_node(i));
cpu_to_node      1412 kernel/sched/fair.c 	int dst_nid = cpu_to_node(dst_cpu);
cpu_to_node      1840 kernel/sched/fair.c 			nid = cpu_to_node(env.best_cpu);
cpu_to_node      2705 kernel/sched/fair.c 	int src_nid = cpu_to_node(task_cpu(p));
cpu_to_node      2706 kernel/sched/fair.c 	int dst_nid = cpu_to_node(new_cpu);
cpu_to_node      7200 kernel/sched/fair.c 	src_nid = cpu_to_node(env->src_cpu);
cpu_to_node      7201 kernel/sched/fair.c 	dst_nid = cpu_to_node(env->dst_cpu);
cpu_to_node      10297 kernel/sched/fair.c 				      GFP_KERNEL, cpu_to_node(i));
cpu_to_node      10302 kernel/sched/fair.c 				  GFP_KERNEL, cpu_to_node(i));
cpu_to_node       201 kernel/sched/rt.c 				     GFP_KERNEL, cpu_to_node(i));
cpu_to_node       206 kernel/sched/rt.c 				     GFP_KERNEL, cpu_to_node(i));
cpu_to_node      2209 kernel/sched/rt.c 					GFP_KERNEL, cpu_to_node(i));
cpu_to_node       883 kernel/sched/topology.c 			GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      1464 kernel/sched/topology.c 	return sched_domains_numa_masks[sched_domains_curr_level][cpu_to_node(cpu)];
cpu_to_node      1707 kernel/sched/topology.c 	int node = cpu_to_node(cpu);
cpu_to_node      1738 kernel/sched/topology.c 	int i, j = cpu_to_node(cpu);
cpu_to_node      1781 kernel/sched/topology.c 					GFP_KERNEL, cpu_to_node(j));
cpu_to_node      1788 kernel/sched/topology.c 					GFP_KERNEL, cpu_to_node(j));
cpu_to_node      1795 kernel/sched/topology.c 					GFP_KERNEL, cpu_to_node(j));
cpu_to_node      1804 kernel/sched/topology.c 					GFP_KERNEL, cpu_to_node(j));
cpu_to_node        48 kernel/smp.c   				     cpu_to_node(cpu)))
cpu_to_node        51 kernel/smp.c   				     cpu_to_node(cpu))) {
cpu_to_node       382 kernel/smp.c   	nodemask = cpumask_of_node(cpu_to_node(cpu));
cpu_to_node       179 kernel/smpboot.c 	td = kzalloc_node(sizeof(*td), GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      2505 kernel/sys.c   		err |= put_user(cpu_to_node(cpu), nodep);
cpu_to_node       297 kernel/taskstats.c 					GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      1229 kernel/trace/ring_buffer.c 				    mflags, cpu_to_node(cpu));
cpu_to_node      1235 kernel/trace/ring_buffer.c 		page = alloc_pages_node(cpu_to_node(cpu), mflags, 0);
cpu_to_node      1294 kernel/trace/ring_buffer.c 				  GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      1310 kernel/trace/ring_buffer.c 			    GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      1317 kernel/trace/ring_buffer.c 	page = alloc_pages_node(cpu_to_node(cpu), GFP_KERNEL, 0);
cpu_to_node      4618 kernel/trace/ring_buffer.c 	page = alloc_pages_node(cpu_to_node(cpu),
cpu_to_node      2401 kernel/trace/trace.c 		page = alloc_pages_node(cpu_to_node(cpu),
cpu_to_node       852 kernel/trace/trace_uprobe.c 		struct page *p = alloc_pages_node(cpu_to_node(cpu),
cpu_to_node      1424 kernel/workqueue.c 		pwq = unbound_pwq_by_node(wq, cpu_to_node(cpu));
cpu_to_node      1553 kernel/workqueue.c 	if (node == cpu_to_node(cpu))
cpu_to_node      4085 kernel/workqueue.c 	int node = cpu_to_node(cpu);
cpu_to_node      4516 kernel/workqueue.c 		pwq = unbound_pwq_by_node(wq, cpu_to_node(cpu));
cpu_to_node      5868 kernel/workqueue.c 		node = cpu_to_node(cpu);
cpu_to_node      5914 kernel/workqueue.c 			pool->node = cpu_to_node(cpu);
cpu_to_node      5992 kernel/workqueue.c 			pool->node = cpu_to_node(cpu);
cpu_to_node       183 lib/cpu_rmap.c 			   cpumask_of_node(cpu_to_node(cpu)));
cpu_to_node       197 lib/cpu_rmap.c 					cpumask_of_node(cpu_to_node(cpu)), 3))
cpu_to_node      1655 mm/memory_hotplug.c 		if (cpu_to_node(cpu) == pgdat->node_id)
cpu_to_node      2392 mm/mempolicy.c 	int thisnid = cpu_to_node(thiscpu);
cpu_to_node      5778 mm/page_alloc.c 			set_cpu_numa_mem(cpu, local_memory_node(cpu_to_node(cpu)));
cpu_to_node        94 mm/percpu-vm.c 			*pagep = alloc_pages_node(cpu_to_node(cpu), gfp, 0);
cpu_to_node      1936 mm/vmstat.c    	node_set_state(cpu_to_node(cpu), N_CPU);
cpu_to_node      1951 mm/vmstat.c    	node = cpu_to_node(cpu);
cpu_to_node       379 mm/zswap.c     	dst = kmalloc_node(PAGE_SIZE * 2, GFP_KERNEL, cpu_to_node(cpu));
cpu_to_node      2227 net/core/dev.c 				       cpu_to_node(attr_index));
cpu_to_node      2342 net/core/dev.c 					numa_node_id = cpu_to_node(j);
cpu_to_node      2343 net/core/dev.c 				else if (numa_node_id != cpu_to_node(j))
cpu_to_node      3594 net/core/pktgen.c 	int node = cpu_to_node(t->cpu);
cpu_to_node      3684 net/core/pktgen.c 			 cpu_to_node(cpu));
cpu_to_node      3700 net/core/pktgen.c 				   cpu_to_node(cpu),
cpu_to_node       146 net/core/sysctl_net_core.c 						   cpu_to_node(i));
cpu_to_node      3744 net/ipv4/tcp.c 					       cpu_to_node(cpu));
cpu_to_node       390 net/ipv6/seg6_hmac.c 					     cpu_to_node(cpu));
cpu_to_node       644 net/iucv/iucv.c 			     GFP_KERNEL|GFP_DMA, cpu_to_node(cpu));
cpu_to_node       650 net/iucv/iucv.c 			  GFP_KERNEL|GFP_DMA, cpu_to_node(cpu));
cpu_to_node       655 net/iucv/iucv.c 			  GFP_KERNEL|GFP_DMA, cpu_to_node(cpu));
cpu_to_node      1324 net/netfilter/x_tables.c 			cpu_to_node(cpu));
cpu_to_node       292 net/sunrpc/svc.c 			return cpu_to_node(m->pool_to[pidx]);
cpu_to_node       352 net/sunrpc/svc.c 			pidx = m->to_pool[cpu_to_node(cpu)];
cpu_to_node       234 net/xfrm/xfrm_ipcomp.c 		scratch = vmalloc_node(IPCOMP_SCRATCH_SIZE, cpu_to_node(i));
cpu_to_node      1555 security/apparmor/lsm.c 			if (cpu_to_node(i) > num_online_nodes())
cpu_to_node      1560 security/apparmor/lsm.c 						      cpu_to_node(i));