/linux-4.4.14/mm/ |
D | percpu-vm.c | 86 unsigned int cpu, tcpu; in pcpu_alloc_pages() local 104 for_each_possible_cpu(tcpu) { in pcpu_alloc_pages() 105 if (tcpu == cpu) in pcpu_alloc_pages() 108 __free_page(pages[pcpu_page_idx(tcpu, i)]); in pcpu_alloc_pages() 215 unsigned int cpu, tcpu; in pcpu_map_pages() local 231 for_each_possible_cpu(tcpu) { in pcpu_map_pages() 232 if (tcpu == cpu) in pcpu_map_pages() 234 __pcpu_unmap_pages(pcpu_chunk_addr(chunk, tcpu, page_start), in pcpu_map_pages()
|
D | percpu.c | 1786 unsigned int cpu, tcpu; in pcpu_build_alloc_info() local 1817 for_each_possible_cpu(tcpu) { in pcpu_build_alloc_info() 1818 if (cpu == tcpu) in pcpu_build_alloc_info() 1820 if (group_map[tcpu] == group && cpu_distance_fn && in pcpu_build_alloc_info() 1821 (cpu_distance_fn(cpu, tcpu) > LOCAL_DISTANCE || in pcpu_build_alloc_info() 1822 cpu_distance_fn(tcpu, cpu) > LOCAL_DISTANCE)) { in pcpu_build_alloc_info()
|
/linux-4.4.14/drivers/media/pci/b2c2/ |
D | flexcop-dma.c | 11 u8 *tcpu; in flexcop_dma_allocate() local 19 if ((tcpu = pci_alloc_consistent(pdev, size, &tdma)) != NULL) { in flexcop_dma_allocate() 21 dma->cpu_addr0 = tcpu; in flexcop_dma_allocate() 23 dma->cpu_addr1 = tcpu + size/2; in flexcop_dma_allocate()
|
/linux-4.4.14/arch/x86/platform/uv/ |
D | uv_nmi.c | 457 int tcpu; in uv_nmi_dump_state() local 467 for_each_online_cpu(tcpu) { in uv_nmi_dump_state() 468 if (cpumask_test_cpu(tcpu, uv_nmi_cpu_mask)) in uv_nmi_dump_state() 470 else if (tcpu == cpu) in uv_nmi_dump_state() 471 uv_nmi_dump_state_cpu(tcpu, regs); in uv_nmi_dump_state() 473 uv_nmi_trigger_dump(tcpu); in uv_nmi_dump_state()
|
D | tlb_uv.c | 756 int tcpu; in disable_for_period() local 766 for_each_present_cpu(tcpu) { in disable_for_period() 767 tbcp = &per_cpu(bau_control, tcpu); in disable_for_period() 966 int tcpu; in check_enable() local 974 for_each_present_cpu(tcpu) { in check_enable() 975 tbcp = &per_cpu(bau_control, tcpu); in check_enable()
|
/linux-4.4.14/arch/s390/kvm/ |
D | diag.c | 159 struct kvm_vcpu *tcpu; in __diag_time_slice_end_directed() local 170 kvm_for_each_vcpu(i, tcpu, kvm) in __diag_time_slice_end_directed() 171 if (tcpu->vcpu_id == tid) { in __diag_time_slice_end_directed() 172 kvm_vcpu_yield_to(tcpu); in __diag_time_slice_end_directed()
|
/linux-4.4.14/drivers/cpufreq/ |
D | powernv-cpufreq.c | 437 int index, tcpu; in powernv_cpufreq_work_fn() local 445 for_each_cpu(tcpu, policy.cpus) in powernv_cpufreq_work_fn() 446 cpumask_clear_cpu(tcpu, mask); in powernv_cpufreq_work_fn()
|
/linux-4.4.14/drivers/xen/events/ |
D | events_base.c | 1307 static int rebind_irq_to_cpu(unsigned irq, unsigned tcpu) in rebind_irq_to_cpu() argument 1321 bind_vcpu.vcpu = tcpu; in rebind_irq_to_cpu() 1335 bind_evtchn_to_cpu(evtchn, tcpu); in rebind_irq_to_cpu() 1346 unsigned tcpu = cpumask_first_and(dest, cpu_online_mask); in set_affinity_irq() local 1348 return rebind_irq_to_cpu(data->irq, tcpu); in set_affinity_irq()
|
/linux-4.4.14/net/core/ |
D | dev.c | 3284 u32 tcpu; in get_rps_cpu() local 3329 tcpu = rflow->cpu; in get_rps_cpu() 3342 if (unlikely(tcpu != next_cpu) && in get_rps_cpu() 3343 (tcpu >= nr_cpu_ids || !cpu_online(tcpu) || in get_rps_cpu() 3344 ((int)(per_cpu(softnet_data, tcpu).input_queue_head - in get_rps_cpu() 3346 tcpu = next_cpu; in get_rps_cpu() 3350 if (tcpu < nr_cpu_ids && cpu_online(tcpu)) { in get_rps_cpu() 3352 cpu = tcpu; in get_rps_cpu() 3360 tcpu = map->cpus[reciprocal_scale(hash, map->len)]; in get_rps_cpu() 3361 if (cpu_online(tcpu)) { in get_rps_cpu() [all …]
|
/linux-4.4.14/Documentation/ia64/ |
D | err_inject.txt | 964 …printf("\t\tcpu,loop,interval,err_type_info,err_struct_info[,err_data_buffer[0],err_data_buffer[1]…
|