tsc_khz           127 arch/x86/hyperv/hv_init.c 	tsc_khz = div64_u64(freq, 1000);
tsc_khz             9 arch/x86/include/asm/iommu.h #define DMAR_OPERATION_TIMEOUT ((cycles_t) tsc_khz*10*1000)
tsc_khz            19 arch/x86/include/asm/tsc.h extern unsigned int tsc_khz;
tsc_khz           147 arch/x86/include/uapi/asm/bootparam.h 	__u32	tsc_khz;
tsc_khz           665 arch/x86/kernel/apic/apic.c 						tsc_khz * (1000 / TSC_DIVISOR),
tsc_khz           682 arch/x86/kernel/apic/apic.c 	clockevents_update_freq(levt, tsc_khz * (1000 / TSC_DIVISOR));
tsc_khz           830 arch/x86/kernel/apic/apic.c 	if (!tsc_khz || !cpu_khz)
tsc_khz           915 arch/x86/kernel/apic/apic.c 	if (tsc_khz) {
tsc_khz           917 arch/x86/kernel/apic/apic.c 		tsc_perj = div_u64((u64)tsc_khz * 1000, HZ);
tsc_khz           929 arch/x86/kernel/apic/apic.c 			if (tsc_khz) {
tsc_khz           176 arch/x86/kernel/cpu/vmware.c 	uint64_t lpj, tsc_khz;
tsc_khz           181 arch/x86/kernel/cpu/vmware.c 		lpj = tsc_khz = eax | (((uint64_t)ebx) << 32);
tsc_khz           182 arch/x86/kernel/cpu/vmware.c 		do_div(tsc_khz, 1000);
tsc_khz           183 arch/x86/kernel/cpu/vmware.c 		WARN_ON(tsc_khz >> 32);
tsc_khz           185 arch/x86/kernel/cpu/vmware.c 			(unsigned long) tsc_khz / 1000,
tsc_khz           186 arch/x86/kernel/cpu/vmware.c 			(unsigned long) tsc_khz % 1000);
tsc_khz           193 arch/x86/kernel/cpu/vmware.c 		vmware_tsc_khz = tsc_khz;
tsc_khz           189 arch/x86/kernel/jailhouse.c 	precalibrated_tsc_khz = setup_data.tsc_khz;
tsc_khz            35 arch/x86/kernel/tsc.c unsigned int __read_mostly tsc_khz;
tsc_khz            36 arch/x86/kernel/tsc.c EXPORT_SYMBOL(tsc_khz);
tsc_khz           183 arch/x86/kernel/tsc.c 	__set_cyc2ns_scale(tsc_khz, smp_processor_id(), rdtsc());
tsc_khz           312 arch/x86/kernel/tsc.c 	u64 thresh = tsc_khz ? tsc_khz >> 5 : TSC_DEFAULT_THRESHOLD;
tsc_khz           898 arch/x86/kernel/tsc.c 	tsc_khz = x86_platform.calibrate_tsc();
tsc_khz           899 arch/x86/kernel/tsc.c 	if (tsc_khz == 0)
tsc_khz           900 arch/x86/kernel/tsc.c 		tsc_khz = cpu_khz;
tsc_khz           901 arch/x86/kernel/tsc.c 	else if (abs(cpu_khz - tsc_khz) * 10 > tsc_khz)
tsc_khz           902 arch/x86/kernel/tsc.c 		cpu_khz = tsc_khz;
tsc_khz           988 arch/x86/kernel/tsc.c 		tsc_khz_ref = tsc_khz;
tsc_khz           996 arch/x86/kernel/tsc.c 		tsc_khz = cpufreq_scale(tsc_khz_ref, ref_freq, freq->new);
tsc_khz          1000 arch/x86/kernel/tsc.c 		set_cyc2ns_scale(tsc_khz, freq->policy->cpu, rdtsc());
tsc_khz          1262 arch/x86/kernel/tsc.c 	res = art_ns * tsc_khz;
tsc_khz          1263 arch/x86/kernel/tsc.c 	tmp = rem * tsc_khz;
tsc_khz          1337 arch/x86/kernel/tsc.c 	if (abs(tsc_khz - freq) > tsc_khz/100)
tsc_khz          1340 arch/x86/kernel/tsc.c 	tsc_khz = freq;
tsc_khz          1342 arch/x86/kernel/tsc.c 		(unsigned long)tsc_khz / 1000,
tsc_khz          1343 arch/x86/kernel/tsc.c 		(unsigned long)tsc_khz % 1000);
tsc_khz          1350 arch/x86/kernel/tsc.c 		set_cyc2ns_scale(tsc_khz, cpu, tsc_stop);
tsc_khz          1358 arch/x86/kernel/tsc.c 	clocksource_register_khz(&clocksource_tsc, tsc_khz);
tsc_khz          1366 arch/x86/kernel/tsc.c 	if (!boot_cpu_has(X86_FEATURE_TSC) || !tsc_khz)
tsc_khz          1385 arch/x86/kernel/tsc.c 		clocksource_register_khz(&clocksource_tsc, tsc_khz);
tsc_khz          1403 arch/x86/kernel/tsc.c 	WARN_ON(cpu_khz || tsc_khz);
tsc_khz          1407 arch/x86/kernel/tsc.c 		tsc_khz = x86_platform.calibrate_tsc();
tsc_khz          1419 arch/x86/kernel/tsc.c 	if (tsc_khz == 0)
tsc_khz          1420 arch/x86/kernel/tsc.c 		tsc_khz = cpu_khz;
tsc_khz          1421 arch/x86/kernel/tsc.c 	else if (abs(cpu_khz - tsc_khz) * 10 > tsc_khz)
tsc_khz          1422 arch/x86/kernel/tsc.c 		cpu_khz = tsc_khz;
tsc_khz          1424 arch/x86/kernel/tsc.c 	if (tsc_khz == 0)
tsc_khz          1431 arch/x86/kernel/tsc.c 	if (cpu_khz != tsc_khz) {
tsc_khz          1433 arch/x86/kernel/tsc.c 			(unsigned long)tsc_khz / KHZ,
tsc_khz          1434 arch/x86/kernel/tsc.c 			(unsigned long)tsc_khz % KHZ);
tsc_khz          1441 arch/x86/kernel/tsc.c 	u64 lpj = (u64)tsc_khz * KHZ;
tsc_khz          1483 arch/x86/kernel/tsc.c 	if (!tsc_khz) {
tsc_khz          1511 arch/x86/kernel/tsc.c 	clocksource_register_khz(&clocksource_tsc_early, tsc_khz);
tsc_khz           235 arch/x86/kernel/tsc_sync.c 	end = start + (cycles_t) tsc_khz * timeout;
tsc_khz          7734 arch/x86/kvm/vmx/vmx.c 		if (tsc_khz)
tsc_khz          7735 arch/x86/kvm/vmx/vmx.c 			use_timer_freq = (u64)tsc_khz * 1000;
tsc_khz          1772 arch/x86/kvm/x86.c 		if (user_tsc_khz > tsc_khz) {
tsc_khz          1784 arch/x86/kvm/x86.c 				user_tsc_khz, tsc_khz);
tsc_khz          1820 arch/x86/kvm/x86.c 	thresh_lo = adjust_tsc_khz(tsc_khz, -tsc_tolerance_ppm);
tsc_khz          1821 arch/x86/kvm/x86.c 	thresh_hi = adjust_tsc_khz(tsc_khz, tsc_tolerance_ppm);
tsc_khz          4440 arch/x86/kvm/x86.c 			user_tsc_khz = tsc_khz;
tsc_khz          6985 arch/x86/kvm/x86.c 		khz = tsc_khz;
tsc_khz          7004 arch/x86/kvm/x86.c 		per_cpu(cpu_tsc_khz, cpu) = tsc_khz;
tsc_khz          7005 arch/x86/kvm/x86.c 	kvm_max_guest_tsc_khz = tsc_khz;
tsc_khz          7131 arch/x86/kvm/x86.c 	max_tsc_khz = tsc_khz;
tsc_khz          9411 arch/x86/kvm/x86.c 			      __scale_tsc(kvm_max_tsc_scaling_ratio, tsc_khz));
tsc_khz           101 drivers/acpi/acpi_lpit.c 				lpit_native->counter_frequency : tsc_khz * 1000;
tsc_khz          7497 drivers/gpu/drm/i915/intel_pm.c 		max_ia_freq = tsc_khz;
tsc_khz            20 drivers/misc/sgi-gru/gruhandles.c #define GRU_OPERATION_TIMEOUT	((cycles_t) tsc_khz*10*1000)
tsc_khz            21 drivers/misc/sgi-gru/gruhandles.c #define CLKS2NSEC(c)		((c) * 1000000 / tsc_khz)
tsc_khz           746 drivers/platform/x86/intel_pmc_core.c 		do_div(pcstate_count, tsc_khz);