Lines Matching refs:c
28 static void early_init_intel(struct cpuinfo_x86 *c) in early_init_intel() argument
33 if (c->x86 > 6 || (c->x86 == 6 && c->x86_model >= 0xd)) { in early_init_intel()
36 c->cpuid_level = cpuid_eax(0); in early_init_intel()
37 get_cpu_cap(c); in early_init_intel()
41 if ((c->x86 == 0xf && c->x86_model >= 0x03) || in early_init_intel()
42 (c->x86 == 0x6 && c->x86_model >= 0x0e)) in early_init_intel()
43 set_cpu_cap(c, X86_FEATURE_CONSTANT_TSC); in early_init_intel()
45 if (c->x86 >= 6 && !cpu_has(c, X86_FEATURE_IA64)) { in early_init_intel()
51 rdmsr(MSR_IA32_UCODE_REV, lower_word, c->microcode); in early_init_intel()
62 if (c->x86 == 6 && c->x86_model == 0x1c && c->x86_mask <= 2 && in early_init_intel()
63 c->microcode < 0x20e) { in early_init_intel()
65 clear_cpu_cap(c, X86_FEATURE_PSE); in early_init_intel()
69 set_cpu_cap(c, X86_FEATURE_SYSENTER32); in early_init_intel()
72 if (c->x86 == 15 && c->x86_cache_alignment == 64) in early_init_intel()
73 c->x86_cache_alignment = 128; in early_init_intel()
77 if (c->x86 == 0xF && c->x86_model == 0x3 in early_init_intel()
78 && (c->x86_mask == 0x3 || c->x86_mask == 0x4)) in early_init_intel()
79 c->x86_phys_bits = 36; in early_init_intel()
88 if (c->x86_power & (1 << 8)) { in early_init_intel()
89 set_cpu_cap(c, X86_FEATURE_CONSTANT_TSC); in early_init_intel()
90 set_cpu_cap(c, X86_FEATURE_NONSTOP_TSC); in early_init_intel()
96 if (c->x86 == 6) { in early_init_intel()
97 switch (c->x86_model) { in early_init_intel()
101 set_cpu_cap(c, X86_FEATURE_NONSTOP_TSC_S3); in early_init_intel()
118 if (c->x86 == 6 && c->x86_model < 15) in early_init_intel()
119 clear_cpu_cap(c, X86_FEATURE_PAT); in early_init_intel()
130 if (c->x86 == 15) in early_init_intel()
140 if (c->x86 > 6 || (c->x86 == 6 && c->x86_model >= 0xd)) { in early_init_intel()
159 if (c->x86 == 5 && c->x86_model == 9) { in early_init_intel()
185 static void intel_smp_check(struct cpuinfo_x86 *c) in intel_smp_check() argument
188 if (!c->cpu_index) in intel_smp_check()
194 if (c->x86 == 5 && in intel_smp_check()
195 c->x86_mask >= 1 && c->x86_mask <= 4 && in intel_smp_check()
196 c->x86_model <= 3) { in intel_smp_check()
213 static void intel_workarounds(struct cpuinfo_x86 *c) in intel_workarounds() argument
222 clear_cpu_bug(c, X86_BUG_F00F); in intel_workarounds()
223 if (!paravirt_enabled() && c->x86 == 5 && c->x86_model < 9) { in intel_workarounds()
226 set_cpu_bug(c, X86_BUG_F00F); in intel_workarounds()
238 if ((c->x86<<8 | c->x86_model<<4 | c->x86_mask) < 0x633) in intel_workarounds()
239 clear_cpu_cap(c, X86_FEATURE_SEP); in intel_workarounds()
248 set_cpu_cap(c, X86_FEATURE_PAE); in intel_workarounds()
256 if ((c->x86 == 15) && (c->x86_model == 1) && (c->x86_mask == 1)) { in intel_workarounds()
271 if (cpu_has_apic && (c->x86<<8 | c->x86_model<<4) == 0x520 && in intel_workarounds()
272 (c->x86_mask < 0x6 || c->x86_mask == 0xb)) in intel_workarounds()
273 set_cpu_bug(c, X86_BUG_11AP); in intel_workarounds()
280 switch (c->x86) { in intel_workarounds()
294 intel_smp_check(c); in intel_workarounds()
297 static void intel_workarounds(struct cpuinfo_x86 *c) in intel_workarounds() argument
302 static void srat_detect_node(struct cpuinfo_x86 *c) in srat_detect_node() argument
322 static int intel_num_cpu_cores(struct cpuinfo_x86 *c) in intel_num_cpu_cores() argument
326 if (c->cpuid_level < 4) in intel_num_cpu_cores()
337 static void detect_vmx_virtcap(struct cpuinfo_x86 *c) in detect_vmx_virtcap() argument
349 clear_cpu_cap(c, X86_FEATURE_TPR_SHADOW); in detect_vmx_virtcap()
350 clear_cpu_cap(c, X86_FEATURE_VNMI); in detect_vmx_virtcap()
351 clear_cpu_cap(c, X86_FEATURE_FLEXPRIORITY); in detect_vmx_virtcap()
352 clear_cpu_cap(c, X86_FEATURE_EPT); in detect_vmx_virtcap()
353 clear_cpu_cap(c, X86_FEATURE_VPID); in detect_vmx_virtcap()
358 set_cpu_cap(c, X86_FEATURE_TPR_SHADOW); in detect_vmx_virtcap()
360 set_cpu_cap(c, X86_FEATURE_VNMI); in detect_vmx_virtcap()
367 set_cpu_cap(c, X86_FEATURE_FLEXPRIORITY); in detect_vmx_virtcap()
369 set_cpu_cap(c, X86_FEATURE_EPT); in detect_vmx_virtcap()
371 set_cpu_cap(c, X86_FEATURE_VPID); in detect_vmx_virtcap()
375 static void init_intel_energy_perf(struct cpuinfo_x86 *c) in init_intel_energy_perf() argument
383 if (!cpu_has(c, X86_FEATURE_EPB)) in init_intel_energy_perf()
396 static void intel_bsp_resume(struct cpuinfo_x86 *c) in intel_bsp_resume() argument
402 init_intel_energy_perf(c); in intel_bsp_resume()
405 static void init_intel(struct cpuinfo_x86 *c) in init_intel() argument
409 early_init_intel(c); in init_intel()
411 intel_workarounds(c); in init_intel()
418 detect_extended_topology(c); in init_intel()
420 if (!cpu_has(c, X86_FEATURE_XTOPOLOGY)) { in init_intel()
425 c->x86_max_cores = intel_num_cpu_cores(c); in init_intel()
427 detect_ht(c); in init_intel()
431 l2 = init_intel_cacheinfo(c); in init_intel()
435 cpu_detect_cache_sizes(c); in init_intel()
436 l2 = c->x86_cache_size; in init_intel()
439 if (c->cpuid_level > 9) { in init_intel()
443 set_cpu_cap(c, X86_FEATURE_ARCH_PERFMON); in init_intel()
447 set_cpu_cap(c, X86_FEATURE_LFENCE_RDTSC); in init_intel()
452 set_cpu_cap(c, X86_FEATURE_BTS); in init_intel()
454 set_cpu_cap(c, X86_FEATURE_PEBS); in init_intel()
457 if (c->x86 == 6 && cpu_has_clflush && in init_intel()
458 (c->x86_model == 29 || c->x86_model == 46 || c->x86_model == 47)) in init_intel()
459 set_cpu_bug(c, X86_BUG_CLFLUSH_MONITOR); in init_intel()
462 if (c->x86 == 15) in init_intel()
463 c->x86_cache_alignment = c->x86_clflush_size * 2; in init_intel()
464 if (c->x86 == 6) in init_intel()
465 set_cpu_cap(c, X86_FEATURE_REP_GOOD); in init_intel()
472 if (c->x86 == 6) { in init_intel()
475 switch (c->x86_model) { in init_intel()
486 else if (c->x86_mask == 0 || c->x86_mask == 5) in init_intel()
497 strcpy(c->x86_model_id, p); in init_intel()
500 if (c->x86 == 15) in init_intel()
501 set_cpu_cap(c, X86_FEATURE_P4); in init_intel()
502 if (c->x86 == 6) in init_intel()
503 set_cpu_cap(c, X86_FEATURE_P3); in init_intel()
507 srat_detect_node(c); in init_intel()
509 if (cpu_has(c, X86_FEATURE_VMX)) in init_intel()
510 detect_vmx_virtcap(c); in init_intel()
512 init_intel_energy_perf(c); in init_intel()
516 static unsigned int intel_size_cache(struct cpuinfo_x86 *c, unsigned int size) in intel_size_cache() argument
524 if ((c->x86 == 6) && (c->x86_model == 11) && (size == 0)) in intel_size_cache()
531 if ((c->x86 == 5) && (c->x86_model == 9)) in intel_size_cache()
682 static void intel_detect_tlb(struct cpuinfo_x86 *c) in intel_detect_tlb() argument
688 if (c->cpuid_level < 2) in intel_detect_tlb()