Lines Matching refs:c
72 static void init_amd_k5(struct cpuinfo_x86 *c) in init_amd_k5() argument
84 if (c->x86_model == 9 || c->x86_model == 10) { in init_amd_k5()
91 static void init_amd_k6(struct cpuinfo_x86 *c) in init_amd_k6() argument
97 if (c->x86_model < 6) { in init_amd_k6()
99 if (c->x86_model == 0) { in init_amd_k6()
100 clear_cpu_cap(c, X86_FEATURE_APIC); in init_amd_k6()
101 set_cpu_cap(c, X86_FEATURE_PGE); in init_amd_k6()
106 if (c->x86_model == 6 && c->x86_mask == 1) { in init_amd_k6()
135 if (c->x86_model < 8 || in init_amd_k6()
136 (c->x86_model == 8 && c->x86_mask < 8)) { in init_amd_k6()
155 if ((c->x86_model == 8 && c->x86_mask > 7) || in init_amd_k6()
156 c->x86_model == 9 || c->x86_model == 13) { in init_amd_k6()
177 if (c->x86_model == 10) { in init_amd_k6()
185 static void init_amd_k7(struct cpuinfo_x86 *c) in init_amd_k7() argument
195 if (c->x86_model >= 6 && c->x86_model <= 10) { in init_amd_k7()
196 if (!cpu_has(c, X86_FEATURE_XMM)) { in init_amd_k7()
199 set_cpu_cap(c, X86_FEATURE_XMM); in init_amd_k7()
208 if ((c->x86_model == 8 && c->x86_mask >= 1) || (c->x86_model > 8)) { in init_amd_k7()
218 set_cpu_cap(c, X86_FEATURE_K7); in init_amd_k7()
221 if (!c->cpu_index) in init_amd_k7()
229 if ((c->x86_model == 6) && ((c->x86_mask == 0) || in init_amd_k7()
230 (c->x86_mask == 1))) in init_amd_k7()
234 if ((c->x86_model == 7) && (c->x86_mask == 0)) in init_amd_k7()
244 if (((c->x86_model == 6) && (c->x86_mask >= 2)) || in init_amd_k7()
245 ((c->x86_model == 7) && (c->x86_mask >= 1)) || in init_amd_k7()
246 (c->x86_model > 7)) in init_amd_k7()
247 if (cpu_has(c, X86_FEATURE_MP)) in init_amd_k7()
292 static void amd_get_topology(struct cpuinfo_x86 *c) in amd_get_topology() argument
308 c->compute_unit_id = ebx & 0xff; in amd_get_topology()
310 } else if (cpu_has(c, X86_FEATURE_NODEID_MSR)) { in amd_get_topology()
324 set_cpu_cap(c, X86_FEATURE_AMD_DCM); in amd_get_topology()
325 cores_per_node = c->x86_max_cores / nodes; in amd_get_topology()
332 c->cpu_core_id %= cores_per_node; in amd_get_topology()
333 c->compute_unit_id %= cus_per_node; in amd_get_topology()
342 static void amd_detect_cmp(struct cpuinfo_x86 *c) in amd_detect_cmp() argument
348 bits = c->x86_coreid_bits; in amd_detect_cmp()
350 c->cpu_core_id = c->initial_apicid & ((1 << bits)-1); in amd_detect_cmp()
352 c->phys_proc_id = c->initial_apicid >> bits; in amd_detect_cmp()
354 per_cpu(cpu_llc_id, cpu) = c->phys_proc_id; in amd_detect_cmp()
355 amd_get_topology(c); in amd_detect_cmp()
369 static void srat_detect_node(struct cpuinfo_x86 *c) in srat_detect_node() argument
374 unsigned apicid = c->apicid; in srat_detect_node()
386 x86_cpuinit.fixup_cpu_id(c, node); in srat_detect_node()
408 int ht_nodeid = c->initial_apicid; in srat_detect_node()
421 static void early_init_amd_mc(struct cpuinfo_x86 *c) in early_init_amd_mc() argument
427 if (c->extended_cpuid_level < 0x80000008) in early_init_amd_mc()
432 c->x86_max_cores = (ecx & 0xff) + 1; in early_init_amd_mc()
439 while ((1 << bits) < c->x86_max_cores) in early_init_amd_mc()
443 c->x86_coreid_bits = bits; in early_init_amd_mc()
447 static void bsp_init_amd(struct cpuinfo_x86 *c) in bsp_init_amd() argument
451 if (c->x86 >= 0xf) { in bsp_init_amd()
469 if (cpu_has(c, X86_FEATURE_CONSTANT_TSC)) { in bsp_init_amd()
471 if (c->x86 > 0x10 || in bsp_init_amd()
472 (c->x86 == 0x10 && c->x86_model >= 0x2)) { in bsp_init_amd()
482 if (c->x86 == 0x15) { in bsp_init_amd()
498 static void early_init_amd(struct cpuinfo_x86 *c) in early_init_amd() argument
500 early_init_amd_mc(c); in early_init_amd()
506 if (c->x86_power & (1 << 8)) { in early_init_amd()
507 set_cpu_cap(c, X86_FEATURE_CONSTANT_TSC); in early_init_amd()
508 set_cpu_cap(c, X86_FEATURE_NONSTOP_TSC); in early_init_amd()
514 set_cpu_cap(c, X86_FEATURE_SYSCALL32); in early_init_amd()
517 if (c->x86 == 5) in early_init_amd()
518 if (c->x86_model == 13 || c->x86_model == 9 || in early_init_amd()
519 (c->x86_model == 8 && c->x86_mask >= 8)) in early_init_amd()
520 set_cpu_cap(c, X86_FEATURE_K6_MTRR); in early_init_amd()
524 if (cpu_has_apic && c->x86 >= 0xf) { in early_init_amd()
528 set_cpu_cap(c, X86_FEATURE_EXTD_APICID); in early_init_amd()
537 set_cpu_cap(c, X86_FEATURE_VMMCALL); in early_init_amd()
540 if (c->x86 == 0x16 && c->x86_model <= 0xf) in early_init_amd()
548 static void init_amd_k8(struct cpuinfo_x86 *c) in init_amd_k8() argument
556 set_cpu_cap(c, X86_FEATURE_REP_GOOD); in init_amd_k8()
563 if (c->x86_model < 0x14 && cpu_has(c, X86_FEATURE_LAHF_LM)) { in init_amd_k8()
564 clear_cpu_cap(c, X86_FEATURE_LAHF_LM); in init_amd_k8()
571 if (!c->x86_model_id[0]) in init_amd_k8()
572 strcpy(c->x86_model_id, "Hammer"); in init_amd_k8()
586 static void init_amd_gh(struct cpuinfo_x86 *c) in init_amd_gh() argument
590 if (c == &boot_cpu_data) in init_amd_gh()
617 if (cpu_has_amd_erratum(c, amd_erratum_383)) in init_amd_gh()
618 set_cpu_bug(c, X86_BUG_AMD_TLB_MMATCH); in init_amd_gh()
621 static void init_amd_bd(struct cpuinfo_x86 *c) in init_amd_bd() argument
626 if ((c->x86_model >= 0x10) && (c->x86_model <= 0x1f) && in init_amd_bd()
627 !cpu_has(c, X86_FEATURE_TOPOEXT)) { in init_amd_bd()
632 set_cpu_cap(c, X86_FEATURE_TOPOEXT); in init_amd_bd()
642 if ((c->x86_model >= 0x02) && (c->x86_model < 0x20)) { in init_amd_bd()
650 static void init_amd(struct cpuinfo_x86 *c) in init_amd() argument
654 early_init_amd(c); in init_amd()
660 clear_cpu_cap(c, 0*32+31); in init_amd()
662 if (c->x86 >= 0x10) in init_amd()
663 set_cpu_cap(c, X86_FEATURE_REP_GOOD); in init_amd()
666 c->apicid = hard_smp_processor_id(); in init_amd()
669 if (c->x86 < 6) in init_amd()
670 clear_cpu_cap(c, X86_FEATURE_MCE); in init_amd()
672 switch (c->x86) { in init_amd()
673 case 4: init_amd_k5(c); break; in init_amd()
674 case 5: init_amd_k6(c); break; in init_amd()
675 case 6: init_amd_k7(c); break; in init_amd()
676 case 0xf: init_amd_k8(c); break; in init_amd()
677 case 0x10: init_amd_gh(c); break; in init_amd()
678 case 0x15: init_amd_bd(c); break; in init_amd()
682 if (c->x86 >= 6) in init_amd()
683 set_cpu_bug(c, X86_BUG_FXSAVE_LEAK); in init_amd()
685 cpu_detect_cache_sizes(c); in init_amd()
688 if (c->extended_cpuid_level >= 0x80000008) { in init_amd()
689 amd_detect_cmp(c); in init_amd()
690 srat_detect_node(c); in init_amd()
694 detect_ht(c); in init_amd()
697 init_amd_cacheinfo(c); in init_amd()
699 if (c->x86 >= 0xf) in init_amd()
700 set_cpu_cap(c, X86_FEATURE_K8); in init_amd()
704 set_cpu_cap(c, X86_FEATURE_MFENCE_RDTSC); in init_amd()
711 if (c->x86 > 0x11) in init_amd()
712 set_cpu_cap(c, X86_FEATURE_ARAT); in init_amd()
714 if (cpu_has_amd_erratum(c, amd_erratum_400)) in init_amd()
715 set_cpu_bug(c, X86_BUG_AMD_APIC_C1E); in init_amd()
717 rdmsr_safe(MSR_AMD64_PATCH_LEVEL, &c->microcode, &dummy); in init_amd()
720 if (!cpu_has(c, X86_FEATURE_3DNOWPREFETCH)) in init_amd()
721 if (cpu_has(c, X86_FEATURE_3DNOW) || cpu_has(c, X86_FEATURE_LM)) in init_amd()
722 set_cpu_cap(c, X86_FEATURE_3DNOWPREFETCH); in init_amd()
725 set_cpu_bug(c, X86_BUG_SYSRET_SS_ATTRS); in init_amd()
729 static unsigned int amd_size_cache(struct cpuinfo_x86 *c, unsigned int size) in amd_size_cache() argument
732 if ((c->x86 == 6)) { in amd_size_cache()
734 if (c->x86_model == 3 && c->x86_mask == 0) in amd_size_cache()
737 if (c->x86_model == 4 && in amd_size_cache()
738 (c->x86_mask == 0 || c->x86_mask == 1)) in amd_size_cache()
745 static void cpu_detect_tlb_amd(struct cpuinfo_x86 *c) in cpu_detect_tlb_amd() argument
750 if (c->x86 < 0xf) in cpu_detect_tlb_amd()
753 if (c->extended_cpuid_level < 0x80000006) in cpu_detect_tlb_amd()
765 if (c->x86 == 0xf) { in cpu_detect_tlb_amd()
782 if (c->x86 == 0x15 && c->x86_model <= 0x1f) { in cpu_detect_tlb_amd()