midr 114 arch/arm/include/asm/cputype.h struct proc_info_list *lookup_processor(u32 midr); midr 169 arch/arm/include/asm/kvm_host.h u32 midr; midr 675 arch/arm/kernel/setup.c struct proc_info_list *lookup_processor(u32 midr) midr 677 arch/arm/kernel/setup.c struct proc_info_list *list = lookup_processor_type(midr); midr 681 arch/arm/kernel/setup.c smp_processor_id(), midr); midr 691 arch/arm/kernel/setup.c unsigned int midr = read_cpuid_id(); midr 692 arch/arm/kernel/setup.c struct proc_info_list *list = lookup_processor(midr); midr 709 arch/arm/kernel/setup.c list->cpu_name, midr, midr & 15, midr 96 arch/arm/kernel/smp_tlb.c unsigned int midr = read_cpuid_id(); midr 125 arch/arm/kernel/smp_tlb.c if ((midr & 0xff0ffff0) == 0x420f00f0 && midr <= 0x420f00f2) { midr 127 arch/arm/kernel/smp_tlb.c } else if ((midr & 0xff0ffff0) == 0x410fc0f0 && midr < 0x412fc0f2) { midr 129 arch/arm/kernel/smp_tlb.c } else if ((midr & 0xff0ffff0) == 0x410fc0f0 && midr < 0x412fc0f4) { midr 136 arch/arm/kernel/smp_tlb.c } else if ((midr & 0xff0ffff0) == 0x410fc0f0 && midr < 0x413fc0f3) { midr 143 arch/arm/kernel/smp_tlb.c } else if ((midr & 0xff0ffff0) == 0x410fc0f0 && midr < 0x414fc0f0) { midr 70 arch/arm/kvm/hyp/switch.c write_sysreg(vcpu->arch.midr, VPIDR); midr 49 arch/arm/kvm/reset.c vcpu->arch.midr = read_cpuid_id(); midr 25 arch/arm64/include/asm/cputype.h #define MIDR_REVISION(midr) ((midr) & MIDR_REVISION_MASK) midr 28 arch/arm64/include/asm/cputype.h #define MIDR_PARTNUM(midr) \ midr 29 arch/arm64/include/asm/cputype.h (((midr) & MIDR_PARTNUM_MASK) >> MIDR_PARTNUM_SHIFT) midr 32 arch/arm64/include/asm/cputype.h #define MIDR_ARCHITECTURE(midr) \ midr 33 arch/arm64/include/asm/cputype.h (((midr) & MIDR_ARCHITECTURE_MASK) >> MIDR_ARCHITECTURE_SHIFT) midr 36 arch/arm64/include/asm/cputype.h #define MIDR_VARIANT(midr) \ midr 37 arch/arm64/include/asm/cputype.h (((midr) & MIDR_VARIANT_MASK) >> MIDR_VARIANT_SHIFT) midr 40 arch/arm64/include/asm/cputype.h #define MIDR_IMPLEMENTOR(midr) \ midr 41 arch/arm64/include/asm/cputype.h (((midr) & MIDR_IMPLEMENTOR_MASK) >> MIDR_IMPLEMENTOR_SHIFT) midr 156 arch/arm64/include/asm/cputype.h static inline bool midr_is_cpu_model_range(u32 midr, u32 model, u32 rv_min, midr 159 arch/arm64/include/asm/cputype.h u32 _model = midr & MIDR_CPU_MODEL_MASK; midr 160 arch/arm64/include/asm/cputype.h u32 rv = midr & (MIDR_REVISION_MASK | MIDR_VARIANT_MASK); midr 165 arch/arm64/include/asm/cputype.h static inline bool is_midr_in_range(u32 midr, struct midr_range const *range) midr 167 arch/arm64/include/asm/cputype.h return midr_is_cpu_model_range(midr, range->model, midr 172 arch/arm64/include/asm/cputype.h is_midr_in_range_list(u32 midr, struct midr_range const *ranges) midr 175 arch/arm64/include/asm/cputype.h if (is_midr_in_range(midr, ranges++)) midr 21 arch/arm64/kernel/cpu_errata.c u32 midr = read_cpuid_id(), revidr; midr 24 arch/arm64/kernel/cpu_errata.c if (!is_midr_in_range(midr, &entry->midr_range)) midr 27 arch/arm64/kernel/cpu_errata.c midr &= MIDR_REVISION_MASK | MIDR_VARIANT_MASK; midr 30 arch/arm64/kernel/cpu_errata.c if (midr == fix->midr_rv && (revidr & fix->revidr_mask)) midr 222 arch/arm64/kernel/cpu_errata.c u32 midr = read_cpuid_id(); midr 267 arch/arm64/kernel/cpu_errata.c if (((midr & MIDR_CPU_MODEL_MASK) == MIDR_QCOM_FALKOR) || midr 268 arch/arm64/kernel/cpu_errata.c ((midr & MIDR_CPU_MODEL_MASK) == MIDR_QCOM_FALKOR_V1)) midr 511 arch/arm64/kernel/cpu_errata.c u32 midr = read_cpuid_id(); midr 516 arch/arm64/kernel/cpu_errata.c return is_midr_in_range(midr, &range) && is_kernel_in_hyp_mode(); midr 666 arch/arm64/kernel/cpu_errata.c u32 midr = read_cpuid_id(); midr 671 arch/arm64/kernel/cpu_errata.c return is_midr_in_range(midr, &range) && has_dic; midr 898 arch/arm64/kernel/cpufeature.c u32 midr = read_cpuid_id(); midr 901 arch/arm64/kernel/cpufeature.c return midr_is_cpu_model_range(midr, MIDR_THUNDERX, midr 134 arch/arm64/kernel/cpuinfo.c u32 midr = cpuinfo->reg_midr; midr 144 arch/arm64/kernel/cpuinfo.c MIDR_REVISION(midr), COMPAT_ELF_PLATFORM); midr 175 arch/arm64/kernel/cpuinfo.c MIDR_IMPLEMENTOR(midr)); midr 177 arch/arm64/kernel/cpuinfo.c seq_printf(m, "CPU variant\t: 0x%x\n", MIDR_VARIANT(midr)); midr 178 arch/arm64/kernel/cpuinfo.c seq_printf(m, "CPU part\t: 0x%03x\n", MIDR_PARTNUM(midr)); midr 179 arch/arm64/kernel/cpuinfo.c seq_printf(m, "CPU revision\t: %d\n\n", MIDR_REVISION(midr)); midr 237 arch/arm64/kernel/cpuinfo.c CPUREGS_ATTR_RO(midr_el1, midr); midr 17 arch/c6x/platforms/emif.c u32 midr; midr 246 drivers/firmware/efi/cper-arm.c printk("%sMIDR: 0x%016llx\n", pfx, proc->midr); midr 401 include/linux/cper.h u64 midr; midr 179 include/ras/ras_event.h __field(u64, midr) midr 194 include/ras/ras_event.h __entry->midr = proc->midr; midr 206 include/ras/ras_event.h __entry->affinity, __entry->mpidr, __entry->midr, midr 21 tools/perf/arch/arm64/util/header.c u64 midr = 0; midr 53 tools/perf/arch/arm64/util/header.c midr = strtoul(buf, NULL, 16); midr 54 tools/perf/arch/arm64/util/header.c midr &= (~(MIDR_VARIANT_MASK | MIDR_REVISION_MASK)); midr 55 tools/perf/arch/arm64/util/header.c scnprintf(buf, MIDR_SIZE, "0x%016lx", midr); midr 60 tools/perf/arch/arm64/util/header.c if (!midr) {