Home
last modified time | relevance | path

Searched refs:msr (Results 1 – 200 of 259) sorted by relevance

12

/linux-4.4.14/arch/x86/kernel/cpu/
Dperfctr-watchdog.c44 static inline unsigned int nmi_perfctr_msr_to_bit(unsigned int msr) in nmi_perfctr_msr_to_bit() argument
49 if (msr >= MSR_F15H_PERF_CTR) in nmi_perfctr_msr_to_bit()
50 return (msr - MSR_F15H_PERF_CTR) >> 1; in nmi_perfctr_msr_to_bit()
51 return msr - MSR_K7_PERFCTR0; in nmi_perfctr_msr_to_bit()
54 return msr - MSR_ARCH_PERFMON_PERFCTR0; in nmi_perfctr_msr_to_bit()
58 return msr - MSR_P6_PERFCTR0; in nmi_perfctr_msr_to_bit()
60 return msr - MSR_KNC_PERFCTR0; in nmi_perfctr_msr_to_bit()
62 return msr - MSR_P4_BPU_PERFCTR0; in nmi_perfctr_msr_to_bit()
72 static inline unsigned int nmi_evntsel_msr_to_bit(unsigned int msr) in nmi_evntsel_msr_to_bit() argument
77 if (msr >= MSR_F15H_PERF_CTL) in nmi_evntsel_msr_to_bit()
[all …]
Dperf_event_msr.c67 u64 msr; member
78 static struct perf_msr msr[] = { variable
131 if (!msr[cfg].attr) in msr_event_init()
135 event->hw.event_base = msr[cfg].msr; in msr_event_init()
226 if (!msr[i].test(i) || rdmsrl_safe(msr[i].msr, &val)) in msr_init()
227 msr[i].attr = NULL; in msr_init()
232 if (msr[i].attr) in msr_init()
233 events_attrs[j++] = &msr[i].attr->attr.attr; in msr_init()
Dperf_event_intel_cstate.c110 u64 msr; member
408 event->hw.event_base = core_msr[cfg].msr; in cstate_pmu_event_init()
414 event->hw.event_base = pkg_msr[cfg].msr; in cstate_pmu_event_init()
575 static bool cstate_probe_msr(struct perf_cstate_msr *msr, in cstate_probe_msr() argument
584 if (!msr[i].test(i) || rdmsrl_safe(msr[i].msr, &val)) in cstate_probe_msr()
585 msr[i].attr = NULL; in cstate_probe_msr()
590 if (msr[i].attr) in cstate_probe_msr()
591 events_attrs[j++] = &msr[i].attr->attr.attr; in cstate_probe_msr()
605 pkg_msr[PERF_CSTATE_PKG_C6_RES].msr = MSR_PKG_C7_RESIDENCY; in cstate_init()
Dperf_event_intel_uncore_nhmex.c206 unsigned msr = uncore_msr_box_ctl(box); in nhmex_uncore_msr_disable_box() local
209 if (msr) { in nhmex_uncore_msr_disable_box()
210 rdmsrl(msr, config); in nhmex_uncore_msr_disable_box()
215 wrmsrl(msr, config); in nhmex_uncore_msr_disable_box()
221 unsigned msr = uncore_msr_box_ctl(box); in nhmex_uncore_msr_enable_box() local
224 if (msr) { in nhmex_uncore_msr_enable_box()
225 rdmsrl(msr, config); in nhmex_uncore_msr_enable_box()
230 wrmsrl(msr, config); in nhmex_uncore_msr_enable_box()
764 unsigned msr; in nhmex_mbox_hw_config() local
771 for (er = nhmex_uncore_mbox_extra_regs; er->msr; er++) { in nhmex_mbox_hw_config()
[all …]
Dperf_event_intel_uncore_snbep.c100 .msr = SNBEP_C0_MSR_PMON_BOX_FILTER, \
325 unsigned msr; in snbep_uncore_msr_disable_box() local
327 msr = uncore_msr_box_ctl(box); in snbep_uncore_msr_disable_box()
328 if (msr) { in snbep_uncore_msr_disable_box()
329 rdmsrl(msr, config); in snbep_uncore_msr_disable_box()
331 wrmsrl(msr, config); in snbep_uncore_msr_disable_box()
338 unsigned msr; in snbep_uncore_msr_enable_box() local
340 msr = uncore_msr_box_ctl(box); in snbep_uncore_msr_enable_box()
341 if (msr) { in snbep_uncore_msr_enable_box()
342 rdmsrl(msr, config); in snbep_uncore_msr_enable_box()
[all …]
Dperf_event.h19 #define wrmsrl(msr, val) \
21 unsigned int _msr = (msr); \
421 unsigned int msr; member
430 .msr = (ms), \
437 #define INTEL_EVENT_EXTRA_REG(event, msr, vm, idx) \ argument
438 EVENT_EXTRA_REG(event, msr, ARCH_PERFMON_EVENTSEL_EVENT, vm, idx)
440 #define INTEL_UEVENT_EXTRA_REG(event, msr, vm, idx) \ argument
441 EVENT_EXTRA_REG(event, msr, ARCH_PERFMON_EVENTSEL_EVENT | \
Dperf_event_amd_ibs.c46 unsigned int msr; member
284 hwc->config_base = perf_ibs->msr; in perf_ibs_init()
479 .msr = MSR_AMD64_IBSFETCHCTL,
503 .msr = MSR_AMD64_IBSOPCTL,
526 unsigned int msr; in perf_ibs_handle_irq() local
539 msr = hwc->config_base; in perf_ibs_handle_irq()
541 rdmsrl(msr, *buf); in perf_ibs_handle_irq()
562 rdmsrl(msr + offset, *buf++); in perf_ibs_handle_irq()
Dperf_event_intel_rapl.c331 int bit, msr, ret = 0; in rapl_pmu_event_init() local
347 msr = MSR_PP0_ENERGY_STATUS; in rapl_pmu_event_init()
351 msr = MSR_PKG_ENERGY_STATUS; in rapl_pmu_event_init()
355 msr = MSR_DRAM_ENERGY_STATUS; in rapl_pmu_event_init()
359 msr = MSR_PP1_ENERGY_STATUS; in rapl_pmu_event_init()
379 event->hw.event_base = msr; in rapl_pmu_event_init()
Damd.c30 static inline int rdmsrl_amd_safe(unsigned msr, unsigned long long *p) in rdmsrl_amd_safe() argument
38 gprs[1] = msr; in rdmsrl_amd_safe()
48 static inline int wrmsrl_amd_safe(unsigned msr, unsigned long long val) in wrmsrl_amd_safe() argument
56 gprs[1] = msr; in wrmsrl_amd_safe()
Dperf_event_p4.c1126 #define P4_ESCR_MSR_IDX(msr) (msr - P4_ESCR_MSR_BASE) argument
1127 #define P4_ESCR_MSR_TABLE_ENTRY(msr) [P4_ESCR_MSR_IDX(msr)] = msr argument
Dperf_event_intel.c2574 arr[0].msr = MSR_CORE_PERF_GLOBAL_CTRL; in intel_guest_get_msrs()
2582 arr[1].msr = MSR_IA32_PEBS_ENABLE; in intel_guest_get_msrs()
2599 arr[idx].msr = x86_pmu_config_addr(idx); in core_guest_get_msrs()
3094 static bool check_msr(unsigned long msr, u64 mask) in check_msr() argument
3103 if (rdmsrl_safe(msr, &val_old)) in check_msr()
3110 if (wrmsrl_safe(msr, val_tmp) || in check_msr()
3111 rdmsrl_safe(msr, &val_new)) in check_msr()
3120 wrmsrl(msr, val_old); in check_msr()
3629 for (er = x86_pmu.extra_regs; er->msr; er++) { in intel_pmu_init()
3630 er->extra_msr_access = check_msr(er->msr, 0x11UL); in intel_pmu_init()
/linux-4.4.14/arch/x86/lib/
Dmsr.c5 struct msr *msrs_alloc(void) in msrs_alloc()
7 struct msr *msrs = NULL; in msrs_alloc()
9 msrs = alloc_percpu(struct msr); in msrs_alloc()
19 void msrs_free(struct msr *msrs) in msrs_free()
35 int msr_read(u32 msr, struct msr *m) in msr_read() argument
40 err = rdmsrl_safe(msr, &val); in msr_read()
53 int msr_write(u32 msr, struct msr *m) in msr_write() argument
55 return wrmsrl_safe(msr, m->q); in msr_write()
58 static inline int __flip_bit(u32 msr, u8 bit, bool set) in __flip_bit() argument
60 struct msr m, m1; in __flip_bit()
[all …]
DMakefile17 obj-$(CONFIG_SMP) += msr-smp.o cache-smp.o
25 obj-y += msr.o msr-reg.o msr-reg-export.o
Dmsr-smp.c9 struct msr *reg; in __rdmsr_on_cpu()
23 struct msr *reg; in __wrmsr_on_cpu()
98 struct msr *msrs, in __rwmsr_on_cpus()
125 void rdmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs) in rdmsr_on_cpus()
139 void wrmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs) in wrmsr_on_cpus()
/linux-4.4.14/tools/power/x86/turbostat/
Dturbostat.c266 int get_msr(int cpu, off_t offset, unsigned long long *msr) in get_msr() argument
277 retval = pread(fd, msr, sizeof *msr, offset); in get_msr()
280 if (retval != sizeof *msr) in get_msr()
982 unsigned long long msr; in get_counters() local
1001 if (get_msr(cpu, MSR_SMI_COUNT, &msr)) in get_counters()
1003 t->smi_count = msr & 0xFFFFFFFF; in get_counters()
1006 if (get_msr(cpu, extra_delta_offset32, &msr)) in get_counters()
1008 t->extra_delta32 = msr & 0xFFFFFFFF; in get_counters()
1016 if (get_msr(cpu, extra_msr_offset32, &msr)) in get_counters()
1018 t->extra_msr32 = msr & 0xFFFFFFFF; in get_counters()
[all …]
/linux-4.4.14/arch/x86/include/asm/
Dmsr.h13 struct msr { struct
25 struct msr reg; argument
26 struct msr *msrs;
60 static inline unsigned long long native_read_msr(unsigned int msr) in native_read_msr() argument
64 asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr)); in native_read_msr()
68 static inline unsigned long long native_read_msr_safe(unsigned int msr, in native_read_msr_safe() argument
80 : "c" (msr), [fault] "i" (-EIO)); in native_read_msr_safe()
84 static inline void native_write_msr(unsigned int msr, in native_write_msr() argument
87 asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory"); in native_write_msr()
91 notrace static inline int native_write_msr_safe(unsigned int msr, in native_write_msr_safe() argument
[all …]
Dmicrocode.h7 #define native_rdmsr(msr, val1, val2) \ argument
9 u64 __val = native_read_msr((msr)); \
14 #define native_wrmsr(msr, low, high) \ argument
15 native_write_msr(msr, low, high)
17 #define native_wrmsrl(msr, val) \ argument
18 native_write_msr((msr), \
Dparavirt.h132 static inline u64 paravirt_read_msr(unsigned msr, int *err) in paravirt_read_msr() argument
134 return PVOP_CALL2(u64, pv_cpu_ops.read_msr, msr, err); in paravirt_read_msr()
137 static inline int paravirt_write_msr(unsigned msr, unsigned low, unsigned high) in paravirt_write_msr() argument
139 return PVOP_CALL3(int, pv_cpu_ops.write_msr, msr, low, high); in paravirt_write_msr()
143 #define rdmsr(msr, val1, val2) \ argument
146 u64 _l = paravirt_read_msr(msr, &_err); \
151 #define wrmsr(msr, val1, val2) \ argument
153 paravirt_write_msr(msr, val1, val2); \
156 #define rdmsrl(msr, val) \ argument
159 val = paravirt_read_msr(msr, &_err); \
[all …]
Dapic.h111 u64 msr; in apic_is_x2apic_enabled() local
113 if (rdmsrl_safe(MSR_IA32_APICBASE, &msr)) in apic_is_x2apic_enabled()
115 return msr & X2APIC_ENABLE; in apic_is_x2apic_enabled()
198 u64 msr; in native_apic_msr_read() local
203 rdmsrl(APIC_BASE_MSR + (reg >> 4), msr); in native_apic_msr_read()
204 return (u32)msr; in native_apic_msr_read()
Dkvm_host.h782 int (*get_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr);
783 int (*set_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr);
1006 int kvm_get_msr(struct kvm_vcpu *vcpu, struct msr_data *msr);
1007 int kvm_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr);
1035 int kvm_get_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr);
1036 int kvm_set_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr);
1125 static inline unsigned long read_msr(unsigned long msr) in read_msr() argument
1129 rdmsrl(msr, value); in read_msr()
1225 void kvm_define_shared_msr(unsigned index, u32 msr);
Dperf_event.h265 unsigned msr; member
/linux-4.4.14/arch/m68k/bvme6000/
Dconfig.c164 unsigned char msr = rtc->msr & 0xc0; in bvme6000_timer_int() local
166 rtc->msr = msr | 0x20; /* Ack the interrupt */ in bvme6000_timer_int()
183 unsigned char msr = rtc->msr & 0xc0; in bvme6000_sched_init() local
185 rtc->msr = 0; /* Ensure timer registers accessible */ in bvme6000_sched_init()
196 rtc->msr = 0x40; /* Access int.cntrl, etc */ in bvme6000_sched_init()
201 rtc->msr = 0; /* Access timer 1 control */ in bvme6000_sched_init()
204 rtc->msr = msr; in bvme6000_sched_init()
225 unsigned char msr = rtc->msr & 0xc0; in bvme6000_gettimeoffset() local
229 rtc->msr = 0; /* Ensure timer registers accessible */ in bvme6000_gettimeoffset()
233 t1int = rtc->msr & 0x20; in bvme6000_gettimeoffset()
[all …]
Drtc.c41 unsigned char msr; in rtc_ioctl() local
51 msr = rtc->msr & 0xc0; in rtc_ioctl()
52 rtc->msr = 0x40; in rtc_ioctl()
65 rtc->msr = msr; in rtc_ioctl()
107 msr = rtc->msr & 0xc0; in rtc_ioctl()
108 rtc->msr = 0x40; in rtc_ioctl()
122 rtc->msr = msr; in rtc_ioctl()
/linux-4.4.14/arch/x86/kvm/
Dmtrr.c29 static bool msr_mtrr_valid(unsigned msr) in msr_mtrr_valid() argument
31 switch (msr) { in msr_mtrr_valid()
61 bool kvm_mtrr_valid(struct kvm_vcpu *vcpu, u32 msr, u64 data) in kvm_mtrr_valid() argument
66 if (!msr_mtrr_valid(msr)) in kvm_mtrr_valid()
69 if (msr == MSR_IA32_CR_PAT) { in kvm_mtrr_valid()
74 } else if (msr == MSR_MTRRdefType) { in kvm_mtrr_valid()
78 } else if (msr >= MSR_MTRRfix64K_00000 && msr <= MSR_MTRRfix4K_F8000) { in kvm_mtrr_valid()
86 WARN_ON(!(msr >= 0x200 && msr < 0x200 + 2 * KVM_NR_VAR_MTRR)); in kvm_mtrr_valid()
89 if ((msr & 1) == 0) { in kvm_mtrr_valid()
196 static bool fixed_msr_to_seg_unit(u32 msr, int *seg, int *unit) in fixed_msr_to_seg_unit() argument
[all …]
Dhyperv.c33 static bool kvm_hv_msr_partition_wide(u32 msr) in kvm_hv_msr_partition_wide() argument
37 switch (msr) { in kvm_hv_msr_partition_wide()
107 static int kvm_hv_set_msr_pw(struct kvm_vcpu *vcpu, u32 msr, u64 data, in kvm_hv_set_msr_pw() argument
113 switch (msr) { in kvm_hv_set_msr_pw()
163 msr - HV_X64_MSR_CRASH_P0, in kvm_hv_set_msr_pw()
175 msr, data); in kvm_hv_set_msr_pw()
190 static int kvm_hv_set_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data, bool host) in kvm_hv_set_msr() argument
194 switch (msr) { in kvm_hv_set_msr()
231 msr, data); in kvm_hv_set_msr()
238 static int kvm_hv_get_msr_pw(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata) in kvm_hv_get_msr_pw() argument
[all …]
Dpmu.h25 bool (*is_valid_msr)(struct kvm_vcpu *vcpu, u32 msr);
26 int (*get_msr)(struct kvm_vcpu *vcpu, u32 msr, u64 *data);
80 static inline struct kvm_pmc *get_gp_pmc(struct kvm_pmu *pmu, u32 msr, in get_gp_pmc() argument
83 if (msr >= base && msr < base + pmu->nr_arch_gp_counters) in get_gp_pmc()
84 return &pmu->gp_counters[msr - base]; in get_gp_pmc()
90 static inline struct kvm_pmc *get_fixed_pmc(struct kvm_pmu *pmu, u32 msr) in get_fixed_pmc() argument
94 if (msr >= base && msr < base + pmu->nr_arch_fixed_counters) in get_fixed_pmc()
95 return &pmu->fixed_counters[msr - base]; in get_fixed_pmc()
108 bool kvm_pmu_is_valid_msr(struct kvm_vcpu *vcpu, u32 msr);
109 int kvm_pmu_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *data);
Dpmu_intel.c145 static bool intel_is_valid_msr(struct kvm_vcpu *vcpu, u32 msr) in intel_is_valid_msr() argument
150 switch (msr) { in intel_is_valid_msr()
158 ret = get_gp_pmc(pmu, msr, MSR_IA32_PERFCTR0) || in intel_is_valid_msr()
159 get_gp_pmc(pmu, msr, MSR_P6_EVNTSEL0) || in intel_is_valid_msr()
160 get_fixed_pmc(pmu, msr); in intel_is_valid_msr()
167 static int intel_pmu_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *data) in intel_pmu_get_msr() argument
172 switch (msr) { in intel_pmu_get_msr()
186 if ((pmc = get_gp_pmc(pmu, msr, MSR_IA32_PERFCTR0)) || in intel_pmu_get_msr()
187 (pmc = get_fixed_pmc(pmu, msr))) { in intel_pmu_get_msr()
190 } else if ((pmc = get_gp_pmc(pmu, msr, MSR_P6_EVNTSEL0))) { in intel_pmu_get_msr()
[all …]
Dpmu_amd.c94 static bool amd_is_valid_msr(struct kvm_vcpu *vcpu, u32 msr) in amd_is_valid_msr() argument
99 ret = get_gp_pmc(pmu, msr, MSR_K7_PERFCTR0) || in amd_is_valid_msr()
100 get_gp_pmc(pmu, msr, MSR_K7_EVNTSEL0); in amd_is_valid_msr()
105 static int amd_pmu_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *data) in amd_pmu_get_msr() argument
111 pmc = get_gp_pmc(pmu, msr, MSR_K7_PERFCTR0); in amd_pmu_get_msr()
117 pmc = get_gp_pmc(pmu, msr, MSR_K7_EVNTSEL0); in amd_pmu_get_msr()
130 u32 msr = msr_info->index; in amd_pmu_set_msr() local
134 pmc = get_gp_pmc(pmu, msr, MSR_K7_PERFCTR0); in amd_pmu_set_msr()
140 pmc = get_gp_pmc(pmu, msr, MSR_K7_EVNTSEL0); in amd_pmu_set_msr()
Dhyperv.h27 int kvm_hv_set_msr_common(struct kvm_vcpu *vcpu, u32 msr, u64 data, bool host);
28 int kvm_hv_get_msr_common(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata);
Dx86.h165 void kvm_write_tsc(struct kvm_vcpu *vcpu, struct msr_data *msr);
177 bool kvm_mtrr_valid(struct kvm_vcpu *vcpu, u32 msr, u64 data);
178 int kvm_mtrr_set_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data);
179 int kvm_mtrr_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata);
Dlapic.h85 int kvm_x2apic_msr_write(struct kvm_vcpu *vcpu, u32 msr, u64 data);
86 int kvm_x2apic_msr_read(struct kvm_vcpu *vcpu, u32 msr, u64 *data);
88 int kvm_hv_vapic_msr_write(struct kvm_vcpu *vcpu, u32 msr, u64 data);
89 int kvm_hv_vapic_msr_read(struct kvm_vcpu *vcpu, u32 msr, u64 *data);
Dvmx.c1262 static int __find_msr_index(struct vcpu_vmx *vmx, u32 msr) in __find_msr_index() argument
1267 if (vmx_msr_index[vmx->guest_msrs[i].index] == msr) in __find_msr_index()
1298 static struct shared_msr_entry *find_msr_entry(struct vcpu_vmx *vmx, u32 msr) in find_msr_entry() argument
1302 i = __find_msr_index(vmx, msr); in find_msr_entry()
1674 static void clear_atomic_switch_msr(struct vcpu_vmx *vmx, unsigned msr) in clear_atomic_switch_msr() argument
1679 switch (msr) { in clear_atomic_switch_msr()
1699 if (m->guest[i].index == msr) in clear_atomic_switch_msr()
1722 static void add_atomic_switch_msr(struct vcpu_vmx *vmx, unsigned msr, in add_atomic_switch_msr() argument
1728 switch (msr) { in add_atomic_switch_msr()
1761 if (m->guest[i].index == msr) in add_atomic_switch_msr()
[all …]
Dpmu.c264 bool kvm_pmu_is_valid_msr(struct kvm_vcpu *vcpu, u32 msr) in kvm_pmu_is_valid_msr() argument
266 return kvm_x86_ops->pmu_ops->is_valid_msr(vcpu, msr); in kvm_pmu_is_valid_msr()
269 int kvm_pmu_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *data) in kvm_pmu_get_msr() argument
271 return kvm_x86_ops->pmu_ops->get_msr(vcpu, msr, data); in kvm_pmu_get_msr()
Dx86.c214 static void shared_msr_update(unsigned slot, u32 msr) in shared_msr_update() argument
226 rdmsrl_safe(msr, &value); in shared_msr_update()
231 void kvm_define_shared_msr(unsigned slot, u32 msr) in kvm_define_shared_msr() argument
234 shared_msrs_global.msrs[slot] = msr; in kvm_define_shared_msr()
1040 int kvm_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr) in kvm_set_msr() argument
1042 switch (msr->index) { in kvm_set_msr()
1048 if (is_noncanonical_address(msr->data)) in kvm_set_msr()
1065 msr->data = get_canonical(msr->data); in kvm_set_msr()
1067 return kvm_x86_ops->set_msr(vcpu, msr); in kvm_set_msr()
1076 struct msr_data msr; in do_get_msr() local
[all …]
Dsvm.c428 static u32 svm_msrpm_offset(u32 msr) in svm_msrpm_offset() argument
434 if (msr < msrpm_ranges[i] || in svm_msrpm_offset()
435 msr >= msrpm_ranges[i] + MSRS_IN_RANGE) in svm_msrpm_offset()
438 offset = (msr - msrpm_ranges[i]) / 4; /* 4 msrs per u8 */ in svm_msrpm_offset()
764 static void set_msr_interception(u32 *msrpm, unsigned msr, in set_msr_interception() argument
775 WARN_ON(!valid_msr_intercept(msr)); in set_msr_interception()
777 offset = svm_msrpm_offset(msr); in set_msr_interception()
778 bit_read = 2 * (msr & 0x0f); in set_msr_interception()
779 bit_write = 2 * (msr & 0x0f) + 1; in set_msr_interception()
2081 u32 offset, msr, value; in nested_svm_exit_handled_msr() local
[all …]
/linux-4.4.14/arch/microblaze/kernel/
Dprocess.c44 regs->msr, regs->ear, regs->esr, regs->fsr); in show_regs()
69 local_save_flags(childregs->msr); in copy_thread()
71 ti->cpu_context.msr = childregs->msr & ~MSR_IE; in copy_thread()
83 ti->cpu_context.msr = (unsigned long)childregs->msr; in copy_thread()
85 childregs->msr |= MSR_UMS; in copy_thread()
97 childregs->msr &= ~MSR_EIP; in copy_thread()
98 childregs->msr |= MSR_IE; in copy_thread()
99 childregs->msr &= ~MSR_VM; in copy_thread()
100 childregs->msr |= MSR_VMS; in copy_thread()
101 childregs->msr |= MSR_EE; /* exceptions will be enabled*/ in copy_thread()
[all …]
Dsetup.c100 unsigned int fdt, unsigned int msr, unsigned int tlb0, in machine_early_init() argument
167 if (msr) { in machine_early_init()
169 pr_cont("CPU don't have it %x\n", msr); in machine_early_init()
172 if (!msr) { in machine_early_init()
174 pr_cont("CPU have it %x\n", msr); in machine_early_init()
Dasm-offsets.c25 DEFINE(PT_MSR, offsetof(struct pt_regs, msr)); in main()
121 DEFINE(CC_MSR, offsetof(struct cpu_context, msr)); in main()
/linux-4.4.14/arch/x86/xen/
Dpmu.c124 static inline bool is_amd_pmu_msr(unsigned int msr) in is_amd_pmu_msr() argument
126 if ((msr >= MSR_F15H_PERF_CTL && in is_amd_pmu_msr()
127 msr < MSR_F15H_PERF_CTR + (amd_num_counters * 2)) || in is_amd_pmu_msr()
128 (msr >= MSR_K7_EVNTSEL0 && in is_amd_pmu_msr()
129 msr < MSR_K7_PERFCTR0 + amd_num_counters)) in is_amd_pmu_msr()
181 static bool xen_intel_pmu_emulate(unsigned int msr, u64 *val, int type, in xen_intel_pmu_emulate() argument
197 switch (msr) { in xen_intel_pmu_emulate()
235 if (msr == MSR_CORE_PERF_GLOBAL_OVF_CTRL) in xen_intel_pmu_emulate()
244 static bool xen_amd_pmu_emulate(unsigned int msr, u64 *val, bool is_read) in xen_amd_pmu_emulate() argument
257 ((msr >= MSR_K7_EVNTSEL0) && (msr <= MSR_K7_PERFCTR3))) in xen_amd_pmu_emulate()
[all …]
Dpmu.h10 bool pmu_msr_read(unsigned int msr, uint64_t *val, int *err);
11 bool pmu_msr_write(unsigned int msr, uint32_t low, uint32_t high, int *err);
Denlighten.c1034 static u64 xen_read_msr_safe(unsigned int msr, int *err) in xen_read_msr_safe() argument
1038 if (pmu_msr_read(msr, &val, err)) in xen_read_msr_safe()
1041 val = native_read_msr_safe(msr, err); in xen_read_msr_safe()
1042 switch (msr) { in xen_read_msr_safe()
1053 static int xen_write_msr_safe(unsigned int msr, unsigned low, unsigned high) in xen_write_msr_safe() argument
1059 switch (msr) { in xen_write_msr_safe()
1088 if (!pmu_msr_write(msr, low, high, &ret)) in xen_write_msr_safe()
1089 ret = native_write_msr_safe(msr, low, high); in xen_write_msr_safe()
1774 uint32_t eax, ebx, ecx, edx, pages, msr, base; in init_hvm_pv_info() local
1784 cpuid(base + 2, &pages, &msr, &ecx, &edx); in init_hvm_pv_info()
[all …]
/linux-4.4.14/arch/arm64/kvm/
Dhyp.S78 msr sp_el1, x22
79 msr elr_el1, x23
80 msr spsr_el1, x24
86 msr sp_el0, x19
87 msr elr_el2, x20 // pc on return from el2
88 msr spsr_el2, x21 // pstate on return from el2
299 msr vmpidr_el2, x4
300 msr csselr_el1, x5
301 msr sctlr_el1, x6
302 msr actlr_el1, x7
[all …]
Dhyp-init.S62 msr ttbr0_el2, x0
88 msr tcr_el2, x4
97 msr vtcr_el2, x4
100 msr mair_el2, x4
111 msr sctlr_el2, x4
125 msr ttbr0_el2, x1
137 msr vbar_el2, x3
/linux-4.4.14/arch/powerpc/kernel/
Dsignal_64.c108 unsigned long msr = regs->msr; in setup_sigcontext() local
123 msr |= MSR_VEC; in setup_sigcontext()
142 msr &= ~MSR_VSX; in setup_sigcontext()
156 msr |= MSR_VSX; in setup_sigcontext()
162 err |= __put_user(msr, &sc->gp_regs[PT_MSR]); in setup_sigcontext()
200 unsigned long msr = regs->msr; in setup_tm_sigcontexts() local
203 BUG_ON(!MSR_TM_ACTIVE(regs->msr)); in setup_tm_sigcontexts()
210 regs->msr &= ~MSR_TS_MASK; in setup_tm_sigcontexts()
227 if (msr & MSR_VEC) in setup_tm_sigcontexts()
239 msr |= MSR_VEC; in setup_tm_sigcontexts()
[all …]
Dprocess.c87 MSR_TM_ACTIVE(tsk->thread.regs->msr) && in giveup_fpu_maybe_transactional()
89 tsk->thread.ckpt_regs.msr = tsk->thread.regs->msr; in giveup_fpu_maybe_transactional()
105 MSR_TM_ACTIVE(tsk->thread.regs->msr) && in giveup_altivec_maybe_transactional()
107 tsk->thread.ckpt_regs.msr = tsk->thread.regs->msr; in giveup_altivec_maybe_transactional()
136 if (tsk->thread.regs->msr & MSR_FP) { in flush_fp_to_thread()
160 if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) in enable_kernel_fp()
176 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) in enable_kernel_altivec()
194 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread()
212 if (current->thread.regs && (current->thread.regs->msr & MSR_VSX)) in enable_kernel_vsx()
234 if (tsk->thread.regs->msr & MSR_VSX) { in flush_vsx_to_thread()
[all …]
Dsignal_32.c412 unsigned long msr = regs->msr; in save_user_regs() local
430 msr |= MSR_VEC; in save_user_regs()
452 msr &= ~MSR_VSX; in save_user_regs()
464 msr |= MSR_VSX; in save_user_regs()
476 msr |= MSR_SPE; in save_user_regs()
485 if (__put_user(msr, &frame->mc_gregs[PT_MSR])) in save_user_regs()
519 unsigned long msr = regs->msr; in save_tm_user_regs() local
526 regs->msr &= ~MSR_TS_MASK; in save_tm_user_regs()
542 if (__put_user((msr >> 32), &tm_frame->mc_gregs[PT_MSR])) in save_tm_user_regs()
552 if (msr & MSR_VEC) { in save_tm_user_regs()
[all …]
Dtraps.c255 printk_ratelimited(regs->msr & MSR_64BIT ? fmt64 : fmt32, in _exception()
283 if (!(regs->msr & MSR_RI)) in system_reset_exception()
332 unsigned long msr = regs->msr; in check_io_access() local
336 if (((msr & 0xffff0000) == 0 || (msr & (0x80000 | 0x40000))) in check_io_access()
359 regs->msr |= MSR_RI; in check_io_access()
389 #define get_reason(regs) ((regs)->msr)
390 #define get_mc_reason(regs) ((regs)->msr)
397 #define single_stepping(regs) ((regs)->msr & MSR_SE)
398 #define clear_single_step(regs) ((regs)->msr &= ~MSR_SE)
741 if (!(regs->msr & MSR_RI)) in machine_check_exception()
[all …]
Dptrace.c100 REG_OFFSET_NAME(msr),
173 return task->thread.regs->msr | task->thread.fpexc_mode; in get_user_msr()
176 static int set_user_msr(struct task_struct *task, unsigned long msr) in set_user_msr() argument
178 task->thread.regs->msr &= ~MSR_DEBUGCHANGE; in set_user_msr()
179 task->thread.regs->msr |= msr & MSR_DEBUGCHANGE; in set_user_msr()
281 0, offsetof(struct pt_regs, msr)); in gpr_get()
283 unsigned long msr = get_user_msr(target); in gpr_get() local
284 ret = user_regset_copyout(&pos, &count, &kbuf, &ubuf, &msr, in gpr_get()
285 offsetof(struct pt_regs, msr), in gpr_get()
286 offsetof(struct pt_regs, msr) + in gpr_get()
[all …]
Dkprobes.c131 kcb->kprobe_saved_msr = regs->msr; in set_current_kprobe()
165 regs->msr &= ~MSR_SINGLESTEP; in kprobe_handler()
166 regs->msr |= kcb->kprobe_saved_msr; in kprobe_handler()
177 kcb->kprobe_saved_msr = regs->msr; in kprobe_handler()
382 regs->msr |= kcb->kprobe_saved_msr; in post_kprobe_handler()
398 if (regs->msr & MSR_SINGLESTEP) in post_kprobe_handler()
421 regs->msr &= ~MSR_SINGLESTEP; /* Turn off 'trace' bits */ in kprobe_fault_handler()
422 regs->msr |= kcb->kprobe_saved_msr; in kprobe_fault_handler()
Dkgdb.c253 PACK64(ptr, regs->msr); in sleeping_thread_to_gdb_regs()
341 { "msr", GDB_SIZEOF_REG, offsetof(struct pt_regs, msr) },
431 linux_regs->msr |= MSR_DE; in kgdb_arch_handle_exception()
433 linux_regs->msr |= MSR_SE; in kgdb_arch_handle_exception()
Dsignal.c190 if (MSR_TM_ACTIVE(regs->msr)) { in get_tm_stackpointer()
192 if (MSR_TM_TRANSACTIONAL(regs->msr)) in get_tm_stackpointer()
Dppc32.h22 unsigned int msr; member
Dhw_breakpoint.c200 regs->msr &= ~MSR_SE; in thread_change_pc()
260 regs->msr |= MSR_SE; in hw_breakpoint_handler()
Dmce_power.c274 srr1 = regs->msr; in __machine_check_early_realmode_p7()
343 srr1 = regs->msr; in __machine_check_early_realmode_p8()
Dmce.c90 mce->srr1 = regs->msr; in save_mce_event()
101 srr1 = regs->msr; in save_mce_event()
Dsyscalls.c129 current->thread.regs->msr ^= MSR_LE; in sys_switch_endian()
Dhead_booke.h226 #define EXC_XFER_TEMPLATE(hdlr, trap, msr, copyee, tfer, ret) \ argument
229 lis r10,msr@h; \
230 ori r10,r10,msr@l; \
Dalign.c782 if (cpu_has_feature(CPU_FTR_PPC_LE) && (regs->msr & MSR_LE)) in fix_alignment()
787 if (cpu_has_feature(CPU_FTR_REAL_LE) && (regs->msr & MSR_LE)) in fix_alignment()
823 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) { in fix_alignment()
863 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) in fix_alignment()
/linux-4.4.14/arch/arm64/mm/
Dproc.S104 msr tpidr_el0, x2
105 msr tpidrro_el0, x3
106 msr contextidr_el1, x4
107 msr mair_el1, x5
108 msr cpacr_el1, x6
109 msr ttbr0_el1, x1
110 msr ttbr1_el1, x7
112 msr tcr_el1, x8
113 msr vbar_el1, x9
114 msr mdscr_el1, x10
[all …]
Dproc-macros.S74 msr pmuserenr_el0, xzr // Disable PMU access from EL0
/linux-4.4.14/tools/power/cpupower/debug/i386/
Dcentrino-decode.c29 static int rdmsr(unsigned int cpu, unsigned int msr, in rdmsr() argument
48 if (lseek(fd, msr, SEEK_CUR) == -1) in rdmsr()
64 static void decode (unsigned int msr) in decode() argument
69 multiplier = ((msr >> 8) & 0xFF); in decode()
71 mv = (((msr & 0xFF) * 16) + 700); in decode()
73 printf("0x%x means multiplier %d @ %d mV\n", msr, multiplier, mv); in decode()
Dpowernow-k8-decode.c31 uint64_t msr = 0; in get_fidvid() local
44 if (read(fd, &msr, 8) != 8) in get_fidvid()
47 *fid = ((uint32_t )(msr & 0xffffffffull)) & MSR_S_LO_CURRENT_FID; in get_fidvid()
48 *vid = ((uint32_t )(msr>>32 & 0xffffffffull)) & MSR_S_HI_CURRENT_VID; in get_fidvid()
/linux-4.4.14/arch/arm64/kernel/
Dhead.S455 msr sctlr_el2, x0
460 msr sctlr_el1, x0
467 msr hcr_el2, x0
472 msr cnthctl_el2, x0
473 msr cntvoff_el2, xzr // Clear virtual offset
497 msr vpidr_el2, x0
498 msr vmpidr_el2, x1
504 msr sctlr_el1, x0
508 msr cptr_el2, x0 // Disable copro. traps to EL2
511 msr hstr_el2, xzr // Disable CP15 traps to EL2
[all …]
Defi-entry.S101 msr sctlr_el2, x0
108 msr sctlr_el1, x0
Dentry.S124 msr sp_el0, x23
136 msr contextidr_el1, x29
138 msr contextidr_el1, xzr
144 msr elr_el1, x21 // set up the return data
145 msr spsr_el1, x22
Dhyp-stub.S61 msr vbar_el2, x0 // Set vbar_el2
Dsleep.S134 msr sctlr_el1, x0 // restore sctlr_el1
/linux-4.4.14/arch/powerpc/kvm/
Dbook3s_pr.c55 ulong msr);
67 ulong msr = kvmppc_get_msr(vcpu); in kvmppc_is_split_real() local
68 return (msr & (MSR_IR|MSR_DR)) == MSR_DR; in kvmppc_is_split_real()
73 ulong msr = kvmppc_get_msr(vcpu); in kvmppc_fixup_split_real() local
77 if ((msr & (MSR_IR|MSR_DR)) != MSR_DR) in kvmppc_fixup_split_real()
337 static void kvmppc_set_msr_pr(struct kvm_vcpu *vcpu, u64 msr) in kvmppc_set_msr_pr() argument
342 printk(KERN_INFO "KVM: Set MSR to 0x%llx\n", msr); in kvmppc_set_msr_pr()
345 msr &= to_book3s(vcpu)->msr_mask; in kvmppc_set_msr_pr()
346 kvmppc_set_msr_fast(vcpu, msr); in kvmppc_set_msr_pr()
349 if (msr & MSR_POW) { in kvmppc_set_msr_pr()
[all …]
Dbooke.c79 printk("pc: %08lx msr: %08llx\n", vcpu->arch.pc, vcpu->arch.shared->msr); in kvmppc_dump_vcpu()
116 if (vcpu->arch.shared->msr & MSR_SPE) { in kvmppc_vcpu_sync_spe()
141 if (!(current->thread.regs->msr & MSR_FP)) { in kvmppc_load_guest_fp()
145 current->thread.regs->msr |= MSR_FP; in kvmppc_load_guest_fp()
157 if (current->thread.regs->msr & MSR_FP) in kvmppc_save_guest_fp()
169 vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_FP; in kvmppc_vcpu_sync_fpu()
182 if (!(current->thread.regs->msr & MSR_VEC)) { in kvmppc_load_guest_altivec()
186 current->thread.regs->msr |= MSR_VEC; in kvmppc_load_guest_altivec()
200 if (current->thread.regs->msr & MSR_VEC) in kvmppc_save_guest_altivec()
212 vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_DE; in kvmppc_vcpu_sync_debug()
[all …]
Dtrace_booke.h46 __field( unsigned long, msr )
55 __entry->msr = vcpu->arch.shared->msr;
67 __entry->msr,
Dbooke_emulate.c91 kvmppc_set_gpr(vcpu, rt, vcpu->arch.shared->msr); in kvmppc_booke_emulate_op()
101 vcpu->arch.shared->msr = (vcpu->arch.shared->msr & ~MSR_EE) in kvmppc_booke_emulate_op()
107 vcpu->arch.shared->msr = (vcpu->arch.shared->msr & ~MSR_EE) in kvmppc_booke_emulate_op()
Dbook3s_32_mmu.c369 u64 msr = kvmppc_get_msr(vcpu); in kvmppc_mmu_book3s_32_esid_to_vsid() local
371 if (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_32_esid_to_vsid()
380 switch (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_32_esid_to_vsid()
400 if (msr & MSR_PR) in kvmppc_mmu_book3s_32_esid_to_vsid()
De500.h218 return !!(vcpu->arch.shared->msr & (MSR_IS | MSR_DS)); in get_cur_as()
223 return !!(vcpu->arch.shared->msr & MSR_PR); in get_cur_pr()
266 if (get_tlb_ts(tlbe) != !!(vcpu->arch.shared->msr & MSR_IS)) in tlbe_is_host_safe()
Dbook3s.c237 u64 msr = kvmppc_get_msr(vcpu); in kvmppc_core_queue_inst_storage() local
238 msr &= ~(SRR1_ISI_NOPT | SRR1_ISI_N_OR_G | SRR1_ISI_PROT); in kvmppc_core_queue_inst_storage()
239 msr |= flags & (SRR1_ISI_NOPT | SRR1_ISI_N_OR_G | SRR1_ISI_PROT); in kvmppc_core_queue_inst_storage()
240 kvmppc_set_msr_fast(vcpu, msr); in kvmppc_core_queue_inst_storage()
477 regs->msr = kvmppc_get_msr(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
505 kvmppc_set_msr(vcpu, regs->msr); in kvm_arch_vcpu_ioctl_set_regs()
693 void kvmppc_set_msr(struct kvm_vcpu *vcpu, u64 msr) in kvmppc_set_msr() argument
695 vcpu->kvm->arch.kvm_ops->set_msr(vcpu, msr); in kvmppc_set_msr()
Dtrace_hv.h254 __field(unsigned long, msr)
263 __entry->msr = vcpu->arch.shregs.msr;
269 __entry->pc, __entry->msr, __entry->ceded
Dtrace_pr.h225 __field( unsigned long, msr )
235 __entry->msr = kvmppc_get_msr(vcpu);
249 __entry->msr,
Dbook3s_hv_ras.c70 unsigned long srr1 = vcpu->arch.shregs.msr; in kvmppc_realmode_mc_power7()
Dbook3s_64_mmu_hv.c248 unsigned long msr = vcpu->arch.intr_msr; in kvmppc_mmu_book3s_64_hv_reset_msr() local
251 if (MSR_TM_TRANSACTIONAL(vcpu->arch.shregs.msr)) in kvmppc_mmu_book3s_64_hv_reset_msr()
252 msr |= MSR_TS_S; in kvmppc_mmu_book3s_64_hv_reset_msr()
254 msr |= vcpu->arch.shregs.msr & MSR_TS_MASK; in kvmppc_mmu_book3s_64_hv_reset_msr()
255 kvmppc_set_msr(vcpu, msr); in kvmppc_mmu_book3s_64_hv_reset_msr()
318 int virtmode = vcpu->arch.shregs.msr & (data ? MSR_DR : MSR_IR); in kvmppc_mmu_book3s_64_hv_xlate()
351 key = (vcpu->arch.shregs.msr & MSR_PR) ? SLB_VSID_KP : SLB_VSID_KS; in kvmppc_mmu_book3s_64_hv_xlate()
Dbook3s_paired_singles.c169 u64 msr = kvmppc_get_msr(vcpu); in kvmppc_inject_pf() local
171 msr = kvmppc_set_field(msr, 33, 36, 0); in kvmppc_inject_pf()
172 msr = kvmppc_set_field(msr, 42, 47, 0); in kvmppc_inject_pf()
173 kvmppc_set_msr(vcpu, msr); in kvmppc_inject_pf()
Dbook3s_64_mmu.c585 u64 msr = kvmppc_get_msr(vcpu); in kvmppc_mmu_book3s_64_esid_to_vsid() local
587 if (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
600 switch (msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
De500_mmu.c415 if (!(vcpu->arch.shared->msr & MSR_CM)) in kvmppc_e500_emul_tlbwe()
500 unsigned int as = !!(vcpu->arch.shared->msr & MSR_IS); in kvmppc_mmu_itlb_index()
507 unsigned int as = !!(vcpu->arch.shared->msr & MSR_DS); in kvmppc_mmu_dtlb_index()
514 unsigned int as = !!(vcpu->arch.shared->msr & MSR_IS); in kvmppc_mmu_itlb_miss()
521 unsigned int as = !!(vcpu->arch.shared->msr & MSR_DS); in kvmppc_mmu_dtlb_miss()
De500_mmu_host.c313 u32 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_e500_setup_stlbe()
644 addr_space = (vcpu->arch.shared->msr & MSR_IS) >> MSR_IR_LG; in kvmppc_load_last_inst()
674 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_load_last_inst()
Dbook3s_hv.c225 static void kvmppc_set_msr_hv(struct kvm_vcpu *vcpu, u64 msr) in kvmppc_set_msr_hv() argument
231 if ((msr & MSR_TS_MASK) == MSR_TS_MASK) in kvmppc_set_msr_hv()
232 msr &= ~MSR_TS_MASK; in kvmppc_set_msr_hv()
233 vcpu->arch.shregs.msr = msr; in kvmppc_set_msr_hv()
288 vcpu->arch.pc, vcpu->arch.shregs.msr, vcpu->arch.trap); in kvmppc_dump_regs()
597 dt->srr1 = cpu_to_be64(vcpu->arch.shregs.msr); in kvmppc_create_dtl_entry()
880 flags = vcpu->arch.shregs.msr & 0x1f0000ull; in kvmppc_handle_exit_hv()
949 vcpu->arch.shregs.msr); in kvmppc_handle_exit_hv()
2720 !(vcpu->arch.shregs.msr & MSR_PR)) { in kvmppc_vcpu_run_hv()
/linux-4.4.14/arch/powerpc/include/asm/
Drunlatch.h21 unsigned long msr = mfmsr(); \
24 if (msr & MSR_EE) \
33 unsigned long msr = mfmsr(); \
36 if (msr & MSR_EE) \
Dhw_irq.h176 unsigned long msr = mfmsr(); in arch_local_irq_enable()
177 SET_MSR_EE(msr | MSR_EE); in arch_local_irq_enable()
195 return !(regs->msr & MSR_EE); in arch_irq_disabled_regs()
Dprobes.h50 regs->msr |= MSR_SINGLESTEP; in enable_single_step()
57 regs->msr &= ~MSR_CE; in enable_single_step()
Dptrace.h105 #define user_mode(regs) ((((regs)->msr) >> MSR_PR_LG) & 0x1)
107 #define user_mode(regs) (((regs)->msr & MSR_PR) != 0)
Dperf_event.h38 asm volatile("mfmsr %0" : "=r" ((regs)->msr)); \
Dkvm_ppc.h238 void (*set_msr)(struct kvm_vcpu *vcpu, u64 msr);
612 SHARED_WRAPPER_GET(msr, 64)
616 vcpu->arch.shared->msr = cpu_to_be64(val); in kvmppc_set_msr_fast()
618 vcpu->arch.shared->msr = cpu_to_le64(val); in kvmppc_set_msr_fast()
/linux-4.4.14/sound/pci/ctxfi/
Dctdaio.c166 entry = kzalloc((sizeof(*entry) * daio->rscl.msr), GFP_KERNEL); in dao_set_left_input()
174 for (i = 0; i < daio->rscl.msr; i++, entry++) { in dao_set_left_input()
195 entry = kzalloc((sizeof(*entry) * daio->rscr.msr), GFP_KERNEL); in dao_set_right_input()
203 for (i = 0; i < daio->rscr.msr; i++, entry++) { in dao_set_right_input()
207 dao->imappers[daio->rscl.msr + i] = entry; in dao_set_right_input()
230 for (i = 1; i < daio->rscl.msr; i++) { in dao_clear_left_input()
248 if (!dao->imappers[daio->rscl.msr]) in dao_clear_right_input()
251 entry = dao->imappers[daio->rscl.msr]; in dao_clear_right_input()
254 for (i = 1; i < daio->rscr.msr; i++) { in dao_clear_right_input()
255 entry = dao->imappers[daio->rscl.msr + i]; in dao_clear_right_input()
[all …]
Dctatc.c261 desc.msr = atc->msr; in atc_pcm_playback_prepare()
268 (atc->rsr * atc->msr)); in atc_pcm_playback_prepare()
283 mix_dsc.msr = atc->msr; in atc_pcm_playback_prepare()
387 max_cisz = src->multi * src->rsc.msr; in atc_pcm_playback_start()
448 max_cisz = src->multi * src->rsc.msr; in atc_pcm_playback_position()
456 unsigned int msr:8; member
468 pitch = atc_get_pitch((atc->rsr * atc->msr), in setup_src_node_conf()
472 if (1 == atc->msr) { /* FIXME: do we really need SRC here if pitch==1 */ in setup_src_node_conf()
475 conf[0].mix_msr = conf[0].imp_msr = conf[0].msr = 1; in setup_src_node_conf()
477 } else if (2 <= atc->msr) { in setup_src_node_conf()
[all …]
Dctsrc.c186 if (src->rsc.msr > 1) { in src_commit_write()
197 for (i = 1; i < src->rsc.msr; i++) { in src_commit_write()
232 unsigned int rsr, msr; in src_default_config_memrd() local
236 for (rsr = 0, msr = src->rsc.msr; msr > 1; msr >>= 1) in src_default_config_memrd()
258 for (msr = 1; msr < src->rsc.msr; msr++) { in src_default_config_memrd()
300 unsigned int rsr, msr; in src_default_config_arcrw() local
305 for (rsr = 0, msr = src->rsc.msr; msr > 1; msr >>= 1) in src_default_config_arcrw()
327 for (msr = 0; msr < src->rsc.msr; msr++) { in src_default_config_arcrw()
369 err = rsc_init(&p->rsc, idx + i, SRC, desc->msr, mgr->mgr.hw); in src_rsc_init()
496 for (i = 0; i < src->rsc.msr; i++) { in src_enable_s()
[all …]
Dctdaio.h69 unsigned int msr:4; member
87 int (*set_srt_msr)(struct dai *dai, unsigned int msr);
96 unsigned int msr:4; member
Dctresource.c119 for (i = 0; (i < 8) && (!(rsc->msr & (0x1 << i))); ) in rsc_next_conj()
138 rsc_init(struct rsc *rsc, u32 idx, enum RSCTYP type, u32 msr, struct hw *hw) in rsc_init() argument
145 rsc->msr = msr; in rsc_init()
204 rsc->msr = 0; in rsc_uninit()
Dcthw20k2.c1139 unsigned int msr; /* master sample rate in rsrs */ member
1143 unsigned int msr; /* master sample rate in rsrs */ member
1149 unsigned int msr; /* master sample rate in rsrs */ member
1163 if (1 == info->msr) { in hw_daio_init()
1167 } else if (2 == info->msr) { in hw_daio_init()
1185 } else if ((4 == info->msr) && (hw->model == CTSB1270)) { in hw_daio_init()
1227 if (2 == info->msr) { in hw_daio_init()
1230 } else if (4 == info->msr) { in hw_daio_init()
1641 if (1 == info->msr) in hw_dac_init()
1643 else if (2 == info->msr) in hw_dac_init()
[all …]
Dctresource.h39 u32 msr:4; /* The Master Sample Rate a resource working on */ member
54 rsc_init(struct rsc *rsc, u32 idx, enum RSCTYP type, u32 msr, struct hw *hw);
Dctamixer.c133 for (i = 0; i < amixer->rsc.msr; i++) { in amixer_commit_write()
207 AMIXER, desc->msr, mgr->mgr.hw); in amixer_rsc_init()
252 for (i = 0; i < desc->msr; i++) { in get_amixer_rsc()
290 for (i = 0; i < amixer->rsc.msr; i++) in put_amixer_rsc()
373 err = rsc_init(&sum->rsc, sum->idx[0], SUM, desc->msr, mgr->mgr.hw); in sum_rsc_init()
407 for (i = 0; i < desc->msr; i++) { in get_sum_rsc()
445 for (i = 0; i < sum->rsc.msr; i++) in put_sum_rsc()
Dctamixer.h34 unsigned int msr; member
79 unsigned int msr; member
Dctsrc.h82 unsigned char msr; member
123 unsigned int msr; member
Dctatc.h81 unsigned int msr; /* master sample rate in rsr */ member
156 unsigned int rsr, unsigned int msr, int chip_type,
Dcthw20k1.c1196 unsigned int msr; /* master sample rate in rsrs */ member
1200 unsigned int msr; /* master sample rate in rsrs */ member
1206 unsigned int msr; /* master sample rate in rsrs */ member
1231 switch (info->msr) { in hw_daio_init()
1462 switch (info->msr) { in hw_dac_init()
2063 daio_info.msr = info->msr; in hw_card_init()
2068 dac_info.msr = info->msr; in hw_card_init()
2073 adc_info.msr = info->msr; in hw_card_init()
/linux-4.4.14/arch/arm64/include/asm/
Dassembler.h45 msr daifset, #2
49 msr daifclr, #2
56 msr daifset, #8
60 msr daifclr, #8
67 msr mdscr_el1, \tmp
77 msr mdscr_el1, \tmp
87 msr daifclr, #(8 | 2)
Dfpsimdmacros.h51 msr fpcr, \state
74 msr fpsr, x\tmpnr
109 msr fpsr, x\tmpnr1
/linux-4.4.14/arch/powerpc/lib/
Dsstep.c51 static unsigned long truncate_if_32bit(unsigned long msr, unsigned long val) in truncate_if_32bit() argument
54 if ((msr & MSR_64BIT) == 0) in truncate_if_32bit()
104 return truncate_if_32bit(regs->msr, ea); in dform_ea()
121 return truncate_if_32bit(regs->msr, ea); in dsform_ea()
140 return truncate_if_32bit(regs->msr, ea); in xform_ea()
539 if (!(regs->msr & MSR_64BIT)) in set_cr0()
560 if (!(regs->msr & MSR_64BIT)) { in add_with_carry()
661 regs->nip = truncate_if_32bit(regs->msr, regs->nip); in analyse_instr()
665 regs->nip = truncate_if_32bit(regs->msr, imm); in analyse_instr()
683 regs->link = truncate_if_32bit(regs->msr, regs->nip + 4); in analyse_instr()
[all …]
/linux-4.4.14/arch/arm/kvm/
Dinterrupts_head.S104 msr SP_\mode, r2
105 msr LR_\mode, r3
106 msr SPSR_\mode, r4
115 msr r8_fiq, r2
116 msr r9_fiq, r3
117 msr r10_fiq, r4
118 msr r11_fiq, r5
119 msr r12_fiq, r6
120 msr SP_fiq, r7
121 msr LR_fiq, r8
[all …]
/linux-4.4.14/tools/power/cpupower/utils/idle_monitor/
Dnhm_idle.c75 int msr; in nhm_get_count() local
79 msr = MSR_CORE_C3_RESIDENCY; in nhm_get_count()
82 msr = MSR_CORE_C6_RESIDENCY; in nhm_get_count()
85 msr = MSR_PKG_C3_RESIDENCY; in nhm_get_count()
88 msr = MSR_PKG_C6_RESIDENCY; in nhm_get_count()
91 msr = MSR_TSC; in nhm_get_count()
96 if (read_msr(cpu, msr, val)) in nhm_get_count()
Dsnb_idle.c64 int msr; in snb_get_count() local
68 msr = MSR_CORE_C7_RESIDENCY; in snb_get_count()
71 msr = MSR_PKG_C2_RESIDENCY; in snb_get_count()
74 msr = MSR_PKG_C7_RESIDENCY; in snb_get_count()
77 msr = MSR_TSC; in snb_get_count()
82 if (read_msr(cpu, msr, val)) in snb_get_count()
Dhsw_ext_idle.c67 int msr; in hsw_ext_get_count() local
71 msr = MSR_PKG_C8_RESIDENCY; in hsw_ext_get_count()
74 msr = MSR_PKG_C9_RESIDENCY; in hsw_ext_get_count()
77 msr = MSR_PKG_C10_RESIDENCY; in hsw_ext_get_count()
80 msr = MSR_TSC; in hsw_ext_get_count()
85 if (read_msr(cpu, msr, val)) in hsw_ext_get_count()
/linux-4.4.14/drivers/cpufreq/
Dspeedstep-centrino.c285 static unsigned extract_clock(unsigned msr, unsigned int cpu, int failsafe) in extract_clock() argument
297 msr = (msr >> 8) & 0xff; in extract_clock()
298 return msr * 100000; in extract_clock()
305 msr &= 0xffff; in extract_clock()
310 if (msr == per_cpu(centrino_model, cpu)->op_points[i].driver_data) in extract_clock()
423 unsigned int msr, oldmsr = 0, h = 0, cpu = policy->cpu; in centrino_target() local
462 msr = op_points->driver_data; in centrino_target()
466 if (msr == (oldmsr & 0xffff)) { in centrino_target()
476 msr &= 0xffff; in centrino_target()
477 oldmsr |= msr; in centrino_target()
Dacpi-cpufreq.c86 static struct msr __percpu *msrs;
91 u64 msr; in boost_state() local
96 msr = lo | ((u64)hi << 32); in boost_state()
97 return !(msr & MSR_IA32_MISC_ENABLE_TURBO_DISABLE); in boost_state()
100 msr = lo | ((u64)hi << 32); in boost_state()
101 return !(msr & MSR_K7_HWCR_CPB_DIS); in boost_state()
128 struct msr *reg = per_cpu_ptr(msrs, cpu); in boost_set_msrs()
220 static unsigned extract_msr(u32 msr, struct acpi_cpufreq_data *data) in extract_msr() argument
226 msr &= AMD_MSR_RANGE; in extract_msr()
228 msr &= INTEL_MSR_RANGE; in extract_msr()
[all …]
Damd_freq_sensitivity.c46 struct msr actual, reference; in amd_powersave_bias_target()
/linux-4.4.14/drivers/i2c/busses/
Di2c-rcar.c269 static int rcar_i2c_irq_send(struct rcar_i2c_priv *priv, u32 msr) in rcar_i2c_irq_send() argument
278 if (!(msr & MDE)) in rcar_i2c_irq_send()
285 if (msr & MAT) in rcar_i2c_irq_send()
330 static int rcar_i2c_irq_recv(struct rcar_i2c_priv *priv, u32 msr) in rcar_i2c_irq_recv() argument
339 if (!(msr & MDR)) in rcar_i2c_irq_recv()
342 if (msr & MAT) { in rcar_i2c_irq_recv()
430 u32 msr; in rcar_i2c_irq() local
438 msr = rcar_i2c_read(priv, ICMSR); in rcar_i2c_irq()
441 msr &= rcar_i2c_read(priv, ICMIER); in rcar_i2c_irq()
442 if (!msr) { in rcar_i2c_irq()
[all …]
Di2c-sh7760.c117 unsigned long msr, fsr, fier, len; in sh7760_i2c_irq() local
119 msr = IN32(id, I2CMSR); in sh7760_i2c_irq()
123 if (msr & MSR_MAL) { in sh7760_i2c_irq()
131 if (msr & MSR_MNR) { in sh7760_i2c_irq()
146 msr &= ~MSR_MAT; in sh7760_i2c_irq()
152 if (msr & MSR_MST) { in sh7760_i2c_irq()
158 if (msr & MSR_MAT) in sh7760_i2c_irq()
231 OUT32(id, I2CMSR, ~msr); in sh7760_i2c_irq()
/linux-4.4.14/arch/arm/kernel/
Dfiqasm.S28 msr cpsr_c, r2 @ select FIQ mode
33 msr cpsr_c, r1 @ return to SVC mode
41 msr cpsr_c, r2 @ select FIQ mode
46 msr cpsr_c, r1 @ return to SVC mode
Diwmmxt.S201 msr cpsr_c, r2
235 1: msr cpsr_c, ip @ restore interrupt mode
253 msr cpsr_c, r2
262 msr cpsr_c, ip @ restore interrupt mode
273 msr cpsr_c, ip @ restore interrupt mode
291 msr cpsr_c, r2
311 msr cpsr_c, ip @ restore interrupt mode
358 msr cpsr_c, ip
364 msr cpsr_c, r2 @ restore interrupts
Dentry-armv.S339 ARM( msr cpsr_c, #ABT_MODE | PSR_I_BIT | PSR_F_BIT )
341 THUMB( msr cpsr_c, r0 )
344 ARM( msr cpsr_c, #SVC_MODE | PSR_I_BIT | PSR_F_BIT )
346 THUMB( msr cpsr_c, r0 )
353 ARM( msr cpsr_c, #ABT_MODE | PSR_I_BIT | PSR_F_BIT )
355 THUMB( msr cpsr_c, r0 )
357 msr spsr_cxsf, r2 @ Restore spsr_abt
358 ARM( msr cpsr_c, #SVC_MODE | PSR_I_BIT | PSR_F_BIT )
360 THUMB( msr cpsr_c, r0 )
1048 msr spsr_cxsf, r0
Dentry-header.S157 msr psp, r2
178 msr cpsr_c, \rtemp @ switch to the SYS mode
184 msr cpsr_c, \rtemp @ switch back to the SVC mode
190 msr cpsr_c, \rtemp @ switch to the SYS mode
196 msr cpsr_c, \rtemp @ switch back to the SVC mode
327 msr spsr_cxsf, r1 @ save in spsr_svc
/linux-4.4.14/drivers/usb/serial/
Dmct_u232.c310 unsigned char *msr) in mct_u232_get_modem_stat() argument
317 *msr = 0; in mct_u232_get_modem_stat()
327 *msr = 0; in mct_u232_get_modem_stat()
329 *msr = buf[0]; in mct_u232_get_modem_stat()
331 dev_dbg(&port->dev, "get_modem_stat: 0x%x\n", *msr); in mct_u232_get_modem_stat()
337 unsigned char msr) in mct_u232_msr_to_icount() argument
340 if (msr & MCT_U232_MSR_DDSR) in mct_u232_msr_to_icount()
342 if (msr & MCT_U232_MSR_DCTS) in mct_u232_msr_to_icount()
344 if (msr & MCT_U232_MSR_DRI) in mct_u232_msr_to_icount()
346 if (msr & MCT_U232_MSR_DCD) in mct_u232_msr_to_icount()
[all …]
Dark3116.c77 __u32 msr; /* modem status register value */ member
376 priv->msr = ark3116_read_reg(serial, UART_MSR, buf); in ark3116_open()
446 status = priv->msr; in ark3116_tiocmget()
513 static void ark3116_update_msr(struct usb_serial_port *port, __u8 msr) in ark3116_update_msr() argument
519 priv->msr = msr; in ark3116_update_msr()
522 if (msr & UART_MSR_ANY_DELTA) { in ark3116_update_msr()
524 if (msr & UART_MSR_DCTS) in ark3116_update_msr()
526 if (msr & UART_MSR_DDSR) in ark3116_update_msr()
528 if (msr & UART_MSR_DDCD) in ark3116_update_msr()
530 if (msr & UART_MSR_TERI) in ark3116_update_msr()
Df81232.c505 u8 mcr, msr; in f81232_tiocmget() local
512 msr = port_priv->modem_status; in f81232_tiocmget()
517 (msr & UART_MSR_CTS ? TIOCM_CTS : 0) | in f81232_tiocmget()
518 (msr & UART_MSR_DCD ? TIOCM_CAR : 0) | in f81232_tiocmget()
519 (msr & UART_MSR_RI ? TIOCM_RI : 0) | in f81232_tiocmget()
520 (msr & UART_MSR_DSR ? TIOCM_DSR : 0); in f81232_tiocmget()
578 u8 msr; in f81232_carrier_raised() local
582 msr = priv->modem_status; in f81232_carrier_raised()
585 if (msr & UART_MSR_DCD) in f81232_carrier_raised()
Dti_usb_3410_5052.c123 static void ti_handle_new_msr(struct ti_port *tport, __u8 msr);
842 unsigned int msr; in ti_tiocmget() local
850 msr = tport->tp_msr; in ti_tiocmget()
857 | ((msr & TI_MSR_CTS) ? TIOCM_CTS : 0) in ti_tiocmget()
858 | ((msr & TI_MSR_CD) ? TIOCM_CAR : 0) in ti_tiocmget()
859 | ((msr & TI_MSR_RI) ? TIOCM_RI : 0) in ti_tiocmget()
860 | ((msr & TI_MSR_DSR) ? TIOCM_DSR : 0); in ti_tiocmget()
934 __u8 msr; in ti_interrupt_callback() local
986 msr = data[1]; in ti_interrupt_callback()
987 dev_dbg(dev, "%s - port %d, msr 0x%02X\n", __func__, port_number, msr); in ti_interrupt_callback()
[all …]
Dssu100.c441 static void ssu100_update_msr(struct usb_serial_port *port, u8 msr) in ssu100_update_msr() argument
447 priv->shadowMSR = msr; in ssu100_update_msr()
450 if (msr & UART_MSR_ANY_DELTA) { in ssu100_update_msr()
452 if (msr & UART_MSR_DCTS) in ssu100_update_msr()
454 if (msr & UART_MSR_DDSR) in ssu100_update_msr()
456 if (msr & UART_MSR_DDCD) in ssu100_update_msr()
458 if (msr & UART_MSR_TERI) in ssu100_update_msr()
Dio_ti.c1560 static void handle_new_msr(struct edgeport_port *edge_port, __u8 msr) in handle_new_msr() argument
1565 dev_dbg(&edge_port->port->dev, "%s - %02x\n", __func__, msr); in handle_new_msr()
1567 if (msr & (EDGEPORT_MSR_DELTA_CTS | EDGEPORT_MSR_DELTA_DSR | in handle_new_msr()
1572 if (msr & EDGEPORT_MSR_DELTA_CTS) in handle_new_msr()
1574 if (msr & EDGEPORT_MSR_DELTA_DSR) in handle_new_msr()
1576 if (msr & EDGEPORT_MSR_DELTA_CD) in handle_new_msr()
1578 if (msr & EDGEPORT_MSR_DELTA_RI) in handle_new_msr()
1584 edge_port->shadow_msr = msr & 0xf0; in handle_new_msr()
1589 if (msr & EDGEPORT_MSR_CTS) in handle_new_msr()
1641 __u8 msr; in edge_interrupt_callback() local
[all …]
Dspcp8x5.c252 u8 msr; in spcp8x5_carrier_raised() local
255 ret = spcp8x5_get_msr(port, &msr); in spcp8x5_carrier_raised()
256 if (ret || msr & MSR_STATUS_LINE_DCD) in spcp8x5_carrier_raised()
Dkeyspan_usa90msg.h155 u8 msr, // reports the actual MSR register member
Dmxuport.c747 unsigned int msr; in mxuport_tiocmget() local
755 msr = mxport->msr_state; in mxuport_tiocmget()
763 ((msr & UART_MSR_CTS) ? TIOCM_CTS : 0) | /* 0x020 */ in mxuport_tiocmget()
764 ((msr & UART_MSR_DCD) ? TIOCM_CAR : 0) | /* 0x040 */ in mxuport_tiocmget()
765 ((msr & UART_MSR_RI) ? TIOCM_RI : 0) | /* 0x080 */ in mxuport_tiocmget()
766 ((msr & UART_MSR_DSR) ? TIOCM_DSR : 0)); /* 0x100 */ in mxuport_tiocmget()
Dmos7720.c1769 unsigned int msr ; in mos7720_tiocmget() local
1772 msr = mos7720_port->shadowMSR; in mos7720_tiocmget()
1776 | ((msr & UART_MSR_CTS) ? TIOCM_CTS : 0) /* 0x020 */ in mos7720_tiocmget()
1777 | ((msr & UART_MSR_DCD) ? TIOCM_CAR : 0) /* 0x040 */ in mos7720_tiocmget()
1778 | ((msr & UART_MSR_RI) ? TIOCM_RI : 0) /* 0x080 */ in mos7720_tiocmget()
1779 | ((msr & UART_MSR_DSR) ? TIOCM_DSR : 0); /* 0x100 */ in mos7720_tiocmget()
/linux-4.4.14/ipc/
Dmsg.c188 struct msg_receiver *msr, *t; in expunge_all() local
190 list_for_each_entry_safe(msr, t, &msq->q_receivers, r_list) { in expunge_all()
191 msr->r_msg = NULL; /* initialize expunge ordering */ in expunge_all()
192 wake_up_process(msr->r_tsk); in expunge_all()
200 msr->r_msg = ERR_PTR(res); in expunge_all()
571 struct msg_receiver *msr, *t; in pipelined_send() local
573 list_for_each_entry_safe(msr, t, &msq->q_receivers, r_list) { in pipelined_send()
574 if (testmsg(msg, msr->r_msgtype, msr->r_mode) && in pipelined_send()
575 !security_msg_queue_msgrcv(msq, msg, msr->r_tsk, in pipelined_send()
576 msr->r_msgtype, msr->r_mode)) { in pipelined_send()
[all …]
/linux-4.4.14/drivers/tty/serial/
Dvr41xx_siu.c223 uint8_t msr; in siu_get_mctrl() local
226 msr = siu_read(port, UART_MSR); in siu_get_mctrl()
227 if (msr & UART_MSR_DCD) in siu_get_mctrl()
229 if (msr & UART_MSR_RI) in siu_get_mctrl()
231 if (msr & UART_MSR_DSR) in siu_get_mctrl()
233 if (msr & UART_MSR_CTS) in siu_get_mctrl()
373 uint8_t msr; in check_modem_status() local
375 msr = siu_read(port, UART_MSR); in check_modem_status()
376 if ((msr & UART_MSR_ANY_DELTA) == 0) in check_modem_status()
378 if (msr & UART_MSR_DDCD) in check_modem_status()
[all …]
Dmen_z135_uart.c186 u8 msr; in men_z135_handle_modem_status() local
188 msr = (uart->stat_reg >> 8) & 0xff; in men_z135_handle_modem_status()
190 if (msr & MEN_Z135_MSR_DDCD) in men_z135_handle_modem_status()
192 msr & MEN_Z135_MSR_DCD); in men_z135_handle_modem_status()
193 if (msr & MEN_Z135_MSR_DCTS) in men_z135_handle_modem_status()
195 msr & MEN_Z135_MSR_CTS); in men_z135_handle_modem_status()
527 u8 msr; in men_z135_get_mctrl() local
529 msr = ioread8(port->membase + MEN_Z135_STAT_REG + 1); in men_z135_get_mctrl()
531 if (msr & MEN_Z135_MSR_CTS) in men_z135_get_mctrl()
533 if (msr & MEN_Z135_MSR_DSR) in men_z135_get_mctrl()
[all …]
Dserial-tegra.c666 unsigned long msr; in tegra_uart_handle_modem_signal_change() local
668 msr = tegra_uart_read(tup, UART_MSR); in tegra_uart_handle_modem_signal_change()
669 if (!(msr & UART_MSR_ANY_DELTA)) in tegra_uart_handle_modem_signal_change()
672 if (msr & UART_MSR_TERI) in tegra_uart_handle_modem_signal_change()
674 if (msr & UART_MSR_DDSR) in tegra_uart_handle_modem_signal_change()
677 if (msr & UART_MSR_DDCD) in tegra_uart_handle_modem_signal_change()
678 uart_handle_dcd_change(&tup->uport, msr & UART_MSR_DCD); in tegra_uart_handle_modem_signal_change()
680 if (msr & UART_MSR_DCTS) in tegra_uart_handle_modem_signal_change()
681 uart_handle_cts_change(&tup->uport, msr & UART_MSR_CTS); in tegra_uart_handle_modem_signal_change()
782 unsigned long msr; in tegra_uart_hw_deinit() local
[all …]
Dpnx8xxx_uart.c331 unsigned int msr; in pnx8xxx_get_mctrl() local
335 msr = serial_in(sport, PNX8XXX_MCR); in pnx8xxx_get_mctrl()
337 mctrl |= msr & PNX8XXX_UART_MCR_CTS ? TIOCM_CTS : 0; in pnx8xxx_get_mctrl()
338 mctrl |= msr & PNX8XXX_UART_MCR_DCD ? TIOCM_CAR : 0; in pnx8xxx_get_mctrl()
347 unsigned int msr; in pnx8xxx_set_mctrl()
Dpch_uart.c571 unsigned int msr = ioread8(priv->membase + UART_MSR); in pch_uart_hal_get_modem() local
572 priv->dmsr = msr & PCH_UART_MSR_DELTA; in pch_uart_hal_get_modem()
573 return (u8)msr; in pch_uart_hal_get_modem()
1109 u8 msr; in pch_uart_interrupt() local
1154 msr = pch_uart_hal_get_modem(priv); in pch_uart_interrupt()
1157 if ((msr & UART_MSR_ANY_DELTA) == 0) in pch_uart_interrupt()
1550 unsigned int msr = ioread8(up->membase + UART_MSR); in wait_for_xmitr() local
1551 if (msr & UART_MSR_CTS) in wait_for_xmitr()
/linux-4.4.14/arch/powerpc/platforms/pasemi/
Didle.c52 if (regs->msr & SRR1_WAKEMASK) in pasemi_system_reset_exception()
55 switch (regs->msr & SRR1_WAKEMASK) { in pasemi_system_reset_exception()
71 regs->msr |= MSR_RI; in pasemi_system_reset_exception()
/linux-4.4.14/arch/powerpc/platforms/52xx/
Dmpc52xx_pm.c115 u32 msr, hid0; in mpc52xx_pm_enter() local
143 msr = mfmsr(); in mpc52xx_pm_enter()
144 mtmsr(msr & ~MSR_POW); in mpc52xx_pm_enter()
163 mtmsr(msr & ~MSR_POW); in mpc52xx_pm_enter()
165 mtmsr(msr); in mpc52xx_pm_enter()
/linux-4.4.14/tools/power/x86/x86_energy_perf_policy/
Dx86_energy_perf_policy.c189 unsigned long long msr; in get_msr() local
202 retval = pread(fd, &msr, sizeof msr, offset); in get_msr()
204 if (retval != sizeof msr) { in get_msr()
209 return msr; in get_msr()
/linux-4.4.14/arch/powerpc/sysdev/
Duic.c202 u32 msr; in uic_irq_cascade() local
213 msr = mfdcr(uic->dcrbase + UIC_MSR); in uic_irq_cascade()
214 if (!msr) /* spurious interrupt */ in uic_irq_cascade()
217 src = 32 - ffs(msr); in uic_irq_cascade()
325 u32 msr; in uic_get_irq() local
330 msr = mfdcr(primary_uic->dcrbase + UIC_MSR); in uic_get_irq()
331 src = 32 - ffs(msr); in uic_get_irq()
/linux-4.4.14/drivers/video/fbdev/i810/
Di810_gtf.c129 u8 msr = 0; in i810fb_encode_registers() local
191 msr |= 1 << 6; in i810fb_encode_registers()
193 msr |= 1 << 7; in i810fb_encode_registers()
194 par->regs.msr = msr; in i810fb_encode_registers()
Di810_dvt.c266 if (~(std_modes[mode].msr & (1 << 6))) in i810fb_fill_var_timings()
268 if (~(std_modes[mode].msr & (1 << 7))) in i810fb_fill_var_timings()
Di810.h221 u8 msr; member
241 u8 cr39, cr41, cr70, sr01, msr; member
/linux-4.4.14/drivers/misc/
Dcs5535-mfgpt.c50 uint32_t msr, mask, value, dummy; in cs5535_mfgpt_toggle_event() local
69 msr = MSR_MFGPT_NR; in cs5535_mfgpt_toggle_event()
74 msr = MSR_MFGPT_NR; in cs5535_mfgpt_toggle_event()
79 msr = MSR_MFGPT_IRQ; in cs5535_mfgpt_toggle_event()
87 rdmsr(msr, value, dummy); in cs5535_mfgpt_toggle_event()
94 wrmsr(msr, value, dummy); in cs5535_mfgpt_toggle_event()
/linux-4.4.14/arch/mips/pci/
Dops-loongson2.c185 void _rdmsr(u32 msr, u32 *hi, u32 *lo) in _rdmsr() argument
194 loongson_pcibios_write(&bus, devfn, PCI_MSR_ADDR, 4, msr); in _rdmsr()
201 void _wrmsr(u32 msr, u32 hi, u32 lo) in _wrmsr() argument
210 loongson_pcibios_write(&bus, devfn, PCI_MSR_ADDR, 4, msr); in _wrmsr()
/linux-4.4.14/drivers/mfd/
Dezx-pcap.c46 u32 msr; member
151 pcap->msr |= 1 << irq_to_pcap(pcap, d->irq); in pcap_mask_irq()
159 pcap->msr &= ~(1 << irq_to_pcap(pcap, d->irq)); in pcap_unmask_irq()
174 ezx_pcap_write(pcap, PCAP_REG_MSR, pcap->msr); in pcap_msr_work()
181 u32 msr, isr, int_sel, service; in pcap_isr_work() local
185 ezx_pcap_read(pcap, PCAP_REG_MSR, &msr); in pcap_isr_work()
194 ezx_pcap_write(pcap, PCAP_REG_MSR, isr | msr); in pcap_isr_work()
198 service = isr & ~msr; in pcap_isr_work()
204 ezx_pcap_write(pcap, PCAP_REG_MSR, pcap->msr); in pcap_isr_work()
472 pcap->msr = PCAP_MASK_ALL_INTERRUPT; in ezx_pcap_probe()
/linux-4.4.14/arch/x86/kernel/
Damd_nb.c141 u64 base, msr; in amd_get_mmconfig_range() local
152 rdmsrl(address, msr); in amd_get_mmconfig_range()
155 if (!(msr & FAM10H_MMIO_CONF_ENABLE)) in amd_get_mmconfig_range()
158 base = msr & (FAM10H_MMIO_CONF_BASE_MASK<<FAM10H_MMIO_CONF_BASE_SHIFT); in amd_get_mmconfig_range()
160 segn_busn_bits = (msr >> FAM10H_MMIO_CONF_BUSRANGE_SHIFT) & in amd_get_mmconfig_range()
/linux-4.4.14/arch/arm/include/asm/
Dassembler.h103 msr cpsr_c, #PSR_I_BIT | SVC_MODE
107 msr cpsr_c, #SVC_MODE
172 msr primask, \oldcpsr
174 msr cpsr_c, \oldcpsr
323 msr cpsr_c, \reg
327 msr cpsr_c, #\mode
349 msr spsr_cxsf, \reg
352 1: msr cpsr_c, \reg
/linux-4.4.14/drivers/powercap/
Dintel_rapl.c762 u32 msr; in rapl_read_data_raw() local
769 msr = rd->msrs[rp->id]; in rapl_read_data_raw()
770 if (!msr) in rapl_read_data_raw()
788 if (rdmsrl_safe_on_cpu(cpu, msr, &value)) { in rapl_read_data_raw()
789 pr_debug("failed to read msr 0x%x on cpu %d\n", msr, cpu); in rapl_read_data_raw()
809 u32 msr; in rapl_write_data_raw() local
816 msr = rd->msrs[rp->id]; in rapl_write_data_raw()
817 if (rdmsrl_safe_on_cpu(cpu, msr, &msr_val)) { in rapl_write_data_raw()
819 "failed to read msr 0x%x on cpu %d\n", msr, cpu); in rapl_write_data_raw()
825 if (wrmsrl_safe_on_cpu(cpu, msr, msr_val)) { in rapl_write_data_raw()
[all …]
/linux-4.4.14/Documentation/virtual/kvm/
Dcpuid.txt44 || || writing to msr 0x4b564d02
47 || || writing to msr 0x4b564d03.
51 || || to msr 0x4b564d04.
D00-INDEX15 msr.txt
/linux-4.4.14/drivers/net/hamradio/
Dbaycom_ser_fdx.c291 unsigned char iir, msr; in ser12_interrupt() local
301 msr = inb(MSR(dev->base_addr)); in ser12_interrupt()
303 if ((msr & 8) && bc->opt_dcd) in ser12_interrupt()
304 hdlcdrv_setdcd(&bc->hdrv, !((msr ^ bc->opt_dcd) & 0x80)); in ser12_interrupt()
335 msr = inb(MSR(dev->base_addr)); in ser12_interrupt()
337 if ((msr & 8) && bc->opt_dcd) in ser12_interrupt()
338 hdlcdrv_setdcd(&bc->hdrv, !((msr ^ bc->opt_dcd) & 0x80)); in ser12_interrupt()
343 ser12_rx(dev, bc, &tv, msr & 0x10); /* CTS */ in ser12_interrupt()
Dyam.c760 unsigned char msr = inb(MSR(dev->base_addr)); in yam_interrupt() local
769 yp->dcd = (msr & RX_DCD) ? 1 : 0; in yam_interrupt()
776 if (msr & TX_RDY) { in yam_interrupt()
783 if (msr & RX_FLAG) in yam_interrupt()
/linux-4.4.14/drivers/video/fbdev/geode/
Dsuspend_gx.c30 rdmsrl(MSR_GX_MSR_PADSEL, par->msr.padsel); in gx_save_regs()
31 rdmsrl(MSR_GLCP_DOTPLL, par->msr.dotpll); in gx_save_regs()
142 wrmsrl(MSR_GX_MSR_PADSEL, par->msr.padsel); in gx_restore_video_proc()
179 gx_set_dotpll((uint32_t) (par->msr.dotpll >> 32)); in gx_restore_regs()
Dlxfb_ops.c600 rdmsrl(MSR_LX_MSR_PADSEL, par->msr.padsel); in lx_save_regs()
601 rdmsrl(MSR_GLCP_DOTPLL, par->msr.dotpll); in lx_save_regs()
602 rdmsrl(MSR_LX_GLD_MSR_CONFIG, par->msr.dfglcfg); in lx_save_regs()
603 rdmsrl(MSR_LX_SPARE_MSR, par->msr.dcspare); in lx_save_regs()
673 wrmsrl(MSR_LX_SPARE_MSR, par->msr.dcspare); in lx_restore_display_ctlr()
737 wrmsrl(MSR_LX_GLD_MSR_CONFIG, par->msr.dfglcfg); in lx_restore_video_proc()
738 wrmsrl(MSR_LX_MSR_PADSEL, par->msr.padsel); in lx_restore_video_proc()
772 lx_set_dotpll((u32) (par->msr.dotpll >> 32)); in lx_restore_regs()
Dgxfb.h35 } msr; member
/linux-4.4.14/arch/arm/mach-ep93xx/
Dcrunch-bits.S214 msr cpsr_c, r2
246 1: msr cpsr_c, ip @ restore interrupt mode
260 msr cpsr_c, r2
269 msr cpsr_c, ip @ restore interrupt mode
279 msr cpsr_c, ip @ restore interrupt mode
293 msr cpsr_c, r2
/linux-4.4.14/samples/kprobes/
Dkprobe_example.c33 p->addr, regs->nip, regs->msr); in handler_pre()
60 p->addr, regs->msr); in handler_post()
/linux-4.4.14/arch/x86/pci/
Dmmconfig-shared.c190 u64 base, msr; in pci_mmcfg_amd_fam10h() local
201 msr = high; in pci_mmcfg_amd_fam10h()
202 msr <<= 32; in pci_mmcfg_amd_fam10h()
203 msr |= low; in pci_mmcfg_amd_fam10h()
206 if (!(msr & FAM10H_MMIO_CONF_ENABLE)) in pci_mmcfg_amd_fam10h()
209 base = msr & (FAM10H_MMIO_CONF_BASE_MASK<<FAM10H_MMIO_CONF_BASE_SHIFT); in pci_mmcfg_amd_fam10h()
211 busnbits = (msr >> FAM10H_MMIO_CONF_BUSRANGE_SHIFT) & in pci_mmcfg_amd_fam10h()
/linux-4.4.14/drivers/ssb/
Ddriver_extif.c43 u8 save_mcr, msr = 0; in serial_exists() local
48 msr = regs[UART_MSR] & (UART_MSR_DCD | UART_MSR_RI in serial_exists()
52 return (msr == (UART_MSR_DCD | UART_MSR_CTS)); in serial_exists()
/linux-4.4.14/arch/arm/mm/
Dproc-feroceon.S264 msr cpsr_c, r3 @ disable interrupts
267 msr cpsr_c, r2 @ restore interrupts
310 msr cpsr_c, r3 @ disable interrupts
313 msr cpsr_c, r2 @ restore interrupts
342 msr cpsr_c, r3 @ disable interrupts
345 msr cpsr_c, r2 @ restore interrupts
373 msr cpsr_c, r3 @ disable interrupts
376 msr cpsr_c, r2 @ restore interrupts
Dproc-v7m.S116 msr control, r1 @ Thread mode has unpriviledged access
/linux-4.4.14/Documentation/powerpc/
Dtransactional_memory.txt111 For 64-bit processes, uc->uc_mcontext.regs->msr is a full 64-bit MSR and its TS
116 uc->uc_link->uc_mcontext.regs->msr. The top word contains the transactional
134 u64 msr = ucp->uc_mcontext.regs->msr;
137 msr |= ((u64)transactional_ucp->uc_mcontext.regs->msr) << 32;
139 if (MSR_TM_ACTIVE(msr)) {
/linux-4.4.14/net/mac80211/
Drc80211_minstrel.c340 struct minstrel_rate *msr, *mr; in minstrel_get_rate() local
396 msr = &mi->r[ndx]; in minstrel_get_rate()
404 msr->perfect_tx_time > mr->perfect_tx_time && in minstrel_get_rate()
405 msr->stats.sample_skipped < 20) { in minstrel_get_rate()
416 if (!msr->sample_limit) in minstrel_get_rate()
420 if (msr->sample_limit > 0) in minstrel_get_rate()
421 msr->sample_limit--; in minstrel_get_rate()
/linux-4.4.14/arch/x86/kernel/cpu/mtrr/
Dgeneric.c519 void mtrr_wrmsr(unsigned msr, unsigned a, unsigned b) in mtrr_wrmsr() argument
521 if (wrmsr_safe(msr, a, b) < 0) { in mtrr_wrmsr()
524 smp_processor_id(), msr, a, b); in mtrr_wrmsr()
535 static void set_fixed_range(int msr, bool *changed, unsigned int *msrwords) in set_fixed_range() argument
539 rdmsr(msr, lo, hi); in set_fixed_range()
542 mtrr_wrmsr(msr, msrwords[0], msrwords[1]); in set_fixed_range()
/linux-4.4.14/arch/mips/include/asm/mach-loongson64/cs5536/
Dcs5536.h13 extern void _rdmsr(u32 msr, u32 *hi, u32 *lo);
14 extern void _wrmsr(u32 msr, u32 hi, u32 lo);
/linux-4.4.14/arch/x86/include/uapi/asm/
DKbuild30 header-y += msr-index.h
31 header-y += msr.h
/linux-4.4.14/drivers/bluetooth/
Ddtl1_cs.c292 unsigned char msr; in dtl1_interrupt() local
339 msr = inb(iobase + UART_MSR); in dtl1_interrupt()
341 if (info->ri_latch ^ (msr & UART_MSR_RI)) { in dtl1_interrupt()
342 info->ri_latch = msr & UART_MSR_RI; in dtl1_interrupt()
/linux-4.4.14/arch/x86/kernel/cpu/mcheck/
Dmce.c354 static int msr_to_offset(u32 msr) in msr_to_offset() argument
358 if (msr == mca_cfg.rip_msr) in msr_to_offset()
360 if (msr == MSR_IA32_MCx_STATUS(bank)) in msr_to_offset()
362 if (msr == MSR_IA32_MCx_ADDR(bank)) in msr_to_offset()
364 if (msr == MSR_IA32_MCx_MISC(bank)) in msr_to_offset()
366 if (msr == MSR_IA32_MCG_STATUS) in msr_to_offset()
372 static u64 mce_rdmsrl(u32 msr) in mce_rdmsrl() argument
377 int offset = msr_to_offset(msr); in mce_rdmsrl()
384 if (rdmsrl_safe(msr, &v)) { in mce_rdmsrl()
385 WARN_ONCE(1, "mce: Unable to read msr %d!\n", msr); in mce_rdmsrl()
[all …]
/linux-4.4.14/drivers/staging/dgnc/
Ddgnc_cls.h45 u8 msr; member
Ddgnc_neo.h37 u8 msr; /* WR MSR - Modem Status Reg */ member
Ddgnc_cls.c143 writeb(ch->ch_stopc, &ch->ch_cls_uart->msr); in cls_set_ixon_flow_control()
269 writeb(ch->ch_stopc, &ch->ch_cls_uart->msr); in cls_set_ixoff_flow_control()
418 cls_parse_modem(ch, readb(&ch->ch_cls_uart->msr)); in cls_parse_isr()
671 cls_parse_modem(ch, readb(&ch->ch_cls_uart->msr)); in cls_param()
1178 readb(&ch->ch_cls_uart->msr); in cls_uart_init()
/linux-4.4.14/arch/microblaze/include/asm/
Dsetup.h39 unsigned int fdt, unsigned int msr, unsigned int tlb0,
Dthread_info.h56 __u32 msr; member
/linux-4.4.14/drivers/acpi/
Dprocessor_throttling.c716 u64 msr = 0; in acpi_throttling_rdmsr() local
728 msr = (msr_high << 32) | msr_low; in acpi_throttling_rdmsr()
729 *value = (u64) msr; in acpi_throttling_rdmsr()
738 u64 msr; in acpi_throttling_wrmsr() local
745 msr = value; in acpi_throttling_wrmsr()
747 msr & 0xffffffff, msr >> 32); in acpi_throttling_wrmsr()
/linux-4.4.14/arch/x86/kernel/apic/
Dapic.c1436 u64 msr; in __x2apic_disable() local
1441 rdmsrl(MSR_IA32_APICBASE, msr); in __x2apic_disable()
1442 if (!(msr & X2APIC_ENABLE)) in __x2apic_disable()
1445 wrmsrl(MSR_IA32_APICBASE, msr & ~(X2APIC_ENABLE | XAPIC_ENABLE)); in __x2apic_disable()
1446 wrmsrl(MSR_IA32_APICBASE, msr & ~X2APIC_ENABLE); in __x2apic_disable()
1452 u64 msr; in __x2apic_enable() local
1454 rdmsrl(MSR_IA32_APICBASE, msr); in __x2apic_enable()
1455 if (msr & X2APIC_ENABLE) in __x2apic_enable()
1457 wrmsrl(MSR_IA32_APICBASE, msr | X2APIC_ENABLE); in __x2apic_enable()
/linux-4.4.14/arch/powerpc/include/uapi/asm/
Dkvm_para.h47 __u64 msr; member
Dptrace.h34 unsigned long msr; member
/linux-4.4.14/arch/arm/mach-s3c24xx/
Dsleep.S57 msr cpsr_c, r0
/linux-4.4.14/arch/arm/mach-s3c64xx/
Dsleep.S43 msr cpsr_c, #PSR_I_BIT | PSR_F_BIT | SVC_MODE
/linux-4.4.14/drivers/ata/
Dpata_cs5536.c45 module_param_named(msr, use_msr, int, 0644);
46 MODULE_PARM_DESC(msr, "Force using MSR to configure IDE function (Default: 0)");
/linux-4.4.14/drivers/isdn/i4l/
Disdn_tty.c301 info->msr &= ~UART_MSR_CTS; in isdn_tty_tint()
446 info->msr &= ~UART_MSR_CTS; in isdn_tty_senddown()
727 if ((info->msr & UART_MSR_RI) && in isdn_tty_modem_hup()
730 info->msr &= ~(UART_MSR_DCD | UART_MSR_RI); in isdn_tty_modem_hup()
1074 info->msr |= (UART_MSR_DSR | UART_MSR_CTS); in isdn_tty_startup()
1092 info->msr &= ~UART_MSR_RI; in isdn_tty_shutdown()
1159 info->msr |= UART_MSR_CTS; in isdn_tty_write()
1201 info->msr |= UART_MSR_CTS; in isdn_tty_write()
1363 status = info->msr; in isdn_tty_tiocmget()
1757 return info->msr & UART_MSR_DCD; in isdn_tty_carrier_raised()
[all …]
/linux-4.4.14/arch/powerpc/platforms/cell/
Dpervasive.c89 switch (regs->msr & SRR1_WAKEMASK) { in cbe_system_reset_exception()
/linux-4.4.14/arch/arm/lib/
Decard.S18 msr spsr_cxsf, rt
/linux-4.4.14/drivers/ide/
Dcs5536.c305 module_param_named(msr, use_msr, int, 0644);
306 MODULE_PARM_DESC(msr, "Force using MSR to configure IDE function (Default: 0)");
/linux-4.4.14/drivers/tty/serial/jsm/
Djsm.h275 u8 msr; /* WR MSR - Modem Status Reg */ member
330 u8 msr; /* WR MSR - Modem Status Reg */ member
Djsm_cls.c125 writeb(ch->ch_stopc, &ch->ch_cls_uart->msr); in cls_set_ixon_flow_control()
251 writeb(ch->ch_stopc, &ch->ch_cls_uart->msr); in cls_set_ixoff_flow_control()
611 cls_parse_modem(ch, readb(&ch->ch_cls_uart->msr)); in cls_parse_isr()
822 cls_parse_modem(ch, readb(&ch->ch_cls_uart->msr)); in cls_param()
901 readb(&ch->ch_cls_uart->msr); in cls_uart_init()
/linux-4.4.14/arch/powerpc/xmon/
Dxmon.c399 return ((regs->msr & MSR_RI) == 0); in unrecoverable_excp()
454 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) in xmon_core()
578 if (regs->msr & MSR_DE) { in xmon_core()
586 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_core()
636 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_bpt()
667 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_break_match()
677 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_iabr_match()
702 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_fault_handler()
988 regs->msr |= MSR_DE; in do_step()
1003 if ((regs->msr & (MSR_64BIT|MSR_PR|MSR_IR)) == (MSR_64BIT|MSR_IR)) { in do_step()
[all …]
/linux-4.4.14/arch/microblaze/include/uapi/asm/
Dptrace.h50 microblaze_reg_t msr; member
/linux-4.4.14/drivers/idle/
Dintel_idle.c1032 unsigned long long msr; in sklh_idle_state_table_update() local
1044 rdmsrl(MSR_NHM_SNB_PKG_CST_CFG_CTL, msr); in sklh_idle_state_table_update()
1047 if ((msr & 0xF) != 8) in sklh_idle_state_table_update()
1056 rdmsrl(MSR_IA32_FEATURE_CONTROL, msr); in sklh_idle_state_table_update()
1059 if (msr & (1 << 18)) in sklh_idle_state_table_update()
/linux-4.4.14/arch/arm/boot/dts/
Dbcm59056.dtsi62 msr_reg: msr {
/linux-4.4.14/Documentation/devicetree/bindings/mfd/
Dbrcm,bcm59056.txt22 csr, iosr1, iosr2, msr, sdsr1, sdsr2, vsr,
/linux-4.4.14/arch/sh/include/asm/
Dsmc37c93x.h77 volatile __u16 msr; member
/linux-4.4.14/arch/powerpc/platforms/embedded6xx/
Dmpc7448_hpc2.c178 regs->msr |= MSR_RI; in mpc7448_machine_check_exception()
Dholly.c267 regs->msr |= MSR_RI; in ppc750_machine_check_exception()
/linux-4.4.14/arch/frv/include/uapi/asm/
Dregisters.h192 unsigned long msr[2]; member
/linux-4.4.14/arch/m68k/include/asm/
Dbvme6000hw.h50 pad_a[3], msr, member
/linux-4.4.14/drivers/staging/rtl8192e/rtl8192e/
Dr8192E_dev.c59 u8 msr; in _rtl92e_update_msr() local
62 msr = rtl92e_readb(dev, MSR); in _rtl92e_update_msr()
63 msr &= ~MSR_LINK_MASK; in _rtl92e_update_msr()
68 msr |= (MSR_LINK_MANAGED << MSR_LINK_SHIFT); in _rtl92e_update_msr()
70 msr |= (MSR_LINK_NONE << MSR_LINK_SHIFT); in _rtl92e_update_msr()
75 msr |= (MSR_LINK_ADHOC << MSR_LINK_SHIFT); in _rtl92e_update_msr()
77 msr |= (MSR_LINK_NONE << MSR_LINK_SHIFT); in _rtl92e_update_msr()
81 msr |= (MSR_LINK_MASTER << MSR_LINK_SHIFT); in _rtl92e_update_msr()
83 msr |= (MSR_LINK_NONE << MSR_LINK_SHIFT); in _rtl92e_update_msr()
89 rtl92e_writeb(dev, MSR, msr); in _rtl92e_update_msr()
/linux-4.4.14/arch/frv/kernel/
Dasm-offsets.c74 DEF_FREG(__FPMEDIA_MSR0, f.msr[0]); in foo()
/linux-4.4.14/drivers/regulator/
Dbcm590xx-regulator.c189 BCM590XX_REG_RANGES(msr, dcdc_iosr1_ranges),
302 BCM590XX_MATCH(msr, MSR),
/linux-4.4.14/arch/microblaze/mm/
Dfault.c119 regs->r15, regs->msr); in do_page_fault()
/linux-4.4.14/arch/blackfin/include/asm/
Dbfin_serial.h239 __BFP(msr);
261 __BFP(msr);
/linux-4.4.14/drivers/isdn/hisax/
Delsa_ser.c389 int status, iir, msr; in rs_interrupt_elsa() local
413 msr = serial_inp(cs, UART_MSR); in rs_interrupt_elsa()
414 debugl1(cs, "rs MSR %02x", msr); in rs_interrupt_elsa()

12