Searched refs:INTEL_PMC_IDX_FIXED (Results 1 – 4 of 4) sorted by relevance
10 #define INTEL_PMC_IDX_FIXED 32 macro142 #define INTEL_PMC_IDX_FIXED_INSTRUCTIONS (INTEL_PMC_IDX_FIXED + 0)146 #define INTEL_PMC_IDX_FIXED_CPU_CYCLES (INTEL_PMC_IDX_FIXED + 1)150 #define INTEL_PMC_IDX_FIXED_REF_CYCLES (INTEL_PMC_IDX_FIXED + 2)160 #define INTEL_PMC_IDX_FIXED_BTS (INTEL_PMC_IDX_FIXED + 16)
84 if (idx < INTEL_PMC_IDX_FIXED) in global_idx_to_pmc()87 return get_fixed_pmc_idx(pmu, idx - INTEL_PMC_IDX_FIXED); in global_idx_to_pmc()306 int fidx = idx - INTEL_PMC_IDX_FIXED; in reprogram_idx()504 (((1ull << pmu->nr_arch_fixed_counters) - 1) << INTEL_PMC_IDX_FIXED); in kvm_pmu_cpuid_update()528 pmu->fixed_counters[i].idx = i + INTEL_PMC_IDX_FIXED; in kvm_pmu_init()
723 if (c->idxmsk64 & (~0ULL << INTEL_PMC_IDX_FIXED)) { in __perf_sched_find_counter()724 idx = INTEL_PMC_IDX_FIXED; in __perf_sched_find_counter()733 for_each_set_bit_from(idx, c->idxmsk, INTEL_PMC_IDX_FIXED) { in __perf_sched_find_counter()974 } else if (hwc->idx >= INTEL_PMC_IDX_FIXED) { in x86_assign_hw_event()976 hwc->event_base = MSR_ARCH_PERFMON_FIXED_CTR0 + (hwc->idx - INTEL_PMC_IDX_FIXED); in x86_assign_hw_event()977 hwc->event_base_rdpmc = (hwc->idx - INTEL_PMC_IDX_FIXED) | 1<<30; in x86_assign_hw_event()1982 if (x86_pmu.num_counters_fixed && idx >= INTEL_PMC_IDX_FIXED) { in x86_pmu_event_idx()1983 idx -= INTEL_PMC_IDX_FIXED; in x86_pmu_event_idx()
1392 int idx = hwc->idx - INTEL_PMC_IDX_FIXED; in intel_pmu_disable_fixed()1442 int idx = hwc->idx - INTEL_PMC_IDX_FIXED; in intel_pmu_enable_fixed()3322 ((1LL << x86_pmu.num_counters_fixed)-1) << INTEL_PMC_IDX_FIXED; in intel_pmu_init()3335 ~(~0UL << (INTEL_PMC_IDX_FIXED + x86_pmu.num_counters_fixed)); in intel_pmu_init()