Lines Matching refs:hwc
1612 static void intel_pmu_disable_fixed(struct hw_perf_event *hwc) in intel_pmu_disable_fixed() argument
1614 int idx = hwc->idx - INTEL_PMC_IDX_FIXED; in intel_pmu_disable_fixed()
1619 rdmsrl(hwc->config_base, ctrl_val); in intel_pmu_disable_fixed()
1621 wrmsrl(hwc->config_base, ctrl_val); in intel_pmu_disable_fixed()
1631 struct hw_perf_event *hwc = &event->hw; in intel_pmu_disable_event() local
1634 if (unlikely(hwc->idx == INTEL_PMC_IDX_FIXED_BTS)) { in intel_pmu_disable_event()
1640 cpuc->intel_ctrl_guest_mask &= ~(1ull << hwc->idx); in intel_pmu_disable_event()
1641 cpuc->intel_ctrl_host_mask &= ~(1ull << hwc->idx); in intel_pmu_disable_event()
1642 cpuc->intel_cp_status &= ~(1ull << hwc->idx); in intel_pmu_disable_event()
1651 if (unlikely(hwc->config_base == MSR_ARCH_PERFMON_FIXED_CTR_CTRL)) { in intel_pmu_disable_event()
1652 intel_pmu_disable_fixed(hwc); in intel_pmu_disable_event()
1662 static void intel_pmu_enable_fixed(struct hw_perf_event *hwc) in intel_pmu_enable_fixed() argument
1664 int idx = hwc->idx - INTEL_PMC_IDX_FIXED; in intel_pmu_enable_fixed()
1673 if (hwc->config & ARCH_PERFMON_EVENTSEL_USR) in intel_pmu_enable_fixed()
1675 if (hwc->config & ARCH_PERFMON_EVENTSEL_OS) in intel_pmu_enable_fixed()
1681 if (x86_pmu.version > 2 && hwc->config & ARCH_PERFMON_EVENTSEL_ANY) in intel_pmu_enable_fixed()
1687 rdmsrl(hwc->config_base, ctrl_val); in intel_pmu_enable_fixed()
1690 wrmsrl(hwc->config_base, ctrl_val); in intel_pmu_enable_fixed()
1695 struct hw_perf_event *hwc = &event->hw; in intel_pmu_enable_event() local
1698 if (unlikely(hwc->idx == INTEL_PMC_IDX_FIXED_BTS)) { in intel_pmu_enable_event()
1702 intel_pmu_enable_bts(hwc->config); in intel_pmu_enable_event()
1713 cpuc->intel_ctrl_guest_mask |= (1ull << hwc->idx); in intel_pmu_enable_event()
1715 cpuc->intel_ctrl_host_mask |= (1ull << hwc->idx); in intel_pmu_enable_event()
1718 cpuc->intel_cp_status |= (1ull << hwc->idx); in intel_pmu_enable_event()
1720 if (unlikely(hwc->config_base == MSR_ARCH_PERFMON_FIXED_CTR_CTRL)) { in intel_pmu_enable_event()
1721 intel_pmu_enable_fixed(hwc); in intel_pmu_enable_event()
1728 __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); in intel_pmu_enable_event()
1923 struct hw_perf_event *hwc = &event->hw; in intel_bts_constraints() local
1929 hw_event = hwc->config & INTEL_ARCH_EVENT_MASK; in intel_bts_constraints()
1932 if (unlikely(hw_event == bts_event && hwc->sample_period == 1)) in intel_bts_constraints()
2372 struct hw_perf_event *hwc = &event->hw; in intel_put_excl_constraints() local
2386 if (hwc->flags & PERF_X86_EVENT_EXCL_ACCT) { in intel_put_excl_constraints()
2387 hwc->flags &= ~PERF_X86_EVENT_EXCL_ACCT; in intel_put_excl_constraints()
2396 if (hwc->idx >= 0) { in intel_put_excl_constraints()
2407 xl->state[hwc->idx] = INTEL_EXCL_UNUSED; in intel_put_excl_constraints()
2630 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in core_pmu_enable_all() local
2636 __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); in core_pmu_enable_all()