Lines Matching refs:hwc
1390 static void intel_pmu_disable_fixed(struct hw_perf_event *hwc) in intel_pmu_disable_fixed() argument
1392 int idx = hwc->idx - INTEL_PMC_IDX_FIXED; in intel_pmu_disable_fixed()
1397 rdmsrl(hwc->config_base, ctrl_val); in intel_pmu_disable_fixed()
1399 wrmsrl(hwc->config_base, ctrl_val); in intel_pmu_disable_fixed()
1409 struct hw_perf_event *hwc = &event->hw; in intel_pmu_disable_event() local
1412 if (unlikely(hwc->idx == INTEL_PMC_IDX_FIXED_BTS)) { in intel_pmu_disable_event()
1418 cpuc->intel_ctrl_guest_mask &= ~(1ull << hwc->idx); in intel_pmu_disable_event()
1419 cpuc->intel_ctrl_host_mask &= ~(1ull << hwc->idx); in intel_pmu_disable_event()
1420 cpuc->intel_cp_status &= ~(1ull << hwc->idx); in intel_pmu_disable_event()
1429 if (unlikely(hwc->config_base == MSR_ARCH_PERFMON_FIXED_CTR_CTRL)) { in intel_pmu_disable_event()
1430 intel_pmu_disable_fixed(hwc); in intel_pmu_disable_event()
1440 static void intel_pmu_enable_fixed(struct hw_perf_event *hwc) in intel_pmu_enable_fixed() argument
1442 int idx = hwc->idx - INTEL_PMC_IDX_FIXED; in intel_pmu_enable_fixed()
1451 if (hwc->config & ARCH_PERFMON_EVENTSEL_USR) in intel_pmu_enable_fixed()
1453 if (hwc->config & ARCH_PERFMON_EVENTSEL_OS) in intel_pmu_enable_fixed()
1459 if (x86_pmu.version > 2 && hwc->config & ARCH_PERFMON_EVENTSEL_ANY) in intel_pmu_enable_fixed()
1465 rdmsrl(hwc->config_base, ctrl_val); in intel_pmu_enable_fixed()
1468 wrmsrl(hwc->config_base, ctrl_val); in intel_pmu_enable_fixed()
1473 struct hw_perf_event *hwc = &event->hw; in intel_pmu_enable_event() local
1476 if (unlikely(hwc->idx == INTEL_PMC_IDX_FIXED_BTS)) { in intel_pmu_enable_event()
1480 intel_pmu_enable_bts(hwc->config); in intel_pmu_enable_event()
1491 cpuc->intel_ctrl_guest_mask |= (1ull << hwc->idx); in intel_pmu_enable_event()
1493 cpuc->intel_ctrl_host_mask |= (1ull << hwc->idx); in intel_pmu_enable_event()
1496 cpuc->intel_cp_status |= (1ull << hwc->idx); in intel_pmu_enable_event()
1498 if (unlikely(hwc->config_base == MSR_ARCH_PERFMON_FIXED_CTR_CTRL)) { in intel_pmu_enable_event()
1499 intel_pmu_enable_fixed(hwc); in intel_pmu_enable_event()
1506 __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); in intel_pmu_enable_event()
1687 struct hw_perf_event *hwc = &event->hw; in intel_bts_constraints() local
1693 hw_event = hwc->config & INTEL_ARCH_EVENT_MASK; in intel_bts_constraints()
1696 if (unlikely(hw_event == bts_event && hwc->sample_period == 1)) in intel_bts_constraints()
2128 struct hw_perf_event *hwc = &event->hw; in intel_put_excl_constraints() local
2148 if (hwc->flags & PERF_X86_EVENT_EXCL_ACCT) { in intel_put_excl_constraints()
2149 hwc->flags &= ~PERF_X86_EVENT_EXCL_ACCT; in intel_put_excl_constraints()
2166 if (hwc->idx >= 0) in intel_put_excl_constraints()
2167 xlo->state[hwc->idx] = INTEL_EXCL_UNUSED; in intel_put_excl_constraints()
2407 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in core_pmu_enable_all() local
2413 __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); in core_pmu_enable_all()