Lines Matching refs:event
1501 struct perf_event *event = in __intel_pmu_enable_all() local
1504 if (WARN_ON_ONCE(!event)) in __intel_pmu_enable_all()
1507 intel_pmu_enable_bts(event->hw.config); in __intel_pmu_enable_all()
1540 struct perf_event *event; in intel_pmu_nhm_workaround() local
1566 event = cpuc->events[i]; in intel_pmu_nhm_workaround()
1567 if (event) in intel_pmu_nhm_workaround()
1568 x86_perf_event_update(event); in intel_pmu_nhm_workaround()
1580 event = cpuc->events[i]; in intel_pmu_nhm_workaround()
1582 if (event) { in intel_pmu_nhm_workaround()
1583 x86_perf_event_set_period(event); in intel_pmu_nhm_workaround()
1584 __x86_pmu_enable_event(&event->hw, in intel_pmu_nhm_workaround()
1624 static inline bool event_is_checkpointed(struct perf_event *event) in event_is_checkpointed() argument
1626 return (event->hw.config & HSW_IN_TX_CHECKPOINTED) != 0; in event_is_checkpointed()
1629 static void intel_pmu_disable_event(struct perf_event *event) in intel_pmu_disable_event() argument
1631 struct hw_perf_event *hwc = &event->hw; in intel_pmu_disable_event()
1648 if (needs_branch_stack(event)) in intel_pmu_disable_event()
1649 intel_pmu_lbr_disable(event); in intel_pmu_disable_event()
1656 x86_pmu_disable_event(event); in intel_pmu_disable_event()
1658 if (unlikely(event->attr.precise_ip)) in intel_pmu_disable_event()
1659 intel_pmu_pebs_disable(event); in intel_pmu_disable_event()
1693 static void intel_pmu_enable_event(struct perf_event *event) in intel_pmu_enable_event() argument
1695 struct hw_perf_event *hwc = &event->hw; in intel_pmu_enable_event()
1709 if (needs_branch_stack(event)) in intel_pmu_enable_event()
1710 intel_pmu_lbr_enable(event); in intel_pmu_enable_event()
1712 if (event->attr.exclude_host) in intel_pmu_enable_event()
1714 if (event->attr.exclude_guest) in intel_pmu_enable_event()
1717 if (unlikely(event_is_checkpointed(event))) in intel_pmu_enable_event()
1725 if (unlikely(event->attr.precise_ip)) in intel_pmu_enable_event()
1726 intel_pmu_pebs_enable(event); in intel_pmu_enable_event()
1735 int intel_pmu_save_and_restart(struct perf_event *event) in intel_pmu_save_and_restart() argument
1737 x86_perf_event_update(event); in intel_pmu_save_and_restart()
1744 if (unlikely(event_is_checkpointed(event))) { in intel_pmu_save_and_restart()
1746 wrmsrl(event->hw.event_base, 0); in intel_pmu_save_and_restart()
1747 local64_set(&event->hw.prev_count, 0); in intel_pmu_save_and_restart()
1749 return x86_perf_event_set_period(event); in intel_pmu_save_and_restart()
1879 struct perf_event *event = cpuc->events[bit]; in intel_pmu_handle_irq() local
1886 if (!intel_pmu_save_and_restart(event)) in intel_pmu_handle_irq()
1889 perf_sample_data_init(&data, 0, event->hw.last_period); in intel_pmu_handle_irq()
1891 if (has_branch_stack(event)) in intel_pmu_handle_irq()
1894 if (perf_event_overflow(event, &data, regs)) in intel_pmu_handle_irq()
1895 x86_pmu_stop(event, 0); in intel_pmu_handle_irq()
1921 intel_bts_constraints(struct perf_event *event) in intel_bts_constraints() argument
1923 struct hw_perf_event *hwc = &event->hw; in intel_bts_constraints()
1926 if (event->attr.freq) in intel_bts_constraints()
1956 static void intel_fixup_er(struct perf_event *event, int idx) in intel_fixup_er() argument
1958 event->hw.extra_reg.idx = idx; in intel_fixup_er()
1961 event->hw.config &= ~INTEL_ARCH_EVENT_MASK; in intel_fixup_er()
1962 event->hw.config |= x86_pmu.extra_regs[EXTRA_REG_RSP_0].event; in intel_fixup_er()
1963 event->hw.extra_reg.reg = MSR_OFFCORE_RSP_0; in intel_fixup_er()
1965 event->hw.config &= ~INTEL_ARCH_EVENT_MASK; in intel_fixup_er()
1966 event->hw.config |= x86_pmu.extra_regs[EXTRA_REG_RSP_1].event; in intel_fixup_er()
1967 event->hw.extra_reg.reg = MSR_OFFCORE_RSP_1; in intel_fixup_er()
1980 struct perf_event *event, in __intel_shared_reg_get_constraints() argument
2018 intel_fixup_er(event, idx); in __intel_shared_reg_get_constraints()
2081 struct perf_event *event) in intel_shared_regs_constraints() argument
2086 xreg = &event->hw.extra_reg; in intel_shared_regs_constraints()
2088 c = __intel_shared_reg_get_constraints(cpuc, event, xreg); in intel_shared_regs_constraints()
2092 breg = &event->hw.branch_reg; in intel_shared_regs_constraints()
2094 d = __intel_shared_reg_get_constraints(cpuc, event, breg); in intel_shared_regs_constraints()
2105 struct perf_event *event) in x86_get_event_constraints() argument
2111 if ((event->hw.config & c->cmask) == c->code) { in x86_get_event_constraints()
2112 event->hw.flags |= c->flags; in x86_get_event_constraints()
2123 struct perf_event *event) in __intel_get_event_constraints() argument
2127 c = intel_bts_constraints(event); in __intel_get_event_constraints()
2131 c = intel_shared_regs_constraints(cpuc, event); in __intel_get_event_constraints()
2135 c = intel_pebs_constraints(event); in __intel_get_event_constraints()
2139 return x86_get_event_constraints(cpuc, idx, event); in __intel_get_event_constraints()
2226 intel_get_excl_constraints(struct cpu_hw_events *cpuc, struct perf_event *event, in intel_get_excl_constraints() argument
2294 if (is_excl && !(event->hw.flags & PERF_X86_EVENT_EXCL_ACCT)) { in intel_get_excl_constraints()
2295 event->hw.flags |= PERF_X86_EVENT_EXCL_ACCT; in intel_get_excl_constraints()
2343 struct perf_event *event) in intel_get_event_constraints() argument
2356 c2 = __intel_get_event_constraints(cpuc, idx, event); in intel_get_event_constraints()
2364 return intel_get_excl_constraints(cpuc, event, idx, c2); in intel_get_event_constraints()
2370 struct perf_event *event) in intel_put_excl_constraints() argument
2372 struct hw_perf_event *hwc = &event->hw; in intel_put_excl_constraints()
2416 struct perf_event *event) in intel_put_shared_regs_event_constraints() argument
2420 reg = &event->hw.extra_reg; in intel_put_shared_regs_event_constraints()
2424 reg = &event->hw.branch_reg; in intel_put_shared_regs_event_constraints()
2430 struct perf_event *event) in intel_put_event_constraints() argument
2432 intel_put_shared_regs_event_constraints(cpuc, event); in intel_put_event_constraints()
2440 intel_put_excl_constraints(cpuc, event); in intel_put_event_constraints()
2443 static void intel_pebs_aliases_core2(struct perf_event *event) in intel_pebs_aliases_core2() argument
2445 if ((event->hw.config & X86_RAW_EVENT_MASK) == 0x003c) { in intel_pebs_aliases_core2()
2464 u64 alt_config = X86_CONFIG(.event=0xc0, .inv=1, .cmask=16); in intel_pebs_aliases_core2()
2466 alt_config |= (event->hw.config & ~X86_RAW_EVENT_MASK); in intel_pebs_aliases_core2()
2467 event->hw.config = alt_config; in intel_pebs_aliases_core2()
2471 static void intel_pebs_aliases_snb(struct perf_event *event) in intel_pebs_aliases_snb() argument
2473 if ((event->hw.config & X86_RAW_EVENT_MASK) == 0x003c) { in intel_pebs_aliases_snb()
2492 u64 alt_config = X86_CONFIG(.event=0xc2, .umask=0x01, .inv=1, .cmask=16); in intel_pebs_aliases_snb()
2494 alt_config |= (event->hw.config & ~X86_RAW_EVENT_MASK); in intel_pebs_aliases_snb()
2495 event->hw.config = alt_config; in intel_pebs_aliases_snb()
2499 static unsigned long intel_pmu_free_running_flags(struct perf_event *event) in intel_pmu_free_running_flags() argument
2503 if (event->attr.use_clockid) in intel_pmu_free_running_flags()
2508 static int intel_pmu_hw_config(struct perf_event *event) in intel_pmu_hw_config() argument
2510 int ret = x86_pmu_hw_config(event); in intel_pmu_hw_config()
2515 if (event->attr.precise_ip) { in intel_pmu_hw_config()
2516 if (!event->attr.freq) { in intel_pmu_hw_config()
2517 event->hw.flags |= PERF_X86_EVENT_AUTO_RELOAD; in intel_pmu_hw_config()
2518 if (!(event->attr.sample_type & in intel_pmu_hw_config()
2519 ~intel_pmu_free_running_flags(event))) in intel_pmu_hw_config()
2520 event->hw.flags |= PERF_X86_EVENT_FREERUNNING; in intel_pmu_hw_config()
2523 x86_pmu.pebs_aliases(event); in intel_pmu_hw_config()
2526 if (needs_branch_stack(event)) { in intel_pmu_hw_config()
2527 ret = intel_pmu_setup_lbr_filter(event); in intel_pmu_hw_config()
2534 if (!intel_pmu_has_bts(event)) { in intel_pmu_hw_config()
2539 event->destroy = hw_perf_lbr_event_destroy; in intel_pmu_hw_config()
2543 if (event->attr.type != PERF_TYPE_RAW) in intel_pmu_hw_config()
2546 if (!(event->attr.config & ARCH_PERFMON_EVENTSEL_ANY)) in intel_pmu_hw_config()
2555 event->hw.config |= ARCH_PERFMON_EVENTSEL_ANY; in intel_pmu_hw_config()
2597 struct perf_event *event = cpuc->events[idx]; in core_guest_get_msrs() local
2606 event->hw.config | ARCH_PERFMON_EVENTSEL_ENABLE; in core_guest_get_msrs()
2608 if (event->attr.exclude_host) in core_guest_get_msrs()
2610 else if (event->attr.exclude_guest) in core_guest_get_msrs()
2618 static void core_pmu_enable_event(struct perf_event *event) in core_pmu_enable_event() argument
2620 if (!event->attr.exclude_host) in core_pmu_enable_event()
2621 x86_pmu_enable_event(event); in core_pmu_enable_event()
2640 static int hsw_hw_config(struct perf_event *event) in hsw_hw_config() argument
2642 int ret = intel_pmu_hw_config(event); in hsw_hw_config()
2648 event->hw.config |= event->attr.config & (HSW_IN_TX|HSW_IN_TX_CHECKPOINTED); in hsw_hw_config()
2655 if ((event->hw.config & (HSW_IN_TX|HSW_IN_TX_CHECKPOINTED)) && in hsw_hw_config()
2656 ((event->hw.config & ARCH_PERFMON_EVENTSEL_ANY) || in hsw_hw_config()
2657 event->attr.precise_ip > 0)) in hsw_hw_config()
2660 if (event_is_checkpointed(event)) { in hsw_hw_config()
2670 if (event->attr.sample_period > 0 && in hsw_hw_config()
2671 event->attr.sample_period < 0x7fffffff) in hsw_hw_config()
2682 struct perf_event *event) in hsw_get_event_constraints() argument
2686 c = intel_get_event_constraints(cpuc, idx, event); in hsw_get_event_constraints()
2689 if (event->hw.config & HSW_IN_TX_CHECKPOINTED) { in hsw_get_event_constraints()
2713 static unsigned bdw_limit_period(struct perf_event *event, unsigned left) in bdw_limit_period() argument
2715 if ((event->hw.config & INTEL_ARCH_EVENT_MASK) == in bdw_limit_period()
2716 X86_CONFIG(.event=0xc0, .umask=0x01)) { in bdw_limit_period()
2724 PMU_FORMAT_ATTR(event, "config:0-7" );
2746 u64 event = (config & ARCH_PERFMON_EVENTSEL_EVENT); in intel_event_sysfs_show() local
2748 return x86_event_sysfs_show(page, config, event); in intel_event_sysfs_show()
3334 X86_CONFIG(.event=0x0e, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()
3337 X86_CONFIG(.event=0xb1, .umask=0x3f, .inv=1, .cmask=1); in intel_pmu_init()
3397 X86_CONFIG(.event=0x0e, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()
3400 X86_CONFIG(.event=0xb1, .umask=0x3f, .inv=1, .cmask=1); in intel_pmu_init()
3434 X86_CONFIG(.event=0x0e, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()
3437 X86_CONFIG(.event=0xb1, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()
3470 X86_CONFIG(.event=0x0e, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()