Lines Matching refs:hwc
66 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_update() local
69 int idx = hwc->idx; in x86_perf_event_update()
83 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update()
84 rdpmcl(hwc->event_base_rdpmc, new_raw_count); in x86_perf_event_update()
86 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in x86_perf_event_update()
102 local64_sub(delta, &hwc->period_left); in x86_perf_event_update()
292 set_ext_hw_attr(struct hw_perf_event *hwc, struct perf_event *event) in set_ext_hw_attr() argument
320 hwc->config |= val; in set_ext_hw_attr()
389 struct hw_perf_event *hwc = &event->hw; in x86_setup_perfctr() local
393 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr()
394 hwc->last_period = hwc->sample_period; in x86_setup_perfctr()
395 local64_set(&hwc->period_left, hwc->sample_period); in x86_setup_perfctr()
402 return set_ext_hw_attr(hwc, event); in x86_setup_perfctr()
422 !attr->freq && hwc->sample_period == 1) { in x86_setup_perfctr()
438 hwc->config |= config; in x86_setup_perfctr()
632 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all() local
637 __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); in x86_pmu_enable_all()
837 struct hw_perf_event *hwc; in x86_schedule_events() local
857 hwc = &cpuc->event_list[i]->hw; in x86_schedule_events()
861 if (hwc->idx == -1) in x86_schedule_events()
865 if (!test_bit(hwc->idx, c->idxmsk)) in x86_schedule_events()
869 if (test_bit(hwc->idx, used_mask)) in x86_schedule_events()
872 __set_bit(hwc->idx, used_mask); in x86_schedule_events()
874 assign[i] = hwc->idx; in x86_schedule_events()
980 struct hw_perf_event *hwc = &event->hw; in x86_assign_hw_event() local
982 hwc->idx = cpuc->assign[i]; in x86_assign_hw_event()
983 hwc->last_cpu = smp_processor_id(); in x86_assign_hw_event()
984 hwc->last_tag = ++cpuc->tags[i]; in x86_assign_hw_event()
986 if (hwc->idx == INTEL_PMC_IDX_FIXED_BTS) { in x86_assign_hw_event()
987 hwc->config_base = 0; in x86_assign_hw_event()
988 hwc->event_base = 0; in x86_assign_hw_event()
989 } else if (hwc->idx >= INTEL_PMC_IDX_FIXED) { in x86_assign_hw_event()
990 hwc->config_base = MSR_ARCH_PERFMON_FIXED_CTR_CTRL; in x86_assign_hw_event()
991 hwc->event_base = MSR_ARCH_PERFMON_FIXED_CTR0 + (hwc->idx - INTEL_PMC_IDX_FIXED); in x86_assign_hw_event()
992 hwc->event_base_rdpmc = (hwc->idx - INTEL_PMC_IDX_FIXED) | 1<<30; in x86_assign_hw_event()
994 hwc->config_base = x86_pmu_config_addr(hwc->idx); in x86_assign_hw_event()
995 hwc->event_base = x86_pmu_event_addr(hwc->idx); in x86_assign_hw_event()
996 hwc->event_base_rdpmc = x86_pmu_rdpmc_index(hwc->idx); in x86_assign_hw_event()
1000 static inline int match_prev_assignment(struct hw_perf_event *hwc, in match_prev_assignment() argument
1004 return hwc->idx == cpuc->assign[i] && in match_prev_assignment()
1005 hwc->last_cpu == smp_processor_id() && in match_prev_assignment()
1006 hwc->last_tag == cpuc->tags[i]; in match_prev_assignment()
1015 struct hw_perf_event *hwc; in x86_pmu_enable() local
1034 hwc = &event->hw; in x86_pmu_enable()
1042 if (hwc->idx == -1 || in x86_pmu_enable()
1043 match_prev_assignment(hwc, cpuc, i)) in x86_pmu_enable()
1050 if (hwc->state & PERF_HES_STOPPED) in x86_pmu_enable()
1051 hwc->state |= PERF_HES_ARCH; in x86_pmu_enable()
1061 hwc = &event->hw; in x86_pmu_enable()
1063 if (!match_prev_assignment(hwc, cpuc, i)) in x86_pmu_enable()
1068 if (hwc->state & PERF_HES_ARCH) in x86_pmu_enable()
1091 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_set_period() local
1092 s64 left = local64_read(&hwc->period_left); in x86_perf_event_set_period()
1093 s64 period = hwc->sample_period; in x86_perf_event_set_period()
1094 int ret = 0, idx = hwc->idx; in x86_perf_event_set_period()
1104 local64_set(&hwc->period_left, left); in x86_perf_event_set_period()
1105 hwc->last_period = period; in x86_perf_event_set_period()
1111 local64_set(&hwc->period_left, left); in x86_perf_event_set_period()
1112 hwc->last_period = period; in x86_perf_event_set_period()
1129 if (!(hwc->flags & PERF_X86_EVENT_AUTO_RELOAD) || in x86_perf_event_set_period()
1130 local64_read(&hwc->prev_count) != (u64)-left) { in x86_perf_event_set_period()
1135 local64_set(&hwc->prev_count, (u64)-left); in x86_perf_event_set_period()
1137 wrmsrl(hwc->event_base, (u64)(-left) & x86_pmu.cntval_mask); in x86_perf_event_set_period()
1146 wrmsrl(hwc->event_base, in x86_perf_event_set_period()
1171 struct hw_perf_event *hwc; in x86_pmu_add() local
1175 hwc = &event->hw; in x86_pmu_add()
1182 hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; in x86_pmu_add()
1184 hwc->state |= PERF_HES_ARCH; in x86_pmu_add()
1305 struct hw_perf_event *hwc = &event->hw; in x86_pmu_stop() local
1307 if (__test_and_clear_bit(hwc->idx, cpuc->active_mask)) { in x86_pmu_stop()
1309 cpuc->events[hwc->idx] = NULL; in x86_pmu_stop()
1310 WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); in x86_pmu_stop()
1311 hwc->state |= PERF_HES_STOPPED; in x86_pmu_stop()
1314 if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { in x86_pmu_stop()
1320 hwc->state |= PERF_HES_UPTODATE; in x86_pmu_stop()