Lines Matching refs:hwc
66 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_update() local
69 int idx = hwc->idx; in x86_perf_event_update()
83 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update()
84 rdpmcl(hwc->event_base_rdpmc, new_raw_count); in x86_perf_event_update()
86 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in x86_perf_event_update()
102 local64_sub(delta, &hwc->period_left); in x86_perf_event_update()
290 set_ext_hw_attr(struct hw_perf_event *hwc, struct perf_event *event) in set_ext_hw_attr() argument
318 hwc->config |= val; in set_ext_hw_attr()
385 struct hw_perf_event *hwc = &event->hw; in x86_setup_perfctr() local
389 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr()
390 hwc->last_period = hwc->sample_period; in x86_setup_perfctr()
391 local64_set(&hwc->period_left, hwc->sample_period); in x86_setup_perfctr()
398 return set_ext_hw_attr(hwc, event); in x86_setup_perfctr()
418 !attr->freq && hwc->sample_period == 1) { in x86_setup_perfctr()
434 hwc->config |= config; in x86_setup_perfctr()
614 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all() local
619 __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); in x86_pmu_enable_all()
819 struct hw_perf_event *hwc; in x86_schedule_events() local
839 hwc = &cpuc->event_list[i]->hw; in x86_schedule_events()
843 if (hwc->idx == -1) in x86_schedule_events()
847 if (!test_bit(hwc->idx, c->idxmsk)) in x86_schedule_events()
851 if (test_bit(hwc->idx, used_mask)) in x86_schedule_events()
854 __set_bit(hwc->idx, used_mask); in x86_schedule_events()
856 assign[i] = hwc->idx; in x86_schedule_events()
965 struct hw_perf_event *hwc = &event->hw; in x86_assign_hw_event() local
967 hwc->idx = cpuc->assign[i]; in x86_assign_hw_event()
968 hwc->last_cpu = smp_processor_id(); in x86_assign_hw_event()
969 hwc->last_tag = ++cpuc->tags[i]; in x86_assign_hw_event()
971 if (hwc->idx == INTEL_PMC_IDX_FIXED_BTS) { in x86_assign_hw_event()
972 hwc->config_base = 0; in x86_assign_hw_event()
973 hwc->event_base = 0; in x86_assign_hw_event()
974 } else if (hwc->idx >= INTEL_PMC_IDX_FIXED) { in x86_assign_hw_event()
975 hwc->config_base = MSR_ARCH_PERFMON_FIXED_CTR_CTRL; in x86_assign_hw_event()
976 hwc->event_base = MSR_ARCH_PERFMON_FIXED_CTR0 + (hwc->idx - INTEL_PMC_IDX_FIXED); in x86_assign_hw_event()
977 hwc->event_base_rdpmc = (hwc->idx - INTEL_PMC_IDX_FIXED) | 1<<30; in x86_assign_hw_event()
979 hwc->config_base = x86_pmu_config_addr(hwc->idx); in x86_assign_hw_event()
980 hwc->event_base = x86_pmu_event_addr(hwc->idx); in x86_assign_hw_event()
981 hwc->event_base_rdpmc = x86_pmu_rdpmc_index(hwc->idx); in x86_assign_hw_event()
985 static inline int match_prev_assignment(struct hw_perf_event *hwc, in match_prev_assignment() argument
989 return hwc->idx == cpuc->assign[i] && in match_prev_assignment()
990 hwc->last_cpu == smp_processor_id() && in match_prev_assignment()
991 hwc->last_tag == cpuc->tags[i]; in match_prev_assignment()
1000 struct hw_perf_event *hwc; in x86_pmu_enable() local
1019 hwc = &event->hw; in x86_pmu_enable()
1027 if (hwc->idx == -1 || in x86_pmu_enable()
1028 match_prev_assignment(hwc, cpuc, i)) in x86_pmu_enable()
1035 if (hwc->state & PERF_HES_STOPPED) in x86_pmu_enable()
1036 hwc->state |= PERF_HES_ARCH; in x86_pmu_enable()
1046 hwc = &event->hw; in x86_pmu_enable()
1048 if (!match_prev_assignment(hwc, cpuc, i)) in x86_pmu_enable()
1053 if (hwc->state & PERF_HES_ARCH) in x86_pmu_enable()
1076 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_set_period() local
1077 s64 left = local64_read(&hwc->period_left); in x86_perf_event_set_period()
1078 s64 period = hwc->sample_period; in x86_perf_event_set_period()
1079 int ret = 0, idx = hwc->idx; in x86_perf_event_set_period()
1089 local64_set(&hwc->period_left, left); in x86_perf_event_set_period()
1090 hwc->last_period = period; in x86_perf_event_set_period()
1096 local64_set(&hwc->period_left, left); in x86_perf_event_set_period()
1097 hwc->last_period = period; in x86_perf_event_set_period()
1118 local64_set(&hwc->prev_count, (u64)-left); in x86_perf_event_set_period()
1120 wrmsrl(hwc->event_base, (u64)(-left) & x86_pmu.cntval_mask); in x86_perf_event_set_period()
1128 wrmsrl(hwc->event_base, in x86_perf_event_set_period()
1153 struct hw_perf_event *hwc; in x86_pmu_add() local
1157 hwc = &event->hw; in x86_pmu_add()
1164 hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; in x86_pmu_add()
1166 hwc->state |= PERF_HES_ARCH; in x86_pmu_add()
1287 struct hw_perf_event *hwc = &event->hw; in x86_pmu_stop() local
1289 if (__test_and_clear_bit(hwc->idx, cpuc->active_mask)) { in x86_pmu_stop()
1291 cpuc->events[hwc->idx] = NULL; in x86_pmu_stop()
1292 WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); in x86_pmu_stop()
1293 hwc->state |= PERF_HES_STOPPED; in x86_pmu_stop()
1296 if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { in x86_pmu_stop()
1302 hwc->state |= PERF_HES_UPTODATE; in x86_pmu_stop()