Lines Matching refs:event
55 struct perf_event *event[XCHAL_NUM_PERF_COUNTERS]; member
139 static void xtensa_perf_event_update(struct perf_event *event, in xtensa_perf_event_update() argument
147 new_raw_count = xtensa_pmu_read_counter(event->hw.idx); in xtensa_perf_event_update()
153 local64_add(delta, &event->count); in xtensa_perf_event_update()
157 static bool xtensa_perf_event_set_period(struct perf_event *event, in xtensa_perf_event_set_period() argument
163 if (!is_sampling_event(event)) { in xtensa_perf_event_set_period()
186 perf_event_update_userpage(event); in xtensa_perf_event_set_period()
201 static int xtensa_pmu_event_init(struct perf_event *event) in xtensa_pmu_event_init() argument
205 switch (event->attr.type) { in xtensa_pmu_event_init()
207 if (event->attr.config >= ARRAY_SIZE(xtensa_hw_ctl) || in xtensa_pmu_event_init()
208 xtensa_hw_ctl[event->attr.config] == 0) in xtensa_pmu_event_init()
210 event->hw.config = xtensa_hw_ctl[event->attr.config]; in xtensa_pmu_event_init()
214 ret = xtensa_pmu_cache_event(event->attr.config); in xtensa_pmu_event_init()
217 event->hw.config = ret; in xtensa_pmu_event_init()
222 if ((event->attr.config & XTENSA_PMU_PMCTRL_SELECT) == in xtensa_pmu_event_init()
225 event->hw.config = (event->attr.config & in xtensa_pmu_event_init()
243 static void xtensa_pmu_start(struct perf_event *event, int flags) in xtensa_pmu_start() argument
245 struct hw_perf_event *hwc = &event->hw; in xtensa_pmu_start()
252 WARN_ON_ONCE(!(event->hw.state & PERF_HES_UPTODATE)); in xtensa_pmu_start()
253 xtensa_perf_event_set_period(event, hwc, idx); in xtensa_pmu_start()
261 static void xtensa_pmu_stop(struct perf_event *event, int flags) in xtensa_pmu_stop() argument
263 struct hw_perf_event *hwc = &event->hw; in xtensa_pmu_stop()
274 !(event->hw.state & PERF_HES_UPTODATE)) { in xtensa_pmu_stop()
275 xtensa_perf_event_update(event, &event->hw, idx); in xtensa_pmu_stop()
276 event->hw.state |= PERF_HES_UPTODATE; in xtensa_pmu_stop()
284 static int xtensa_pmu_add(struct perf_event *event, int flags) in xtensa_pmu_add() argument
287 struct hw_perf_event *hwc = &event->hw; in xtensa_pmu_add()
299 ev->event[idx] = event; in xtensa_pmu_add()
304 xtensa_pmu_start(event, PERF_EF_RELOAD); in xtensa_pmu_add()
306 perf_event_update_userpage(event); in xtensa_pmu_add()
310 static void xtensa_pmu_del(struct perf_event *event, int flags) in xtensa_pmu_del() argument
314 xtensa_pmu_stop(event, PERF_EF_UPDATE); in xtensa_pmu_del()
315 __clear_bit(event->hw.idx, ev->used_mask); in xtensa_pmu_del()
316 perf_event_update_userpage(event); in xtensa_pmu_del()
319 static void xtensa_pmu_read(struct perf_event *event) in xtensa_pmu_read() argument
321 xtensa_perf_event_update(event, &event->hw, event->hw.idx); in xtensa_pmu_read()
372 struct perf_event *event = ev->event[i]; in xtensa_pmu_irq_handler() local
373 struct hw_perf_event *hwc = &event->hw; in xtensa_pmu_irq_handler()
380 xtensa_perf_event_update(event, hwc, i); in xtensa_pmu_irq_handler()
382 if (xtensa_perf_event_set_period(event, hwc, i)) { in xtensa_pmu_irq_handler()
387 if (perf_event_overflow(event, &data, regs)) in xtensa_pmu_irq_handler()
388 xtensa_pmu_stop(event, 0); in xtensa_pmu_irq_handler()