Lines Matching refs:armpmu

105 	struct arm_pmu *armpmu = to_arm_pmu(event->pmu);  in armpmu_event_set_period()  local
131 if (left > (armpmu->max_period >> 1)) in armpmu_event_set_period()
132 left = armpmu->max_period >> 1; in armpmu_event_set_period()
136 armpmu->write_counter(event, (u64)(-left) & 0xffffffff); in armpmu_event_set_period()
145 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_event_update() local
151 new_raw_count = armpmu->read_counter(event); in armpmu_event_update()
157 delta = (new_raw_count - prev_raw_count) & armpmu->max_period; in armpmu_event_update()
174 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_stop() local
182 armpmu->disable(event); in armpmu_stop()
190 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_start() local
209 armpmu->enable(event); in armpmu_start()
215 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_del() local
216 struct pmu_hw_events *hw_events = this_cpu_ptr(armpmu->hw_events); in armpmu_del()
223 if (armpmu->clear_event_idx) in armpmu_del()
224 armpmu->clear_event_idx(hw_events, event); in armpmu_del()
232 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_add() local
233 struct pmu_hw_events *hw_events = this_cpu_ptr(armpmu->hw_events); in armpmu_add()
239 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in armpmu_add()
245 idx = armpmu->get_event_idx(hw_events, event); in armpmu_add()
256 armpmu->disable(event); in armpmu_add()
275 struct arm_pmu *armpmu; in validate_event() local
294 armpmu = to_arm_pmu(event->pmu); in validate_event()
295 return armpmu->get_event_idx(hw_events, event) >= 0; in validate_event()
326 struct arm_pmu *armpmu; in armpmu_dispatch_irq() local
338 armpmu = *(void **)dev; in armpmu_dispatch_irq()
339 plat_device = armpmu->plat_device; in armpmu_dispatch_irq()
344 ret = plat->handle_irq(irq, armpmu, armpmu->handle_irq); in armpmu_dispatch_irq()
346 ret = armpmu->handle_irq(irq, armpmu); in armpmu_dispatch_irq()
354 armpmu_release_hardware(struct arm_pmu *armpmu) in armpmu_release_hardware() argument
356 armpmu->free_irq(armpmu); in armpmu_release_hardware()
360 armpmu_reserve_hardware(struct arm_pmu *armpmu) in armpmu_reserve_hardware() argument
362 int err = armpmu->request_irq(armpmu, armpmu_dispatch_irq); in armpmu_reserve_hardware()
364 armpmu_release_hardware(armpmu); in armpmu_reserve_hardware()
374 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in hw_perf_event_destroy() local
375 atomic_t *active_events = &armpmu->active_events; in hw_perf_event_destroy()
376 struct mutex *pmu_reserve_mutex = &armpmu->reserve_mutex; in hw_perf_event_destroy()
379 armpmu_release_hardware(armpmu); in hw_perf_event_destroy()
394 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in __hw_perf_event_init() local
398 mapping = armpmu->map_event(event); in __hw_perf_event_init()
420 if ((!armpmu->set_event_filter || in __hw_perf_event_init()
421 armpmu->set_event_filter(hwc, &event->attr)) && in __hw_perf_event_init()
440 hwc->sample_period = armpmu->max_period >> 1; in __hw_perf_event_init()
455 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_event_init() local
457 atomic_t *active_events = &armpmu->active_events; in armpmu_event_init()
467 !cpumask_test_cpu(event->cpu, &armpmu->supported_cpus)) in armpmu_event_init()
474 if (armpmu->map_event(event) == -ENOENT) in armpmu_event_init()
480 mutex_lock(&armpmu->reserve_mutex); in armpmu_event_init()
482 err = armpmu_reserve_hardware(armpmu); in armpmu_event_init()
486 mutex_unlock(&armpmu->reserve_mutex); in armpmu_event_init()
501 struct arm_pmu *armpmu = to_arm_pmu(pmu); in armpmu_enable() local
502 struct pmu_hw_events *hw_events = this_cpu_ptr(armpmu->hw_events); in armpmu_enable()
503 int enabled = bitmap_weight(hw_events->used_mask, armpmu->num_events); in armpmu_enable()
506 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in armpmu_enable()
510 armpmu->start(armpmu); in armpmu_enable()
515 struct arm_pmu *armpmu = to_arm_pmu(pmu); in armpmu_disable() local
518 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in armpmu_disable()
521 armpmu->stop(armpmu); in armpmu_disable()
531 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_filter_match() local
533 return cpumask_test_cpu(cpu, &armpmu->supported_cpus); in armpmu_filter_match()
536 static void armpmu_init(struct arm_pmu *armpmu) in armpmu_init() argument
538 atomic_set(&armpmu->active_events, 0); in armpmu_init()
539 mutex_init(&armpmu->reserve_mutex); in armpmu_init()
541 armpmu->pmu = (struct pmu) { in armpmu_init()
554 int armpmu_register(struct arm_pmu *armpmu, int type) in armpmu_register() argument
556 armpmu_init(armpmu); in armpmu_register()
558 armpmu->name, armpmu->num_events); in armpmu_register()
559 return perf_pmu_register(&armpmu->pmu, armpmu->name, type); in armpmu_register()