/linux-4.4.14/arch/alpha/kernel/ |
D | perf_event.c | 390 static void maybe_change_configuration(struct cpu_hw_events *cpuc) in maybe_change_configuration() argument 394 if (cpuc->n_added == 0) in maybe_change_configuration() 398 for (j = 0; j < cpuc->n_events; j++) { in maybe_change_configuration() 399 struct perf_event *pe = cpuc->event[j]; in maybe_change_configuration() 401 if (cpuc->current_idx[j] != PMC_NO_INDEX && in maybe_change_configuration() 402 cpuc->current_idx[j] != pe->hw.idx) { in maybe_change_configuration() 403 alpha_perf_event_update(pe, &pe->hw, cpuc->current_idx[j], 0); in maybe_change_configuration() 404 cpuc->current_idx[j] = PMC_NO_INDEX; in maybe_change_configuration() 409 cpuc->idx_mask = 0; in maybe_change_configuration() 410 for (j = 0; j < cpuc->n_events; j++) { in maybe_change_configuration() [all …]
|
/linux-4.4.14/arch/x86/kernel/cpu/ |
D | perf_event_intel_lbr.c | 131 static void intel_pmu_lbr_filter(struct cpu_hw_events *cpuc); 140 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_enable() local 154 if (cpuc->lbr_sel) in __intel_pmu_lbr_enable() 155 lbr_select = cpuc->lbr_sel->config; in __intel_pmu_lbr_enable() 280 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_sched_task() local 292 cpuc->lbr_context = ctx; in intel_pmu_lbr_sched_task() 313 cpuc->lbr_context = ctx; in intel_pmu_lbr_sched_task() 324 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_enable() local 334 if (event->ctx->task && cpuc->lbr_context != event->ctx) { in intel_pmu_lbr_enable() 336 cpuc->lbr_context = event->ctx; in intel_pmu_lbr_enable() [all …]
|
D | perf_event.c | 580 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable_all() local 586 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_disable_all() 611 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable() local 616 if (!cpuc->enabled) in x86_pmu_disable() 619 cpuc->n_added = 0; in x86_pmu_disable() 620 cpuc->enabled = 0; in x86_pmu_disable() 628 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_enable_all() local 632 struct hw_perf_event *hwc = &cpuc->events[idx]->hw; in x86_pmu_enable_all() 634 if (!test_bit(idx, cpuc->active_mask)) in x86_pmu_enable_all() 831 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in x86_schedule_events() argument [all …]
|
D | perf_event_amd.c | 207 static inline int amd_has_nb(struct cpu_hw_events *cpuc) in amd_has_nb() argument 209 struct amd_nb *nb = cpuc->amd_nb; in amd_has_nb() 235 static void __amd_put_nb_event_constraints(struct cpu_hw_events *cpuc, in __amd_put_nb_event_constraints() argument 238 struct amd_nb *nb = cpuc->amd_nb; in __amd_put_nb_event_constraints() 292 __amd_get_nb_event_constraints(struct cpu_hw_events *cpuc, struct perf_event *event, in __amd_get_nb_event_constraints() argument 296 struct amd_nb *nb = cpuc->amd_nb; in __amd_get_nb_event_constraints() 303 if (cpuc->is_fake) in __amd_get_nb_event_constraints() 368 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_prepare() local 370 WARN_ON_ONCE(cpuc->amd_nb); in amd_pmu_cpu_prepare() 375 cpuc->amd_nb = amd_alloc_nb(cpu); in amd_pmu_cpu_prepare() [all …]
|
D | perf_event_intel.c | 1473 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_disable_all() local 1477 if (test_bit(INTEL_PMC_IDX_FIXED_BTS, cpuc->active_mask)) in __intel_pmu_disable_all() 1493 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_enable_all() local 1498 x86_pmu.intel_ctrl & ~cpuc->intel_ctrl_guest_mask); in __intel_pmu_enable_all() 1500 if (test_bit(INTEL_PMC_IDX_FIXED_BTS, cpuc->active_mask)) { in __intel_pmu_enable_all() 1502 cpuc->events[INTEL_PMC_IDX_FIXED_BTS]; in __intel_pmu_enable_all() 1533 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_nhm_workaround() local 1566 event = cpuc->events[i]; in intel_pmu_nhm_workaround() 1580 event = cpuc->events[i]; in intel_pmu_nhm_workaround() 1632 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_event() local [all …]
|
D | perf_event_intel_ds.c | 497 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_bts() local 500 if (!cpuc->ds) in intel_pmu_disable_bts() 514 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_drain_bts_buffer() local 515 struct debug_store *ds = cpuc->ds; in intel_pmu_drain_bts_buffer() 521 struct perf_event *event = cpuc->events[INTEL_PMC_IDX_FIXED_BTS]; in intel_pmu_drain_bts_buffer() 769 static inline bool pebs_is_enabled(struct cpu_hw_events *cpuc) in pebs_is_enabled() argument 771 return (cpuc->pebs_enabled & ((1ULL << MAX_PEBS_EVENTS) - 1)); in pebs_is_enabled() 776 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_enable() local 778 struct debug_store *ds = cpuc->ds; in intel_pmu_pebs_enable() 784 first_pebs = !pebs_is_enabled(cpuc); in intel_pmu_pebs_enable() [all …]
|
D | perf_event_intel_bts.c | 420 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_del() local 435 cpuc->ds->bts_index = bts->ds_back.bts_buffer_base; in bts_event_del() 436 cpuc->ds->bts_buffer_base = bts->ds_back.bts_buffer_base; in bts_event_del() 437 cpuc->ds->bts_absolute_maximum = bts->ds_back.bts_absolute_maximum; in bts_event_del() 438 cpuc->ds->bts_interrupt_threshold = bts->ds_back.bts_interrupt_threshold; in bts_event_del() 445 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_add() local 451 if (test_bit(INTEL_PMC_IDX_FIXED_BTS, cpuc->active_mask)) in bts_event_add() 467 bts->ds_back.bts_buffer_base = cpuc->ds->bts_buffer_base; in bts_event_add() 468 bts->ds_back.bts_absolute_maximum = cpuc->ds->bts_absolute_maximum; in bts_event_add() 469 bts->ds_back.bts_interrupt_threshold = cpuc->ds->bts_interrupt_threshold; in bts_event_add()
|
D | perf_event_knc.c | 215 struct cpu_hw_events *cpuc; in knc_pmu_handle_irq() local 220 cpuc = this_cpu_ptr(&cpu_hw_events); in knc_pmu_handle_irq() 242 struct perf_event *event = cpuc->events[bit]; in knc_pmu_handle_irq() 246 if (!test_bit(bit, cpuc->active_mask)) in knc_pmu_handle_irq() 267 if (cpuc->enabled) in knc_pmu_handle_irq()
|
D | perf_event.h | 518 int (*schedule_events)(struct cpu_hw_events *cpuc, int n, int *assign); 537 (*get_event_constraints)(struct cpu_hw_events *cpuc, 541 void (*put_event_constraints)(struct cpu_hw_events *cpuc, 544 void (*start_scheduling)(struct cpu_hw_events *cpuc); 546 void (*commit_scheduling)(struct cpu_hw_events *cpuc, int idx, int cntr); 548 void (*stop_scheduling)(struct cpu_hw_events *cpuc); 750 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign); 830 x86_get_event_constraints(struct cpu_hw_events *cpuc, int idx,
|
D | perf_event_p4.c | 918 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_disable_all() local 922 struct perf_event *event = cpuc->events[idx]; in p4_pmu_disable_all() 923 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_disable_all() 987 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_enable_all() local 991 struct perf_event *event = cpuc->events[idx]; in p4_pmu_enable_all() 992 if (!test_bit(idx, cpuc->active_mask)) in p4_pmu_enable_all() 1001 struct cpu_hw_events *cpuc; in p4_pmu_handle_irq() local 1007 cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_handle_irq() 1012 if (!test_bit(idx, cpuc->active_mask)) { in p4_pmu_handle_irq() 1014 if (__test_and_clear_bit(idx, cpuc->running)) in p4_pmu_handle_irq() [all …]
|
/linux-4.4.14/arch/sparc/kernel/ |
D | perf_event.c | 825 static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, in… in sparc_pmu_enable_event() argument 833 enc = perf_event_get_enc(cpuc->events[idx]); in sparc_pmu_enable_event() 835 val = cpuc->pcr[pcr_index]; in sparc_pmu_enable_event() 838 cpuc->pcr[pcr_index] = val; in sparc_pmu_enable_event() 840 pcr_ops->write_pcr(pcr_index, cpuc->pcr[pcr_index]); in sparc_pmu_enable_event() 843 static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, i… in sparc_pmu_disable_event() argument 853 val = cpuc->pcr[pcr_index]; in sparc_pmu_disable_event() 856 cpuc->pcr[pcr_index] = val; in sparc_pmu_disable_event() 858 pcr_ops->write_pcr(pcr_index, cpuc->pcr[pcr_index]); in sparc_pmu_disable_event() 917 static void read_in_all_counters(struct cpu_hw_events *cpuc) in read_in_all_counters() argument [all …]
|
/linux-4.4.14/arch/tile/kernel/ |
D | perf_event.c | 593 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in tile_pmu_stop() local 597 if (__test_and_clear_bit(idx, cpuc->active_mask)) { in tile_pmu_stop() 599 cpuc->events[hwc->idx] = NULL; in tile_pmu_stop() 619 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in tile_pmu_start() local 635 cpuc->events[idx] = event; in tile_pmu_start() 636 __set_bit(idx, cpuc->active_mask); in tile_pmu_start() 653 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in tile_pmu_add() local 663 if (cpuc->n_events == tile_pmu->num_counters) in tile_pmu_add() 666 cpuc->event_list[cpuc->n_events] = event; in tile_pmu_add() 667 cpuc->n_events++; in tile_pmu_add() [all …]
|
/linux-4.4.14/arch/sh/kernel/ |
D | perf_event.c | 222 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_stop() local 228 cpuc->events[idx] = NULL; in sh_pmu_stop() 240 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_start() local 250 cpuc->events[idx] = event; in sh_pmu_start() 257 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_del() local 260 __clear_bit(event->hw.idx, cpuc->used_mask); in sh_pmu_del() 267 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_add() local 274 if (__test_and_set_bit(idx, cpuc->used_mask)) { in sh_pmu_add() 275 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); in sh_pmu_add() 279 __set_bit(idx, cpuc->used_mask); in sh_pmu_add()
|
/linux-4.4.14/arch/blackfin/kernel/ |
D | perf_event.c | 303 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bfin_pmu_stop() local 309 cpuc->events[idx] = NULL; in bfin_pmu_stop() 321 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bfin_pmu_start() local 331 cpuc->events[idx] = event; in bfin_pmu_start() 338 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bfin_pmu_del() local 341 __clear_bit(event->hw.idx, cpuc->used_mask); in bfin_pmu_del() 348 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bfin_pmu_add() local 355 if (__test_and_set_bit(idx, cpuc->used_mask)) { in bfin_pmu_add() 356 idx = find_first_zero_bit(cpuc->used_mask, MAX_HWEVENTS); in bfin_pmu_add() 360 __set_bit(idx, cpuc->used_mask); in bfin_pmu_add() [all …]
|
/linux-4.4.14/arch/metag/kernel/perf/ |
D | perf_event.c | 261 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in metag_pmu_start() local 288 cpuc->events[idx] = event; in metag_pmu_start() 309 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in metag_pmu_add() local 318 cpuc->used_mask)) { in metag_pmu_add() 325 idx = find_first_zero_bit(cpuc->used_mask, in metag_pmu_add() 332 __set_bit(idx, cpuc->used_mask); in metag_pmu_add() 351 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in metag_pmu_del() local 357 cpuc->events[idx] = NULL; in metag_pmu_del() 358 __clear_bit(idx, cpuc->used_mask); in metag_pmu_del() 810 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in metag_pmu_cpu_notify() local [all …]
|
/linux-4.4.14/arch/arm/kernel/ |
D | perf_event_xscale.c | 149 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_handle_irq() local 174 struct perf_event *event = cpuc->events[idx]; in xscale1pmu_handle_irq() 275 xscale1pmu_get_event_idx(struct pmu_hw_events *cpuc, in xscale1pmu_get_event_idx() argument 280 if (test_and_set_bit(XSCALE_CYCLE_COUNTER, cpuc->used_mask)) in xscale1pmu_get_event_idx() 285 if (!test_and_set_bit(XSCALE_COUNTER1, cpuc->used_mask)) in xscale1pmu_get_event_idx() 288 if (!test_and_set_bit(XSCALE_COUNTER0, cpuc->used_mask)) in xscale1pmu_get_event_idx() 496 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale2pmu_handle_irq() local 515 struct perf_event *event = cpuc->events[idx]; in xscale2pmu_handle_irq() 644 xscale2pmu_get_event_idx(struct pmu_hw_events *cpuc, in xscale2pmu_get_event_idx() argument 647 int idx = xscale1pmu_get_event_idx(cpuc, event); in xscale2pmu_get_event_idx() [all …]
|
D | perf_event_v6.c | 311 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv6pmu_handle_irq() local 328 struct perf_event *event = cpuc->events[idx]; in armv6pmu_handle_irq() 389 armv6pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv6pmu_get_event_idx() argument 395 if (test_and_set_bit(ARMV6_CYCLE_COUNTER, cpuc->used_mask)) in armv6pmu_get_event_idx() 404 if (!test_and_set_bit(ARMV6_COUNTER1, cpuc->used_mask)) in armv6pmu_get_event_idx() 407 if (!test_and_set_bit(ARMV6_COUNTER0, cpuc->used_mask)) in armv6pmu_get_event_idx()
|
D | perf_event_v7.c | 848 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv7pmu_handle_irq() local 869 struct perf_event *event = cpuc->events[idx]; in armv7pmu_handle_irq() 927 static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv7pmu_get_event_idx() argument 937 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx() 948 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx() 1461 static int krait_pmu_get_event_idx(struct pmu_hw_events *cpuc, in krait_pmu_get_event_idx() argument 1481 if (test_and_set_bit(bit, cpuc->used_mask)) in krait_pmu_get_event_idx() 1485 idx = armv7pmu_get_event_idx(cpuc, event); in krait_pmu_get_event_idx() 1487 clear_bit(bit, cpuc->used_mask); in krait_pmu_get_event_idx() 1492 static void krait_pmu_clear_event_idx(struct pmu_hw_events *cpuc, in krait_pmu_clear_event_idx() argument [all …]
|
/linux-4.4.14/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 311 static int mipsxx_pmu_alloc_counter(struct cpu_hw_events *cpuc, in mipsxx_pmu_alloc_counter() argument 334 !test_and_set_bit(i, cpuc->used_mask)) in mipsxx_pmu_alloc_counter() 343 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_enable_event() local 347 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base & 0xff) | in mipsxx_pmu_enable_event() 353 cpuc->saved_ctrl[idx] |= in mipsxx_pmu_enable_event() 363 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_disable_event() local 369 cpuc->saved_ctrl[idx] = mipsxx_pmu_read_control(idx) & in mipsxx_pmu_disable_event() 371 mipsxx_pmu_write_control(idx, cpuc->saved_ctrl[idx]); in mipsxx_pmu_disable_event() 463 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipspmu_add() local 471 idx = mipsxx_pmu_alloc_counter(cpuc, hwc); in mipspmu_add() [all …]
|
/linux-4.4.14/arch/arm64/kernel/ |
D | perf_event.c | 430 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv8pmu_handle_irq() local 451 struct perf_event *event = cpuc->events[idx]; in armv8pmu_handle_irq() 509 static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc, in armv8pmu_get_event_idx() argument 519 if (test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv8pmu_get_event_idx() 530 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv8pmu_get_event_idx()
|