sparc_pmu 321 arch/sparc/kernel/perf_event.c static const struct sparc_pmu ultra3_pmu = { sparc_pmu 459 arch/sparc/kernel/perf_event.c static const struct sparc_pmu niagara1_pmu = { sparc_pmu 594 arch/sparc/kernel/perf_event.c static const struct sparc_pmu niagara2_pmu = { sparc_pmu 751 arch/sparc/kernel/perf_event.c static const struct sparc_pmu niagara4_pmu = { sparc_pmu 781 arch/sparc/kernel/perf_event.c static const struct sparc_pmu sparc_m7_pmu = { sparc_pmu 804 arch/sparc/kernel/perf_event.c static const struct sparc_pmu *sparc_pmu __read_mostly; sparc_pmu 809 arch/sparc/kernel/perf_event.c event_id <<= sparc_pmu->upper_shift; sparc_pmu 811 arch/sparc/kernel/perf_event.c event_id <<= sparc_pmu->lower_shift; sparc_pmu 817 arch/sparc/kernel/perf_event.c return event_encoding(sparc_pmu->event_mask, idx); sparc_pmu 823 arch/sparc/kernel/perf_event.c sparc_pmu->upper_nop : sparc_pmu 824 arch/sparc/kernel/perf_event.c sparc_pmu->lower_nop, idx); sparc_pmu 832 arch/sparc/kernel/perf_event.c if (sparc_pmu->num_pcrs > 1) sparc_pmu 852 arch/sparc/kernel/perf_event.c if (sparc_pmu->num_pcrs > 1) sparc_pmu 872 arch/sparc/kernel/perf_event.c new_raw_count = sparc_pmu->read_pmc(idx); sparc_pmu 916 arch/sparc/kernel/perf_event.c sparc_pmu->write_pmc(idx, (u64)(-left) & 0xffffffff); sparc_pmu 1022 arch/sparc/kernel/perf_event.c if (sparc_pmu->num_pcrs == 1) { sparc_pmu 1043 arch/sparc/kernel/perf_event.c for (i = 0; i < sparc_pmu->num_pcrs; i++) sparc_pmu 1058 arch/sparc/kernel/perf_event.c for (i = 0; i < sparc_pmu->num_pcrs; i++) { sparc_pmu 1061 arch/sparc/kernel/perf_event.c val &= ~(sparc_pmu->user_bit | sparc_pmu->priv_bit | sparc_pmu 1062 arch/sparc/kernel/perf_event.c sparc_pmu->hv_bit | sparc_pmu->irq_bit); sparc_pmu 1167 arch/sparc/kernel/perf_event.c for (i = 0; i < sparc_pmu->num_pcrs; i++) sparc_pmu 1201 arch/sparc/kernel/perf_event.c if (!sparc_pmu->cache_map) sparc_pmu 1216 arch/sparc/kernel/perf_event.c pmap = &((*sparc_pmu->cache_map)[cache_type][cache_op][cache_result]); sparc_pmu 1254 arch/sparc/kernel/perf_event.c if (n_ev > sparc_pmu->max_hw_events) sparc_pmu 1257 arch/sparc/kernel/perf_event.c if (!(sparc_pmu->flags & SPARC_PMU_HAS_CONFLICTS)) { sparc_pmu 1320 arch/sparc/kernel/perf_event.c if (!(sparc_pmu->flags & SPARC_PMU_ALL_EXCLUDES_SAME)) sparc_pmu 1381 arch/sparc/kernel/perf_event.c if (n0 >= sparc_pmu->max_hw_events) sparc_pmu 1434 arch/sparc/kernel/perf_event.c if (attr->config >= sparc_pmu->max_events) sparc_pmu 1436 arch/sparc/kernel/perf_event.c pmap = sparc_pmu->event_map(attr->config); sparc_pmu 1465 arch/sparc/kernel/perf_event.c hwc->config_base = sparc_pmu->irq_bit; sparc_pmu 1467 arch/sparc/kernel/perf_event.c hwc->config_base |= sparc_pmu->user_bit; sparc_pmu 1469 arch/sparc/kernel/perf_event.c hwc->config_base |= sparc_pmu->priv_bit; sparc_pmu 1471 arch/sparc/kernel/perf_event.c hwc->config_base |= sparc_pmu->hv_bit; sparc_pmu 1476 arch/sparc/kernel/perf_event.c sparc_pmu->max_hw_events - 1, sparc_pmu 1555 arch/sparc/kernel/perf_event.c if (!sparc_pmu) sparc_pmu 1595 arch/sparc/kernel/perf_event.c if (!sparc_pmu) sparc_pmu 1603 arch/sparc/kernel/perf_event.c for (i = 0; i < sparc_pmu->num_pcrs; i++) sparc_pmu 1606 arch/sparc/kernel/perf_event.c for (i = 0; i < sparc_pmu->num_pic_regs; i++) sparc_pmu 1648 arch/sparc/kernel/perf_event.c if (sparc_pmu->irq_bit && sparc_pmu 1649 arch/sparc/kernel/perf_event.c sparc_pmu->num_pcrs == 1) sparc_pmu 1658 arch/sparc/kernel/perf_event.c if (sparc_pmu->irq_bit && sparc_pmu 1659 arch/sparc/kernel/perf_event.c sparc_pmu->num_pcrs > 1) sparc_pmu 1692 arch/sparc/kernel/perf_event.c sparc_pmu = &ultra3_pmu; sparc_pmu 1696 arch/sparc/kernel/perf_event.c sparc_pmu = &niagara1_pmu; sparc_pmu 1701 arch/sparc/kernel/perf_event.c sparc_pmu = &niagara2_pmu; sparc_pmu 1706 arch/sparc/kernel/perf_event.c sparc_pmu = &niagara4_pmu; sparc_pmu 1710 arch/sparc/kernel/perf_event.c sparc_pmu = &sparc_m7_pmu;