sh_pmu 26 arch/sh/include/asm/perf_event.h extern int register_sh_pmu(struct sh_pmu *); sh_pmu 28 arch/sh/kernel/cpu/sh4/perf_event.c static struct sh_pmu sh7750_pmu; sh_pmu 239 arch/sh/kernel/cpu/sh4/perf_event.c static struct sh_pmu sh7750_pmu = { sh_pmu 53 arch/sh/kernel/cpu/sh4a/perf_event.c static struct sh_pmu sh4a_pmu; sh_pmu 273 arch/sh/kernel/cpu/sh4a/perf_event.c static struct sh_pmu sh4a_pmu = { sh_pmu 36 arch/sh/kernel/perf_event.c static struct sh_pmu *sh_pmu __read_mostly; sh_pmu 57 arch/sh/kernel/perf_event.c return !!sh_pmu; sh_pmu 62 arch/sh/kernel/perf_event.c if (!sh_pmu) sh_pmu 65 arch/sh/kernel/perf_event.c return sh_pmu->name; sh_pmu 71 arch/sh/kernel/perf_event.c if (!sh_pmu) sh_pmu 74 arch/sh/kernel/perf_event.c return sh_pmu->num_events; sh_pmu 96 arch/sh/kernel/perf_event.c if (!sh_pmu->cache_events) sh_pmu 109 arch/sh/kernel/perf_event.c ev = (*sh_pmu->cache_events)[type][op][result]; sh_pmu 153 arch/sh/kernel/perf_event.c config = attr->config & sh_pmu->raw_event_mask; sh_pmu 161 arch/sh/kernel/perf_event.c if (attr->config >= sh_pmu->max_events) sh_pmu 164 arch/sh/kernel/perf_event.c config = sh_pmu->event_map(attr->config); sh_pmu 197 arch/sh/kernel/perf_event.c new_raw_count = sh_pmu->read(idx); sh_pmu 224 arch/sh/kernel/perf_event.c sh_pmu->disable(hwc, idx); sh_pmu 249 arch/sh/kernel/perf_event.c sh_pmu->enable(hwc, idx); sh_pmu 272 arch/sh/kernel/perf_event.c idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); sh_pmu 273 arch/sh/kernel/perf_event.c if (idx == sh_pmu->num_events) sh_pmu 280 arch/sh/kernel/perf_event.c sh_pmu->disable(hwc, idx); sh_pmu 330 arch/sh/kernel/perf_event.c sh_pmu->enable_all(); sh_pmu 338 arch/sh/kernel/perf_event.c sh_pmu->disable_all(); sh_pmu 360 arch/sh/kernel/perf_event.c int register_sh_pmu(struct sh_pmu *_pmu) sh_pmu 362 arch/sh/kernel/perf_event.c if (sh_pmu) sh_pmu 364 arch/sh/kernel/perf_event.c sh_pmu = _pmu;