hwc 253 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc, int idx) hwc 255 arch/alpha/kernel/perf_event.c long left = local64_read(&hwc->period_left); hwc 256 arch/alpha/kernel/perf_event.c long period = hwc->sample_period; hwc 261 arch/alpha/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 262 arch/alpha/kernel/perf_event.c hwc->last_period = period; hwc 268 arch/alpha/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 269 arch/alpha/kernel/perf_event.c hwc->last_period = period; hwc 283 arch/alpha/kernel/perf_event.c local64_set(&hwc->prev_count, (unsigned long)(-left)); hwc 308 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc, int idx, long ovf) hwc 314 arch/alpha/kernel/perf_event.c prev_raw_count = local64_read(&hwc->prev_count); hwc 317 arch/alpha/kernel/perf_event.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 331 arch/alpha/kernel/perf_event.c local64_sub(delta, &hwc->period_left); hwc 413 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc = &pe->hw; hwc 414 arch/alpha/kernel/perf_event.c int idx = hwc->idx; hwc 417 arch/alpha/kernel/perf_event.c alpha_perf_event_set_period(pe, hwc, idx); hwc 421 arch/alpha/kernel/perf_event.c if (!(hwc->state & PERF_HES_STOPPED)) hwc 436 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 469 arch/alpha/kernel/perf_event.c hwc->state = PERF_HES_UPTODATE; hwc 471 arch/alpha/kernel/perf_event.c hwc->state |= PERF_HES_STOPPED; hwc 488 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 510 arch/alpha/kernel/perf_event.c alpha_perf_event_update(event, hwc, idx, 0); hwc 526 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 528 arch/alpha/kernel/perf_event.c alpha_perf_event_update(event, hwc, hwc->idx, 0); hwc 534 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 537 arch/alpha/kernel/perf_event.c if (!(hwc->state & PERF_HES_STOPPED)) { hwc 538 arch/alpha/kernel/perf_event.c cpuc->idx_mask &= ~(1UL<<hwc->idx); hwc 539 arch/alpha/kernel/perf_event.c hwc->state |= PERF_HES_STOPPED; hwc 542 arch/alpha/kernel/perf_event.c if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { hwc 543 arch/alpha/kernel/perf_event.c alpha_perf_event_update(event, hwc, hwc->idx, 0); hwc 544 arch/alpha/kernel/perf_event.c hwc->state |= PERF_HES_UPTODATE; hwc 548 arch/alpha/kernel/perf_event.c wrperfmon(PERFMON_CMD_DISABLE, (1UL<<hwc->idx)); hwc 554 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 557 arch/alpha/kernel/perf_event.c if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) hwc 561 arch/alpha/kernel/perf_event.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 562 arch/alpha/kernel/perf_event.c alpha_perf_event_set_period(event, hwc, hwc->idx); hwc 565 arch/alpha/kernel/perf_event.c hwc->state = 0; hwc 567 arch/alpha/kernel/perf_event.c cpuc->idx_mask |= 1UL<<hwc->idx; hwc 569 arch/alpha/kernel/perf_event.c wrperfmon(PERFMON_CMD_ENABLE, (1UL<<hwc->idx)); hwc 605 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 642 arch/alpha/kernel/perf_event.c hwc->event_base = ev; hwc 656 arch/alpha/kernel/perf_event.c evtypes[n] = hwc->event_base; hwc 663 arch/alpha/kernel/perf_event.c hwc->config_base = 0; hwc 664 arch/alpha/kernel/perf_event.c hwc->idx = PMC_NO_INDEX; hwc 678 arch/alpha/kernel/perf_event.c if (!hwc->sample_period) { hwc 679 arch/alpha/kernel/perf_event.c hwc->sample_period = alpha_pmu->pmc_max_period[0]; hwc 680 arch/alpha/kernel/perf_event.c hwc->last_period = hwc->sample_period; hwc 681 arch/alpha/kernel/perf_event.c local64_set(&hwc->period_left, hwc->sample_period); hwc 810 arch/alpha/kernel/perf_event.c struct hw_perf_event *hwc; hwc 855 arch/alpha/kernel/perf_event.c hwc = &event->hw; hwc 856 arch/alpha/kernel/perf_event.c alpha_perf_event_update(event, hwc, idx, alpha_pmu->pmc_max_period[idx]+1); hwc 857 arch/alpha/kernel/perf_event.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 859 arch/alpha/kernel/perf_event.c if (alpha_perf_event_set_period(event, hwc, idx)) { hwc 119 arch/arc/kernel/perf_event.c struct hw_perf_event *hwc, int idx) hwc 121 arch/arc/kernel/perf_event.c u64 prev_raw_count = local64_read(&hwc->prev_count); hwc 129 arch/arc/kernel/perf_event.c local64_set(&hwc->prev_count, new_raw_count); hwc 131 arch/arc/kernel/perf_event.c local64_sub(delta, &hwc->period_left); hwc 169 arch/arc/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 173 arch/arc/kernel/perf_event.c hwc->sample_period = arc_pmu->max_period; hwc 174 arch/arc/kernel/perf_event.c hwc->last_period = hwc->sample_period; hwc 175 arch/arc/kernel/perf_event.c local64_set(&hwc->period_left, hwc->sample_period); hwc 178 arch/arc/kernel/perf_event.c hwc->config = 0; hwc 183 arch/arc/kernel/perf_event.c hwc->config |= ARC_REG_PCT_CONFIG_KERN; hwc 187 arch/arc/kernel/perf_event.c hwc->config |= ARC_REG_PCT_CONFIG_USER; hwc 196 arch/arc/kernel/perf_event.c hwc->config |= arc_pmu->ev_hw_idx[event->attr.config]; hwc 198 arch/arc/kernel/perf_event.c (int)event->attr.config, (int)hwc->config, hwc 206 arch/arc/kernel/perf_event.c hwc->config |= arc_pmu->ev_hw_idx[ret]; hwc 208 arch/arc/kernel/perf_event.c (int)hwc->config, arc_pmu_ev_hw_map[ret]); hwc 215 arch/arc/kernel/perf_event.c hwc->config |= event->attr.config; hwc 245 arch/arc/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 246 arch/arc/kernel/perf_event.c s64 left = local64_read(&hwc->period_left); hwc 247 arch/arc/kernel/perf_event.c s64 period = hwc->sample_period; hwc 248 arch/arc/kernel/perf_event.c int idx = hwc->idx; hwc 255 arch/arc/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 256 arch/arc/kernel/perf_event.c hwc->last_period = period; hwc 261 arch/arc/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 262 arch/arc/kernel/perf_event.c hwc->last_period = period; hwc 270 arch/arc/kernel/perf_event.c local64_set(&hwc->prev_count, value); hwc 291 arch/arc/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 292 arch/arc/kernel/perf_event.c int idx = hwc->idx; hwc 298 arch/arc/kernel/perf_event.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 300 arch/arc/kernel/perf_event.c hwc->state = 0; hwc 311 arch/arc/kernel/perf_event.c write_aux_reg(ARC_REG_PCT_CONFIG, hwc->config); /* condition */ hwc 316 arch/arc/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 317 arch/arc/kernel/perf_event.c int idx = hwc->idx; hwc 363 arch/arc/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 364 arch/arc/kernel/perf_event.c int idx = hwc->idx; hwc 371 arch/arc/kernel/perf_event.c hwc->idx = idx; hwc 388 arch/arc/kernel/perf_event.c local64_set(&hwc->prev_count, 0); hwc 390 arch/arc/kernel/perf_event.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 418 arch/arc/kernel/perf_event.c struct hw_perf_event *hwc; hwc 434 arch/arc/kernel/perf_event.c hwc = &event->hw; hwc 436 arch/arc/kernel/perf_event.c WARN_ON_ONCE(hwc->idx != idx); hwc 439 arch/arc/kernel/perf_event.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 238 arch/arm/kernel/perf_event_v6.c struct hw_perf_event *hwc = &event->hw; hwc 239 arch/arm/kernel/perf_event_v6.c int counter = hwc->idx; hwc 256 arch/arm/kernel/perf_event_v6.c struct hw_perf_event *hwc = &event->hw; hwc 257 arch/arm/kernel/perf_event_v6.c int counter = hwc->idx; hwc 273 arch/arm/kernel/perf_event_v6.c struct hw_perf_event *hwc = &event->hw; hwc 275 arch/arm/kernel/perf_event_v6.c int idx = hwc->idx; hwc 282 arch/arm/kernel/perf_event_v6.c evt = (hwc->config_base << ARMV6_PMCR_EVT_COUNT0_SHIFT) | hwc 286 arch/arm/kernel/perf_event_v6.c evt = (hwc->config_base << ARMV6_PMCR_EVT_COUNT1_SHIFT) | hwc 328 arch/arm/kernel/perf_event_v6.c struct hw_perf_event *hwc; hwc 341 arch/arm/kernel/perf_event_v6.c hwc = &event->hw; hwc 343 arch/arm/kernel/perf_event_v6.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 391 arch/arm/kernel/perf_event_v6.c struct hw_perf_event *hwc = &event->hw; hwc 393 arch/arm/kernel/perf_event_v6.c if (ARMV6_PERFCTR_CPU_CYCLES == hwc->config_base) { hwc 424 arch/arm/kernel/perf_event_v6.c struct hw_perf_event *hwc = &event->hw; hwc 426 arch/arm/kernel/perf_event_v6.c int idx = hwc->idx; hwc 459 arch/arm/kernel/perf_event_v6.c struct hw_perf_event *hwc = &event->hw; hwc 461 arch/arm/kernel/perf_event_v6.c int idx = hwc->idx; hwc 749 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 750 arch/arm/kernel/perf_event_v7.c int idx = hwc->idx; hwc 769 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 770 arch/arm/kernel/perf_event_v7.c int idx = hwc->idx; hwc 874 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 877 arch/arm/kernel/perf_event_v7.c int idx = hwc->idx; hwc 902 arch/arm/kernel/perf_event_v7.c armv7_pmnc_write_evtsel(idx, hwc->config_base); hwc 920 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 923 arch/arm/kernel/perf_event_v7.c int idx = hwc->idx; hwc 975 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc; hwc 988 arch/arm/kernel/perf_event_v7.c hwc = &event->hw; hwc 990 arch/arm/kernel/perf_event_v7.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 1037 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1038 arch/arm/kernel/perf_event_v7.c unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT; hwc 1496 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1497 arch/arm/kernel/perf_event_v7.c int idx = hwc->idx; hwc 1510 arch/arm/kernel/perf_event_v7.c if (hwc->config_base & KRAIT_EVENT_MASK) hwc 1511 arch/arm/kernel/perf_event_v7.c krait_clearpmu(hwc->config_base); hwc 1522 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1523 arch/arm/kernel/perf_event_v7.c int idx = hwc->idx; hwc 1541 arch/arm/kernel/perf_event_v7.c if (hwc->config_base & KRAIT_EVENT_MASK) hwc 1542 arch/arm/kernel/perf_event_v7.c krait_evt_setup(idx, hwc->config_base); hwc 1544 arch/arm/kernel/perf_event_v7.c armv7_pmnc_write_evtsel(idx, hwc->config_base); hwc 1584 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1587 arch/arm/kernel/perf_event_v7.c if (hwc->config_base & VENUM_EVENT) hwc 1611 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1612 arch/arm/kernel/perf_event_v7.c unsigned int region = EVENT_REGION(hwc->config_base); hwc 1613 arch/arm/kernel/perf_event_v7.c unsigned int code = EVENT_CODE(hwc->config_base); hwc 1614 arch/arm/kernel/perf_event_v7.c unsigned int group = EVENT_GROUP(hwc->config_base); hwc 1615 arch/arm/kernel/perf_event_v7.c bool venum_event = EVENT_VENUM(hwc->config_base); hwc 1616 arch/arm/kernel/perf_event_v7.c bool krait_event = EVENT_CPU(hwc->config_base); hwc 1641 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1642 arch/arm/kernel/perf_event_v7.c unsigned int region = EVENT_REGION(hwc->config_base); hwc 1643 arch/arm/kernel/perf_event_v7.c unsigned int group = EVENT_GROUP(hwc->config_base); hwc 1644 arch/arm/kernel/perf_event_v7.c bool venum_event = EVENT_VENUM(hwc->config_base); hwc 1645 arch/arm/kernel/perf_event_v7.c bool krait_event = EVENT_CPU(hwc->config_base); hwc 1829 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1830 arch/arm/kernel/perf_event_v7.c int idx = hwc->idx; hwc 1843 arch/arm/kernel/perf_event_v7.c if (hwc->config_base & KRAIT_EVENT_MASK) hwc 1844 arch/arm/kernel/perf_event_v7.c scorpion_clearpmu(hwc->config_base); hwc 1855 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1856 arch/arm/kernel/perf_event_v7.c int idx = hwc->idx; hwc 1874 arch/arm/kernel/perf_event_v7.c if (hwc->config_base & KRAIT_EVENT_MASK) hwc 1875 arch/arm/kernel/perf_event_v7.c scorpion_evt_setup(idx, hwc->config_base); hwc 1877 arch/arm/kernel/perf_event_v7.c armv7_pmnc_write_evtsel(idx, hwc->config_base); hwc 1917 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1920 arch/arm/kernel/perf_event_v7.c if (hwc->config_base & VENUM_EVENT) hwc 1944 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1945 arch/arm/kernel/perf_event_v7.c unsigned int region = EVENT_REGION(hwc->config_base); hwc 1946 arch/arm/kernel/perf_event_v7.c unsigned int group = EVENT_GROUP(hwc->config_base); hwc 1947 arch/arm/kernel/perf_event_v7.c bool venum_event = EVENT_VENUM(hwc->config_base); hwc 1948 arch/arm/kernel/perf_event_v7.c bool scorpion_event = EVENT_CPU(hwc->config_base); hwc 1971 arch/arm/kernel/perf_event_v7.c struct hw_perf_event *hwc = &event->hw; hwc 1972 arch/arm/kernel/perf_event_v7.c unsigned int region = EVENT_REGION(hwc->config_base); hwc 1973 arch/arm/kernel/perf_event_v7.c unsigned int group = EVENT_GROUP(hwc->config_base); hwc 1974 arch/arm/kernel/perf_event_v7.c bool venum_event = EVENT_VENUM(hwc->config_base); hwc 1975 arch/arm/kernel/perf_event_v7.c bool scorpion_event = EVENT_CPU(hwc->config_base); hwc 175 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc; hwc 183 arch/arm/kernel/perf_event_xscale.c hwc = &event->hw; hwc 185 arch/arm/kernel/perf_event_xscale.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 208 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc = &event->hw; hwc 210 arch/arm/kernel/perf_event_xscale.c int idx = hwc->idx; hwc 219 arch/arm/kernel/perf_event_xscale.c evt = (hwc->config_base << XSCALE1_COUNT0_EVT_SHFT) | hwc 224 arch/arm/kernel/perf_event_xscale.c evt = (hwc->config_base << XSCALE1_COUNT1_EVT_SHFT) | hwc 244 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc = &event->hw; hwc 246 arch/arm/kernel/perf_event_xscale.c int idx = hwc->idx; hwc 278 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc = &event->hw; hwc 279 arch/arm/kernel/perf_event_xscale.c if (XSCALE_PERFCTR_CCNT == hwc->config_base) { hwc 327 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc = &event->hw; hwc 328 arch/arm/kernel/perf_event_xscale.c int counter = hwc->idx; hwc 348 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc = &event->hw; hwc 349 arch/arm/kernel/perf_event_xscale.c int counter = hwc->idx; hwc 521 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc; hwc 529 arch/arm/kernel/perf_event_xscale.c hwc = &event->hw; hwc 531 arch/arm/kernel/perf_event_xscale.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 554 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc = &event->hw; hwc 556 arch/arm/kernel/perf_event_xscale.c int idx = hwc->idx; hwc 568 arch/arm/kernel/perf_event_xscale.c evtsel |= hwc->config_base << XSCALE2_COUNT0_EVT_SHFT; hwc 573 arch/arm/kernel/perf_event_xscale.c evtsel |= hwc->config_base << XSCALE2_COUNT1_EVT_SHFT; hwc 578 arch/arm/kernel/perf_event_xscale.c evtsel |= hwc->config_base << XSCALE2_COUNT2_EVT_SHFT; hwc 583 arch/arm/kernel/perf_event_xscale.c evtsel |= hwc->config_base << XSCALE2_COUNT3_EVT_SHFT; hwc 600 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc = &event->hw; hwc 602 arch/arm/kernel/perf_event_xscale.c int idx = hwc->idx; hwc 690 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc = &event->hw; hwc 691 arch/arm/kernel/perf_event_xscale.c int counter = hwc->idx; hwc 717 arch/arm/kernel/perf_event_xscale.c struct hw_perf_event *hwc = &event->hw; hwc 718 arch/arm/kernel/perf_event_xscale.c int counter = hwc->idx; hwc 307 arch/arm/mach-imx/mmdc.c struct hw_perf_event *hwc = &event->hw; hwc 311 arch/arm/mach-imx/mmdc.c prev_raw_count = local64_read(&hwc->prev_count); hwc 314 arch/arm/mach-imx/mmdc.c } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 325 arch/arm/mach-imx/mmdc.c struct hw_perf_event *hwc = &event->hw; hwc 339 arch/arm/mach-imx/mmdc.c local64_set(&hwc->prev_count, 0); hwc 361 arch/arm/mach-imx/mmdc.c struct hw_perf_event *hwc = &event->hw; hwc 374 arch/arm/mach-imx/mmdc.c local64_set(&hwc->prev_count, mmdc_pmu_read_counter(pmu_mmdc, cfg)); hwc 427 arch/arm64/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 428 arch/arm64/kernel/perf_event.c int idx = hwc->idx; hwc 464 arch/arm64/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 465 arch/arm64/kernel/perf_event.c int idx = hwc->idx; hwc 493 arch/arm64/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 494 arch/arm64/kernel/perf_event.c int idx = hwc->idx; hwc 505 arch/arm64/kernel/perf_event.c armv8pmu_write_evtype(idx - 1, hwc->config_base); hwc 508 arch/arm64/kernel/perf_event.c armv8pmu_write_evtype(idx, hwc->config_base); hwc 547 arch/arm64/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 549 arch/arm64/kernel/perf_event.c int idx = hwc->idx; hwc 720 arch/arm64/kernel/perf_event.c struct hw_perf_event *hwc; hwc 733 arch/arm64/kernel/perf_event.c hwc = &event->hw; hwc 735 arch/arm64/kernel/perf_event.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 793 arch/arm64/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 794 arch/arm64/kernel/perf_event.c unsigned long evtype = hwc->config_base & ARMV8_PMU_EVTYPE_EVENT; hwc 880 arch/csky/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 881 arch/csky/kernel/perf_event.c s64 left = local64_read(&hwc->period_left); hwc 882 arch/csky/kernel/perf_event.c s64 period = hwc->sample_period; hwc 887 arch/csky/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 888 arch/csky/kernel/perf_event.c hwc->last_period = period; hwc 894 arch/csky/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 895 arch/csky/kernel/perf_event.c hwc->last_period = period; hwc 906 arch/csky/kernel/perf_event.c local64_set(&hwc->prev_count, (u64)(-left)); hwc 908 arch/csky/kernel/perf_event.c if (hw_raw_write_mapping[hwc->idx] != NULL) hwc 909 arch/csky/kernel/perf_event.c hw_raw_write_mapping[hwc->idx]((u64)(-left) & hwc 912 arch/csky/kernel/perf_event.c cpwcr(HPOFSR, ~BIT(hwc->idx) & cprcr(HPOFSR)); hwc 920 arch/csky/kernel/perf_event.c struct hw_perf_event *hwc) hwc 922 arch/csky/kernel/perf_event.c uint64_t prev_raw_count = local64_read(&hwc->prev_count); hwc 928 arch/csky/kernel/perf_event.c hw_raw_read_mapping[hwc->idx](), csky_pmu.count_width - 1); hwc 935 arch/csky/kernel/perf_event.c local64_set(&hwc->prev_count, new_raw_count); hwc 937 arch/csky/kernel/perf_event.c local64_sub(delta, &hwc->period_left); hwc 970 arch/csky/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 980 arch/csky/kernel/perf_event.c hwc->idx = ret; hwc 986 arch/csky/kernel/perf_event.c hwc->idx = ret; hwc 991 arch/csky/kernel/perf_event.c hwc->idx = event->attr.config; hwc 1024 arch/csky/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 1025 arch/csky/kernel/perf_event.c int idx = hwc->idx; hwc 1031 arch/csky/kernel/perf_event.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 1033 arch/csky/kernel/perf_event.c hwc->state = 0; hwc 1048 arch/csky/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 1049 arch/csky/kernel/perf_event.c int idx = hwc->idx; hwc 1076 arch/csky/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 1080 arch/csky/kernel/perf_event.c hw_events->events[hwc->idx] = NULL; hwc 1089 arch/csky/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 1091 arch/csky/kernel/perf_event.c hw_events->events[hwc->idx] = event; hwc 1093 arch/csky/kernel/perf_event.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 1125 arch/csky/kernel/perf_event.c struct hw_perf_event *hwc; hwc 1137 arch/csky/kernel/perf_event.c hwc = &event->hw; hwc 1139 arch/csky/kernel/perf_event.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 286 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc) hwc 294 arch/mips/kernel/perf_event_mipsxx.c unsigned long cntr_mask = (hwc->event_base >> 8) & 0xffff; hwc 371 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc, hwc 374 arch/mips/kernel/perf_event_mipsxx.c u64 left = local64_read(&hwc->period_left); hwc 375 arch/mips/kernel/perf_event_mipsxx.c u64 period = hwc->sample_period; hwc 381 arch/mips/kernel/perf_event_mipsxx.c local64_set(&hwc->period_left, left); hwc 382 arch/mips/kernel/perf_event_mipsxx.c hwc->last_period = period; hwc 387 arch/mips/kernel/perf_event_mipsxx.c local64_set(&hwc->period_left, left); hwc 388 arch/mips/kernel/perf_event_mipsxx.c hwc->last_period = period; hwc 394 arch/mips/kernel/perf_event_mipsxx.c local64_set(&hwc->period_left, left); hwc 397 arch/mips/kernel/perf_event_mipsxx.c local64_set(&hwc->prev_count, mipspmu.overflow - left); hwc 407 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc, hwc 414 arch/mips/kernel/perf_event_mipsxx.c prev_raw_count = local64_read(&hwc->prev_count); hwc 417 arch/mips/kernel/perf_event_mipsxx.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 424 arch/mips/kernel/perf_event_mipsxx.c local64_sub(delta, &hwc->period_left); hwc 429 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc = &event->hw; hwc 432 arch/mips/kernel/perf_event_mipsxx.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 434 arch/mips/kernel/perf_event_mipsxx.c hwc->state = 0; hwc 437 arch/mips/kernel/perf_event_mipsxx.c mipspmu_event_set_period(event, hwc, hwc->idx); hwc 440 arch/mips/kernel/perf_event_mipsxx.c mipsxx_pmu_enable_event(hwc, hwc->idx); hwc 445 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc = &event->hw; hwc 447 arch/mips/kernel/perf_event_mipsxx.c if (!(hwc->state & PERF_HES_STOPPED)) { hwc 449 arch/mips/kernel/perf_event_mipsxx.c mipsxx_pmu_disable_event(hwc->idx); hwc 451 arch/mips/kernel/perf_event_mipsxx.c mipspmu_event_update(event, hwc, hwc->idx); hwc 452 arch/mips/kernel/perf_event_mipsxx.c hwc->state |= PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 459 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc = &event->hw; hwc 466 arch/mips/kernel/perf_event_mipsxx.c idx = mipsxx_pmu_alloc_counter(cpuc, hwc); hwc 480 arch/mips/kernel/perf_event_mipsxx.c hwc->state = PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 495 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc = &event->hw; hwc 496 arch/mips/kernel/perf_event_mipsxx.c int idx = hwc->idx; hwc 509 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc = &event->hw; hwc 512 arch/mips/kernel/perf_event_mipsxx.c if (hwc->idx < 0) hwc 515 arch/mips/kernel/perf_event_mipsxx.c mipspmu_event_update(event, hwc, hwc->idx); hwc 738 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc = &event->hw; hwc 740 arch/mips/kernel/perf_event_mipsxx.c mipspmu_event_update(event, hwc, idx); hwc 742 arch/mips/kernel/perf_event_mipsxx.c if (!mipspmu_event_set_period(event, hwc, idx)) hwc 1282 arch/mips/kernel/perf_event_mipsxx.c struct hw_perf_event *hwc = &event->hw; hwc 1312 arch/mips/kernel/perf_event_mipsxx.c hwc->config_base = MIPS_PERFCTRL_IE; hwc 1314 arch/mips/kernel/perf_event_mipsxx.c hwc->event_base = mipspmu_perf_event_encode(pev); hwc 1319 arch/mips/kernel/perf_event_mipsxx.c hwc->config_base |= MIPS_PERFCTRL_U; hwc 1321 arch/mips/kernel/perf_event_mipsxx.c hwc->config_base |= MIPS_PERFCTRL_K; hwc 1323 arch/mips/kernel/perf_event_mipsxx.c hwc->config_base |= MIPS_PERFCTRL_EXL; hwc 1326 arch/mips/kernel/perf_event_mipsxx.c hwc->config_base |= MIPS_PERFCTRL_S; hwc 1328 arch/mips/kernel/perf_event_mipsxx.c hwc->config_base &= M_PERFCTL_CONFIG_MASK; hwc 1333 arch/mips/kernel/perf_event_mipsxx.c hwc->idx = -1; hwc 1334 arch/mips/kernel/perf_event_mipsxx.c hwc->config = 0; hwc 1336 arch/mips/kernel/perf_event_mipsxx.c if (!hwc->sample_period) { hwc 1337 arch/mips/kernel/perf_event_mipsxx.c hwc->sample_period = mipspmu.max_period; hwc 1338 arch/mips/kernel/perf_event_mipsxx.c hwc->last_period = hwc->sample_period; hwc 1339 arch/mips/kernel/perf_event_mipsxx.c local64_set(&hwc->period_left, hwc->sample_period); hwc 186 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 187 arch/nds32/kernel/perf_event_cpu.c s64 left = local64_read(&hwc->period_left); hwc 188 arch/nds32/kernel/perf_event_cpu.c s64 period = hwc->sample_period; hwc 192 arch/nds32/kernel/perf_event_cpu.c if (unlikely(period != hwc->last_period)) hwc 193 arch/nds32/kernel/perf_event_cpu.c left = period - (hwc->last_period - left); hwc 197 arch/nds32/kernel/perf_event_cpu.c local64_set(&hwc->period_left, left); hwc 198 arch/nds32/kernel/perf_event_cpu.c hwc->last_period = period; hwc 204 arch/nds32/kernel/perf_event_cpu.c local64_set(&hwc->period_left, left); hwc 205 arch/nds32/kernel/perf_event_cpu.c hwc->last_period = period; hwc 216 arch/nds32/kernel/perf_event_cpu.c local64_set(&hwc->prev_count, (u64)(-left)); hwc 252 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc; hwc 265 arch/nds32/kernel/perf_event_cpu.c hwc = &event->hw; hwc 267 arch/nds32/kernel/perf_event_cpu.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 416 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 419 arch/nds32/kernel/perf_event_cpu.c int idx = hwc->idx; hwc 441 arch/nds32/kernel/perf_event_cpu.c cpu_pmu->set_event_filter(hwc, &event->attr)) && hwc 445 arch/nds32/kernel/perf_event_cpu.c hwc->config_base = 0; hwc 448 arch/nds32/kernel/perf_event_cpu.c evnum = hwc->config_base; hwc 467 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 470 arch/nds32/kernel/perf_event_cpu.c int idx = hwc->idx; hwc 498 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 499 arch/nds32/kernel/perf_event_cpu.c int idx = hwc->idx; hwc 527 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 528 arch/nds32/kernel/perf_event_cpu.c int idx = hwc->idx; hwc 555 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 567 arch/nds32/kernel/perf_event_cpu.c unsigned long evtype = hwc->config_base & SOFTWARE_EVENT_MASK; hwc 799 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 816 arch/nds32/kernel/perf_event_cpu.c hwc->idx = -1; hwc 817 arch/nds32/kernel/perf_event_cpu.c hwc->config_base = 0; hwc 818 arch/nds32/kernel/perf_event_cpu.c hwc->config = 0; hwc 819 arch/nds32/kernel/perf_event_cpu.c hwc->event_base = 0; hwc 825 arch/nds32/kernel/perf_event_cpu.c nds32_pmu->set_event_filter(hwc, &event->attr)) && hwc 835 arch/nds32/kernel/perf_event_cpu.c hwc->config_base |= (unsigned long)mapping; hwc 837 arch/nds32/kernel/perf_event_cpu.c if (!hwc->sample_period) { hwc 844 arch/nds32/kernel/perf_event_cpu.c hwc->sample_period = nds32_pmu->max_period >> 1; hwc 845 arch/nds32/kernel/perf_event_cpu.c hwc->last_period = hwc->sample_period; hwc 846 arch/nds32/kernel/perf_event_cpu.c local64_set(&hwc->period_left, hwc->sample_period); hwc 891 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 897 arch/nds32/kernel/perf_event_cpu.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 899 arch/nds32/kernel/perf_event_cpu.c hwc->state = 0; hwc 910 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 931 arch/nds32/kernel/perf_event_cpu.c hwc->state = PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 946 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 950 arch/nds32/kernel/perf_event_cpu.c prev_raw_count = local64_read(&hwc->prev_count); hwc 953 arch/nds32/kernel/perf_event_cpu.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 964 arch/nds32/kernel/perf_event_cpu.c local64_sub(delta, &hwc->period_left); hwc 972 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 977 arch/nds32/kernel/perf_event_cpu.c if (!(hwc->state & PERF_HES_STOPPED)) { hwc 980 arch/nds32/kernel/perf_event_cpu.c hwc->state |= PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 988 arch/nds32/kernel/perf_event_cpu.c struct hw_perf_event *hwc = &event->hw; hwc 989 arch/nds32/kernel/perf_event_cpu.c int idx = hwc->idx; hwc 31 arch/powerpc/include/asm/perf_event_server.h unsigned int hwc[], unsigned long mmcr[], hwc 366 arch/powerpc/perf/isa207-common.c unsigned int hwc[], unsigned long mmcr[], hwc 463 arch/powerpc/perf/isa207-common.c hwc[i] = pmc - 1; hwc 220 arch/powerpc/perf/isa207-common.h unsigned int hwc[], unsigned long mmcr[], hwc 259 arch/powerpc/perf/mpc7450-pmu.c static int mpc7450_compute_mmcr(u64 event[], int n_ev, unsigned int hwc[], hwc 314 arch/powerpc/perf/mpc7450-pmu.c hwc[event_index[class][i]] = pmc - 1; hwc 451 arch/powerpc/perf/power5+-pmu.c unsigned int hwc[], unsigned long mmcr[], struct perf_event *pevents[]) hwc 585 arch/powerpc/perf/power5+-pmu.c hwc[i] = pmc; hwc 382 arch/powerpc/perf/power5-pmu.c unsigned int hwc[], unsigned long mmcr[], struct perf_event *pevents[]) hwc 527 arch/powerpc/perf/power5-pmu.c hwc[i] = pmc; hwc 174 arch/powerpc/perf/power6-pmu.c unsigned int hwc[], unsigned long mmcr[], struct perf_event *pevents[]) hwc 207 arch/powerpc/perf/power6-pmu.c hwc[i] = pmc; hwc 245 arch/powerpc/perf/power7-pmu.c unsigned int hwc[], unsigned long mmcr[], struct perf_event *pevents[]) hwc 297 arch/powerpc/perf/power7-pmu.c hwc[i] = pmc; hwc 256 arch/powerpc/perf/ppc970-pmu.c unsigned int hwc[], unsigned long mmcr[], struct perf_event *pevents[]) hwc 377 arch/powerpc/perf/ppc970-pmu.c hwc[i] = pmc; hwc 216 arch/riscv/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 219 arch/riscv/kernel/perf_event.c int idx = hwc->idx; hwc 223 arch/riscv/kernel/perf_event.c prev_raw_count = local64_read(&hwc->prev_count); hwc 226 arch/riscv/kernel/perf_event.c oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 253 arch/riscv/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 255 arch/riscv/kernel/perf_event.c WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); hwc 256 arch/riscv/kernel/perf_event.c hwc->state |= PERF_HES_STOPPED; hwc 258 arch/riscv/kernel/perf_event.c if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { hwc 260 arch/riscv/kernel/perf_event.c hwc->state |= PERF_HES_UPTODATE; hwc 269 arch/riscv/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 283 arch/riscv/kernel/perf_event.c hwc->state = 0; hwc 291 arch/riscv/kernel/perf_event.c local64_set(&hwc->prev_count, read_counter(hwc->idx)); hwc 300 arch/riscv/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 313 arch/riscv/kernel/perf_event.c hwc->idx = hwc->config; hwc 314 arch/riscv/kernel/perf_event.c cpuc->events[hwc->idx] = event; hwc 317 arch/riscv/kernel/perf_event.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 331 arch/riscv/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 333 arch/riscv/kernel/perf_event.c cpuc->events[hwc->idx] = NULL; hwc 387 arch/riscv/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 427 arch/riscv/kernel/perf_event.c hwc->config = code; hwc 428 arch/riscv/kernel/perf_event.c hwc->idx = -1; hwc 67 arch/s390/include/asm/perf_event.h #define OVERFLOW_REG(hwc) ((hwc)->extra_reg.config) hwc 68 arch/s390/include/asm/perf_event.h #define SFB_ALLOC_REG(hwc) ((hwc)->extra_reg.alloc) hwc 69 arch/s390/include/asm/perf_event.h #define TEAR_REG(hwc) ((hwc)->last_tag) hwc 70 arch/s390/include/asm/perf_event.h #define SAMPL_RATE(hwc) ((hwc)->event_base) hwc 71 arch/s390/include/asm/perf_event.h #define SAMPL_FLAGS(hwc) ((hwc)->config_base) hwc 72 arch/s390/include/asm/perf_event.h #define SAMPL_DIAG_MODE(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_DIAG_MODE) hwc 73 arch/s390/include/asm/perf_event.h #define SDB_FULL_BLOCKS(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_FULL_BLOCKS) hwc 74 arch/s390/include/asm/perf_event.h #define SAMPLE_FREQ_MODE(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_FREQ_MODE) hwc 37 arch/s390/kernel/perf_cpum_cf.c static int validate_ctr_version(const struct hw_perf_event *hwc) hwc 46 arch/s390/kernel/perf_cpum_cf.c switch (hwc->config_base) { hwc 54 arch/s390/kernel/perf_cpum_cf.c hwc->config > 79) || hwc 55 arch/s390/kernel/perf_cpum_cf.c (cpuhw->info.csvn >= 6 && hwc->config > 83)) hwc 61 arch/s390/kernel/perf_cpum_cf.c if ((cpuhw->info.csvn == 1 && hwc->config > 159) || hwc 62 arch/s390/kernel/perf_cpum_cf.c (cpuhw->info.csvn == 2 && hwc->config > 175) || hwc 64 arch/s390/kernel/perf_cpum_cf.c && hwc->config > 255) || hwc 65 arch/s390/kernel/perf_cpum_cf.c (cpuhw->info.csvn >= 6 && hwc->config > 287)) hwc 95 arch/s390/kernel/perf_cpum_cf.c static int validate_ctr_auth(const struct hw_perf_event *hwc) hwc 108 arch/s390/kernel/perf_cpum_cf.c ctrs_state = cpumf_ctr_ctl[hwc->config_base]; hwc 205 arch/s390/kernel/perf_cpum_cf.c struct hw_perf_event *hwc = &event->hw; hwc 266 arch/s390/kernel/perf_cpum_cf.c hwc->config = ev; hwc 267 arch/s390/kernel/perf_cpum_cf.c hwc->config_base = set; hwc 288 arch/s390/kernel/perf_cpum_cf.c err = validate_ctr_auth(hwc); hwc 290 arch/s390/kernel/perf_cpum_cf.c err = validate_ctr_version(hwc); hwc 365 arch/s390/kernel/perf_cpum_cf.c struct hw_perf_event *hwc = &event->hw; hwc 367 arch/s390/kernel/perf_cpum_cf.c if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) hwc 370 arch/s390/kernel/perf_cpum_cf.c if (WARN_ON_ONCE(hwc->config == -1)) hwc 374 arch/s390/kernel/perf_cpum_cf.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 376 arch/s390/kernel/perf_cpum_cf.c hwc->state = 0; hwc 379 arch/s390/kernel/perf_cpum_cf.c ctr_set_enable(&cpuhw->state, hwc->config_base); hwc 380 arch/s390/kernel/perf_cpum_cf.c ctr_set_start(&cpuhw->state, hwc->config_base); hwc 390 arch/s390/kernel/perf_cpum_cf.c atomic_inc(&cpuhw->ctr_set[hwc->config_base]); hwc 396 arch/s390/kernel/perf_cpum_cf.c struct hw_perf_event *hwc = &event->hw; hwc 398 arch/s390/kernel/perf_cpum_cf.c if (!(hwc->state & PERF_HES_STOPPED)) { hwc 403 arch/s390/kernel/perf_cpum_cf.c if (!atomic_dec_return(&cpuhw->ctr_set[hwc->config_base])) hwc 404 arch/s390/kernel/perf_cpum_cf.c ctr_set_stop(&cpuhw->state, hwc->config_base); hwc 408 arch/s390/kernel/perf_cpum_cf.c if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { hwc 525 arch/s390/kernel/perf_cpum_cf_diag.c struct hw_perf_event *hwc = &event->hw; hwc 529 arch/s390/kernel/perf_cpum_cf_diag.c __func__, event, event->cpu, flags, hwc->state); hwc 530 arch/s390/kernel/perf_cpum_cf_diag.c if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) hwc 535 arch/s390/kernel/perf_cpum_cf_diag.c hwc->state = 0; hwc 536 arch/s390/kernel/perf_cpum_cf_diag.c ctr_set_multiple_enable(&cpuhw->state, hwc->config_base); hwc 540 arch/s390/kernel/perf_cpum_cf_diag.c ctr_set_multiple_start(&cpuhw->state, hwc->config_base); hwc 548 arch/s390/kernel/perf_cpum_cf_diag.c struct hw_perf_event *hwc = &event->hw; hwc 552 arch/s390/kernel/perf_cpum_cf_diag.c __func__, event, event->cpu, flags, hwc->state); hwc 555 arch/s390/kernel/perf_cpum_cf_diag.c ctr_set_multiple_stop(&cpuhw->state, hwc->config_base); hwc 561 arch/s390/kernel/perf_cpum_cf_diag.c hwc->state |= PERF_HES_STOPPED; hwc 329 arch/s390/kernel/perf_cpum_sf.c static unsigned long sfb_max_limit(struct hw_perf_event *hwc) hwc 331 arch/s390/kernel/perf_cpum_sf.c return SAMPL_DIAG_MODE(hwc) ? CPUM_SF_MAX_SDB * CPUM_SF_SDB_DIAG_FACTOR hwc 336 arch/s390/kernel/perf_cpum_sf.c struct hw_perf_event *hwc) hwc 339 arch/s390/kernel/perf_cpum_sf.c return SFB_ALLOC_REG(hwc); hwc 340 arch/s390/kernel/perf_cpum_sf.c if (SFB_ALLOC_REG(hwc) > sfb->num_sdb) hwc 341 arch/s390/kernel/perf_cpum_sf.c return SFB_ALLOC_REG(hwc) - sfb->num_sdb; hwc 346 arch/s390/kernel/perf_cpum_sf.c struct hw_perf_event *hwc) hwc 348 arch/s390/kernel/perf_cpum_sf.c return sfb_pending_allocs(sfb, hwc) > 0; hwc 351 arch/s390/kernel/perf_cpum_sf.c static void sfb_account_allocs(unsigned long num, struct hw_perf_event *hwc) hwc 354 arch/s390/kernel/perf_cpum_sf.c num = min_t(unsigned long, num, sfb_max_limit(hwc) - SFB_ALLOC_REG(hwc)); hwc 356 arch/s390/kernel/perf_cpum_sf.c SFB_ALLOC_REG(hwc) += num; hwc 359 arch/s390/kernel/perf_cpum_sf.c static void sfb_init_allocs(unsigned long num, struct hw_perf_event *hwc) hwc 361 arch/s390/kernel/perf_cpum_sf.c SFB_ALLOC_REG(hwc) = 0; hwc 362 arch/s390/kernel/perf_cpum_sf.c sfb_account_allocs(num, hwc); hwc 371 arch/s390/kernel/perf_cpum_sf.c static int allocate_buffers(struct cpu_hw_sf *cpuhw, struct hw_perf_event *hwc) hwc 401 arch/s390/kernel/perf_cpum_sf.c freq = sample_rate_to_freq(&cpuhw->qsi, SAMPL_RATE(hwc)); hwc 415 arch/s390/kernel/perf_cpum_sf.c sfb_init_allocs(n_sdb, hwc); hwc 422 arch/s390/kernel/perf_cpum_sf.c SAMPL_RATE(hwc), freq, n_sdb, sfb_max_limit(hwc), hwc 426 arch/s390/kernel/perf_cpum_sf.c sfb_pending_allocs(&cpuhw->sfb, hwc)); hwc 459 arch/s390/kernel/perf_cpum_sf.c struct hw_perf_event *hwc) hwc 463 arch/s390/kernel/perf_cpum_sf.c if (!OVERFLOW_REG(hwc)) hwc 473 arch/s390/kernel/perf_cpum_sf.c ratio = DIV_ROUND_UP(100 * OVERFLOW_REG(hwc) * cpuhw->sfb.num_sdb, hwc 474 arch/s390/kernel/perf_cpum_sf.c sample_rate_to_freq(&cpuhw->qsi, SAMPL_RATE(hwc))); hwc 479 arch/s390/kernel/perf_cpum_sf.c sfb_account_allocs(num, hwc); hwc 482 arch/s390/kernel/perf_cpum_sf.c " num=%lu\n", OVERFLOW_REG(hwc), ratio, num); hwc 483 arch/s390/kernel/perf_cpum_sf.c OVERFLOW_REG(hwc) = 0; hwc 498 arch/s390/kernel/perf_cpum_sf.c struct hw_perf_event *hwc) hwc 503 arch/s390/kernel/perf_cpum_sf.c num = sfb_pending_allocs(sfb, hwc); hwc 523 arch/s390/kernel/perf_cpum_sf.c if (sfb_has_pending_allocs(sfb, hwc)) hwc 527 arch/s390/kernel/perf_cpum_sf.c sfb_pending_allocs(sfb, hwc)); hwc 607 arch/s390/kernel/perf_cpum_sf.c static void hw_init_period(struct hw_perf_event *hwc, u64 period) hwc 609 arch/s390/kernel/perf_cpum_sf.c hwc->sample_period = period; hwc 610 arch/s390/kernel/perf_cpum_sf.c hwc->last_period = hwc->sample_period; hwc 611 arch/s390/kernel/perf_cpum_sf.c local64_set(&hwc->period_left, hwc->sample_period); hwc 614 arch/s390/kernel/perf_cpum_sf.c static void hw_reset_registers(struct hw_perf_event *hwc, hwc 618 arch/s390/kernel/perf_cpum_sf.c TEAR_REG(hwc) = (unsigned long) sdbt_origin; hwc 742 arch/s390/kernel/perf_cpum_sf.c struct hw_perf_event *hwc = &event->hw; hwc 750 arch/s390/kernel/perf_cpum_sf.c SAMPL_FLAGS(hwc) |= PERF_CPUM_SF_FREQ_MODE; hwc 757 arch/s390/kernel/perf_cpum_sf.c SAMPL_RATE(hwc) = rate; hwc 758 arch/s390/kernel/perf_cpum_sf.c hw_init_period(hwc, SAMPL_RATE(hwc)); hwc 762 arch/s390/kernel/perf_cpum_sf.c SAMPLE_FREQ_MODE(hwc)); hwc 771 arch/s390/kernel/perf_cpum_sf.c struct hw_perf_event *hwc = &event->hw; hwc 826 arch/s390/kernel/perf_cpum_sf.c SAMPL_FLAGS(hwc) = PERF_CPUM_SF_BASIC_MODE; hwc 836 arch/s390/kernel/perf_cpum_sf.c SAMPL_FLAGS(hwc) |= PERF_CPUM_SF_DIAG_MODE; hwc 841 arch/s390/kernel/perf_cpum_sf.c SAMPL_FLAGS(hwc) |= PERF_CPUM_SF_FULL_BLOCKS; hwc 848 arch/s390/kernel/perf_cpum_sf.c hwc->extra_reg.reg = REG_OVERFLOW; hwc 849 arch/s390/kernel/perf_cpum_sf.c OVERFLOW_REG(hwc) = 0; hwc 862 arch/s390/kernel/perf_cpum_sf.c err = allocate_buffers(cpuhw, hwc); hwc 869 arch/s390/kernel/perf_cpum_sf.c err = allocate_buffers(cpuhw, hwc); hwc 937 arch/s390/kernel/perf_cpum_sf.c struct hw_perf_event *hwc; hwc 958 arch/s390/kernel/perf_cpum_sf.c hwc = &cpuhw->event->hw; hwc 959 arch/s390/kernel/perf_cpum_sf.c if (!(SAMPL_DIAG_MODE(hwc))) { hwc 964 arch/s390/kernel/perf_cpum_sf.c sfb_account_overflows(cpuhw, hwc); hwc 965 arch/s390/kernel/perf_cpum_sf.c if (sfb_has_pending_allocs(&cpuhw->sfb, hwc)) hwc 966 arch/s390/kernel/perf_cpum_sf.c extend_sampling_buffer(&cpuhw->sfb, hwc); hwc 1245 arch/s390/kernel/perf_cpum_sf.c struct hw_perf_event *hwc = &event->hw; hwc 1258 arch/s390/kernel/perf_cpum_sf.c if (flush_all && SDB_FULL_BLOCKS(hwc)) hwc 1261 arch/s390/kernel/perf_cpum_sf.c sdbt = (unsigned long *) TEAR_REG(hwc); hwc 1309 arch/s390/kernel/perf_cpum_sf.c TEAR_REG(hwc) = (unsigned long) sdbt; hwc 1320 arch/s390/kernel/perf_cpum_sf.c OVERFLOW_REG(hwc) = DIV_ROUND_UP(OVERFLOW_REG(hwc) + hwc 1332 arch/s390/kernel/perf_cpum_sf.c SAMPL_RATE(hwc) += DIV_ROUND_UP(SAMPL_RATE(hwc), 10); hwc 1335 arch/s390/kernel/perf_cpum_sf.c DIV_ROUND_UP(SAMPL_RATE(hwc), 10)); hwc 208 arch/sh/kernel/cpu/sh4/perf_event.c static void sh7750_pmu_disable(struct hw_perf_event *hwc, int idx) hwc 217 arch/sh/kernel/cpu/sh4/perf_event.c static void sh7750_pmu_enable(struct hw_perf_event *hwc, int idx) hwc 220 arch/sh/kernel/cpu/sh4/perf_event.c __raw_writew(hwc->config | PMCR_PMEN | PMCR_PMST, PMCR(idx)); hwc 232 arch/sh/kernel/cpu/sh4a/perf_event.c static void sh4a_pmu_disable(struct hw_perf_event *hwc, int idx) hwc 241 arch/sh/kernel/cpu/sh4a/perf_event.c static void sh4a_pmu_enable(struct hw_perf_event *hwc, int idx) hwc 251 arch/sh/kernel/cpu/sh4a/perf_event.c tmp |= (hwc->config << 6) | CCBR_CMDS | CCBR_PPCE; hwc 121 arch/sh/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 171 arch/sh/kernel/perf_event.c hwc->config |= config; hwc 177 arch/sh/kernel/perf_event.c struct hw_perf_event *hwc, int idx) hwc 196 arch/sh/kernel/perf_event.c prev_raw_count = local64_read(&hwc->prev_count); hwc 199 arch/sh/kernel/perf_event.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 220 arch/sh/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 221 arch/sh/kernel/perf_event.c int idx = hwc->idx; hwc 224 arch/sh/kernel/perf_event.c sh_pmu->disable(hwc, idx); hwc 238 arch/sh/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 239 arch/sh/kernel/perf_event.c int idx = hwc->idx; hwc 249 arch/sh/kernel/perf_event.c sh_pmu->enable(hwc, idx); hwc 265 arch/sh/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 266 arch/sh/kernel/perf_event.c int idx = hwc->idx; hwc 277 arch/sh/kernel/perf_event.c hwc->idx = idx; hwc 280 arch/sh/kernel/perf_event.c sh_pmu->disable(hwc, idx); hwc 827 arch/sparc/kernel/perf_event.c static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, int idx) hwc 845 arch/sparc/kernel/perf_event.c static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, int idx) hwc 864 arch/sparc/kernel/perf_event.c struct hw_perf_event *hwc, int idx) hwc 871 arch/sparc/kernel/perf_event.c prev_raw_count = local64_read(&hwc->prev_count); hwc 874 arch/sparc/kernel/perf_event.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 882 arch/sparc/kernel/perf_event.c local64_sub(delta, &hwc->period_left); hwc 888 arch/sparc/kernel/perf_event.c struct hw_perf_event *hwc, int idx) hwc 890 arch/sparc/kernel/perf_event.c s64 left = local64_read(&hwc->period_left); hwc 891 arch/sparc/kernel/perf_event.c s64 period = hwc->sample_period; hwc 895 arch/sparc/kernel/perf_event.c if (unlikely(period != hwc->last_period)) hwc 896 arch/sparc/kernel/perf_event.c left = period - (hwc->last_period - left); hwc 900 arch/sparc/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 901 arch/sparc/kernel/perf_event.c hwc->last_period = period; hwc 907 arch/sparc/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 908 arch/sparc/kernel/perf_event.c hwc->last_period = period; hwc 914 arch/sparc/kernel/perf_event.c local64_set(&hwc->prev_count, (u64)-left); hwc 957 arch/sparc/kernel/perf_event.c struct hw_perf_event *hwc = &cp->hw; hwc 958 arch/sparc/kernel/perf_event.c int idx = hwc->idx; hwc 964 arch/sparc/kernel/perf_event.c sparc_perf_event_set_period(cp, hwc, idx); hwc 969 arch/sparc/kernel/perf_event.c if (hwc->state & PERF_HES_ARCH) { hwc 973 arch/sparc/kernel/perf_event.c hwc->state = 0; hwc 992 arch/sparc/kernel/perf_event.c struct hw_perf_event *hwc = &cp->hw; hwc 993 arch/sparc/kernel/perf_event.c int idx = hwc->idx; hwc 1153 arch/sparc/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 1155 arch/sparc/kernel/perf_event.c sparc_perf_event_update(event, hwc, idx); hwc 1419 arch/sparc/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 1455 arch/sparc/kernel/perf_event.c hwc->event_base = perf_event_encode(pmap); hwc 1461 arch/sparc/kernel/perf_event.c hwc->event_base = attr->config; hwc 1465 arch/sparc/kernel/perf_event.c hwc->config_base = sparc_pmu->irq_bit; hwc 1467 arch/sparc/kernel/perf_event.c hwc->config_base |= sparc_pmu->user_bit; hwc 1469 arch/sparc/kernel/perf_event.c hwc->config_base |= sparc_pmu->priv_bit; hwc 1471 arch/sparc/kernel/perf_event.c hwc->config_base |= sparc_pmu->hv_bit; hwc 1481 arch/sparc/kernel/perf_event.c events[n] = hwc->event_base; hwc 1490 arch/sparc/kernel/perf_event.c hwc->idx = PIC_NO_INDEX; hwc 1498 arch/sparc/kernel/perf_event.c if (!hwc->sample_period) { hwc 1499 arch/sparc/kernel/perf_event.c hwc->sample_period = MAX_PERIOD; hwc 1500 arch/sparc/kernel/perf_event.c hwc->last_period = hwc->sample_period; hwc 1501 arch/sparc/kernel/perf_event.c local64_set(&hwc->period_left, hwc->sample_period); hwc 1655 arch/sparc/kernel/perf_event.c struct hw_perf_event *hwc; hwc 1662 arch/sparc/kernel/perf_event.c hwc = &event->hw; hwc 1663 arch/sparc/kernel/perf_event.c val = sparc_perf_event_update(event, hwc, idx); hwc 1667 arch/sparc/kernel/perf_event.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 1668 arch/sparc/kernel/perf_event.c if (!sparc_perf_event_set_period(event, hwc, idx)) hwc 308 arch/x86/events/amd/core.c static inline unsigned int amd_get_event_code(struct hw_perf_event *hwc) hwc 310 arch/x86/events/amd/core.c return ((hwc->config >> 24) & 0x0f00) | (hwc->config & 0x00ff); hwc 313 arch/x86/events/amd/core.c static inline bool amd_is_pair_event_code(struct hw_perf_event *hwc) hwc 318 arch/x86/events/amd/core.c switch (amd_get_event_code(hwc)) { hwc 342 arch/x86/events/amd/core.c static inline int amd_is_nb_event(struct hw_perf_event *hwc) hwc 344 arch/x86/events/amd/core.c return (hwc->config & 0xe0) == 0xe0; hwc 435 arch/x86/events/amd/core.c struct hw_perf_event *hwc = &event->hw; hwc 457 arch/x86/events/amd/core.c if (new == -1 || hwc->idx == idx) hwc 810 arch/x86/events/amd/core.c struct hw_perf_event *hwc = &event->hw; hwc 811 arch/x86/events/amd/core.c unsigned int event_code = amd_get_event_code(hwc); hwc 817 arch/x86/events/amd/core.c if (!(hwc->config & 0x0000F000ULL)) hwc 819 arch/x86/events/amd/core.c if (!(hwc->config & 0x00000F00ULL)) hwc 823 arch/x86/events/amd/core.c if (hweight_long(hwc->config & ARCH_PERFMON_EVENTSEL_UMASK) <= 1) hwc 848 arch/x86/events/amd/core.c if (hweight_long(hwc->config & ARCH_PERFMON_EVENTSEL_UMASK) <= 1) hwc 885 arch/x86/events/amd/core.c struct hw_perf_event *hwc = &event->hw; hwc 887 arch/x86/events/amd/core.c if (amd_is_pair_event_code(hwc)) hwc 111 arch/x86/events/amd/ibs.c perf_event_set_period(struct hw_perf_event *hwc, u64 min, u64 max, u64 *hw_period) hwc 113 arch/x86/events/amd/ibs.c s64 left = local64_read(&hwc->period_left); hwc 114 arch/x86/events/amd/ibs.c s64 period = hwc->sample_period; hwc 122 arch/x86/events/amd/ibs.c local64_set(&hwc->period_left, left); hwc 123 arch/x86/events/amd/ibs.c hwc->last_period = period; hwc 129 arch/x86/events/amd/ibs.c local64_set(&hwc->period_left, left); hwc 130 arch/x86/events/amd/ibs.c hwc->last_period = period; hwc 156 arch/x86/events/amd/ibs.c struct hw_perf_event *hwc = &event->hw; hwc 168 arch/x86/events/amd/ibs.c prev_raw_count = local64_read(&hwc->prev_count); hwc 169 arch/x86/events/amd/ibs.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 185 arch/x86/events/amd/ibs.c local64_sub(delta, &hwc->period_left); hwc 258 arch/x86/events/amd/ibs.c struct hw_perf_event *hwc = &event->hw; hwc 279 arch/x86/events/amd/ibs.c if (hwc->sample_period) { hwc 283 arch/x86/events/amd/ibs.c if (!event->attr.sample_freq && hwc->sample_period & 0x0f) hwc 290 arch/x86/events/amd/ibs.c hwc->sample_period &= ~0x0FULL; hwc 291 arch/x86/events/amd/ibs.c if (!hwc->sample_period) hwc 292 arch/x86/events/amd/ibs.c hwc->sample_period = 0x10; hwc 297 arch/x86/events/amd/ibs.c hwc->sample_period = event->attr.sample_period; hwc 300 arch/x86/events/amd/ibs.c if (!hwc->sample_period) hwc 307 arch/x86/events/amd/ibs.c hwc->last_period = hwc->sample_period; hwc 308 arch/x86/events/amd/ibs.c local64_set(&hwc->period_left, hwc->sample_period); hwc 310 arch/x86/events/amd/ibs.c hwc->config_base = perf_ibs->msr; hwc 311 arch/x86/events/amd/ibs.c hwc->config = config; hwc 317 arch/x86/events/amd/ibs.c struct hw_perf_event *hwc, u64 *period) hwc 322 arch/x86/events/amd/ibs.c overflow = perf_event_set_period(hwc, 1<<4, perf_ibs->max_period, period); hwc 323 arch/x86/events/amd/ibs.c local64_set(&hwc->prev_count, 0); hwc 364 arch/x86/events/amd/ibs.c struct hw_perf_event *hwc, u64 config) hwc 366 arch/x86/events/amd/ibs.c wrmsrl(hwc->config_base, hwc->config | config | perf_ibs->enable_mask); hwc 377 arch/x86/events/amd/ibs.c struct hw_perf_event *hwc, u64 config) hwc 381 arch/x86/events/amd/ibs.c wrmsrl(hwc->config_base, config); hwc 383 arch/x86/events/amd/ibs.c wrmsrl(hwc->config_base, config); hwc 394 arch/x86/events/amd/ibs.c struct hw_perf_event *hwc = &event->hw; hwc 399 arch/x86/events/amd/ibs.c if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) hwc 402 arch/x86/events/amd/ibs.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 403 arch/x86/events/amd/ibs.c hwc->state = 0; hwc 405 arch/x86/events/amd/ibs.c perf_ibs_set_period(perf_ibs, hwc, &period); hwc 412 arch/x86/events/amd/ibs.c perf_ibs_enable_event(perf_ibs, hwc, period >> 4); hwc 419 arch/x86/events/amd/ibs.c struct hw_perf_event *hwc = &event->hw; hwc 430 arch/x86/events/amd/ibs.c if (!stopping && (hwc->state & PERF_HES_UPTODATE)) hwc 433 arch/x86/events/amd/ibs.c rdmsrl(hwc->config_base, config); hwc 443 arch/x86/events/amd/ibs.c perf_ibs_disable_event(perf_ibs, hwc, config); hwc 454 arch/x86/events/amd/ibs.c WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); hwc 455 arch/x86/events/amd/ibs.c hwc->state |= PERF_HES_STOPPED; hwc 458 arch/x86/events/amd/ibs.c if (hwc->state & PERF_HES_UPTODATE) hwc 468 arch/x86/events/amd/ibs.c hwc->state |= PERF_HES_UPTODATE; hwc 573 arch/x86/events/amd/ibs.c struct hw_perf_event *hwc; hwc 599 arch/x86/events/amd/ibs.c hwc = &event->hw; hwc 600 arch/x86/events/amd/ibs.c msr = hwc->config_base; hwc 608 arch/x86/events/amd/ibs.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 609 arch/x86/events/amd/ibs.c if (!perf_ibs_set_period(perf_ibs, hwc, &period)) hwc 675 arch/x86/events/amd/ibs.c perf_ibs_enable_event(perf_ibs, hwc, period); hwc 209 arch/x86/events/amd/iommu.c struct hw_perf_event *hwc = &event->hw; hwc 227 arch/x86/events/amd/iommu.c hwc->conf = event->attr.config; hwc 228 arch/x86/events/amd/iommu.c hwc->conf1 = event->attr.config1; hwc 241 arch/x86/events/amd/iommu.c struct hw_perf_event *hwc = &ev->hw; hwc 242 arch/x86/events/amd/iommu.c u8 bank = hwc->iommu_bank; hwc 243 arch/x86/events/amd/iommu.c u8 cntr = hwc->iommu_cntr; hwc 246 arch/x86/events/amd/iommu.c reg = GET_CSOURCE(hwc); hwc 249 arch/x86/events/amd/iommu.c reg = GET_DEVID_MASK(hwc); hwc 250 arch/x86/events/amd/iommu.c reg = GET_DEVID(hwc) | (reg << 32); hwc 255 arch/x86/events/amd/iommu.c reg = GET_PASID_MASK(hwc); hwc 256 arch/x86/events/amd/iommu.c reg = GET_PASID(hwc) | (reg << 32); hwc 261 arch/x86/events/amd/iommu.c reg = GET_DOMID_MASK(hwc); hwc 262 arch/x86/events/amd/iommu.c reg = GET_DOMID(hwc) | (reg << 32); hwc 271 arch/x86/events/amd/iommu.c struct hw_perf_event *hwc = &event->hw; hwc 274 arch/x86/events/amd/iommu.c amd_iommu_pc_set_reg(iommu, hwc->iommu_bank, hwc->iommu_cntr, hwc 280 arch/x86/events/amd/iommu.c struct hw_perf_event *hwc = &event->hw; hwc 282 arch/x86/events/amd/iommu.c if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) hwc 285 arch/x86/events/amd/iommu.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 286 arch/x86/events/amd/iommu.c hwc->state = 0; hwc 289 arch/x86/events/amd/iommu.c u64 prev_raw_count = local64_read(&hwc->prev_count); hwc 292 arch/x86/events/amd/iommu.c amd_iommu_pc_set_reg(iommu, hwc->iommu_bank, hwc->iommu_cntr, hwc 304 arch/x86/events/amd/iommu.c struct hw_perf_event *hwc = &event->hw; hwc 307 arch/x86/events/amd/iommu.c if (amd_iommu_pc_get_reg(iommu, hwc->iommu_bank, hwc->iommu_cntr, hwc 314 arch/x86/events/amd/iommu.c prev = local64_read(&hwc->prev_count); hwc 315 arch/x86/events/amd/iommu.c if (local64_cmpxchg(&hwc->prev_count, prev, count) != prev) hwc 326 arch/x86/events/amd/iommu.c struct hw_perf_event *hwc = &event->hw; hwc 328 arch/x86/events/amd/iommu.c if (hwc->state & PERF_HES_UPTODATE) hwc 332 arch/x86/events/amd/iommu.c WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); hwc 333 arch/x86/events/amd/iommu.c hwc->state |= PERF_HES_STOPPED; hwc 335 arch/x86/events/amd/iommu.c if (hwc->state & PERF_HES_UPTODATE) hwc 339 arch/x86/events/amd/iommu.c hwc->state |= PERF_HES_UPTODATE; hwc 361 arch/x86/events/amd/iommu.c struct hw_perf_event *hwc = &event->hw; hwc 369 arch/x86/events/amd/iommu.c hwc->iommu_bank, hwc->iommu_cntr); hwc 49 arch/x86/events/amd/power.c struct hw_perf_event *hwc = &event->hw; hwc 53 arch/x86/events/amd/power.c prev_pwr_acc = hwc->pwr_acc; hwc 54 arch/x86/events/amd/power.c prev_ptsc = hwc->ptsc; hwc 93 arch/x86/events/amd/power.c struct hw_perf_event *hwc = &event->hw; hwc 96 arch/x86/events/amd/power.c if (!(hwc->state & PERF_HES_STOPPED)) hwc 97 arch/x86/events/amd/power.c hwc->state |= PERF_HES_STOPPED; hwc 100 arch/x86/events/amd/power.c if ((mode & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { hwc 106 arch/x86/events/amd/power.c hwc->state |= PERF_HES_UPTODATE; hwc 112 arch/x86/events/amd/power.c struct hw_perf_event *hwc = &event->hw; hwc 114 arch/x86/events/amd/power.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 84 arch/x86/events/amd/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 93 arch/x86/events/amd/uncore.c prev = local64_read(&hwc->prev_count); hwc 94 arch/x86/events/amd/uncore.c rdpmcl(hwc->event_base_rdpmc, new); hwc 95 arch/x86/events/amd/uncore.c local64_set(&hwc->prev_count, new); hwc 103 arch/x86/events/amd/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 106 arch/x86/events/amd/uncore.c wrmsrl(hwc->event_base, (u64)local64_read(&hwc->prev_count)); hwc 108 arch/x86/events/amd/uncore.c hwc->state = 0; hwc 109 arch/x86/events/amd/uncore.c wrmsrl(hwc->config_base, (hwc->config | ARCH_PERFMON_EVENTSEL_ENABLE)); hwc 115 arch/x86/events/amd/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 117 arch/x86/events/amd/uncore.c wrmsrl(hwc->config_base, hwc->config); hwc 118 arch/x86/events/amd/uncore.c hwc->state |= PERF_HES_STOPPED; hwc 120 arch/x86/events/amd/uncore.c if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { hwc 122 arch/x86/events/amd/uncore.c hwc->state |= PERF_HES_UPTODATE; hwc 130 arch/x86/events/amd/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 133 arch/x86/events/amd/uncore.c if (hwc->idx != -1 && uncore->events[hwc->idx] == event) hwc 138 arch/x86/events/amd/uncore.c hwc->idx = i; hwc 144 arch/x86/events/amd/uncore.c hwc->idx = -1; hwc 147 arch/x86/events/amd/uncore.c hwc->idx = i; hwc 153 arch/x86/events/amd/uncore.c if (hwc->idx == -1) hwc 156 arch/x86/events/amd/uncore.c hwc->config_base = uncore->msr_base + (2 * hwc->idx); hwc 157 arch/x86/events/amd/uncore.c hwc->event_base = uncore->msr_base + 1 + (2 * hwc->idx); hwc 158 arch/x86/events/amd/uncore.c hwc->event_base_rdpmc = uncore->rdpmc_base + hwc->idx; hwc 159 arch/x86/events/amd/uncore.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 171 arch/x86/events/amd/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 180 arch/x86/events/amd/uncore.c hwc->idx = -1; hwc 186 arch/x86/events/amd/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 199 arch/x86/events/amd/uncore.c hwc->config = event->attr.config & AMD64_RAW_EVENT_MASK_NB; hwc 200 arch/x86/events/amd/uncore.c hwc->idx = -1; hwc 215 arch/x86/events/amd/uncore.c hwc->config |= (1ULL << (AMD64_L3_THREAD_SHIFT + thread) & hwc 70 arch/x86/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 73 arch/x86/events/core.c int idx = hwc->idx; hwc 87 arch/x86/events/core.c prev_raw_count = local64_read(&hwc->prev_count); hwc 88 arch/x86/events/core.c rdpmcl(hwc->event_base_rdpmc, new_raw_count); hwc 90 arch/x86/events/core.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 106 arch/x86/events/core.c local64_sub(delta, &hwc->period_left); hwc 300 arch/x86/events/core.c set_ext_hw_attr(struct hw_perf_event *hwc, struct perf_event *event) hwc 331 arch/x86/events/core.c hwc->config |= val; hwc 415 arch/x86/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 419 arch/x86/events/core.c hwc->sample_period = x86_pmu.max_period; hwc 420 arch/x86/events/core.c hwc->last_period = hwc->sample_period; hwc 421 arch/x86/events/core.c local64_set(&hwc->period_left, hwc->sample_period); hwc 428 arch/x86/events/core.c return set_ext_hw_attr(hwc, event); hwc 446 arch/x86/events/core.c hwc->config |= config; hwc 668 arch/x86/events/core.c struct hw_perf_event *hwc = &cpuc->events[idx]->hw; hwc 673 arch/x86/events/core.c __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); hwc 877 arch/x86/events/core.c struct hw_perf_event *hwc; hwc 922 arch/x86/events/core.c hwc = &cpuc->event_list[i]->hw; hwc 926 arch/x86/events/core.c if (hwc->idx == -1) hwc 930 arch/x86/events/core.c if (!test_bit(hwc->idx, c->idxmsk)) hwc 934 arch/x86/events/core.c if (test_bit(hwc->idx, used_mask)) hwc 937 arch/x86/events/core.c __set_bit(hwc->idx, used_mask); hwc 939 arch/x86/events/core.c assign[i] = hwc->idx; hwc 1061 arch/x86/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 1063 arch/x86/events/core.c hwc->idx = cpuc->assign[i]; hwc 1064 arch/x86/events/core.c hwc->last_cpu = smp_processor_id(); hwc 1065 arch/x86/events/core.c hwc->last_tag = ++cpuc->tags[i]; hwc 1067 arch/x86/events/core.c if (hwc->idx == INTEL_PMC_IDX_FIXED_BTS) { hwc 1068 arch/x86/events/core.c hwc->config_base = 0; hwc 1069 arch/x86/events/core.c hwc->event_base = 0; hwc 1070 arch/x86/events/core.c } else if (hwc->idx >= INTEL_PMC_IDX_FIXED) { hwc 1071 arch/x86/events/core.c hwc->config_base = MSR_ARCH_PERFMON_FIXED_CTR_CTRL; hwc 1072 arch/x86/events/core.c hwc->event_base = MSR_ARCH_PERFMON_FIXED_CTR0 + (hwc->idx - INTEL_PMC_IDX_FIXED); hwc 1073 arch/x86/events/core.c hwc->event_base_rdpmc = (hwc->idx - INTEL_PMC_IDX_FIXED) | 1<<30; hwc 1075 arch/x86/events/core.c hwc->config_base = x86_pmu_config_addr(hwc->idx); hwc 1076 arch/x86/events/core.c hwc->event_base = x86_pmu_event_addr(hwc->idx); hwc 1077 arch/x86/events/core.c hwc->event_base_rdpmc = x86_pmu_rdpmc_index(hwc->idx); hwc 1102 arch/x86/events/core.c static inline int match_prev_assignment(struct hw_perf_event *hwc, hwc 1106 arch/x86/events/core.c return hwc->idx == cpuc->assign[i] && hwc 1107 arch/x86/events/core.c hwc->last_cpu == smp_processor_id() && hwc 1108 arch/x86/events/core.c hwc->last_tag == cpuc->tags[i]; hwc 1117 arch/x86/events/core.c struct hw_perf_event *hwc; hwc 1136 arch/x86/events/core.c hwc = &event->hw; hwc 1144 arch/x86/events/core.c if (hwc->idx == -1 || hwc 1145 arch/x86/events/core.c match_prev_assignment(hwc, cpuc, i)) hwc 1152 arch/x86/events/core.c if (hwc->state & PERF_HES_STOPPED) hwc 1153 arch/x86/events/core.c hwc->state |= PERF_HES_ARCH; hwc 1163 arch/x86/events/core.c hwc = &event->hw; hwc 1165 arch/x86/events/core.c if (!match_prev_assignment(hwc, cpuc, i)) hwc 1170 arch/x86/events/core.c if (hwc->state & PERF_HES_ARCH) hwc 1193 arch/x86/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 1194 arch/x86/events/core.c s64 left = local64_read(&hwc->period_left); hwc 1195 arch/x86/events/core.c s64 period = hwc->sample_period; hwc 1196 arch/x86/events/core.c int ret = 0, idx = hwc->idx; hwc 1206 arch/x86/events/core.c local64_set(&hwc->period_left, left); hwc 1207 arch/x86/events/core.c hwc->last_period = period; hwc 1213 arch/x86/events/core.c local64_set(&hwc->period_left, left); hwc 1214 arch/x86/events/core.c hwc->last_period = period; hwc 1235 arch/x86/events/core.c local64_set(&hwc->prev_count, (u64)-left); hwc 1237 arch/x86/events/core.c wrmsrl(hwc->event_base, (u64)(-left) & x86_pmu.cntval_mask); hwc 1245 arch/x86/events/core.c wrmsrl(hwc->event_base, hwc 1270 arch/x86/events/core.c struct hw_perf_event *hwc; hwc 1274 arch/x86/events/core.c hwc = &event->hw; hwc 1281 arch/x86/events/core.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 1283 arch/x86/events/core.c hwc->state |= PERF_HES_ARCH; hwc 1415 arch/x86/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 1417 arch/x86/events/core.c if (test_bit(hwc->idx, cpuc->active_mask)) { hwc 1419 arch/x86/events/core.c __clear_bit(hwc->idx, cpuc->active_mask); hwc 1420 arch/x86/events/core.c cpuc->events[hwc->idx] = NULL; hwc 1421 arch/x86/events/core.c WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); hwc 1422 arch/x86/events/core.c hwc->state |= PERF_HES_STOPPED; hwc 1425 arch/x86/events/core.c if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { hwc 1431 arch/x86/events/core.c hwc->state |= PERF_HES_UPTODATE; hwc 517 arch/x86/events/intel/bts.c struct hw_perf_event *hwc = &event->hw; hwc 529 arch/x86/events/intel/bts.c if (hwc->state & PERF_HES_STOPPED) hwc 2132 arch/x86/events/intel/core.c static void intel_pmu_disable_fixed(struct hw_perf_event *hwc) hwc 2134 arch/x86/events/intel/core.c int idx = hwc->idx - INTEL_PMC_IDX_FIXED; hwc 2139 arch/x86/events/intel/core.c rdmsrl(hwc->config_base, ctrl_val); hwc 2141 arch/x86/events/intel/core.c wrmsrl(hwc->config_base, ctrl_val); hwc 2151 arch/x86/events/intel/core.c struct hw_perf_event *hwc = &event->hw; hwc 2154 arch/x86/events/intel/core.c if (unlikely(hwc->idx == INTEL_PMC_IDX_FIXED_BTS)) { hwc 2160 arch/x86/events/intel/core.c cpuc->intel_ctrl_guest_mask &= ~(1ull << hwc->idx); hwc 2161 arch/x86/events/intel/core.c cpuc->intel_ctrl_host_mask &= ~(1ull << hwc->idx); hwc 2162 arch/x86/events/intel/core.c cpuc->intel_cp_status &= ~(1ull << hwc->idx); hwc 2164 arch/x86/events/intel/core.c if (unlikely(hwc->config_base == MSR_ARCH_PERFMON_FIXED_CTR_CTRL)) hwc 2165 arch/x86/events/intel/core.c intel_pmu_disable_fixed(hwc); hwc 2195 arch/x86/events/intel/core.c struct hw_perf_event *hwc = &event->hw; hwc 2196 arch/x86/events/intel/core.c int idx = hwc->idx - INTEL_PMC_IDX_FIXED; hwc 2206 arch/x86/events/intel/core.c if (hwc->config & ARCH_PERFMON_EVENTSEL_USR) hwc 2208 arch/x86/events/intel/core.c if (hwc->config & ARCH_PERFMON_EVENTSEL_OS) hwc 2214 arch/x86/events/intel/core.c if (x86_pmu.version > 2 && hwc->config & ARCH_PERFMON_EVENTSEL_ANY) hwc 2225 arch/x86/events/intel/core.c rdmsrl(hwc->config_base, ctrl_val); hwc 2228 arch/x86/events/intel/core.c wrmsrl(hwc->config_base, ctrl_val); hwc 2233 arch/x86/events/intel/core.c struct hw_perf_event *hwc = &event->hw; hwc 2236 arch/x86/events/intel/core.c if (unlikely(hwc->idx == INTEL_PMC_IDX_FIXED_BTS)) { hwc 2240 arch/x86/events/intel/core.c intel_pmu_enable_bts(hwc->config); hwc 2245 arch/x86/events/intel/core.c cpuc->intel_ctrl_guest_mask |= (1ull << hwc->idx); hwc 2247 arch/x86/events/intel/core.c cpuc->intel_ctrl_host_mask |= (1ull << hwc->idx); hwc 2250 arch/x86/events/intel/core.c cpuc->intel_cp_status |= (1ull << hwc->idx); hwc 2255 arch/x86/events/intel/core.c if (unlikely(hwc->config_base == MSR_ARCH_PERFMON_FIXED_CTR_CTRL)) { hwc 2260 arch/x86/events/intel/core.c __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); hwc 3045 arch/x86/events/intel/core.c struct hw_perf_event *hwc = &event->hw; hwc 3059 arch/x86/events/intel/core.c if (hwc->flags & PERF_X86_EVENT_EXCL_ACCT) { hwc 3060 arch/x86/events/intel/core.c hwc->flags &= ~PERF_X86_EVENT_EXCL_ACCT; hwc 3069 arch/x86/events/intel/core.c if (hwc->idx >= 0) { hwc 3080 arch/x86/events/intel/core.c xl->state[hwc->idx] = INTEL_EXCL_UNUSED; hwc 3407 arch/x86/events/intel/core.c struct hw_perf_event *hwc = &cpuc->events[idx]->hw; hwc 3413 arch/x86/events/intel/core.c __x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE); hwc 360 arch/x86/events/intel/cstate.c struct hw_perf_event *hwc = &event->hw; hwc 364 arch/x86/events/intel/cstate.c prev_raw_count = local64_read(&hwc->prev_count); hwc 367 arch/x86/events/intel/cstate.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 1062 arch/x86/events/intel/ds.c struct hw_perf_event *hwc = &event->hw; hwc 1066 arch/x86/events/intel/ds.c if (hwc->flags & PERF_X86_EVENT_LARGE_PEBS) hwc 1068 arch/x86/events/intel/ds.c if (hwc->flags & PERF_X86_EVENT_PEBS_VIA_PT) hwc 1088 arch/x86/events/intel/ds.c struct hw_perf_event *hwc = &event->hw; hwc 1099 arch/x86/events/intel/ds.c wrmsrl(MSR_RELOAD_PMC0 + hwc->idx, ds->pebs_event_reset[hwc->idx]); hwc 1105 arch/x86/events/intel/ds.c struct hw_perf_event *hwc = &event->hw; hwc 1108 arch/x86/events/intel/ds.c hwc->config &= ~ARCH_PERFMON_EVENTSEL_INT; hwc 1110 arch/x86/events/intel/ds.c cpuc->pebs_enabled |= 1ULL << hwc->idx; hwc 1113 arch/x86/events/intel/ds.c cpuc->pebs_enabled |= 1ULL << (hwc->idx + 32); hwc 1118 arch/x86/events/intel/ds.c hwc->config |= ICL_EVENTSEL_ADAPTIVE; hwc 1129 arch/x86/events/intel/ds.c if (hwc->flags & PERF_X86_EVENT_AUTO_RELOAD) { hwc 1130 arch/x86/events/intel/ds.c unsigned int idx = hwc->idx; hwc 1135 arch/x86/events/intel/ds.c (u64)(-hwc->sample_period) & x86_pmu.cntval_mask; hwc 1137 arch/x86/events/intel/ds.c ds->pebs_event_reset[hwc->idx] = 0; hwc 1146 arch/x86/events/intel/ds.c struct hw_perf_event *hwc = &event->hw; hwc 1150 arch/x86/events/intel/ds.c if (hwc->flags & PERF_X86_EVENT_LARGE_PEBS) hwc 1152 arch/x86/events/intel/ds.c if (hwc->flags & PERF_X86_EVENT_PEBS_VIA_PT) hwc 1161 arch/x86/events/intel/ds.c struct hw_perf_event *hwc = &event->hw; hwc 1167 arch/x86/events/intel/ds.c cpuc->pebs_enabled &= ~(1ULL << hwc->idx); hwc 1171 arch/x86/events/intel/ds.c cpuc->pebs_enabled &= ~(1ULL << (hwc->idx + 32)); hwc 1180 arch/x86/events/intel/ds.c hwc->config |= ARCH_PERFMON_EVENTSEL_INT; hwc 1668 arch/x86/events/intel/ds.c struct hw_perf_event *hwc = &event->hw; hwc 1670 arch/x86/events/intel/ds.c u64 period = hwc->sample_period; hwc 1681 arch/x86/events/intel/ds.c prev_raw_count = local64_read(&hwc->prev_count); hwc 1682 arch/x86/events/intel/ds.c rdpmcl(hwc->event_base_rdpmc, new_raw_count); hwc 1683 arch/x86/events/intel/ds.c local64_set(&hwc->prev_count, new_raw_count); hwc 1716 arch/x86/events/intel/ds.c local64_set(&hwc->period_left, -new); hwc 1734 arch/x86/events/intel/ds.c struct hw_perf_event *hwc = &event->hw; hwc 1740 arch/x86/events/intel/ds.c if (hwc->flags & PERF_X86_EVENT_AUTO_RELOAD) { hwc 179 arch/x86/events/intel/knc.c struct hw_perf_event *hwc = &event->hw; hwc 182 arch/x86/events/intel/knc.c val = hwc->config; hwc 185 arch/x86/events/intel/knc.c (void)wrmsrl_safe(hwc->config_base + hwc->idx, val); hwc 190 arch/x86/events/intel/knc.c struct hw_perf_event *hwc = &event->hw; hwc 193 arch/x86/events/intel/knc.c val = hwc->config; hwc 196 arch/x86/events/intel/knc.c (void)wrmsrl_safe(hwc->config_base + hwc->idx, val); hwc 855 arch/x86/events/intel/p4.c static inline int p4_pmu_clear_cccr_ovf(struct hw_perf_event *hwc) hwc 860 arch/x86/events/intel/p4.c rdmsrl(hwc->config_base, v); hwc 862 arch/x86/events/intel/p4.c wrmsrl(hwc->config_base, v & ~P4_CCCR_OVF); hwc 873 arch/x86/events/intel/p4.c rdmsrl(hwc->event_base, v); hwc 905 arch/x86/events/intel/p4.c struct hw_perf_event *hwc = &event->hw; hwc 912 arch/x86/events/intel/p4.c (void)wrmsrl_safe(hwc->config_base, hwc 913 arch/x86/events/intel/p4.c p4_config_unpack_cccr(hwc->config) & ~P4_CCCR_ENABLE & ~P4_CCCR_OVF & ~P4_CCCR_RESERVED); hwc 951 arch/x86/events/intel/p4.c struct hw_perf_event *hwc = &event->hw; hwc 952 arch/x86/events/intel/p4.c int thread = p4_ht_config_thread(hwc->config); hwc 953 arch/x86/events/intel/p4.c u64 escr_conf = p4_config_unpack_escr(p4_clear_ht_bit(hwc->config)); hwc 954 arch/x86/events/intel/p4.c unsigned int idx = p4_config_unpack_event(hwc->config); hwc 965 arch/x86/events/intel/p4.c WARN_ON_ONCE(p4_is_event_cascaded(hwc->config)); hwc 966 arch/x86/events/intel/p4.c WARN_ON_ONCE(hwc->idx == 1); hwc 972 arch/x86/events/intel/p4.c cccr = p4_config_unpack_cccr(hwc->config); hwc 978 arch/x86/events/intel/p4.c p4_pmu_enable_pebs(hwc->config); hwc 981 arch/x86/events/intel/p4.c (void)wrmsrl_safe(hwc->config_base, hwc 1003 arch/x86/events/intel/p4.c struct hw_perf_event *hwc; hwc 1020 arch/x86/events/intel/p4.c hwc = &event->hw; hwc 1022 arch/x86/events/intel/p4.c WARN_ON_ONCE(hwc->idx != idx); hwc 1025 arch/x86/events/intel/p4.c overflow = p4_pmu_clear_cccr_ovf(hwc); hwc 1034 arch/x86/events/intel/p4.c perf_sample_data_init(&data, 0, hwc->last_period); hwc 1066 arch/x86/events/intel/p4.c static void p4_pmu_swap_config_ts(struct hw_perf_event *hwc, int cpu) hwc 1073 arch/x86/events/intel/p4.c if (!p4_should_swap_ts(hwc->config, cpu)) hwc 1081 arch/x86/events/intel/p4.c escr = p4_config_unpack_escr(hwc->config); hwc 1082 arch/x86/events/intel/p4.c cccr = p4_config_unpack_cccr(hwc->config); hwc 1095 arch/x86/events/intel/p4.c hwc->config = p4_config_pack_escr(escr); hwc 1096 arch/x86/events/intel/p4.c hwc->config |= p4_config_pack_cccr(cccr); hwc 1097 arch/x86/events/intel/p4.c hwc->config |= P4_CONFIG_HT; hwc 1109 arch/x86/events/intel/p4.c hwc->config = p4_config_pack_escr(escr); hwc 1110 arch/x86/events/intel/p4.c hwc->config |= p4_config_pack_cccr(cccr); hwc 1111 arch/x86/events/intel/p4.c hwc->config &= ~P4_CONFIG_HT; hwc 1211 arch/x86/events/intel/p4.c struct hw_perf_event *hwc; hwc 1223 arch/x86/events/intel/p4.c hwc = &cpuc->event_list[i]->hw; hwc 1236 arch/x86/events/intel/p4.c bind = p4_config_get_bind(hwc->config); hwc 1241 arch/x86/events/intel/p4.c if (hwc->idx != -1 && !p4_should_swap_ts(hwc->config, cpu)) { hwc 1242 arch/x86/events/intel/p4.c cntr_idx = hwc->idx; hwc 1244 arch/x86/events/intel/p4.c assign[i] = hwc->idx; hwc 1253 arch/x86/events/intel/p4.c config_alias = p4_get_alias_event(hwc->config); hwc 1256 arch/x86/events/intel/p4.c hwc->config = config_alias; hwc 1276 arch/x86/events/intel/p4.c if (p4_should_swap_ts(hwc->config, cpu)) hwc 1277 arch/x86/events/intel/p4.c hwc->idx = -1; hwc 1278 arch/x86/events/intel/p4.c p4_pmu_swap_config_ts(hwc, cpu); hwc 161 arch/x86/events/intel/p6.c struct hw_perf_event *hwc = &event->hw; hwc 164 arch/x86/events/intel/p6.c (void)wrmsrl_safe(hwc->config_base, val); hwc 169 arch/x86/events/intel/p6.c struct hw_perf_event *hwc = &event->hw; hwc 172 arch/x86/events/intel/p6.c val = hwc->config; hwc 181 arch/x86/events/intel/p6.c (void)wrmsrl_safe(hwc->config_base, val); hwc 1430 arch/x86/events/intel/pt.c struct hw_perf_event *hwc = &event->hw; hwc 1445 arch/x86/events/intel/pt.c hwc->state = 0; hwc 1456 arch/x86/events/intel/pt.c hwc->state = PERF_HES_STOPPED; hwc 1507 arch/x86/events/intel/pt.c struct hw_perf_event *hwc = &event->hw; hwc 1516 arch/x86/events/intel/pt.c if (hwc->state == PERF_HES_STOPPED) hwc 1519 arch/x86/events/intel/pt.c hwc->state = PERF_HES_STOPPED; hwc 178 arch/x86/events/intel/rapl.c struct hw_perf_event *hwc = &event->hw; hwc 184 arch/x86/events/intel/rapl.c prev_raw_count = local64_read(&hwc->prev_count); hwc 187 arch/x86/events/intel/rapl.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 276 arch/x86/events/intel/rapl.c struct hw_perf_event *hwc = &event->hw; hwc 282 arch/x86/events/intel/rapl.c if (!(hwc->state & PERF_HES_STOPPED)) { hwc 290 arch/x86/events/intel/rapl.c WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); hwc 291 arch/x86/events/intel/rapl.c hwc->state |= PERF_HES_STOPPED; hwc 295 arch/x86/events/intel/rapl.c if ((mode & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { hwc 301 arch/x86/events/intel/rapl.c hwc->state |= PERF_HES_UPTODATE; hwc 310 arch/x86/events/intel/rapl.c struct hw_perf_event *hwc = &event->hw; hwc 315 arch/x86/events/intel/rapl.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 218 arch/x86/events/intel/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 220 arch/x86/events/intel/uncore.c hwc->idx = idx; hwc 221 arch/x86/events/intel/uncore.c hwc->last_tag = ++box->tags[idx]; hwc 223 arch/x86/events/intel/uncore.c if (uncore_pmc_fixed(hwc->idx)) { hwc 224 arch/x86/events/intel/uncore.c hwc->event_base = uncore_fixed_ctr(box); hwc 225 arch/x86/events/intel/uncore.c hwc->config_base = uncore_fixed_ctl(box); hwc 229 arch/x86/events/intel/uncore.c hwc->config_base = uncore_event_ctl(box, hwc->idx); hwc 230 arch/x86/events/intel/uncore.c hwc->event_base = uncore_perf_ctr(box, hwc->idx); hwc 427 arch/x86/events/intel/uncore.c struct hw_perf_event *hwc; hwc 440 arch/x86/events/intel/uncore.c hwc = &box->event_list[i]->hw; hwc 444 arch/x86/events/intel/uncore.c if (hwc->idx == -1) hwc 448 arch/x86/events/intel/uncore.c if (!test_bit(hwc->idx, c->idxmsk)) hwc 452 arch/x86/events/intel/uncore.c if (test_bit(hwc->idx, used_mask)) hwc 455 arch/x86/events/intel/uncore.c __set_bit(hwc->idx, used_mask); hwc 457 arch/x86/events/intel/uncore.c assign[i] = hwc->idx; hwc 512 arch/x86/events/intel/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 515 arch/x86/events/intel/uncore.c if (uncore_pmc_freerunning(hwc->idx)) { hwc 523 arch/x86/events/intel/uncore.c if (__test_and_clear_bit(hwc->idx, box->active_mask)) { hwc 526 arch/x86/events/intel/uncore.c box->events[hwc->idx] = NULL; hwc 527 arch/x86/events/intel/uncore.c WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); hwc 528 arch/x86/events/intel/uncore.c hwc->state |= PERF_HES_STOPPED; hwc 534 arch/x86/events/intel/uncore.c if ((flags & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { hwc 540 arch/x86/events/intel/uncore.c hwc->state |= PERF_HES_UPTODATE; hwc 547 arch/x86/events/intel/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 559 arch/x86/events/intel/uncore.c if (uncore_pmc_freerunning(hwc->idx)) { hwc 569 arch/x86/events/intel/uncore.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 571 arch/x86/events/intel/uncore.c hwc->state |= PERF_HES_ARCH; hwc 580 arch/x86/events/intel/uncore.c hwc = &event->hw; hwc 582 arch/x86/events/intel/uncore.c if (hwc->idx == assign[i] && hwc 583 arch/x86/events/intel/uncore.c hwc->last_tag == box->tags[assign[i]]) hwc 589 arch/x86/events/intel/uncore.c if (hwc->state & PERF_HES_STOPPED) hwc 590 arch/x86/events/intel/uncore.c hwc->state |= PERF_HES_ARCH; hwc 598 arch/x86/events/intel/uncore.c hwc = &event->hw; hwc 600 arch/x86/events/intel/uncore.c if (hwc->idx != assign[i] || hwc 601 arch/x86/events/intel/uncore.c hwc->last_tag != box->tags[assign[i]]) hwc 606 arch/x86/events/intel/uncore.c if (hwc->state & PERF_HES_ARCH) hwc 700 arch/x86/events/intel/uncore.c struct hw_perf_event *hwc = &event->hw; hwc 712 arch/x86/events/intel/uncore.c if (hwc->sample_period) hwc 746 arch/x86/events/intel/uncore.c hwc->config = 0ULL; hwc 748 arch/x86/events/intel/uncore.c hwc->config = event->attr.config; hwc 760 arch/x86/events/intel/uncore.c hwc->config = event->attr.config & hwc 247 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 249 arch/x86/events/intel/uncore_nhmex.c if (hwc->idx == UNCORE_PMC_IDX_FIXED) hwc 250 arch/x86/events/intel/uncore_nhmex.c wrmsrl(hwc->config_base, NHMEX_PMON_CTL_EN_BIT0); hwc 252 arch/x86/events/intel/uncore_nhmex.c wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT22); hwc 254 arch/x86/events/intel/uncore_nhmex.c wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT0); hwc 352 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 353 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 354 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg2 = &hwc->branch_reg; hwc 357 arch/x86/events/intel/uncore_nhmex.c ctr = (hwc->config & NHMEX_B_PMON_CTR_MASK) >> hwc 359 arch/x86/events/intel/uncore_nhmex.c ev_sel = (hwc->config & NHMEX_B_PMON_CTL_EV_SEL_MASK) >> hwc 379 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 380 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 381 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg2 = &hwc->branch_reg; hwc 387 arch/x86/events/intel/uncore_nhmex.c wrmsrl(hwc->config_base, NHMEX_PMON_CTL_EN_BIT0 | hwc 388 arch/x86/events/intel/uncore_nhmex.c (hwc->config & NHMEX_B_PMON_CTL_EV_SEL_MASK)); hwc 443 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 444 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 445 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg2 = &hwc->branch_reg; hwc 448 arch/x86/events/intel/uncore_nhmex.c if ((hwc->config & NHMEX_PMON_CTL_EV_SEL_MASK) != hwc 464 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 465 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 466 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg2 = &hwc->branch_reg; hwc 474 arch/x86/events/intel/uncore_nhmex.c wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT22); hwc 631 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 632 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 659 arch/x86/events/intel/uncore_nhmex.c hwc->config += idx << NHMEX_M_PMON_CTL_INC_SEL_SHIFT; hwc 661 arch/x86/events/intel/uncore_nhmex.c hwc->config -= idx << NHMEX_M_PMON_CTL_INC_SEL_SHIFT; hwc 837 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 838 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 839 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg2 = &hwc->branch_reg; hwc 862 arch/x86/events/intel/uncore_nhmex.c wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT0); hwc 946 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 947 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 952 arch/x86/events/intel/uncore_nhmex.c hwc->config -= 1 << NHMEX_R_PMON_CTL_EV_SEL_SHIFT; hwc 955 arch/x86/events/intel/uncore_nhmex.c hwc->config += 1 << NHMEX_R_PMON_CTL_EV_SEL_SHIFT; hwc 980 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 981 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 982 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg2 = &hwc->branch_reg; hwc 1024 arch/x86/events/intel/uncore_nhmex.c (er->config == (hwc->config >> 32) && hwc 1028 arch/x86/events/intel/uncore_nhmex.c er->config = (hwc->config >> 32); hwc 1088 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 1104 arch/x86/events/intel/uncore_nhmex.c hwc->config |= event->attr.config & (~0ULL << 32); hwc 1113 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event *hwc = &event->hw; hwc 1114 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 1115 arch/x86/events/intel/uncore_nhmex.c struct hw_perf_event_extra *reg2 = &hwc->branch_reg; hwc 1135 arch/x86/events/intel/uncore_nhmex.c hwc->config >> 32); hwc 1141 arch/x86/events/intel/uncore_nhmex.c hwc->config >> 32); hwc 1147 arch/x86/events/intel/uncore_nhmex.c wrmsrl(hwc->config_base, NHMEX_PMON_CTL_EN_BIT0 | hwc 1148 arch/x86/events/intel/uncore_nhmex.c (hwc->config & NHMEX_R_PMON_CTL_EV_SEL_MASK)); hwc 123 arch/x86/events/intel/uncore_snb.c struct hw_perf_event *hwc = &event->hw; hwc 125 arch/x86/events/intel/uncore_snb.c if (hwc->idx < UNCORE_PMC_IDX_FIXED) hwc 126 arch/x86/events/intel/uncore_snb.c wrmsrl(hwc->config_base, hwc->config | SNB_UNC_CTL_EN); hwc 128 arch/x86/events/intel/uncore_snb.c wrmsrl(hwc->config_base, SNB_UNC_CTL_EN); hwc 452 arch/x86/events/intel/uncore_snb.c struct hw_perf_event *hwc = &event->hw; hwc 465 arch/x86/events/intel/uncore_snb.c if (hwc->sample_period) hwc 935 arch/x86/events/intel/uncore_snb.c struct hw_perf_event *hwc = &event->hw; hwc 937 arch/x86/events/intel/uncore_snb.c if (hwc->idx < UNCORE_PMC_IDX_FIXED) hwc 938 arch/x86/events/intel/uncore_snb.c wrmsrl(hwc->config_base, hwc->config | SNB_UNC_CTL_EN); hwc 940 arch/x86/events/intel/uncore_snb.c wrmsrl(hwc->config_base, NHM_UNC_FIXED_CTR_CTL_EN); hwc 490 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 492 arch/x86/events/intel/uncore_snbep.c pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); hwc 498 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 500 arch/x86/events/intel/uncore_snbep.c pci_write_config_dword(pdev, hwc->config_base, hwc->config); hwc 506 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 509 arch/x86/events/intel/uncore_snbep.c pci_read_config_dword(pdev, hwc->event_base, (u32 *)&count); hwc 510 arch/x86/events/intel/uncore_snbep.c pci_read_config_dword(pdev, hwc->event_base + 4, (u32 *)&count + 1); hwc 551 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 552 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 557 arch/x86/events/intel/uncore_snbep.c wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); hwc 563 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 565 arch/x86/events/intel/uncore_snbep.c wrmsrl(hwc->config_base, hwc->config); hwc 972 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 973 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 982 arch/x86/events/intel/uncore_snbep.c hwc->config += new_idx - reg1->idx; hwc 1045 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 1046 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 1047 arch/x86/events/intel/uncore_snbep.c int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK; hwc 1101 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 1102 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 1103 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg2 = &hwc->branch_reg; hwc 1105 arch/x86/events/intel/uncore_snbep.c if ((hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK) == 0x38) { hwc 1118 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 1119 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 1120 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg2 = &hwc->branch_reg; hwc 1139 arch/x86/events/intel/uncore_snbep.c pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); hwc 1633 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 1634 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 1642 arch/x86/events/intel/uncore_snbep.c wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); hwc 1735 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 1737 arch/x86/events/intel/uncore_snbep.c pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], hwc 1738 arch/x86/events/intel/uncore_snbep.c hwc->config | SNBEP_PMON_CTL_EN); hwc 1744 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 1746 arch/x86/events/intel/uncore_snbep.c pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], hwc->config); hwc 1752 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 1755 arch/x86/events/intel/uncore_snbep.c pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx], (u32 *)&count); hwc 1756 arch/x86/events/intel/uncore_snbep.c pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1); hwc 2149 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 2153 arch/x86/events/intel/uncore_snbep.c pci_write_config_dword(pdev, hwc->config_base, hwc 2154 arch/x86/events/intel/uncore_snbep.c hwc->config | KNL_PMON_FIXED_CTL_EN); hwc 2156 arch/x86/events/intel/uncore_snbep.c pci_write_config_dword(pdev, hwc->config_base, hwc 2157 arch/x86/events/intel/uncore_snbep.c hwc->config | SNBEP_PMON_CTL_EN); hwc 2617 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 2618 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 2626 arch/x86/events/intel/uncore_snbep.c wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); hwc 2712 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 2713 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 2714 arch/x86/events/intel/uncore_snbep.c int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK; hwc 2809 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 2812 arch/x86/events/intel/uncore_snbep.c pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx], (u32 *)&count); hwc 2813 arch/x86/events/intel/uncore_snbep.c pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1); hwc 3564 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 3566 arch/x86/events/intel/uncore_snbep.c wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); hwc 4082 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 4083 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 4088 arch/x86/events/intel/uncore_snbep.c wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); hwc 4177 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 4178 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event_extra *reg1 = &hwc->extra_reg; hwc 4179 arch/x86/events/intel/uncore_snbep.c int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK; hwc 4435 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 4440 arch/x86/events/intel/uncore_snbep.c writel(hwc->config | SNBEP_PMON_CTL_EN, hwc 4441 arch/x86/events/intel/uncore_snbep.c box->io_addr + hwc->config_base); hwc 4447 arch/x86/events/intel/uncore_snbep.c struct hw_perf_event *hwc = &event->hw; hwc 4452 arch/x86/events/intel/uncore_snbep.c writel(hwc->config, box->io_addr + hwc->config_base); hwc 835 arch/x86/events/perf_event.h static inline void __x86_pmu_enable_event(struct hw_perf_event *hwc, hwc 840 arch/x86/events/perf_event.h if (hwc->extra_reg.reg) hwc 841 arch/x86/events/perf_event.h wrmsrl(hwc->extra_reg.reg, hwc->extra_reg.config); hwc 842 arch/x86/events/perf_event.h wrmsrl(hwc->config_base, (hwc->config | enable_mask) & ~disable_mask); hwc 855 arch/x86/events/perf_event.h struct hw_perf_event *hwc = &event->hw; hwc 857 arch/x86/events/perf_event.h wrmsrl(hwc->config_base, hwc->config); hwc 927 arch/x86/events/perf_event.h struct hw_perf_event *hwc = &event->hw; hwc 933 arch/x86/events/perf_event.h hw_event = hwc->config & INTEL_ARCH_EVENT_MASK; hwc 941 arch/x86/events/perf_event.h struct hw_perf_event *hwc = &event->hw; hwc 943 arch/x86/events/perf_event.h return intel_pmu_has_bts_period(event, hwc->sample_period); hwc 137 arch/xtensa/kernel/perf_event.c struct hw_perf_event *hwc, int idx) hwc 143 arch/xtensa/kernel/perf_event.c prev_raw_count = local64_read(&hwc->prev_count); hwc 145 arch/xtensa/kernel/perf_event.c } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 151 arch/xtensa/kernel/perf_event.c local64_sub(delta, &hwc->period_left); hwc 155 arch/xtensa/kernel/perf_event.c struct hw_perf_event *hwc, int idx) hwc 163 arch/xtensa/kernel/perf_event.c s64 period = hwc->sample_period; hwc 165 arch/xtensa/kernel/perf_event.c left = local64_read(&hwc->period_left); hwc 168 arch/xtensa/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 169 arch/xtensa/kernel/perf_event.c hwc->last_period = period; hwc 173 arch/xtensa/kernel/perf_event.c local64_set(&hwc->period_left, left); hwc 174 arch/xtensa/kernel/perf_event.c hwc->last_period = period; hwc 181 arch/xtensa/kernel/perf_event.c local64_set(&hwc->prev_count, -left); hwc 242 arch/xtensa/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 243 arch/xtensa/kernel/perf_event.c int idx = hwc->idx; hwc 250 arch/xtensa/kernel/perf_event.c xtensa_perf_event_set_period(event, hwc, idx); hwc 253 arch/xtensa/kernel/perf_event.c hwc->state = 0; hwc 255 arch/xtensa/kernel/perf_event.c set_er(hwc->config, XTENSA_PMU_PMCTRL(idx)); hwc 260 arch/xtensa/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 261 arch/xtensa/kernel/perf_event.c int idx = hwc->idx; hwc 263 arch/xtensa/kernel/perf_event.c if (!(hwc->state & PERF_HES_STOPPED)) { hwc 267 arch/xtensa/kernel/perf_event.c hwc->state |= PERF_HES_STOPPED; hwc 284 arch/xtensa/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 285 arch/xtensa/kernel/perf_event.c int idx = hwc->idx; hwc 294 arch/xtensa/kernel/perf_event.c hwc->idx = idx; hwc 298 arch/xtensa/kernel/perf_event.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 370 arch/xtensa/kernel/perf_event.c struct hw_perf_event *hwc = &event->hw; hwc 377 arch/xtensa/kernel/perf_event.c xtensa_perf_event_update(event, hwc, i); hwc 378 arch/xtensa/kernel/perf_event.c last_period = hwc->last_period; hwc 379 arch/xtensa/kernel/perf_event.c if (xtensa_perf_event_set_period(event, hwc, i)) { hwc 788 drivers/clk/clk-qoriq.c struct mux_hwclock *hwc = to_mux_hwclock(hw); hwc 791 drivers/clk/clk-qoriq.c if (idx >= hwc->num_parents) hwc 794 drivers/clk/clk-qoriq.c clksel = hwc->parent_to_clksel[idx]; hwc 795 drivers/clk/clk-qoriq.c cg_out(hwc->cg, (clksel << CLKSEL_SHIFT) & CLKSEL_MASK, hwc->reg); hwc 802 drivers/clk/clk-qoriq.c struct mux_hwclock *hwc = to_mux_hwclock(hw); hwc 806 drivers/clk/clk-qoriq.c clksel = (cg_in(hwc->cg, hwc->reg) & CLKSEL_MASK) >> CLKSEL_SHIFT; hwc 808 drivers/clk/clk-qoriq.c ret = hwc->clksel_to_parent[clksel]; hwc 810 drivers/clk/clk-qoriq.c pr_err("%s: mux at %p has bad clksel\n", __func__, hwc->reg); hwc 831 drivers/clk/clk-qoriq.c struct mux_hwclock *hwc, hwc 836 drivers/clk/clk-qoriq.c if (!(hwc->info->clksel[idx].flags & CLKSEL_VALID)) hwc 839 drivers/clk/clk-qoriq.c pll = hwc->info->clksel[idx].pll; hwc 840 drivers/clk/clk-qoriq.c div = hwc->info->clksel[idx].div; hwc 846 drivers/clk/clk-qoriq.c struct mux_hwclock *hwc, hwc 865 drivers/clk/clk-qoriq.c hwc->clksel_to_parent[i] = -1; hwc 867 drivers/clk/clk-qoriq.c div = get_pll_div(cg, hwc, i); hwc 873 drivers/clk/clk-qoriq.c if (hwc->info->clksel[i].flags & CLKSEL_80PCT && hwc 882 drivers/clk/clk-qoriq.c hwc->parent_to_clksel[j] = i; hwc 883 drivers/clk/clk-qoriq.c hwc->clksel_to_parent[i] = j; hwc 890 drivers/clk/clk-qoriq.c init.num_parents = hwc->num_parents = j; hwc 892 drivers/clk/clk-qoriq.c hwc->hw.init = &init; hwc 893 drivers/clk/clk-qoriq.c hwc->cg = cg; hwc 895 drivers/clk/clk-qoriq.c clk = clk_register(NULL, &hwc->hw); hwc 899 drivers/clk/clk-qoriq.c kfree(hwc); hwc 908 drivers/clk/clk-qoriq.c struct mux_hwclock *hwc; hwc 914 drivers/clk/clk-qoriq.c hwc = kzalloc(sizeof(*hwc), GFP_KERNEL); hwc 915 drivers/clk/clk-qoriq.c if (!hwc) hwc 919 drivers/clk/clk-qoriq.c hwc->reg = cg->regs + 0x70000 + 0x20 * idx; hwc 921 drivers/clk/clk-qoriq.c hwc->reg = cg->regs + 0x20 * idx; hwc 923 drivers/clk/clk-qoriq.c hwc->info = cg->info.cmux_groups[cg->info.cmux_to_group[idx]]; hwc 932 drivers/clk/clk-qoriq.c clksel = (cg_in(cg, hwc->reg) & CLKSEL_MASK) >> CLKSEL_SHIFT; hwc 933 drivers/clk/clk-qoriq.c div = get_pll_div(cg, hwc, clksel); hwc 935 drivers/clk/clk-qoriq.c kfree(hwc); hwc 950 drivers/clk/clk-qoriq.c return create_mux_common(cg, hwc, &cmux_ops, min_rate, max_rate, hwc 956 drivers/clk/clk-qoriq.c struct mux_hwclock *hwc; hwc 958 drivers/clk/clk-qoriq.c hwc = kzalloc(sizeof(*hwc), GFP_KERNEL); hwc 959 drivers/clk/clk-qoriq.c if (!hwc) hwc 962 drivers/clk/clk-qoriq.c hwc->reg = cg->regs + 0x20 * idx + 0x10; hwc 963 drivers/clk/clk-qoriq.c hwc->info = cg->info.hwaccel[idx]; hwc 965 drivers/clk/clk-qoriq.c return create_mux_common(cg, hwc, &hwaccel_ops, 0, ULONG_MAX, 0, hwc 48 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 55 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c hwc->conf = event->attr.config; hwc 63 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 68 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) hwc 71 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 72 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c hwc->state = 0; hwc 77 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c pe->adev->df_funcs->pmc_start(pe->adev, hwc->conf, 1); hwc 79 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c pe->adev->df_funcs->pmc_start(pe->adev, hwc->conf, 0); hwc 92 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 100 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c prev = local64_read(&hwc->prev_count); hwc 104 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c pe->adev->df_funcs->pmc_get_count(pe->adev, hwc->conf, hwc 111 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c } while (local64_cmpxchg(&hwc->prev_count, prev, count) != prev); hwc 119 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 124 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c if (hwc->state & PERF_HES_UPTODATE) hwc 129 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c pe->adev->df_funcs->pmc_stop(pe->adev, hwc->conf, 0); hwc 135 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); hwc 136 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c hwc->state |= PERF_HES_STOPPED; hwc 138 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c if (hwc->state & PERF_HES_UPTODATE) hwc 142 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c hwc->state |= PERF_HES_UPTODATE; hwc 148 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 159 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c retval = pe->adev->df_funcs->pmc_start(pe->adev, hwc->conf, 1); hwc 178 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 187 drivers/gpu/drm/amd/amdgpu/amdgpu_pmu.c pe->adev->df_funcs->pmc_stop(pe->adev, hwc->conf, 1); hwc 576 drivers/gpu/drm/i915/i915_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 580 drivers/gpu/drm/i915/i915_pmu.c prev = local64_read(&hwc->prev_count); hwc 583 drivers/gpu/drm/i915/i915_pmu.c if (local64_cmpxchg(&hwc->prev_count, prev, new) != prev) hwc 392 drivers/hwtracing/coresight/coresight-etm-perf.c struct hw_perf_event *hwc = &event->hw; hwc 396 drivers/hwtracing/coresight/coresight-etm-perf.c if (hwc->state & PERF_HES_STOPPED) hwc 399 drivers/hwtracing/coresight/coresight-etm-perf.c hwc->state = PERF_HES_STOPPED; hwc 79 drivers/iio/adc/stm32-dfsdm-adc.c struct iio_hw_consumer *hwc; hwc 1004 drivers/iio/adc/stm32-dfsdm-adc.c if (adc->hwc) { hwc 1005 drivers/iio/adc/stm32-dfsdm-adc.c ret = iio_hw_consumer_enable(adc->hwc); hwc 1033 drivers/iio/adc/stm32-dfsdm-adc.c if (adc->hwc) hwc 1034 drivers/iio/adc/stm32-dfsdm-adc.c iio_hw_consumer_disable(adc->hwc); hwc 1072 drivers/iio/adc/stm32-dfsdm-adc.c if (adc->hwc) hwc 1073 drivers/iio/adc/stm32-dfsdm-adc.c iio_hw_consumer_disable(adc->hwc); hwc 1253 drivers/iio/adc/stm32-dfsdm-adc.c ret = iio_hw_consumer_enable(adc->hwc); hwc 1262 drivers/iio/adc/stm32-dfsdm-adc.c iio_hw_consumer_disable(adc->hwc); hwc 1473 drivers/iio/adc/stm32-dfsdm-adc.c adc->hwc = devm_iio_hw_consumer_alloc(&indio_dev->dev); hwc 1474 drivers/iio/adc/stm32-dfsdm-adc.c if (IS_ERR(adc->hwc)) hwc 53 drivers/iio/buffer/industrialio-hw-consumer.c struct iio_hw_consumer *hwc, struct iio_dev *indio_dev) hwc 58 drivers/iio/buffer/industrialio-hw-consumer.c list_for_each_entry(buf, &hwc->buffers, head) { hwc 72 drivers/iio/buffer/industrialio-hw-consumer.c list_add_tail(&buf->head, &hwc->buffers); hwc 86 drivers/iio/buffer/industrialio-hw-consumer.c struct iio_hw_consumer *hwc; hwc 90 drivers/iio/buffer/industrialio-hw-consumer.c hwc = kzalloc(sizeof(*hwc), GFP_KERNEL); hwc 91 drivers/iio/buffer/industrialio-hw-consumer.c if (!hwc) hwc 94 drivers/iio/buffer/industrialio-hw-consumer.c INIT_LIST_HEAD(&hwc->buffers); hwc 96 drivers/iio/buffer/industrialio-hw-consumer.c hwc->channels = iio_channel_get_all(dev); hwc 97 drivers/iio/buffer/industrialio-hw-consumer.c if (IS_ERR(hwc->channels)) { hwc 98 drivers/iio/buffer/industrialio-hw-consumer.c ret = PTR_ERR(hwc->channels); hwc 102 drivers/iio/buffer/industrialio-hw-consumer.c chan = &hwc->channels[0]; hwc 104 drivers/iio/buffer/industrialio-hw-consumer.c buf = iio_hw_consumer_get_buffer(hwc, chan->indio_dev); hwc 113 drivers/iio/buffer/industrialio-hw-consumer.c return hwc; hwc 116 drivers/iio/buffer/industrialio-hw-consumer.c list_for_each_entry(buf, &hwc->buffers, head) hwc 118 drivers/iio/buffer/industrialio-hw-consumer.c iio_channel_release_all(hwc->channels); hwc 120 drivers/iio/buffer/industrialio-hw-consumer.c kfree(hwc); hwc 129 drivers/iio/buffer/industrialio-hw-consumer.c void iio_hw_consumer_free(struct iio_hw_consumer *hwc) hwc 133 drivers/iio/buffer/industrialio-hw-consumer.c iio_channel_release_all(hwc->channels); hwc 134 drivers/iio/buffer/industrialio-hw-consumer.c list_for_each_entry_safe(buf, n, &hwc->buffers, head) hwc 136 drivers/iio/buffer/industrialio-hw-consumer.c kfree(hwc); hwc 196 drivers/iio/buffer/industrialio-hw-consumer.c void devm_iio_hw_consumer_free(struct device *dev, struct iio_hw_consumer *hwc) hwc 201 drivers/iio/buffer/industrialio-hw-consumer.c devm_iio_hw_consumer_match, hwc); hwc 212 drivers/iio/buffer/industrialio-hw-consumer.c int iio_hw_consumer_enable(struct iio_hw_consumer *hwc) hwc 217 drivers/iio/buffer/industrialio-hw-consumer.c list_for_each_entry(buf, &hwc->buffers, head) { hwc 226 drivers/iio/buffer/industrialio-hw-consumer.c list_for_each_entry_continue_reverse(buf, &hwc->buffers, head) hwc 236 drivers/iio/buffer/industrialio-hw-consumer.c void iio_hw_consumer_disable(struct iio_hw_consumer *hwc) hwc 240 drivers/iio/buffer/industrialio-hw-consumer.c list_for_each_entry(buf, &hwc->buffers, head) hwc 985 drivers/perf/arm-cci.c struct hw_perf_event *hwc = &event->hw; hwc 989 drivers/perf/arm-cci.c prev_raw_count = local64_read(&hwc->prev_count); hwc 991 drivers/perf/arm-cci.c } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 1008 drivers/perf/arm-cci.c struct hw_perf_event *hwc = &event->hw; hwc 1016 drivers/perf/arm-cci.c local64_set(&hwc->prev_count, val); hwc 1024 drivers/perf/arm-cci.c hwc->state |= PERF_HES_ARCH; hwc 1137 drivers/perf/arm-cci.c struct hw_perf_event *hwc = &event->hw; hwc 1138 drivers/perf/arm-cci.c int idx = hwc->idx; hwc 1146 drivers/perf/arm-cci.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 1148 drivers/perf/arm-cci.c hwc->state = 0; hwc 1159 drivers/perf/arm-cci.c pmu_set_event(cci_pmu, idx, hwc->config_base); hwc 1170 drivers/perf/arm-cci.c struct hw_perf_event *hwc = &event->hw; hwc 1171 drivers/perf/arm-cci.c int idx = hwc->idx; hwc 1173 drivers/perf/arm-cci.c if (hwc->state & PERF_HES_STOPPED) hwc 1187 drivers/perf/arm-cci.c hwc->state |= PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 1194 drivers/perf/arm-cci.c struct hw_perf_event *hwc = &event->hw; hwc 1205 drivers/perf/arm-cci.c hwc->state = PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 1219 drivers/perf/arm-cci.c struct hw_perf_event *hwc = &event->hw; hwc 1220 drivers/perf/arm-cci.c int idx = hwc->idx; hwc 1283 drivers/perf/arm-cci.c struct hw_perf_event *hwc = &event->hw; hwc 1299 drivers/perf/arm-cci.c hwc->idx = -1; hwc 1300 drivers/perf/arm-cci.c hwc->config_base = 0; hwc 1301 drivers/perf/arm-cci.c hwc->config = 0; hwc 1302 drivers/perf/arm-cci.c hwc->event_base = 0; hwc 1307 drivers/perf/arm-cci.c hwc->config_base |= (unsigned long)mapping; hwc 351 drivers/perf/arm_dsu_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 356 drivers/perf/arm_dsu_pmu.c prev_count = local64_read(&hwc->prev_count); hwc 358 drivers/perf/arm_dsu_pmu.c } while (local64_cmpxchg(&hwc->prev_count, prev_count, new_count) != hwc 360 drivers/perf/arm_dsu_pmu.c delta = (new_count - prev_count) & DSU_PMU_COUNTER_MASK(hwc->idx); hwc 444 drivers/perf/arm_dsu_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 455 drivers/perf/arm_dsu_pmu.c hwc->idx = idx; hwc 457 drivers/perf/arm_dsu_pmu.c hwc->state = PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 470 drivers/perf/arm_dsu_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 471 drivers/perf/arm_dsu_pmu.c int idx = hwc->idx; hwc 123 drivers/perf/arm_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 124 drivers/perf/arm_pmu.c s64 left = local64_read(&hwc->period_left); hwc 125 drivers/perf/arm_pmu.c s64 period = hwc->sample_period; hwc 132 drivers/perf/arm_pmu.c local64_set(&hwc->period_left, left); hwc 133 drivers/perf/arm_pmu.c hwc->last_period = period; hwc 139 drivers/perf/arm_pmu.c local64_set(&hwc->period_left, left); hwc 140 drivers/perf/arm_pmu.c hwc->last_period = period; hwc 153 drivers/perf/arm_pmu.c local64_set(&hwc->prev_count, (u64)-left); hwc 165 drivers/perf/arm_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 170 drivers/perf/arm_pmu.c prev_raw_count = local64_read(&hwc->prev_count); hwc 173 drivers/perf/arm_pmu.c if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 180 drivers/perf/arm_pmu.c local64_sub(delta, &hwc->period_left); hwc 195 drivers/perf/arm_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 201 drivers/perf/arm_pmu.c if (!(hwc->state & PERF_HES_STOPPED)) { hwc 204 drivers/perf/arm_pmu.c hwc->state |= PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 211 drivers/perf/arm_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 218 drivers/perf/arm_pmu.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 220 drivers/perf/arm_pmu.c hwc->state = 0; hwc 237 drivers/perf/arm_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 238 drivers/perf/arm_pmu.c int idx = hwc->idx; hwc 245 drivers/perf/arm_pmu.c hwc->idx = -1; hwc 253 drivers/perf/arm_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 273 drivers/perf/arm_pmu.c hwc->state = PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 364 drivers/perf/arm_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 367 drivers/perf/arm_pmu.c hwc->flags = 0; hwc 382 drivers/perf/arm_pmu.c hwc->idx = -1; hwc 383 drivers/perf/arm_pmu.c hwc->config_base = 0; hwc 384 drivers/perf/arm_pmu.c hwc->config = 0; hwc 385 drivers/perf/arm_pmu.c hwc->event_base = 0; hwc 391 drivers/perf/arm_pmu.c armpmu->set_event_filter(hwc, &event->attr)) { hwc 400 drivers/perf/arm_pmu.c hwc->config_base |= (unsigned long)mapping; hwc 409 drivers/perf/arm_pmu.c hwc->sample_period = arm_pmu_event_max_period(event) >> 1; hwc 410 drivers/perf/arm_pmu.c hwc->last_period = hwc->sample_period; hwc 411 drivers/perf/arm_pmu.c local64_set(&hwc->period_left, hwc->sample_period); hwc 204 drivers/perf/arm_smmuv3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 207 drivers/perf/arm_smmuv3_pmu.c u32 idx = hwc->idx; hwc 210 drivers/perf/arm_smmuv3_pmu.c prev = local64_read(&hwc->prev_count); hwc 212 drivers/perf/arm_smmuv3_pmu.c } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); hwc 222 drivers/perf/arm_smmuv3_pmu.c struct hw_perf_event *hwc) hwc 224 drivers/perf/arm_smmuv3_pmu.c u32 idx = hwc->idx; hwc 247 drivers/perf/arm_smmuv3_pmu.c local64_set(&hwc->prev_count, new); hwc 343 drivers/perf/arm_smmuv3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 353 drivers/perf/arm_smmuv3_pmu.c if (hwc->sample_period) { hwc 391 drivers/perf/arm_smmuv3_pmu.c hwc->idx = -1; hwc 405 drivers/perf/arm_smmuv3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 406 drivers/perf/arm_smmuv3_pmu.c int idx = hwc->idx; hwc 408 drivers/perf/arm_smmuv3_pmu.c hwc->state = 0; hwc 410 drivers/perf/arm_smmuv3_pmu.c smmu_pmu_set_period(smmu_pmu, hwc); hwc 418 drivers/perf/arm_smmuv3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 419 drivers/perf/arm_smmuv3_pmu.c int idx = hwc->idx; hwc 421 drivers/perf/arm_smmuv3_pmu.c if (hwc->state & PERF_HES_STOPPED) hwc 427 drivers/perf/arm_smmuv3_pmu.c hwc->state |= PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 432 drivers/perf/arm_smmuv3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 440 drivers/perf/arm_smmuv3_pmu.c hwc->idx = idx; hwc 441 drivers/perf/arm_smmuv3_pmu.c hwc->state = PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 443 drivers/perf/arm_smmuv3_pmu.c local64_set(&hwc->prev_count, 0); hwc 458 drivers/perf/arm_smmuv3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 460 drivers/perf/arm_smmuv3_pmu.c int idx = hwc->idx; hwc 619 drivers/perf/arm_smmuv3_pmu.c struct hw_perf_event *hwc; hwc 625 drivers/perf/arm_smmuv3_pmu.c hwc = &event->hw; hwc 627 drivers/perf/arm_smmuv3_pmu.c smmu_pmu_set_period(smmu_pmu, hwc); hwc 716 drivers/perf/arm_spe_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 719 drivers/perf/arm_spe_pmu.c hwc->state = 0; hwc 721 drivers/perf/arm_spe_pmu.c if (hwc->state) hwc 737 drivers/perf/arm_spe_pmu.c reg = local64_read(&hwc->period_left); hwc 749 drivers/perf/arm_spe_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 753 drivers/perf/arm_spe_pmu.c if (hwc->state & PERF_HES_STOPPED) hwc 780 drivers/perf/arm_spe_pmu.c local64_set(&hwc->period_left, read_sysreg_s(SYS_PMSICR_EL1)); hwc 781 drivers/perf/arm_spe_pmu.c hwc->state |= PERF_HES_UPTODATE; hwc 784 drivers/perf/arm_spe_pmu.c hwc->state |= PERF_HES_STOPPED; hwc 791 drivers/perf/arm_spe_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 797 drivers/perf/arm_spe_pmu.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 801 drivers/perf/arm_spe_pmu.c if (hwc->state & PERF_HES_STOPPED) hwc 259 drivers/perf/fsl_imx8_ddr_perf.c struct hw_perf_event *hwc = &event->hw; hwc 298 drivers/perf/fsl_imx8_ddr_perf.c hwc->idx = -1; hwc 307 drivers/perf/fsl_imx8_ddr_perf.c struct hw_perf_event *hwc = &event->hw; hwc 309 drivers/perf/fsl_imx8_ddr_perf.c int counter = hwc->idx; hwc 312 drivers/perf/fsl_imx8_ddr_perf.c prev_raw_count = local64_read(&hwc->prev_count); hwc 314 drivers/perf/fsl_imx8_ddr_perf.c } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 349 drivers/perf/fsl_imx8_ddr_perf.c struct hw_perf_event *hwc = &event->hw; hwc 350 drivers/perf/fsl_imx8_ddr_perf.c int counter = hwc->idx; hwc 352 drivers/perf/fsl_imx8_ddr_perf.c local64_set(&hwc->prev_count, 0); hwc 356 drivers/perf/fsl_imx8_ddr_perf.c hwc->state = 0; hwc 362 drivers/perf/fsl_imx8_ddr_perf.c struct hw_perf_event *hwc = &event->hw; hwc 391 drivers/perf/fsl_imx8_ddr_perf.c hwc->idx = counter; hwc 393 drivers/perf/fsl_imx8_ddr_perf.c hwc->state |= PERF_HES_STOPPED; hwc 404 drivers/perf/fsl_imx8_ddr_perf.c struct hw_perf_event *hwc = &event->hw; hwc 405 drivers/perf/fsl_imx8_ddr_perf.c int counter = hwc->idx; hwc 410 drivers/perf/fsl_imx8_ddr_perf.c hwc->state |= PERF_HES_STOPPED; hwc 416 drivers/perf/fsl_imx8_ddr_perf.c struct hw_perf_event *hwc = &event->hw; hwc 417 drivers/perf/fsl_imx8_ddr_perf.c int counter = hwc->idx; hwc 423 drivers/perf/fsl_imx8_ddr_perf.c hwc->idx = -1; hwc 47 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c #define GET_DDRC_EVENTID(hwc) (hwc->config_base & 0x7) hwc 65 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c struct hw_perf_event *hwc) hwc 68 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c u32 idx = GET_DDRC_EVENTID(hwc); hwc 79 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c struct hw_perf_event *hwc, u64 val) hwc 81 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c u32 idx = GET_DDRC_EVENTID(hwc); hwc 122 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c struct hw_perf_event *hwc) hwc 128 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c val |= (1 << GET_DDRC_EVENTID(hwc)); hwc 133 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c struct hw_perf_event *hwc) hwc 139 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c val &= ~(1 << GET_DDRC_EVENTID(hwc)); hwc 147 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 149 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c int idx = GET_DDRC_EVENTID(hwc); hwc 160 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c struct hw_perf_event *hwc) hwc 166 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c val &= ~(1 << GET_DDRC_EVENTID(hwc)); hwc 171 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c struct hw_perf_event *hwc) hwc 177 drivers/perf/hisilicon/hisi_uncore_ddrc_pmu.c val |= (1 << GET_DDRC_EVENTID(hwc)); hwc 51 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c struct hw_perf_event *hwc) hwc 53 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c u32 idx = hwc->idx; hwc 65 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c struct hw_perf_event *hwc, u64 val) hwc 67 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c u32 idx = hwc->idx; hwc 128 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c struct hw_perf_event *hwc) hwc 134 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c val |= (1 << hwc->idx); hwc 139 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c struct hw_perf_event *hwc) hwc 145 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c val &= ~(1 << hwc->idx); hwc 150 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c struct hw_perf_event *hwc) hwc 156 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c val &= ~(1 << hwc->idx); hwc 161 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c struct hw_perf_event *hwc) hwc 167 drivers/perf/hisilicon/hisi_uncore_hha_pmu.c val |= (1 << hwc->idx); hwc 50 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c struct hw_perf_event *hwc) hwc 52 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c u32 idx = hwc->idx; hwc 64 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c struct hw_perf_event *hwc, u64 val) hwc 66 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c u32 idx = hwc->idx; hwc 127 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c struct hw_perf_event *hwc) hwc 133 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c val |= (1 << hwc->idx); hwc 138 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c struct hw_perf_event *hwc) hwc 144 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c val &= ~(1 << hwc->idx); hwc 149 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c struct hw_perf_event *hwc) hwc 155 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c val &= ~(1 << hwc->idx); hwc 160 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c struct hw_perf_event *hwc) hwc 166 drivers/perf/hisilicon/hisi_uncore_l3c_pmu.c val |= (1 << hwc->idx); hwc 128 drivers/perf/hisilicon/hisi_uncore_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 167 drivers/perf/hisilicon/hisi_uncore_pmu.c hwc->idx = -1; hwc 168 drivers/perf/hisilicon/hisi_uncore_pmu.c hwc->config_base = event->attr.config; hwc 183 drivers/perf/hisilicon/hisi_uncore_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 185 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_pmu->ops->write_evtype(hisi_pmu, hwc->idx, hwc 188 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_pmu->ops->enable_counter_int(hisi_pmu, hwc); hwc 189 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_pmu->ops->enable_counter(hisi_pmu, hwc); hwc 198 drivers/perf/hisilicon/hisi_uncore_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 200 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_pmu->ops->disable_counter(hisi_pmu, hwc); hwc 201 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_pmu->ops->disable_counter_int(hisi_pmu, hwc); hwc 207 drivers/perf/hisilicon/hisi_uncore_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 218 drivers/perf/hisilicon/hisi_uncore_pmu.c local64_set(&hwc->prev_count, val); hwc 220 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_pmu->ops->write_counter(hisi_pmu, hwc, val); hwc 226 drivers/perf/hisilicon/hisi_uncore_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 231 drivers/perf/hisilicon/hisi_uncore_pmu.c new_raw_count = hisi_pmu->ops->read_counter(hisi_pmu, hwc); hwc 232 drivers/perf/hisilicon/hisi_uncore_pmu.c prev_raw_count = local64_read(&hwc->prev_count); hwc 233 drivers/perf/hisilicon/hisi_uncore_pmu.c } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, hwc 246 drivers/perf/hisilicon/hisi_uncore_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 248 drivers/perf/hisilicon/hisi_uncore_pmu.c if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) hwc 251 drivers/perf/hisilicon/hisi_uncore_pmu.c WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); hwc 252 drivers/perf/hisilicon/hisi_uncore_pmu.c hwc->state = 0; hwc 256 drivers/perf/hisilicon/hisi_uncore_pmu.c u64 prev_raw_count = local64_read(&hwc->prev_count); hwc 258 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_pmu->ops->write_counter(hisi_pmu, hwc, prev_raw_count); hwc 267 drivers/perf/hisilicon/hisi_uncore_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 270 drivers/perf/hisilicon/hisi_uncore_pmu.c WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); hwc 271 drivers/perf/hisilicon/hisi_uncore_pmu.c hwc->state |= PERF_HES_STOPPED; hwc 273 drivers/perf/hisilicon/hisi_uncore_pmu.c if (hwc->state & PERF_HES_UPTODATE) hwc 278 drivers/perf/hisilicon/hisi_uncore_pmu.c hwc->state |= PERF_HES_UPTODATE; hwc 284 drivers/perf/hisilicon/hisi_uncore_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 287 drivers/perf/hisilicon/hisi_uncore_pmu.c hwc->state = PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 306 drivers/perf/hisilicon/hisi_uncore_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 309 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_uncore_pmu_clear_event_idx(hisi_pmu, hwc->idx); hwc 311 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_pmu->pmu_events.hw_events[hwc->idx] = NULL; hwc 341 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 343 drivers/perf/qcom_l2_pmu.c u32 idx = hwc->idx; hwc 346 drivers/perf/qcom_l2_pmu.c prev = local64_read(&hwc->prev_count); hwc 348 drivers/perf/qcom_l2_pmu.c } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); hwc 362 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc) hwc 364 drivers/perf/qcom_l2_pmu.c u32 idx = hwc->idx; hwc 377 drivers/perf/qcom_l2_pmu.c local64_set(&hwc->prev_count, new); hwc 384 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 389 drivers/perf/qcom_l2_pmu.c if (hwc->config_base == L2CYCLE_CTR_RAW_CODE) { hwc 406 drivers/perf/qcom_l2_pmu.c group = L2_EVT_GROUP(hwc->config_base); hwc 419 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 420 drivers/perf/qcom_l2_pmu.c int idx = hwc->idx; hwc 423 drivers/perf/qcom_l2_pmu.c if (hwc->config_base != L2CYCLE_CTR_RAW_CODE) hwc 424 drivers/perf/qcom_l2_pmu.c clear_bit(L2_EVT_GROUP(hwc->config_base), cluster->used_groups); hwc 440 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc; hwc 449 drivers/perf/qcom_l2_pmu.c hwc = &event->hw; hwc 451 drivers/perf/qcom_l2_pmu.c l2_cache_cluster_set_period(cluster, hwc); hwc 482 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 492 drivers/perf/qcom_l2_pmu.c if (hwc->sample_period) { hwc 571 drivers/perf/qcom_l2_pmu.c hwc->idx = -1; hwc 572 drivers/perf/qcom_l2_pmu.c hwc->config_base = event->attr.config; hwc 586 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 587 drivers/perf/qcom_l2_pmu.c int idx = hwc->idx; hwc 591 drivers/perf/qcom_l2_pmu.c hwc->state = 0; hwc 595 drivers/perf/qcom_l2_pmu.c l2_cache_cluster_set_period(cluster, hwc); hwc 597 drivers/perf/qcom_l2_pmu.c if (hwc->config_base == L2CYCLE_CTR_RAW_CODE) { hwc 600 drivers/perf/qcom_l2_pmu.c config = hwc->config_base; hwc 616 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 617 drivers/perf/qcom_l2_pmu.c int idx = hwc->idx; hwc 619 drivers/perf/qcom_l2_pmu.c if (hwc->state & PERF_HES_STOPPED) hwc 627 drivers/perf/qcom_l2_pmu.c hwc->state |= PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 632 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 643 drivers/perf/qcom_l2_pmu.c hwc->idx = idx; hwc 644 drivers/perf/qcom_l2_pmu.c hwc->state = PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 646 drivers/perf/qcom_l2_pmu.c local64_set(&hwc->prev_count, 0); hwc 659 drivers/perf/qcom_l2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 661 drivers/perf/qcom_l2_pmu.c int idx = hwc->idx; hwc 481 drivers/perf/qcom_l3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 492 drivers/perf/qcom_l3_pmu.c if (hwc->sample_period) hwc 506 drivers/perf/qcom_l3_pmu.c hwc->idx = -1; hwc 526 drivers/perf/qcom_l3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 529 drivers/perf/qcom_l3_pmu.c hwc->state = 0; hwc 535 drivers/perf/qcom_l3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 538 drivers/perf/qcom_l3_pmu.c if (hwc->state & PERF_HES_STOPPED) hwc 544 drivers/perf/qcom_l3_pmu.c hwc->state |= PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 550 drivers/perf/qcom_l3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 562 drivers/perf/qcom_l3_pmu.c hwc->idx = idx; hwc 563 drivers/perf/qcom_l3_pmu.c hwc->state = PERF_HES_STOPPED | PERF_HES_UPTODATE; hwc 578 drivers/perf/qcom_l3_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 583 drivers/perf/qcom_l3_pmu.c l3pmu->events[hwc->idx] = NULL; hwc 584 drivers/perf/qcom_l3_pmu.c bitmap_release_region(l3pmu->used_mask, hwc->idx, order); hwc 247 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 250 drivers/perf/thunderx2_pmu.c hwc->config_base = (unsigned long)tx2_pmu->base hwc 252 drivers/perf/thunderx2_pmu.c hwc->event_base = (unsigned long)tx2_pmu->base hwc 259 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 261 drivers/perf/thunderx2_pmu.c hwc->config_base = (unsigned long)tx2_pmu->base hwc 264 drivers/perf/thunderx2_pmu.c hwc->event_base = (unsigned long)tx2_pmu->base hwc 271 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 275 drivers/perf/thunderx2_pmu.c reg_writel(val, hwc->config_base); hwc 276 drivers/perf/thunderx2_pmu.c local64_set(&hwc->prev_count, 0); hwc 277 drivers/perf/thunderx2_pmu.c reg_writel(0, hwc->event_base); hwc 288 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 295 drivers/perf/thunderx2_pmu.c val = reg_readl(hwc->config_base); hwc 298 drivers/perf/thunderx2_pmu.c reg_writel(val, hwc->config_base); hwc 299 drivers/perf/thunderx2_pmu.c local64_set(&hwc->prev_count, 0); hwc 300 drivers/perf/thunderx2_pmu.c reg_writel(0, hwc->event_base); hwc 306 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 310 drivers/perf/thunderx2_pmu.c val = reg_readl(hwc->config_base); hwc 312 drivers/perf/thunderx2_pmu.c reg_writel(val, hwc->config_base); hwc 318 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 327 drivers/perf/thunderx2_pmu.c new = reg_readl(hwc->event_base); hwc 328 drivers/perf/thunderx2_pmu.c prev = local64_xchg(&hwc->prev_count, new); hwc 412 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 439 drivers/perf/thunderx2_pmu.c hwc->config = event->attr.config; hwc 450 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 453 drivers/perf/thunderx2_pmu.c hwc->state = 0; hwc 470 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 473 drivers/perf/thunderx2_pmu.c if (hwc->state & PERF_HES_UPTODATE) hwc 478 drivers/perf/thunderx2_pmu.c WARN_ON_ONCE(hwc->state & PERF_HES_STOPPED); hwc 479 drivers/perf/thunderx2_pmu.c hwc->state |= PERF_HES_STOPPED; hwc 482 drivers/perf/thunderx2_pmu.c hwc->state |= PERF_HES_UPTODATE; hwc 488 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 494 drivers/perf/thunderx2_pmu.c hwc->idx = alloc_counter(tx2_pmu); hwc 495 drivers/perf/thunderx2_pmu.c if (hwc->idx < 0) hwc 498 drivers/perf/thunderx2_pmu.c tx2_pmu->events[hwc->idx] = event; hwc 502 drivers/perf/thunderx2_pmu.c hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; hwc 512 drivers/perf/thunderx2_pmu.c struct hw_perf_event *hwc = &event->hw; hwc 520 drivers/perf/thunderx2_pmu.c tx2_pmu->events[hwc->idx] = NULL; hwc 521 drivers/perf/thunderx2_pmu.c hwc->idx = -1; hwc 879 drivers/video/fbdev/au1200fb.c lcd->hwc.cursorctrl = 0; hwc 880 drivers/video/fbdev/au1200fb.c lcd->hwc.cursorpos = 0; hwc 881 drivers/video/fbdev/au1200fb.c lcd->hwc.cursorcolor0 = 0; hwc 882 drivers/video/fbdev/au1200fb.c lcd->hwc.cursorcolor1 = 0; hwc 883 drivers/video/fbdev/au1200fb.c lcd->hwc.cursorcolor2 = 0; hwc 884 drivers/video/fbdev/au1200fb.c lcd->hwc.cursorcolor3 = 0; hwc 928 drivers/video/fbdev/au1200fb.c D(lcd->hwc.cursorctrl); hwc 929 drivers/video/fbdev/au1200fb.c D(lcd->hwc.cursorpos); hwc 930 drivers/video/fbdev/au1200fb.c D(lcd->hwc.cursorcolor0); hwc 931 drivers/video/fbdev/au1200fb.c D(lcd->hwc.cursorcolor1); hwc 932 drivers/video/fbdev/au1200fb.c D(lcd->hwc.cursorcolor2); hwc 933 drivers/video/fbdev/au1200fb.c D(lcd->hwc.cursorcolor3); hwc 59 drivers/video/fbdev/au1200fb.h } hwc; hwc 534 drivers/video/fbdev/mmp/hw/mmp_ctrl.h #define CFG_CSB_256x32(hwc) ((hwc)<<15) /* HWC */ hwc 309 drivers/video/fbdev/pxa168fb.h #define CFG_CSB_256x32(hwc) ((hwc) << 15) /* HWC */ hwc 15 include/linux/iio/hw-consumer.h void iio_hw_consumer_free(struct iio_hw_consumer *hwc); hwc 17 include/linux/iio/hw-consumer.h void devm_iio_hw_consumer_free(struct device *dev, struct iio_hw_consumer *hwc); hwc 18 include/linux/iio/hw-consumer.h int iio_hw_consumer_enable(struct iio_hw_consumer *hwc); hwc 19 include/linux/iio/hw-consumer.h void iio_hw_consumer_disable(struct iio_hw_consumer *hwc); hwc 3672 kernel/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 3678 kernel/events/core.c delta = (s64)(period - hwc->sample_period); hwc 3681 kernel/events/core.c sample_period = hwc->sample_period + delta; hwc 3686 kernel/events/core.c hwc->sample_period = sample_period; hwc 3688 kernel/events/core.c if (local64_read(&hwc->period_left) > 8*sample_period) { hwc 3692 kernel/events/core.c local64_set(&hwc->period_left, 0); hwc 3708 kernel/events/core.c struct hw_perf_event *hwc; hwc 3732 kernel/events/core.c hwc = &event->hw; hwc 3734 kernel/events/core.c if (hwc->interrupts == MAX_INTERRUPTS) { hwc 3735 kernel/events/core.c hwc->interrupts = 0; hwc 3749 kernel/events/core.c delta = now - hwc->freq_count_stamp; hwc 3750 kernel/events/core.c hwc->freq_count_stamp = now; hwc 8140 kernel/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 8145 kernel/events/core.c if (seq != hwc->interrupts_seq) { hwc 8146 kernel/events/core.c hwc->interrupts_seq = seq; hwc 8147 kernel/events/core.c hwc->interrupts = 1; hwc 8149 kernel/events/core.c hwc->interrupts++; hwc 8151 kernel/events/core.c && hwc->interrupts >= max_samples_per_tick)) { hwc 8154 kernel/events/core.c hwc->interrupts = MAX_INTERRUPTS; hwc 8162 kernel/events/core.c s64 delta = now - hwc->freq_time_stamp; hwc 8164 kernel/events/core.c hwc->freq_time_stamp = now; hwc 8167 kernel/events/core.c perf_adjust_period(event, delta, hwc->last_period, true); hwc 8252 kernel/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 8253 kernel/events/core.c u64 period = hwc->last_period; hwc 8257 kernel/events/core.c hwc->last_period = hwc->sample_period; hwc 8260 kernel/events/core.c old = val = local64_read(&hwc->period_left); hwc 8267 kernel/events/core.c if (local64_cmpxchg(&hwc->period_left, old, val) != old) hwc 8277 kernel/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 8283 kernel/events/core.c if (hwc->interrupts == MAX_INTERRUPTS) hwc 8303 kernel/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 8319 kernel/events/core.c if (nr == 1 && hwc->sample_period == 1 && !event->attr.freq) hwc 8322 kernel/events/core.c if (local64_add_negative(nr, &hwc->period_left)) hwc 8485 kernel/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 8489 kernel/events/core.c hwc->last_period = hwc->sample_period; hwc 8493 kernel/events/core.c hwc->state = !(flags & PERF_EF_START); hwc 9609 kernel/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 9615 kernel/events/core.c period = local64_read(&hwc->period_left); hwc 9620 kernel/events/core.c local64_set(&hwc->period_left, 0); hwc 9622 kernel/events/core.c period = max_t(u64, 10000, hwc->sample_period); hwc 9624 kernel/events/core.c hrtimer_start(&hwc->hrtimer, ns_to_ktime(period), hwc 9630 kernel/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 9633 kernel/events/core.c ktime_t remaining = hrtimer_get_remaining(&hwc->hrtimer); hwc 9634 kernel/events/core.c local64_set(&hwc->period_left, ktime_to_ns(remaining)); hwc 9636 kernel/events/core.c hrtimer_cancel(&hwc->hrtimer); hwc 9642 kernel/events/core.c struct hw_perf_event *hwc = &event->hw; hwc 9647 kernel/events/core.c hrtimer_init(&hwc->hrtimer, CLOCK_MONOTONIC, HRTIMER_MODE_REL_HARD); hwc 9648 kernel/events/core.c hwc->hrtimer.function = perf_swevent_hrtimer; hwc 9658 kernel/events/core.c hwc->sample_period = event->attr.sample_period; hwc 9659 kernel/events/core.c local64_set(&hwc->period_left, hwc->sample_period); hwc 9660 kernel/events/core.c hwc->last_period = hwc->sample_period; hwc 10429 kernel/events/core.c struct hw_perf_event *hwc; hwc 10529 kernel/events/core.c hwc = &event->hw; hwc 10530 kernel/events/core.c hwc->sample_period = attr->sample_period; hwc 10532 kernel/events/core.c hwc->sample_period = 1; hwc 10533 kernel/events/core.c hwc->last_period = hwc->sample_period; hwc 10535 kernel/events/core.c local64_set(&hwc->period_left, hwc->sample_period); hwc 11861 kernel/events/core.c struct hw_perf_event *hwc = &child_event->hw; hwc 11863 kernel/events/core.c hwc->sample_period = sample_period; hwc 11864 kernel/events/core.c hwc->last_period = sample_period; hwc 11866 kernel/events/core.c local64_set(&hwc->period_left, sample_period);