Lines Matching refs:group
403 static u32 intel_cqm_xchg_rmid(struct perf_event *group, u32 rmid) in intel_cqm_xchg_rmid() argument
406 struct list_head *head = &group->hw.cqm_group_entry; in intel_cqm_xchg_rmid()
407 u32 old_rmid = group->hw.cqm_rmid; in intel_cqm_xchg_rmid()
422 local64_set(&group->count, atomic64_read(&rr.value)); in intel_cqm_xchg_rmid()
427 group->hw.cqm_rmid = rmid; in intel_cqm_xchg_rmid()
646 struct perf_event *group, *g; in intel_cqm_sched_out_conflicting_events() local
651 list_for_each_entry_safe(group, g, &cache_groups, hw.cqm_groups_entry) { in intel_cqm_sched_out_conflicting_events()
652 if (group == event) in intel_cqm_sched_out_conflicting_events()
655 rmid = group->hw.cqm_rmid; in intel_cqm_sched_out_conflicting_events()
666 if (!__conflict_event(group, event)) in intel_cqm_sched_out_conflicting_events()
669 intel_cqm_xchg_rmid(group, INVALID_RMID); in intel_cqm_sched_out_conflicting_events()
700 struct perf_event *group, *start = NULL; in __intel_cqm_rmid_rotate() local
716 list_for_each_entry(group, &cache_groups, hw.cqm_groups_entry) { in __intel_cqm_rmid_rotate()
717 if (!__rmid_valid(group->hw.cqm_rmid)) { in __intel_cqm_rmid_rotate()
719 start = group; in __intel_cqm_rmid_rotate()
846 struct perf_event **group) in intel_cqm_setup_event() argument
858 *group = iter; in intel_cqm_setup_event()
1087 struct perf_event *group = NULL; in intel_cqm_event_init() local
1114 intel_cqm_setup_event(event, &group); in intel_cqm_event_init()
1116 if (group) { in intel_cqm_event_init()
1118 &group->hw.cqm_group_entry); in intel_cqm_event_init()