tx2_pmu            83 drivers/perf/thunderx2_pmu.c 			struct tx2_uncore_pmu *tx2_pmu);
tx2_pmu           183 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           185 drivers/perf/thunderx2_pmu.c 	tx2_pmu = pmu_to_tx2_pmu(dev_get_drvdata(dev));
tx2_pmu           186 drivers/perf/thunderx2_pmu.c 	return cpumap_print_to_pagebuf(true, buf, cpumask_of(tx2_pmu->cpu));
tx2_pmu           226 drivers/perf/thunderx2_pmu.c static int alloc_counter(struct tx2_uncore_pmu *tx2_pmu)
tx2_pmu           230 drivers/perf/thunderx2_pmu.c 	counter = find_first_zero_bit(tx2_pmu->active_counters,
tx2_pmu           231 drivers/perf/thunderx2_pmu.c 				tx2_pmu->max_counters);
tx2_pmu           232 drivers/perf/thunderx2_pmu.c 	if (counter == tx2_pmu->max_counters)
tx2_pmu           235 drivers/perf/thunderx2_pmu.c 	set_bit(counter, tx2_pmu->active_counters);
tx2_pmu           239 drivers/perf/thunderx2_pmu.c static inline void free_counter(struct tx2_uncore_pmu *tx2_pmu, int counter)
tx2_pmu           241 drivers/perf/thunderx2_pmu.c 	clear_bit(counter, tx2_pmu->active_counters);
tx2_pmu           245 drivers/perf/thunderx2_pmu.c 		struct tx2_uncore_pmu *tx2_pmu)
tx2_pmu           250 drivers/perf/thunderx2_pmu.c 	hwc->config_base = (unsigned long)tx2_pmu->base
tx2_pmu           252 drivers/perf/thunderx2_pmu.c 	hwc->event_base =  (unsigned long)tx2_pmu->base
tx2_pmu           257 drivers/perf/thunderx2_pmu.c 		struct tx2_uncore_pmu *tx2_pmu)
tx2_pmu           261 drivers/perf/thunderx2_pmu.c 	hwc->config_base = (unsigned long)tx2_pmu->base
tx2_pmu           264 drivers/perf/thunderx2_pmu.c 	hwc->event_base = (unsigned long)tx2_pmu->base
tx2_pmu           319 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           323 drivers/perf/thunderx2_pmu.c 	tx2_pmu = pmu_to_tx2_pmu(event->pmu);
tx2_pmu           324 drivers/perf/thunderx2_pmu.c 	type = tx2_pmu->type;
tx2_pmu           325 drivers/perf/thunderx2_pmu.c 	prorate_factor = tx2_pmu->prorate_factor;
tx2_pmu           413 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           430 drivers/perf/thunderx2_pmu.c 	tx2_pmu = pmu_to_tx2_pmu(event->pmu);
tx2_pmu           431 drivers/perf/thunderx2_pmu.c 	if (tx2_pmu->cpu >= nr_cpu_ids)
tx2_pmu           433 drivers/perf/thunderx2_pmu.c 	event->cpu = tx2_pmu->cpu;
tx2_pmu           435 drivers/perf/thunderx2_pmu.c 	if (event->attr.config >= tx2_pmu->max_events)
tx2_pmu           451 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           454 drivers/perf/thunderx2_pmu.c 	tx2_pmu = pmu_to_tx2_pmu(event->pmu);
tx2_pmu           456 drivers/perf/thunderx2_pmu.c 	tx2_pmu->start_event(event, flags);
tx2_pmu           460 drivers/perf/thunderx2_pmu.c 	if (bitmap_weight(tx2_pmu->active_counters,
tx2_pmu           461 drivers/perf/thunderx2_pmu.c 				tx2_pmu->max_counters) == 1) {
tx2_pmu           462 drivers/perf/thunderx2_pmu.c 		hrtimer_start(&tx2_pmu->hrtimer,
tx2_pmu           463 drivers/perf/thunderx2_pmu.c 			ns_to_ktime(tx2_pmu->hrtimer_interval),
tx2_pmu           471 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           476 drivers/perf/thunderx2_pmu.c 	tx2_pmu = pmu_to_tx2_pmu(event->pmu);
tx2_pmu           477 drivers/perf/thunderx2_pmu.c 	tx2_pmu->stop_event(event);
tx2_pmu           489 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           491 drivers/perf/thunderx2_pmu.c 	tx2_pmu = pmu_to_tx2_pmu(event->pmu);
tx2_pmu           494 drivers/perf/thunderx2_pmu.c 	hwc->idx  = alloc_counter(tx2_pmu);
tx2_pmu           498 drivers/perf/thunderx2_pmu.c 	tx2_pmu->events[hwc->idx] = event;
tx2_pmu           500 drivers/perf/thunderx2_pmu.c 	tx2_pmu->init_cntr_base(event, tx2_pmu);
tx2_pmu           511 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu = pmu_to_tx2_pmu(event->pmu);
tx2_pmu           517 drivers/perf/thunderx2_pmu.c 	free_counter(tx2_pmu, GET_COUNTERID(event));
tx2_pmu           520 drivers/perf/thunderx2_pmu.c 	tx2_pmu->events[hwc->idx] = NULL;
tx2_pmu           531 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           534 drivers/perf/thunderx2_pmu.c 	tx2_pmu = container_of(timer, struct tx2_uncore_pmu, hrtimer);
tx2_pmu           535 drivers/perf/thunderx2_pmu.c 	max_counters = tx2_pmu->max_counters;
tx2_pmu           537 drivers/perf/thunderx2_pmu.c 	if (bitmap_empty(tx2_pmu->active_counters, max_counters))
tx2_pmu           540 drivers/perf/thunderx2_pmu.c 	for_each_set_bit(idx, tx2_pmu->active_counters, max_counters) {
tx2_pmu           541 drivers/perf/thunderx2_pmu.c 		struct perf_event *event = tx2_pmu->events[idx];
tx2_pmu           545 drivers/perf/thunderx2_pmu.c 	hrtimer_forward_now(timer, ns_to_ktime(tx2_pmu->hrtimer_interval));
tx2_pmu           550 drivers/perf/thunderx2_pmu.c 		struct tx2_uncore_pmu *tx2_pmu)
tx2_pmu           552 drivers/perf/thunderx2_pmu.c 	struct device *dev = tx2_pmu->dev;
tx2_pmu           553 drivers/perf/thunderx2_pmu.c 	char *name = tx2_pmu->name;
tx2_pmu           556 drivers/perf/thunderx2_pmu.c 	tx2_pmu->pmu = (struct pmu) {
tx2_pmu           558 drivers/perf/thunderx2_pmu.c 		.attr_groups	= tx2_pmu->attr_groups,
tx2_pmu           569 drivers/perf/thunderx2_pmu.c 	tx2_pmu->pmu.name = devm_kasprintf(dev, GFP_KERNEL,
tx2_pmu           572 drivers/perf/thunderx2_pmu.c 	return perf_pmu_register(&tx2_pmu->pmu, tx2_pmu->pmu.name, -1);
tx2_pmu           575 drivers/perf/thunderx2_pmu.c static int tx2_uncore_pmu_add_dev(struct tx2_uncore_pmu *tx2_pmu)
tx2_pmu           579 drivers/perf/thunderx2_pmu.c 	cpu = cpumask_any_and(cpumask_of_node(tx2_pmu->node),
tx2_pmu           582 drivers/perf/thunderx2_pmu.c 	tx2_pmu->cpu = cpu;
tx2_pmu           583 drivers/perf/thunderx2_pmu.c 	hrtimer_init(&tx2_pmu->hrtimer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
tx2_pmu           584 drivers/perf/thunderx2_pmu.c 	tx2_pmu->hrtimer.function = tx2_hrtimer_callback;
tx2_pmu           586 drivers/perf/thunderx2_pmu.c 	ret = tx2_uncore_pmu_register(tx2_pmu);
tx2_pmu           588 drivers/perf/thunderx2_pmu.c 		dev_err(tx2_pmu->dev, "%s PMU: Failed to init driver\n",
tx2_pmu           589 drivers/perf/thunderx2_pmu.c 				tx2_pmu->name);
tx2_pmu           596 drivers/perf/thunderx2_pmu.c 			&tx2_pmu->hpnode);
tx2_pmu           598 drivers/perf/thunderx2_pmu.c 		dev_err(tx2_pmu->dev, "Error %d registering hotplug", ret);
tx2_pmu           603 drivers/perf/thunderx2_pmu.c 	list_add(&tx2_pmu->entry, &tx2_pmus);
tx2_pmu           605 drivers/perf/thunderx2_pmu.c 	dev_dbg(tx2_pmu->dev, "%s PMU UNCORE registered\n",
tx2_pmu           606 drivers/perf/thunderx2_pmu.c 			tx2_pmu->pmu.name);
tx2_pmu           613 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           644 drivers/perf/thunderx2_pmu.c 	tx2_pmu = devm_kzalloc(dev, sizeof(*tx2_pmu), GFP_KERNEL);
tx2_pmu           645 drivers/perf/thunderx2_pmu.c 	if (!tx2_pmu)
tx2_pmu           648 drivers/perf/thunderx2_pmu.c 	tx2_pmu->dev = dev;
tx2_pmu           649 drivers/perf/thunderx2_pmu.c 	tx2_pmu->type = type;
tx2_pmu           650 drivers/perf/thunderx2_pmu.c 	tx2_pmu->base = base;
tx2_pmu           651 drivers/perf/thunderx2_pmu.c 	tx2_pmu->node = dev_to_node(dev);
tx2_pmu           652 drivers/perf/thunderx2_pmu.c 	INIT_LIST_HEAD(&tx2_pmu->entry);
tx2_pmu           654 drivers/perf/thunderx2_pmu.c 	switch (tx2_pmu->type) {
tx2_pmu           656 drivers/perf/thunderx2_pmu.c 		tx2_pmu->max_counters = TX2_PMU_MAX_COUNTERS;
tx2_pmu           657 drivers/perf/thunderx2_pmu.c 		tx2_pmu->prorate_factor = TX2_PMU_L3_TILES;
tx2_pmu           658 drivers/perf/thunderx2_pmu.c 		tx2_pmu->max_events = L3_EVENT_MAX;
tx2_pmu           659 drivers/perf/thunderx2_pmu.c 		tx2_pmu->hrtimer_interval = TX2_PMU_HRTIMER_INTERVAL;
tx2_pmu           660 drivers/perf/thunderx2_pmu.c 		tx2_pmu->attr_groups = l3c_pmu_attr_groups;
tx2_pmu           661 drivers/perf/thunderx2_pmu.c 		tx2_pmu->name = devm_kasprintf(dev, GFP_KERNEL,
tx2_pmu           662 drivers/perf/thunderx2_pmu.c 				"uncore_l3c_%d", tx2_pmu->node);
tx2_pmu           663 drivers/perf/thunderx2_pmu.c 		tx2_pmu->init_cntr_base = init_cntr_base_l3c;
tx2_pmu           664 drivers/perf/thunderx2_pmu.c 		tx2_pmu->start_event = uncore_start_event_l3c;
tx2_pmu           665 drivers/perf/thunderx2_pmu.c 		tx2_pmu->stop_event = uncore_stop_event_l3c;
tx2_pmu           668 drivers/perf/thunderx2_pmu.c 		tx2_pmu->max_counters = TX2_PMU_MAX_COUNTERS;
tx2_pmu           669 drivers/perf/thunderx2_pmu.c 		tx2_pmu->prorate_factor = TX2_PMU_DMC_CHANNELS;
tx2_pmu           670 drivers/perf/thunderx2_pmu.c 		tx2_pmu->max_events = DMC_EVENT_MAX;
tx2_pmu           671 drivers/perf/thunderx2_pmu.c 		tx2_pmu->hrtimer_interval = TX2_PMU_HRTIMER_INTERVAL;
tx2_pmu           672 drivers/perf/thunderx2_pmu.c 		tx2_pmu->attr_groups = dmc_pmu_attr_groups;
tx2_pmu           673 drivers/perf/thunderx2_pmu.c 		tx2_pmu->name = devm_kasprintf(dev, GFP_KERNEL,
tx2_pmu           674 drivers/perf/thunderx2_pmu.c 				"uncore_dmc_%d", tx2_pmu->node);
tx2_pmu           675 drivers/perf/thunderx2_pmu.c 		tx2_pmu->init_cntr_base = init_cntr_base_dmc;
tx2_pmu           676 drivers/perf/thunderx2_pmu.c 		tx2_pmu->start_event = uncore_start_event_dmc;
tx2_pmu           677 drivers/perf/thunderx2_pmu.c 		tx2_pmu->stop_event = uncore_stop_event_dmc;
tx2_pmu           680 drivers/perf/thunderx2_pmu.c 		devm_kfree(dev, tx2_pmu);
tx2_pmu           684 drivers/perf/thunderx2_pmu.c 	return tx2_pmu;
tx2_pmu           690 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           703 drivers/perf/thunderx2_pmu.c 	tx2_pmu = tx2_uncore_pmu_init_dev((struct device *)data,
tx2_pmu           706 drivers/perf/thunderx2_pmu.c 	if (!tx2_pmu)
tx2_pmu           709 drivers/perf/thunderx2_pmu.c 	if (tx2_uncore_pmu_add_dev(tx2_pmu)) {
tx2_pmu           719 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           721 drivers/perf/thunderx2_pmu.c 	tx2_pmu = hlist_entry_safe(hpnode,
tx2_pmu           727 drivers/perf/thunderx2_pmu.c 	if ((tx2_pmu->cpu >= nr_cpu_ids) &&
tx2_pmu           728 drivers/perf/thunderx2_pmu.c 		(tx2_pmu->node == cpu_to_node(cpu)))
tx2_pmu           729 drivers/perf/thunderx2_pmu.c 		tx2_pmu->cpu = cpu;
tx2_pmu           738 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu;
tx2_pmu           741 drivers/perf/thunderx2_pmu.c 	tx2_pmu = hlist_entry_safe(hpnode,
tx2_pmu           744 drivers/perf/thunderx2_pmu.c 	if (cpu != tx2_pmu->cpu)
tx2_pmu           747 drivers/perf/thunderx2_pmu.c 	hrtimer_cancel(&tx2_pmu->hrtimer);
tx2_pmu           751 drivers/perf/thunderx2_pmu.c 			cpumask_of_node(tx2_pmu->node),
tx2_pmu           754 drivers/perf/thunderx2_pmu.c 	tx2_pmu->cpu = new_cpu;
tx2_pmu           757 drivers/perf/thunderx2_pmu.c 	perf_pmu_migrate_context(&tx2_pmu->pmu, cpu, new_cpu);
tx2_pmu           798 drivers/perf/thunderx2_pmu.c 	struct tx2_uncore_pmu *tx2_pmu, *temp;
tx2_pmu           802 drivers/perf/thunderx2_pmu.c 		list_for_each_entry_safe(tx2_pmu, temp, &tx2_pmus, entry) {
tx2_pmu           803 drivers/perf/thunderx2_pmu.c 			if (tx2_pmu->node == dev_to_node(dev)) {
tx2_pmu           806 drivers/perf/thunderx2_pmu.c 					&tx2_pmu->hpnode);
tx2_pmu           807 drivers/perf/thunderx2_pmu.c 				perf_pmu_unregister(&tx2_pmu->pmu);
tx2_pmu           808 drivers/perf/thunderx2_pmu.c 				list_del(&tx2_pmu->entry);