Lines Matching refs:per_cpu_ptr

5859 		sibling = *per_cpu_ptr(sdd->sd, i);  in build_group_mask()
5894 sibling = *per_cpu_ptr(sdd->sd, i); in build_overlap_sched_groups()
5914 sg->sgc = *per_cpu_ptr(sdd->sgc, i); in build_overlap_sched_groups()
5953 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in get_group()
5960 *sg = *per_cpu_ptr(sdd->sg, cpu); in get_group()
5961 (*sg)->sgc = *per_cpu_ptr(sdd->sgc, cpu); in get_group()
6134 WARN_ON_ONCE(*per_cpu_ptr(sdd->sd, cpu) != sd); in claim_allocations()
6135 *per_cpu_ptr(sdd->sd, cpu) = NULL; in claim_allocations()
6137 if (atomic_read(&(*per_cpu_ptr(sdd->sg, cpu))->ref)) in claim_allocations()
6138 *per_cpu_ptr(sdd->sg, cpu) = NULL; in claim_allocations()
6140 if (atomic_read(&(*per_cpu_ptr(sdd->sgc, cpu))->ref)) in claim_allocations()
6141 *per_cpu_ptr(sdd->sgc, cpu) = NULL; in claim_allocations()
6174 struct sched_domain *sd = *per_cpu_ptr(tl->data.sd, cpu); in sd_init()
6617 *per_cpu_ptr(sdd->sd, j) = sd; in __sdt_alloc()
6626 *per_cpu_ptr(sdd->sg, j) = sg; in __sdt_alloc()
6633 *per_cpu_ptr(sdd->sgc, j) = sgc; in __sdt_alloc()
6652 sd = *per_cpu_ptr(sdd->sd, j); in __sdt_free()
6655 kfree(*per_cpu_ptr(sdd->sd, j)); in __sdt_free()
6659 kfree(*per_cpu_ptr(sdd->sg, j)); in __sdt_free()
6661 kfree(*per_cpu_ptr(sdd->sgc, j)); in __sdt_free()
6730 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
6740 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
6757 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
6766 sd = *per_cpu_ptr(d.sd, i); in build_sched_domains()