Lines Matching refs:sched_domain

600 	struct sched_domain *sd;  in get_nohz_timer_target()
1421 struct sched_domain *sd; in ttwu_stat()
5123 sd_alloc_ctl_domain_table(struct sched_domain *sd) in sd_alloc_ctl_domain_table()
5166 struct sched_domain *sd; in sd_alloc_ctl_cpu_table()
5407 static int sched_domain_debug_one(struct sched_domain *sd, int cpu, int level, in sched_domain_debug_one()
5480 static void sched_domain_debug(struct sched_domain *sd, int cpu) in sched_domain_debug()
5511 static int sd_degenerate(struct sched_domain *sd) in sd_degenerate()
5536 sd_parent_degenerate(struct sched_domain *sd, struct sched_domain *parent) in sd_parent_degenerate()
5698 struct sched_domain *sd = container_of(rcu, struct sched_domain, rcu); in free_sched_domain()
5713 static void destroy_sched_domain(struct sched_domain *sd, int cpu) in destroy_sched_domain()
5718 static void destroy_sched_domains(struct sched_domain *sd, int cpu) in destroy_sched_domains()
5733 DEFINE_PER_CPU(struct sched_domain *, sd_llc);
5736 DEFINE_PER_CPU(struct sched_domain *, sd_numa);
5737 DEFINE_PER_CPU(struct sched_domain *, sd_busy);
5738 DEFINE_PER_CPU(struct sched_domain *, sd_asym);
5742 struct sched_domain *sd; in update_top_cache_domain()
5743 struct sched_domain *busy_sd = NULL; in update_top_cache_domain()
5771 cpu_attach_domain(struct sched_domain *sd, struct root_domain *rd, int cpu) in cpu_attach_domain()
5774 struct sched_domain *tmp; in cpu_attach_domain()
5778 struct sched_domain *parent = tmp->parent; in cpu_attach_domain()
5827 struct sched_domain ** __percpu sd;
5851 static void build_group_mask(struct sched_domain *sd, struct sched_group *sg) in build_group_mask()
5855 struct sched_domain *sibling; in build_group_mask()
5877 build_overlap_sched_groups(struct sched_domain *sd, int cpu) in build_overlap_sched_groups()
5883 struct sched_domain *sibling; in build_overlap_sched_groups()
5953 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in get_group()
5954 struct sched_domain *child = sd->child; in get_group()
5976 build_sched_groups(struct sched_domain *sd, int cpu) in build_sched_groups()
6034 static void init_sched_groups_capacity(int cpu, struct sched_domain *sd) in init_sched_groups_capacity()
6069 static void set_domain_attribute(struct sched_domain *sd, in set_domain_attribute()
6116 d->sd = alloc_percpu(struct sched_domain *); in __visit_domain_allocation_hell()
6130 static void claim_allocations(int cpu, struct sched_domain *sd) in claim_allocations()
6171 static struct sched_domain *
6174 struct sched_domain *sd = *per_cpu_ptr(tl->data.sd, cpu); in sd_init()
6192 *sd = (struct sched_domain){ in sd_init()
6595 sdd->sd = alloc_percpu(struct sched_domain *); in __sdt_alloc()
6608 struct sched_domain *sd; in __sdt_alloc()
6612 sd = kzalloc_node(sizeof(struct sched_domain) + cpumask_size(), in __sdt_alloc()
6649 struct sched_domain *sd; in __sdt_free()
6672 struct sched_domain *build_sched_domain(struct sched_domain_topology_level *tl, in build_sched_domain()
6674 struct sched_domain *child, int cpu) in build_sched_domain()
6676 struct sched_domain *sd = sd_init(tl, cpu); in build_sched_domain()
6714 struct sched_domain *sd; in build_sched_domains()