imbalance_pct      79 include/linux/sched/topology.h 	unsigned int imbalance_pct;	/* No balance until over watermark */
imbalance_pct     259 kernel/sched/debug.c 	set_table_entry(&table[3], "imbalance_pct",	  &sd->imbalance_pct,	    sizeof(int),  0644, proc_dointvec_minmax);
imbalance_pct    1512 kernel/sched/fair.c 	int imbalance_pct;
imbalance_pct    1745 kernel/sched/fair.c 		.imbalance_pct = 112,
imbalance_pct    1769 kernel/sched/fair.c 		env.imbalance_pct = 100 + (sd->imbalance_pct - 100) / 2;
imbalance_pct    5531 kernel/sched/fair.c 		prev_eff_load *= 100 + (sd->imbalance_pct - 100) / 2;
imbalance_pct    5589 kernel/sched/fair.c 	int imbalance_scale = 100 + (sd->imbalance_pct-100)/2;
imbalance_pct    5591 kernel/sched/fair.c 				(sd->imbalance_pct-100) / 100;
imbalance_pct    7891 kernel/sched/fair.c 	return ((rq->cpu_capacity * sd->imbalance_pct) <
imbalance_pct    7960 kernel/sched/fair.c 			(sgs->group_util * env->sd->imbalance_pct))
imbalance_pct    7981 kernel/sched/fair.c 			(sgs->group_util * env->sd->imbalance_pct))
imbalance_pct    8618 kernel/sched/fair.c 				env->sd->imbalance_pct * local->avg_load)
imbalance_pct    8765 kernel/sched/fair.c 		    (capacity_of(env->src_cpu)*sd->imbalance_pct < capacity_of(env->dst_cpu)*100))
imbalance_pct    1345 kernel/sched/topology.c 		.imbalance_pct		= 125,
imbalance_pct    1394 kernel/sched/topology.c 		sd->imbalance_pct = 110;
imbalance_pct    1397 kernel/sched/topology.c 		sd->imbalance_pct = 117;