Searched refs:inv_weight (Results 1 - 3 of 3) sorted by relevance

/linux-4.1.27/kernel/sched/
H A Dfair.c120 lw->inv_weight = 0; update_load_add()
126 lw->inv_weight = 0; update_load_sub()
132 lw->inv_weight = 0; update_load_set()
189 if (likely(lw->inv_weight)) __update_inv_weight()
195 lw->inv_weight = 1; __update_inv_weight()
197 lw->inv_weight = WMULT_CONST; __update_inv_weight()
199 lw->inv_weight = WMULT_CONST / w; __update_inv_weight()
205 * (delta_exec * (weight * lw->inv_weight)) >> WMULT_SHIFT
208 * we're guaranteed shift stays positive because inv_weight is guaranteed to
229 fact = (u64)(u32)fact * lw->inv_weight; __calc_delta()
H A Dcore.c797 load->inv_weight = WMULT_IDLEPRIO; set_load_weight()
802 load->inv_weight = prio_to_wmult[prio]; set_load_weight()
/linux-4.1.27/include/linux/
H A Dsched.h1116 u32 inv_weight; prefetch_stack() member in struct:load_weight

Completed in 190 milliseconds