aboutsummaryrefslogtreecommitdiff
path: root/kernel/sched_fair.c
diff options
context:
space:
mode:
authorPeter Zijlstra <a.p.zijlstra@chello.nl>2008-06-27 13:41:13 +0200
committerIngo Molnar <mingo@elte.hu>2008-06-27 14:31:28 +0200
commitced8aa16e1db55c33c507174c1b1f9e107445865 (patch)
tree87210bb14026a7a3d005b5339dae44d845f27708 /kernel/sched_fair.c
parentc9c294a630e28eec5f2865f028ecfc58d45c0a5a (diff)
sched: fix calc_delta_asym, #2
Ok, so why are we in this mess, it was: 1/w but now we mixed that rw in the mix like: rw/w rw being \Sum w suggests: fiddling w, we should also fiddle rw, humm? Signed-off-by: Peter Zijlstra <a.p.zijlstra@chello.nl> Cc: Srivatsa Vaddagiri <vatsa@linux.vnet.ibm.com> Cc: Mike Galbraith <efault@gmx.de> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'kernel/sched_fair.c')
-rw-r--r--kernel/sched_fair.c9
1 files changed, 6 insertions, 3 deletions
diff --git a/kernel/sched_fair.c b/kernel/sched_fair.c
index 2268e634812..2e197b8e43f 100644
--- a/kernel/sched_fair.c
+++ b/kernel/sched_fair.c
@@ -429,6 +429,7 @@ calc_delta_asym(unsigned long delta, struct sched_entity *se)
for_each_sched_entity(se) {
struct load_weight *se_lw = &se->load;
+ unsigned long rw = cfs_rq_of(se)->load.weight;
#ifdef CONFIG_FAIR_SCHED_GROUP
struct cfs_rq *cfs_rq = se->my_q;
@@ -450,14 +451,16 @@ calc_delta_asym(unsigned long delta, struct sched_entity *se)
lw.inv_weight = 0;
se_lw = &lw;
+ rw += lw.weight - se->load.weight;
} else
#endif
- if (se->load.weight < NICE_0_LOAD)
+ if (se->load.weight < NICE_0_LOAD) {
se_lw = &lw;
+ rw += NICE_0_LOAD - se->load.weight;
+ }
- delta = calc_delta_mine(delta,
- cfs_rq_of(se)->load.weight, se_lw);
+ delta = calc_delta_mine(delta, rw, se_lw);
}
return delta;