summaryrefslogtreecommitdiffstats
path: root/kernel/sched.c
diff options
context:
space:
mode:
authorIngo Molnar <mingo@elte.hu>2008-03-14 23:48:28 +0100
committerIngo Molnar <mingo@elte.hu>2008-03-15 03:02:49 +0100
commite89996ae3f9e88d4fd75751a15c10b19d197e702 (patch)
treeb4b754697995e3ebff4e987b46167263e86ff0d8 /kernel/sched.c
parentsched: min_vruntime fix (diff)
downloadlinux-e89996ae3f9e88d4fd75751a15c10b19d197e702.tar.xz
linux-e89996ae3f9e88d4fd75751a15c10b19d197e702.zip
sched: fix update_load_add()/sub()
Clear the cached inverse value when updating load. This is needed for calc_delta_mine() to work correctly when using the rq load. Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Peter Zijlstra <a.p.zijlstra@chello.nl>
Diffstat (limited to '')
-rw-r--r--kernel/sched.c2
1 files changed, 2 insertions, 0 deletions
diff --git a/kernel/sched.c b/kernel/sched.c
index 9df9ba73cb7a..3a4ba3dc0f49 100644
--- a/kernel/sched.c
+++ b/kernel/sched.c
@@ -1108,11 +1108,13 @@ calc_delta_fair(unsigned long delta_exec, struct load_weight *lw)
static inline void update_load_add(struct load_weight *lw, unsigned long inc)
{
lw->weight += inc;
+ lw->inv_weight = 0;
}
static inline void update_load_sub(struct load_weight *lw, unsigned long dec)
{
lw->weight -= dec;
+ lw->inv_weight = 0;
}
/*