aboutsummaryrefslogtreecommitdiffstats
path: root/kernel
diff options
context:
space:
mode:
authorIngo Molnar <mingo@elte.hu>2008-03-14 23:48:28 +0100
committerIngo Molnar <mingo@elte.hu>2008-03-15 03:02:49 +0100
commite89996ae3f9e88d4fd75751a15c10b19d197e702 (patch)
treeb4b754697995e3ebff4e987b46167263e86ff0d8 /kernel
parent3fe69747dab906cd6a8523230276a9820d6a514f (diff)
downloadkernel_samsung_aries-e89996ae3f9e88d4fd75751a15c10b19d197e702.zip
kernel_samsung_aries-e89996ae3f9e88d4fd75751a15c10b19d197e702.tar.gz
kernel_samsung_aries-e89996ae3f9e88d4fd75751a15c10b19d197e702.tar.bz2
sched: fix update_load_add()/sub()
Clear the cached inverse value when updating load. This is needed for calc_delta_mine() to work correctly when using the rq load. Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Peter Zijlstra <a.p.zijlstra@chello.nl>
Diffstat (limited to 'kernel')
-rw-r--r--kernel/sched.c2
1 files changed, 2 insertions, 0 deletions
diff --git a/kernel/sched.c b/kernel/sched.c
index 9df9ba7..3a4ba3d 100644
--- a/kernel/sched.c
+++ b/kernel/sched.c
@@ -1108,11 +1108,13 @@ calc_delta_fair(unsigned long delta_exec, struct load_weight *lw)
static inline void update_load_add(struct load_weight *lw, unsigned long inc)
{
lw->weight += inc;
+ lw->inv_weight = 0;
}
static inline void update_load_sub(struct load_weight *lw, unsigned long dec)
{
lw->weight -= dec;
+ lw->inv_weight = 0;
}
/*