summaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorFan Du <fan.du@windriver.com>2013-07-04 00:05:19 +0200
committerLinus Torvalds <torvalds@linux-foundation.org>2013-07-04 01:07:43 +0200
commit64df3071a97f20767f63b88c573791691a855b5c (patch)
treed7f8de21c425465f6e790c5de74d2dc54214b9e3 /lib
parentipw2200: convert __list_for_each usage to list_for_each (diff)
downloadlinux-64df3071a97f20767f63b88c573791691a855b5c.tar.xz
linux-64df3071a97f20767f63b88c573791691a855b5c.zip
lib/percpu_counter.c: __this_cpu_write() doesn't need to be protected by spinlock
__this_cpu_write doesn't need to be protected by spinlock, AS we are doing per cpu write with preempt disabled. And another reason to remove __this_cpu_write outside of spinlock: __percpu_counter_sum is not an accurate counter. Signed-off-by: Fan Du <fan.du@windriver.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'lib')
-rw-r--r--lib/percpu_counter.c2
1 files changed, 1 insertions, 1 deletions
diff --git a/lib/percpu_counter.c b/lib/percpu_counter.c
index ba6085d9c741..1fc23a3277e1 100644
--- a/lib/percpu_counter.c
+++ b/lib/percpu_counter.c
@@ -80,8 +80,8 @@ void __percpu_counter_add(struct percpu_counter *fbc, s64 amount, s32 batch)
if (count >= batch || count <= -batch) {
raw_spin_lock(&fbc->lock);
fbc->count += count;
- __this_cpu_write(*fbc->counters, 0);
raw_spin_unlock(&fbc->lock);
+ __this_cpu_write(*fbc->counters, 0);
} else {
__this_cpu_write(*fbc->counters, count);
}