diff options
author | Jan Beulich <JBeulich@novell.com> | 2010-04-21 15:21:51 +0100 |
---|---|---|
committer | H. Peter Anvin <hpa@zytor.com> | 2010-04-28 16:58:49 -0700 |
commit | 402af0d7c692ddcfa2333e93d3f275ebd0487926 (patch) | |
tree | 08d016caa7dc404d7400281b57fb634108b0291f /arch/x86/include/asm/percpu.h | |
parent | 1d16b0f2f3edf05f12a9e3960588e0d4854157bb (diff) |
x86, asm: Introduce and use percpu_inc()
... generating slightly smaller code.
Signed-off-by: Jan Beulich <jbeulich@novell.com>
LKML-Reference: <4BCF261F020000780003B33C@vpn.id2.novell.com>
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Diffstat (limited to 'arch/x86/include/asm/percpu.h')
-rw-r--r-- | arch/x86/include/asm/percpu.h | 24 |
1 files changed, 24 insertions, 0 deletions
diff --git a/arch/x86/include/asm/percpu.h b/arch/x86/include/asm/percpu.h index 66a272dfd8b..0ec6d12d84e 100644 --- a/arch/x86/include/asm/percpu.h +++ b/arch/x86/include/asm/percpu.h @@ -190,6 +190,29 @@ do { \ pfo_ret__; \ }) +#define percpu_unary_op(op, var) \ +({ \ + switch (sizeof(var)) { \ + case 1: \ + asm(op "b "__percpu_arg(0) \ + : "+m" (var)); \ + break; \ + case 2: \ + asm(op "w "__percpu_arg(0) \ + : "+m" (var)); \ + break; \ + case 4: \ + asm(op "l "__percpu_arg(0) \ + : "+m" (var)); \ + break; \ + case 8: \ + asm(op "q "__percpu_arg(0) \ + : "+m" (var)); \ + break; \ + default: __bad_percpu_size(); \ + } \ +}) + /* * percpu_read() makes gcc load the percpu variable every time it is * accessed while percpu_read_stable() allows the value to be cached. @@ -207,6 +230,7 @@ do { \ #define percpu_and(var, val) percpu_to_op("and", var, val) #define percpu_or(var, val) percpu_to_op("or", var, val) #define percpu_xor(var, val) percpu_to_op("xor", var, val) +#define percpu_inc(var) percpu_unary_op("inc", var) #define __this_cpu_read_1(pcp) percpu_from_op("mov", (pcp), "m"(pcp)) #define __this_cpu_read_2(pcp) percpu_from_op("mov", (pcp), "m"(pcp)) |