--- arch/x86/include/asm/percpu.h | 14 +++---- include/linux/percpu.h | 81 ++++++++++++++++++++++++++++++++++-------- 2 files changed, 73 insertions(+), 22 deletions(-) Index: linux-2.6/include/linux/percpu.h =================================================================== --- linux-2.6.orig/include/linux/percpu.h 2009-10-02 18:05:37.000000000 -0500 +++ linux-2.6/include/linux/percpu.h 2009-10-02 18:15:31.000000000 -0500 @@ -250,13 +250,13 @@ do { \ extern void __bad_size_call_parameter(void); -#define __size_call_return(stem, variable) \ +#define __size_call_return(stem, variable, ...) \ ({ typeof(variable) ret__; \ switch(sizeof(variable)) { \ - case 1: ret__ = stem##1(variable);break; \ - case 2: ret__ = stem##2(variable);break; \ - case 4: ret__ = stem##4(variable);break; \ - case 8: ret__ = stem##8(variable);break; \ + case 1: ret__ = stem##1(variable, __VA_ARGS__);break; \ + case 2: ret__ = stem##2(variable, __VA_ARGS__);break; \ + case 4: ret__ = stem##4(variable, __VA_ARGS__);break; \ + case 8: ret__ = stem##8(variable, __VA_ARGS__);break; \ default: \ __bad_size_call_parameter();break; \ } \ @@ -312,18 +312,18 @@ do { \ #ifndef this_cpu_read # ifndef this_cpu_read_1 -# define this_cpu_read_1(pcp) _this_cpu_generic_read(pcp) +# define this_cpu_read_1(pcp, x) _this_cpu_generic_read(pcp) # endif # ifndef this_cpu_read_2 -# define this_cpu_read_2(pcp) _this_cpu_generic_read(pcp) +# define this_cpu_read_2(pcp, x) _this_cpu_generic_read(pcp) # endif # ifndef this_cpu_read_4 -# define this_cpu_read_4(pcp) _this_cpu_generic_read(pcp) +# define this_cpu_read_4(pcp, x) _this_cpu_generic_read(pcp) # endif # ifndef this_cpu_read_8 -# define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp) +# define this_cpu_read_8(pcp, x) _this_cpu_generic_read(pcp) # endif -# define this_cpu_read(pcp) __size_call_return(this_cpu_read_, (pcp)) +# define this_cpu_read(pcp) __size_call_return(this_cpu_read_, (pcp),) #endif #define _this_cpu_generic_to_op(pcp, val, op) \ @@ -425,6 +425,34 @@ do { \ # define this_cpu_xor(pcp, val) __size_call(this_cpu_or_, (pcp), (val)) #endif +#define _this_cpu_generic_cmpxchg(pcp, val, new) \ +({ typeof(pcp) ret__; \ + unsigned long flags; \ + local_irq_save(flags); \ + ret__ = *__this_cpu_ptr(&(pcp)); \ + if (ret__ == val) \ + *__this_cpu_ptr(&(pcp)) = new; \ + local_irq_restore(flags); \ + ret__; \ +}) + +#ifndef this_cpu_cmpxchg +# ifndef this_cpu_cmpxchg_1 +# define this_cpu_cmpxchg_1(pcp, val, new) _this_cpu_generic_cmpxchg((pcp), (val), (new)) +# endif +# ifndef this_cpu_cmpxchg_2 +# define this_cpu_cmpxchg_2(pcp, val, new) _this_cpu_generic_cmpxchg((pcp), (val), (new)) +# endif +# ifndef this_cpu_cmpxchg_4 +# define this_cpu_cmpxchg_4(pcp, val, new) _this_cpu_generic_cmpxchg((pcp), (val), (new)) +# endif +# ifndef this_cpu_cmpxchg_8 +# define this_cpu_cmpxchg_8(pcp, val, new) _this_cpu_generic_cmpxchg((pcp), (val), (new)) +# endif +# define this_cpu_cmpxchg(pcp, val, new) __size_call_return(this_cpu_cmpxchg_, (pcp), (val), (new)) +#endif + + /* * Generic percpu operations that do not require preemption handling. * Either we do not care about races or the caller has the @@ -441,18 +469,18 @@ do { \ */ #ifndef __this_cpu_read # ifndef __this_cpu_read_1 -# define __this_cpu_read_1(pcp) (*__this_cpu_ptr(&(pcp))) +# define __this_cpu_read_1(pcp, x) (*__this_cpu_ptr(&(pcp))) # endif # ifndef __this_cpu_read_2 -# define __this_cpu_read_2(pcp) (*__this_cpu_ptr(&(pcp))) +# define __this_cpu_read_2(pcp, x) (*__this_cpu_ptr(&(pcp))) # endif # ifndef __this_cpu_read_4 -# define __this_cpu_read_4(pcp) (*__this_cpu_ptr(&(pcp))) +# define __this_cpu_read_4(pcp, x) (*__this_cpu_ptr(&(pcp))) # endif # ifndef __this_cpu_read_8 -# define __this_cpu_read_8(pcp) (*__this_cpu_ptr(&(pcp))) +# define __this_cpu_read_8(pcp, x) (*__this_cpu_ptr(&(pcp))) # endif -# define __this_cpu_read(pcp) __size_call_return(__this_cpu_read_, (pcp)) +# define __this_cpu_read(pcp) __size_call_return(__this_cpu_read_, (pcp),) #endif #define __this_cpu_generic_to_op(pcp, val, op) \ @@ -460,6 +488,13 @@ do { \ *__this_cpu_ptr(&(pcp)) op val; \ } while (0) +#define __this_cpu_generic_cmpxchg(pcp, val, new) \ +({ typeof(pcp) ret__ = *__this_cpu_ptr(&(pcp); \ + if (ret__ == val) \ + *__this_cpu_ptr(&(pcp) = new; \ + ret__; \ +}) + #ifndef __this_cpu_write # ifndef __this_cpu_write_1 # define __this_cpu_write_1(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) @@ -552,6 +587,22 @@ do { \ # define __this_cpu_xor(pcp, val) __size_call(__this_cpu_xor_, (pcp), (val)) #endif +#ifndef __this_cpu_cmpxchg +# ifndef __this_cpu_cmpxchg_1 +# define __this_cpu_cmpxchg_1(pcp, val) __this_cpu_generic_cmpxchg((pcp), (val), (new)) +# endif +# ifndef __this_cpu_cmpxchg_2 +# define __this_cpu_cmpxchg_2(pcp, val) __this_cpu_generic_cmpxchg((pcp), (val), (new)) +# endif +# ifndef __this_cpu_and_4 +# define __this_cpu_cmpxchg_4(pcp, val) __this_cpu_generic_cmpxchg((pcp), (val), (new)) +# endif +# ifndef __this_cpu_and_8 +# define __this_cpu_cmpxchg_8(pcp, val) __this_cpu_generic_cmpxchg((pcp), (val), (new)) +# endif +# define __this_cpu_cmpxchg(pcp, val, new) __size_call_return(__this_cpu_cmpxchg_, (pcp), (val), (new)) +#endif + /* * IRQ safe versions of the per cpu RMW operations. Note that these operations * are *not* safe against modification of the same variable from another Index: linux-2.6/arch/x86/include/asm/percpu.h =================================================================== --- linux-2.6.orig/arch/x86/include/asm/percpu.h 2009-10-02 18:13:27.000000000 -0500 +++ linux-2.6/arch/x86/include/asm/percpu.h 2009-10-02 18:14:03.000000000 -0500 @@ -153,9 +153,9 @@ do { \ #define percpu_or(var, val) percpu_to_op("or", per_cpu__##var, val) #define percpu_xor(var, val) percpu_to_op("xor", per_cpu__##var, val) -#define __this_cpu_read_1(pcp) percpu_from_op("mov", (pcp), "m"(pcp)) -#define __this_cpu_read_2(pcp) percpu_from_op("mov", (pcp), "m"(pcp)) -#define __this_cpu_read_4(pcp) percpu_from_op("mov", (pcp), "m"(pcp)) +#define __this_cpu_read_1(pcp, x) percpu_from_op("mov", (pcp), "m"(pcp)) +#define __this_cpu_read_2(pcp, x) percpu_from_op("mov", (pcp), "m"(pcp)) +#define __this_cpu_read_4(pcp, x) percpu_from_op("mov", (pcp), "m"(pcp)) #define __this_cpu_write_1(pcp, val) percpu_to_op("mov", (pcp), val) #define __this_cpu_write_2(pcp, val) percpu_to_op("mov", (pcp), val) @@ -173,9 +173,9 @@ do { \ #define __this_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val) #define __this_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val) -#define this_cpu_read_1(pcp) percpu_from_op("mov", (pcp), "m"(pcp)) -#define this_cpu_read_2(pcp) percpu_from_op("mov", (pcp), "m"(pcp)) -#define this_cpu_read_4(pcp) percpu_from_op("mov", (pcp), "m"(pcp)) +#define this_cpu_read_1(pcp, x) percpu_from_op("mov", (pcp), "m"(pcp)) +#define this_cpu_read_2(pcp, x) percpu_from_op("mov", (pcp), "m"(pcp)) +#define this_cpu_read_4(pcp, x) percpu_from_op("mov", (pcp), "m"(pcp)) #define this_cpu_write_1(pcp, val) percpu_to_op("mov", (pcp), val) #define this_cpu_write_2(pcp, val) percpu_to_op("mov", (pcp), val) #define this_cpu_write_4(pcp, val) percpu_to_op("mov", (pcp), val) @@ -210,7 +210,7 @@ do { \ * 32 bit must fall back to generic operations. */ #ifdef CONFIG_X86_64 -#define __this_cpu_read_8(pcp) percpu_from_op("mov", (pcp), "m"(pcp)) +#define __this_cpu_read_8(pcp, x) percpu_from_op("mov", (pcp), "m"(pcp)) #define __this_cpu_write_8(pcp, val) percpu_to_op("mov", (pcp), val) #define __this_cpu_add_8(pcp, val) percpu_to_op("add", (pcp), val) #define __this_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)