--- include/asm-x86/pda.h | 11 ++--------- include/asm-x86/percpu_64.h | 38 +++++++++++++++++++++++++++++++++++++- 2 files changed, 39 insertions(+), 10 deletions(-) Index: linux-2.6/include/asm-x86/pda.h =================================================================== --- linux-2.6.orig/include/asm-x86/pda.h 2007-11-19 17:08:40.767874989 -0800 +++ linux-2.6/include/asm-x86/pda.h 2007-11-19 17:08:40.871625076 -0800 @@ -53,15 +53,8 @@ extern struct x8664_pda _proxy_pda; #define add_pda(field,val) CPU_ADD(per_cpu_var(pda).field, val) #define sub_pda(field,val) CPU_ADD(per_cpu_var(pda).field, val) #define or_pda(field,val) CPU_OR(per_cpu_var(pda).field, val) - -/* This is not atomic against other CPUs -- CPU preemption needs to be off */ -#define test_and_clear_bit_pda(bit,field) ({ \ - int old__; \ - asm volatile("btr %2,%%gs:%c3\n\tsbbl %0,%0" \ - : "=r" (old__), "+m" (_proxy_pda.field) \ - : "dIr" (bit), "i" (pda_offset(field)) : "memory"); \ - old__; \ -}) +#define test_and_clear_bit_pda(bit, field) \ + CPU_TEST_AND_CLEAR(per_cpu_var(pda).field, bit) #endif Index: linux-2.6/include/asm-x86/percpu_64.h =================================================================== --- linux-2.6.orig/include/asm-x86/percpu_64.h 2007-11-19 17:08:40.767874989 -0800 +++ linux-2.6/include/asm-x86/percpu_64.h 2007-11-19 17:08:40.871625076 -0800 @@ -304,6 +304,8 @@ static inline unsigned long __cmpxchg_lo ((__typeof__(obj))__cmpxchg_local_gs(&(obj),(unsigned long)(o),\ (unsigned long)(n),sizeof(obj))) +/* x86 only functions */ + static inline void __cpu_or_gs(volatile void *ptr, long data, int size) { @@ -331,6 +333,40 @@ static inline void __cpu_or_gs(volatile #define cpu_or_gs(obj, value)\ __cpu_or_gs(&(obj), (unsigned long)value, sizeof(obj)) + +static inline int __cpu_test_and_clear_gs(volatile void *ptr, + int bit, int size) +{ + int result; + + switch (size) { + case 1: + __asm__ ("btr %2, %%gs:%3\n\tsbbl %0,%0" + : "r=" (result), "m"(*__xp(ptr)) + : "dIr" (bit), "m"(*__xp(ptr)) : "memory"); + return result; + case 2: + __asm__ ("btr %2, %%gs:%3\n\tsbbl %0,%0" + : "r=" (result), "m"(*__xp(ptr)) + : "dIr" (bit), "m"(*__xp(ptr)) : "memory"); + return result; + case 4: + __asm__ ("btr %2, %%gs:%3\n\tsbbl %0,%0" + : "r=" (result), "m"(*__xp(ptr)) + : "dIr" (bit), "m"(*__xp(ptr)) : "memory"); + return result; + case 8: + __asm__ ("btr %2, %%gs:%3\n\tsbbl %0,%0" + : "r=" (result), "m"(*__xp(ptr)) + : "dIr" (bit), "m"(*__xp(ptr)) : "memory"); + return result; + } + BUG(); +} + +#define cpu_test_and_clear_gs(obj, value)\ + __cpu_test_and_clear_gs(&(obj), (unsigned long)value, sizeof(obj)) + #define CPU_READ(obj) cpu_read_gs(obj) #define CPU_WRITE(obj,val) cpu_write_gs(obj, val) #define CPU_ADD(obj,val) cpu_add_gs(obj, val) @@ -338,7 +374,7 @@ static inline void __cpu_or_gs(volatile #define CPU_INC(obj) cpu_inc_gs(obj) #define CPU_DEC(obj) cpu_dec_gs(obj) #define CPU_OR(obj, val) cpu_or_gs(obj, val) - +#define CPU_TEST_AND_CLEAR(obj, bit) cpu_test_and_clear_gs(obj, bit) #define CPU_XCHG(obj,val) cpu_xchg_gs(obj, val) #define CPU_CMPXCHG(obj, old, new) cmpxchg_local_gs(obj, old, new)