--- include/asm-x86/percpu.h | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) Index: linux-2.6/include/asm-x86/percpu.h =================================================================== --- linux-2.6.orig/include/asm-x86/percpu.h 2007-11-27 21:07:50.600463718 -0800 +++ linux-2.6/include/asm-x86/percpu.h 2007-11-27 21:20:32.368963117 -0800 @@ -11,9 +11,14 @@ #ifdef CONFIG_SMP #include - #define __my_cpu_offset read_pda(data_offset) +#define __percpu_seg "%%gs:" +/* Calculate the offset to use with the segment register */ +#define per_cpu_offset(name) *SHIFT_PTR(&per_cpu_var(name), -__per_cpu_start) +#else +#define __percpu_seg "" +#define per_cpu_offset(name) per_cpu_var(name) #endif #include @@ -61,16 +66,11 @@ DECLARE_PER_CPU(struct x8664_pda, pda); * PER_CPU(cpu_gdt_descr, %ebx) */ #ifdef CONFIG_SMP - #define __my_cpu_offset x86_read_percpu(this_cpu_off) - /* fs segment starts at (positive) offset == __per_cpu_offset[cpu] */ #define __percpu_seg "%%fs:" - #else /* !SMP */ - #define __percpu_seg "" - #endif /* SMP */ #include @@ -78,6 +78,13 @@ DECLARE_PER_CPU(struct x8664_pda, pda); /* We can use this directly for local CPU (faster). */ DECLARE_PER_CPU(unsigned long, this_cpu_off); +#define per_cpu_offset(name) per_cpu_var(name) + +#endif /* __ASSEMBLY__ */ +#endif /* !CONFIG_X86_64 */ + +#ifndef __ASSEMBLY__ + /* For arch-specific code, we can use direct single-insn ops (they * don't give an lvalue though). */ extern void __bad_percpu_size(void); @@ -129,11 +136,10 @@ extern void __bad_percpu_size(void); } \ ret__; }) -#define x86_read_percpu(var) percpu_from_op("mov", per_cpu__##var) -#define x86_write_percpu(var,val) percpu_to_op("mov", per_cpu__##var, val) -#define x86_add_percpu(var,val) percpu_to_op("add", per_cpu__##var, val) -#define x86_sub_percpu(var,val) percpu_to_op("sub", per_cpu__##var, val) -#define x86_or_percpu(var,val) percpu_to_op("or", per_cpu__##var, val) +#define x86_read_percpu(var) percpu_from_op("mov", per_cpu_offset(var)) +#define x86_write_percpu(var,val) percpu_to_op("mov", per_cpu_offset(var), val) +#define x86_add_percpu(var,val) percpu_to_op("add", per_cpu_offset(var), val) +#define x86_sub_percpu(var,val) percpu_to_op("sub", per_cpu_offset(var), val) +#define x86_or_percpu(var,val) percpu_to_op("or", per_cpu_offset(var), val) #endif /* !__ASSEMBLY__ */ -#endif /* !CONFIG_X86_64 */ #endif /* _ASM_X86_PERCPU_H_ */