Fix x86_64 _spin_lock_irqsave() From: Edward Falk Add spin_lock_string_flags and _raw_spin_lock_flags() to asm-x86_64/spinlock.h so that _spin_lock_irqsave() has the same semantics on x86_64 as it does on i386 and does *not* have interrupts disabled while it is waiting for the lock. This fix is courtesy of Michael Davidson Signed-off-by: Andi Kleen --- include/asm-x86_64/spinlock.h | 26 ++++++++++++++++++++++++-- 1 files changed, 24 insertions(+), 2 deletions(-) Index: linux/include/asm-x86_64/spinlock.h =================================================================== --- linux.orig/include/asm-x86_64/spinlock.h +++ linux/include/asm-x86_64/spinlock.h @@ -33,6 +33,23 @@ #define __raw_spin_lock_string_up \ "\n\tdecl %0" +#define __raw_spin_lock_string_flags \ + "\n1:\t" \ + "lock ; decb %0\n\t" \ + "js 2f\n\t" \ + LOCK_SECTION_START("") \ + "2:\t" \ + "test $0x200, %1\n\t" \ + "jz 3f\n\t" \ + "sti\n\t" \ + "3:\t" \ + "rep;nop\n\t" \ + "cmpb $0, %0\n\t" \ + "jle 3b\n\t" \ + "cli\n\t" \ + "jmp 1b\n" \ + LOCK_SECTION_END + #define __raw_spin_unlock_string \ "movl $1,%0" \ :"=m" (lock->slock) : : "memory" @@ -45,8 +62,6 @@ static inline void __raw_spin_lock(raw_s "=m" (lock->slock) : : "memory"); } -#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) - static inline int __raw_spin_trylock(raw_spinlock_t *lock) { int oldval; @@ -59,6 +74,13 @@ static inline int __raw_spin_trylock(raw return oldval > 0; } +static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) +{ + __asm__ __volatile__( + __raw_spin_lock_string_flags + :"=m" (lock->slock) : "r" (flags) : "memory"); +} + static inline void __raw_spin_unlock(raw_spinlock_t *lock) { __asm__ __volatile__(