Switch rwlocks over to patchable lock prefix This way their lock prefix can be patched away on UP Signed-off-by: Andi Kleen --- include/asm-x86_64/spinlock.h | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) Index: linux/include/asm-x86_64/spinlock.h =================================================================== --- linux.orig/include/asm-x86_64/spinlock.h +++ linux/include/asm-x86_64/spinlock.h @@ -125,13 +125,13 @@ static inline int __raw_write_trylock(ra static inline void __raw_read_unlock(raw_rwlock_t *rw) { - asm volatile("lock ; incl %0" :"=m" (rw->lock) : : "memory"); + asm volatile(LOCK_PREFIX "incl %0" :"=m" (rw->lock) : : "memory"); } static inline void __raw_write_unlock(raw_rwlock_t *rw) { - asm volatile("lock ; addl $" RW_LOCK_BIAS_STR ",%0" - : "=m" (rw->lock) : : "memory"); + asm volatile(LOCK_PREFIX "addl %1,%0" + : "=m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory"); } #endif /* __ASM_SPINLOCK_H */