Remove alternaive_smp The .fill causes miscompilations with some binutils version. Instead just patch the lock prefix in the lock constructs. That is the majority of the cost and should be good enough. Cc: kraxel@suse.de Signed-off-by: Andi Kleen --- include/asm-x86_64/alternative.h | 20 -------------------- include/asm-x86_64/spinlock.h | 12 +++--------- 2 files changed, 3 insertions(+), 29 deletions(-) Index: linux/include/asm-x86_64/alternative.h =================================================================== --- linux.orig/include/asm-x86_64/alternative.h +++ linux/include/asm-x86_64/alternative.h @@ -103,9 +103,6 @@ static inline void alternatives_smp_swit /* * Alternative inline assembly for SMP. * - * alternative_smp() takes two versions (SMP first, UP second) and is - * for more complex stuff such as spinlocks. - * * The LOCK_PREFIX macro defined here replaces the LOCK and * LOCK_PREFIX macros used everywhere in the source tree. * @@ -125,21 +122,6 @@ static inline void alternatives_smp_swit */ #ifdef CONFIG_SMP -#define alternative_smp(smpinstr, upinstr, args...) \ - asm volatile ("661:\n\t" smpinstr "\n662:\n" \ - ".section .smp_altinstructions,\"a\"\n" \ - " .align 8\n" \ - " .quad 661b\n" /* label */ \ - " .quad 663f\n" /* new instruction */ \ - " .byte " __stringify(X86_FEATURE_UP) "\n" \ - " .byte 662b-661b\n" /* sourcelen */ \ - " .byte 664f-663f\n" /* replacementlen */ \ - ".previous\n" \ - ".section .smp_altinstr_replacement,\"awx\"\n" \ - "663:\n\t" upinstr "\n" /* replacement */ \ - "664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \ - ".previous" : args) - #define LOCK_PREFIX \ ".section .smp_locks,\"a\"\n" \ " .align 8\n" \ @@ -148,8 +130,6 @@ static inline void alternatives_smp_swit "661:\n\tlock; " #else /* ! CONFIG_SMP */ -#define alternative_smp(smpinstr, upinstr, args...) \ - asm volatile (upinstr : args) #define LOCK_PREFIX "" #endif Index: linux/include/asm-x86_64/spinlock.h =================================================================== --- linux.orig/include/asm-x86_64/spinlock.h +++ linux/include/asm-x86_64/spinlock.h @@ -21,7 +21,7 @@ #define __raw_spin_lock_string \ "\n0:\t" \ - "lock ; decl %0\n\t" \ + LOCK_PREFIX " ; decl %0\n\t" \ "jns 2f\n" \ "1:\n\t" \ "rep;nop\n\t" \ @@ -30,12 +30,9 @@ "jmp 0b\n" \ "2:\t" -#define __raw_spin_lock_string_up \ - "\n\tdecl %0" - #define __raw_spin_lock_string_flags \ "\n1:\t" \ - "lock ; decb %0\n\t" \ + LOCK_PREFIX "; decb %0\n\t" \ "js 2f\n\t" \ LOCK_SECTION_START("") \ "2:\t" \ @@ -56,10 +53,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) { - alternative_smp( - __raw_spin_lock_string, - __raw_spin_lock_string_up, - "=m" (lock->slock) : : "memory"); + asm(__raw_spin_lock_string : "=m" (lock->slock) : : "memory"); } static inline int __raw_spin_trylock(raw_spinlock_t *lock)