diff --git a/include/asm-i386/alternative.h b/include/asm-i386/alternative.h
index bb3b6317c810..b01a7ec409ce 100644
--- a/include/asm-i386/alternative.h
+++ b/include/asm-i386/alternative.h
@@ -88,9 +88,6 @@ static inline void alternatives_smp_switch(int smp) {}
 /*
  * Alternative inline assembly for SMP.
  *
- * alternative_smp() takes two versions (SMP first, UP second) and is
- * for more complex stuff such as spinlocks.
- *
  * The LOCK_PREFIX macro defined here replaces the LOCK and
  * LOCK_PREFIX macros used everywhere in the source tree.
  *
@@ -110,21 +107,6 @@ static inline void alternatives_smp_switch(int smp) {}
  */
 
 #ifdef CONFIG_SMP
-#define alternative_smp(smpinstr, upinstr, args...)			\
-	asm volatile ("661:\n\t" smpinstr "\n662:\n" 			\
-		      ".section .smp_altinstructions,\"a\"\n"		\
-		      "  .align 4\n"					\
-		      "  .long 661b\n"            /* label */		\
-		      "  .long 663f\n"		  /* new instruction */	\
-		      "  .byte " __stringify(X86_FEATURE_UP) "\n"	\
-		      "  .byte 662b-661b\n"       /* sourcelen */	\
-		      "  .byte 664f-663f\n"       /* replacementlen */	\
-		      ".previous\n"					\
-		      ".section .smp_altinstr_replacement,\"awx\"\n"   	\
-		      "663:\n\t" upinstr "\n"     /* replacement */	\
-		      "664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \
-		      ".previous" : args)
-
 #define LOCK_PREFIX \
 		".section .smp_locks,\"a\"\n"	\
 		"  .align 4\n"			\
@@ -133,8 +115,6 @@ static inline void alternatives_smp_switch(int smp) {}
 	       	"661:\n\tlock; "
 
 #else /* ! CONFIG_SMP */
-#define alternative_smp(smpinstr, upinstr, args...) \
-	asm volatile (upinstr : args)
 #define LOCK_PREFIX ""
 #endif
 
diff --git a/include/asm-i386/rwlock.h b/include/asm-i386/rwlock.h
index 96b0bef2ea56..3ac1ba98b1bc 100644
--- a/include/asm-i386/rwlock.h
+++ b/include/asm-i386/rwlock.h
@@ -21,22 +21,20 @@
 #define RW_LOCK_BIAS_STR	"0x01000000"
 
 #define __build_read_lock_ptr(rw, helper)   \
-	alternative_smp("lock; subl $1,(%0)\n\t" \
+	asm volatile(LOCK_PREFIX " ; subl $1,(%0)\n\t" \
 			"jns 1f\n" \
 			"call " helper "\n\t" \
-			"1:\n", \
-			"subl $1,(%0)\n\t", \
+			"1:\n" \
 			:"a" (rw) : "memory")
 
 #define __build_read_lock_const(rw, helper)   \
-	alternative_smp("lock; subl $1,%0\n\t" \
+	asm volatile(LOCK_PREFIX " ; subl $1,%0\n\t" \
 			"jns 1f\n" \
 			"pushl %%eax\n\t" \
 			"leal %0,%%eax\n\t" \
 			"call " helper "\n\t" \
 			"popl %%eax\n\t" \
-			"1:\n", \
-			"subl $1,%0\n\t", \
+			"1:\n" : \
 			"+m" (*(volatile int *)rw) : : "memory")
 
 #define __build_read_lock(rw, helper)	do { \
@@ -47,7 +45,7 @@
 					} while (0)
 
 #define __build_write_lock_ptr(rw, helper) \
-	alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
+	asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
 			"jz 1f\n" \
 			"call " helper "\n\t" \
 			"1:\n", \
@@ -55,7 +53,7 @@
 			:"a" (rw) : "memory")
 
 #define __build_write_lock_const(rw, helper) \
-	alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
+	asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
 			"jz 1f\n" \
 			"pushl %%eax\n\t" \
 			"leal %0,%%eax\n\t" \
diff --git a/include/asm-i386/spinlock.h b/include/asm-i386/spinlock.h
index d816c62a7a1d..d1020363c41a 100644
--- a/include/asm-i386/spinlock.h
+++ b/include/asm-i386/spinlock.h
@@ -22,7 +22,7 @@
 
 #define __raw_spin_lock_string \
 	"\n1:\t" \
-	"lock ; decb %0\n\t" \
+	LOCK_PREFIX " ; decb %0\n\t" \
 	"jns 3f\n" \
 	"2:\t" \
 	"rep;nop\n\t" \
@@ -38,7 +38,7 @@
  */
 #define __raw_spin_lock_string_flags \
 	"\n1:\t" \
-	"lock ; decb %0\n\t" \
+	LOCK_PREFIX " ; decb %0\n\t" \
 	"jns 5f\n" \
 	"2:\t" \
 	"testl $0x200, %1\n\t" \
@@ -57,15 +57,9 @@
 	"jmp 4b\n" \
 	"5:\n\t"
 
-#define __raw_spin_lock_string_up \
-	"\n\tdecb %0"
-
 static inline void __raw_spin_lock(raw_spinlock_t *lock)
 {
-	alternative_smp(
-		__raw_spin_lock_string,
-		__raw_spin_lock_string_up,
-		"+m" (lock->slock) : : "memory");
+	asm(__raw_spin_lock_string : "+m" (lock->slock) : : "memory");
 }
 
 /*
@@ -76,10 +70,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
 #ifndef CONFIG_PROVE_LOCKING
 static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags)
 {
-	alternative_smp(
-		__raw_spin_lock_string_flags,
-		__raw_spin_lock_string_up,
-		"+m" (lock->slock) : "r" (flags) : "memory");
+	asm(__raw_spin_lock_string_flags : "+m" (lock->slock) : "r" (flags) : "memory");
 }
 #endif