[PATCH] fix i386 cmpxchg

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



(Note: Patch also attached because the inline version is certain to get
line wrapped.)

This adjusts i386's cmpxchg patterns so that
- for word and long cmpxchg-es the compiler can utilize all possible
  registers
- cmpxchg8b gets disabled when the minimum specified hardware
architectur
  doesn't support it (like was already happening for the byte, word,
and
  long ones).

Signed-off-by: Jan Beulich <[email protected]>

diff -Npru 2.6.13/arch/i386/Kconfig
2.6.13-i386-cmpxchg/arch/i386/Kconfig
--- 2.6.13/arch/i386/Kconfig	2005-08-29 01:41:01.000000000 +0200
+++ 2.6.13-i386-cmpxchg/arch/i386/Kconfig	2005-09-01
11:45:29.000000000 +0200
@@ -412,6 +412,11 @@ config X86_POPAD_OK
 	depends on !M386
 	default y
 
+config X86_CMPXCHG64
+	bool
+	depends on !M386 && !M486 && !MCYRIXIII && !MGEODEGX1
+	default y
+
 config X86_ALIGNMENT_16
 	bool
 	depends on MWINCHIP3D || MWINCHIP2 || MWINCHIPC6 || MCYRIXIII ||
X86_ELAN || MK6 || M586MMX || M586TSC || M586 || M486 || MVIAC3_2 ||
MGEODEGX1
diff -Npru 2.6.13/include/asm-i386/system.h
2.6.13-i386-cmpxchg/include/asm-i386/system.h
--- 2.6.13/include/asm-i386/system.h	2005-08-29 01:41:01.000000000
+0200
+++ 2.6.13-i386-cmpxchg/include/asm-i386/system.h	2005-09-07
11:37:25.000000000 +0200
@@ -149,6 +149,8 @@ struct __xchg_dummy { unsigned long a[10
 #define __xg(x) ((struct __xchg_dummy *)(x))
 
 
+#ifdef CONFIG_X86_CMPXCHG64
+
 /*
  * The semantics of XCHGCMP8B are a bit strange, this is why
  * there is a loop and the loading of %%eax and %%edx has to
@@ -203,6 +205,8 @@ static inline void __set_64bit_var (unsi
  __set_64bit(ptr, (unsigned int)(value), (unsigned
int)((value)>>32ULL) ) : \
  __set_64bit(ptr, ll_low(value), ll_high(value)) )
 
+#endif
+
 /*
  * Note: no "lock" prefix even on SMP: xchg always implies lock
anyway
  * Note 2: xchg has side effect, so that attribute volatile is
necessary,
@@ -241,7 +245,6 @@ static inline unsigned long __xchg(unsig
 
 #ifdef CONFIG_X86_CMPXCHG
 #define __HAVE_ARCH_CMPXCHG 1
-#endif
 
 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned
long old,
 				      unsigned long new, int size)
@@ -257,13 +260,13 @@ static inline unsigned long __cmpxchg(vo
 	case 2:
 		__asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2"
 				     : "=a"(prev)
-				     : "q"(new), "m"(*__xg(ptr)),
"0"(old)
+				     : "r"(new), "m"(*__xg(ptr)),
"0"(old)
 				     : "memory");
 		return prev;
 	case 4:
 		__asm__ __volatile__(LOCK_PREFIX "cmpxchgl %1,%2"
 				     : "=a"(prev)
-				     : "q"(new), "m"(*__xg(ptr)),
"0"(old)
+				     : "r"(new), "m"(*__xg(ptr)),
"0"(old)
 				     : "memory");
 		return prev;
 	}
@@ -273,6 +276,30 @@ static inline unsigned long __cmpxchg(vo
 #define cmpxchg(ptr,o,n)\
 	((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
 					(unsigned
long)(n),sizeof(*(ptr))))
+
+#endif
+
+#ifdef CONFIG_X86_CMPXCHG64
+
+static inline unsigned long long __cmpxchg64(volatile void *ptr,
unsigned long long old,
+				      unsigned long long new)
+{
+	unsigned long long prev;
+	__asm__ __volatile__(LOCK_PREFIX "cmpxchg8b %3"
+			     : "=A"(prev)
+			     : "b"((unsigned long)new),
+			       "c"((unsigned long)(new >> 32)),
+			       "m"(*__xg(ptr)),
+			       "0"(old)
+			     : "memory");
+	return prev;
+}
+
+#define cmpxchg64(ptr,o,n)\
+	((__typeof__(*(ptr)))__cmpxchg64((ptr),(unsigned long
long)(o),\
+					(unsigned long long)(n)))
+
+#endif
     
 #ifdef __KERNEL__
 struct alt_instr { 

Attachment: linux-2.6.13-i386-cmpxchg.patch
Description: Binary data


[Index of Archives]     [Kernel Newbies]     [Netfilter]     [Bugtraq]     [Photo]     [Gimp]     [Yosemite News]     [MIPS Linux]     [ARM Linux]     [Linux Security]     [Linux RAID]     [Video 4 Linux]     [Linux for the blind]
  Powered by Linux