Initial INC MR3 commit with EVO/BRAVO included and majority of the compile warnings ...
/arch/x86/include/asm/cmpxchg_64.h
blob:52de72e0de8c882c3673fcff25c315f270db977f -> blob:e8cb051b8681e4be97c803076716f8c287ada567
--- arch/x86/include/asm/cmpxchg_64.h
+++ arch/x86/include/asm/cmpxchg_64.h
@@ -8,13 +8,11 @@
#define __xg(x) ((volatile long *)(x))
-static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
+static inline void set_64bit(volatile u64 *ptr, u64 val)
{
*ptr = val;
}
-#define _set_64bit set_64bit
-
/*
* Note: no "lock" prefix even on SMP: xchg always implies lock anyway
* Note 2: xchg has side effect, so that attribute volatile is necessary,
@@ -26,26 +24,26 @@ static inline unsigned long __xchg(unsig
switch (size) {
case 1:
asm volatile("xchgb %b0,%1"
- : "=q" (x)
- : "m" (*__xg(ptr)), "0" (x)
+ : "=q" (x), "+m" (*__xg(ptr))
+ : "0" (x)
: "memory");
break;
case 2:
asm volatile("xchgw %w0,%1"
- : "=r" (x)
- : "m" (*__xg(ptr)), "0" (x)
+ : "=r" (x), "+m" (*__xg(ptr))
+ : "0" (x)
: "memory");
break;
case 4:
asm volatile("xchgl %k0,%1"
- : "=r" (x)
- : "m" (*__xg(ptr)), "0" (x)
+ : "=r" (x), "+m" (*__xg(ptr))
+ : "0" (x)
: "memory");
break;
case 8:
asm volatile("xchgq %0,%1"
- : "=r" (x)
- : "m" (*__xg(ptr)), "0" (x)
+ : "=r" (x), "+m" (*__xg(ptr))
+ : "0" (x)
: "memory");
break;
}
@@ -66,27 +64,27 @@ static inline unsigned long __cmpxchg(vo
unsigned long prev;
switch (size) {
case 1:
- asm volatile(LOCK_PREFIX "cmpxchgb %b1,%2"
- : "=a"(prev)
- : "q"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile(LOCK_PREFIX "cmpxchgb %b2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "q"(new), "0"(old)
: "memory");
return prev;
case 2:
- asm volatile(LOCK_PREFIX "cmpxchgw %w1,%2"
- : "=a"(prev)
- : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile(LOCK_PREFIX "cmpxchgw %w2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "r"(new), "0"(old)
: "memory");
return prev;
case 4:
- asm volatile(LOCK_PREFIX "cmpxchgl %k1,%2"
- : "=a"(prev)
- : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile(LOCK_PREFIX "cmpxchgl %k2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "r"(new), "0"(old)
: "memory");
return prev;
case 8:
- asm volatile(LOCK_PREFIX "cmpxchgq %1,%2"
- : "=a"(prev)
- : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile(LOCK_PREFIX "cmpxchgq %2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "r"(new), "0"(old)
: "memory");
return prev;
}
@@ -105,21 +103,27 @@ static inline unsigned long __sync_cmpxc
unsigned long prev;
switch (size) {
case 1:
- asm volatile("lock; cmpxchgb %b1,%2"
- : "=a"(prev)
- : "q"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile("lock; cmpxchgb %b2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "q"(new), "0"(old)
: "memory");
return prev;
case 2:
- asm volatile("lock; cmpxchgw %w1,%2"
- : "=a"(prev)
- : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile("lock; cmpxchgw %w2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "r"(new), "0"(old)
: "memory");
return prev;
case 4:
- asm volatile("lock; cmpxchgl %1,%2"
- : "=a"(prev)
- : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile("lock; cmpxchgl %k2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "r"(new), "0"(old)
+ : "memory");
+ return prev;
+ case 8:
+ asm volatile("lock; cmpxchgq %2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "r"(new), "0"(old)
: "memory");
return prev;
}
@@ -133,27 +137,27 @@ static inline unsigned long __cmpxchg_lo
unsigned long prev;
switch (size) {
case 1:
- asm volatile("cmpxchgb %b1,%2"
- : "=a"(prev)
- : "q"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile("cmpxchgb %b2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "q"(new), "0"(old)
: "memory");
return prev;
case 2:
- asm volatile("cmpxchgw %w1,%2"
- : "=a"(prev)
- : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile("cmpxchgw %w2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "r"(new), "0"(old)
: "memory");
return prev;
case 4:
- asm volatile("cmpxchgl %k1,%2"
- : "=a"(prev)
- : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile("cmpxchgl %k2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "r"(new), "0"(old)
: "memory");
return prev;
case 8:
- asm volatile("cmpxchgq %1,%2"
- : "=a"(prev)
- : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ asm volatile("cmpxchgq %2,%1"
+ : "=a"(prev), "+m"(*__xg(ptr))
+ : "r"(new), "0"(old)
: "memory");
return prev;
}