aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/system.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-i386/system.h')
-rw-r--r--include/asm-i386/system.h59
1 files changed, 58 insertions, 1 deletions
diff --git a/include/asm-i386/system.h b/include/asm-i386/system.h
index c3a58c0..e0454af 100644
--- a/include/asm-i386/system.h
+++ b/include/asm-i386/system.h
@@ -305,6 +305,9 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
#define sync_cmpxchg(ptr,o,n)\
((__typeof__(*(ptr)))__sync_cmpxchg((ptr),(unsigned long)(o),\
(unsigned long)(n),sizeof(*(ptr))))
+#define cmpxchg_local(ptr,o,n)\
+ ((__typeof__(*(ptr)))__cmpxchg_local((ptr),(unsigned long)(o),\
+ (unsigned long)(n),sizeof(*(ptr))))
#endif
static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
@@ -367,6 +370,33 @@ static inline unsigned long __sync_cmpxchg(volatile void *ptr,
return old;
}
+static inline unsigned long __cmpxchg_local(volatile void *ptr,
+ unsigned long old, unsigned long new, int size)
+{
+ unsigned long prev;
+ switch (size) {
+ case 1:
+ __asm__ __volatile__("cmpxchgb %b1,%2"
+ : "=a"(prev)
+ : "q"(new), "m"(*__xg(ptr)), "0"(old)
+ : "memory");
+ return prev;
+ case 2:
+ __asm__ __volatile__("cmpxchgw %w1,%2"
+ : "=a"(prev)
+ : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ : "memory");
+ return prev;
+ case 4:
+ __asm__ __volatile__("cmpxchgl %1,%2"
+ : "=a"(prev)
+ : "r"(new), "m"(*__xg(ptr)), "0"(old)
+ : "memory");
+ return prev;
+ }
+ return old;
+}
+
#ifndef CONFIG_X86_CMPXCHG
/*
* Building a kernel capable running on 80386. It may be necessary to
@@ -403,6 +433,17 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,
(unsigned long)(n), sizeof(*(ptr))); \
__ret; \
})
+#define cmpxchg_local(ptr,o,n) \
+({ \
+ __typeof__(*(ptr)) __ret; \
+ if (likely(boot_cpu_data.x86 > 3)) \
+ __ret = __cmpxchg_local((ptr), (unsigned long)(o), \
+ (unsigned long)(n), sizeof(*(ptr))); \
+ else \
+ __ret = cmpxchg_386((ptr), (unsigned long)(o), \
+ (unsigned long)(n), sizeof(*(ptr))); \
+ __ret; \
+})
#endif
#ifdef CONFIG_X86_CMPXCHG64
@@ -421,10 +462,26 @@ static inline unsigned long long __cmpxchg64(volatile void *ptr, unsigned long l
return prev;
}
+static inline unsigned long long __cmpxchg64_local(volatile void *ptr,
+ unsigned long long old, unsigned long long new)
+{
+ unsigned long long prev;
+ __asm__ __volatile__("cmpxchg8b %3"
+ : "=A"(prev)
+ : "b"((unsigned long)new),
+ "c"((unsigned long)(new >> 32)),
+ "m"(*__xg(ptr)),
+ "0"(old)
+ : "memory");
+ return prev;
+}
+
#define cmpxchg64(ptr,o,n)\
((__typeof__(*(ptr)))__cmpxchg64((ptr),(unsigned long long)(o),\
(unsigned long long)(n)))
-
+#define cmpxchg64_local(ptr,o,n)\
+ ((__typeof__(*(ptr)))__cmpxchg64_local((ptr),(unsigned long long)(o),\
+ (unsigned long long)(n)))
#endif
/*