summaryrefslogtreecommitdiffstats
path: root/include/cutils/atomic-x86.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/cutils/atomic-x86.h')
-rw-r--r--include/cutils/atomic-x86.h139
1 files changed, 139 insertions, 0 deletions
diff --git a/include/cutils/atomic-x86.h b/include/cutils/atomic-x86.h
new file mode 100644
index 0000000..06b643f
--- /dev/null
+++ b/include/cutils/atomic-x86.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_CUTILS_ATOMIC_X86_H
+#define ANDROID_CUTILS_ATOMIC_X86_H
+
+#include <stdint.h>
+
+extern inline void android_compiler_barrier(void)
+{
+ __asm__ __volatile__ ("" : : : "memory");
+}
+
+#if ANDROID_SMP == 0
+extern inline void android_memory_barrier(void)
+{
+ android_compiler_barrier();
+}
+#else
+extern inline void android_memory_barrier(void)
+{
+ __asm__ __volatile__ ("mfence" : : : "memory");
+}
+#endif
+
+extern inline int32_t android_atomic_acquire_load(volatile int32_t *ptr) {
+ int32_t value = *ptr;
+ android_compiler_barrier();
+ return value;
+}
+
+extern inline int32_t android_atomic_release_load(volatile int32_t *ptr) {
+ android_memory_barrier();
+ return *ptr;
+}
+
+extern inline void android_atomic_acquire_store(int32_t value,
+ volatile int32_t *ptr) {
+ *ptr = value;
+ android_memory_barrier();
+}
+
+extern inline void android_atomic_release_store(int32_t value,
+ volatile int32_t *ptr) {
+ android_compiler_barrier();
+ *ptr = value;
+}
+
+extern inline int android_atomic_cas(int32_t old_value, int32_t new_value,
+ volatile int32_t *ptr)
+{
+ int32_t prev;
+ __asm__ __volatile__ ("lock; cmpxchgl %1, %2"
+ : "=a" (prev)
+ : "q" (new_value), "m" (*ptr), "0" (old_value)
+ : "memory");
+ return prev != old_value;
+}
+
+extern inline int android_atomic_acquire_cas(int32_t old_value,
+ int32_t new_value,
+ volatile int32_t *ptr)
+{
+ /* Loads are not reordered with other loads. */
+ return android_atomic_cas(old_value, new_value, ptr);
+}
+
+extern inline int android_atomic_release_cas(int32_t old_value,
+ int32_t new_value,
+ volatile int32_t *ptr)
+{
+ /* Stores are not reordered with other stores. */
+ return android_atomic_cas(old_value, new_value, ptr);
+}
+
+extern inline int32_t android_atomic_swap(int32_t new_value,
+ volatile int32_t *ptr)
+{
+ __asm__ __volatile__ ("xchgl %1, %0"
+ : "=r" (new_value)
+ : "m" (*ptr), "0" (new_value)
+ : "memory");
+ /* new_value now holds the old value of *ptr */
+ return new_value;
+}
+
+extern inline int32_t android_atomic_add(int32_t increment,
+ volatile int32_t *ptr)
+{
+ __asm__ __volatile__ ("lock; xaddl %0, %1"
+ : "+r" (increment), "+m" (*ptr)
+ : : "memory");
+ /* increment now holds the old value of *ptr */
+ return increment;
+}
+
+extern inline int32_t android_atomic_inc(volatile int32_t *addr) {
+ return android_atomic_add(1, addr);
+}
+
+extern inline int32_t android_atomic_dec(volatile int32_t *addr) {
+ return android_atomic_add(-1, addr);
+}
+
+extern inline int32_t android_atomic_and(int32_t value,
+ volatile int32_t *ptr)
+{
+ int32_t prev, status;
+ do {
+ prev = *ptr;
+ status = android_atomic_cas(prev, prev & value, ptr);
+ } while (__builtin_expect(status != 0, 0));
+ return prev;
+}
+
+extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
+{
+ int32_t prev, status;
+ do {
+ prev = *ptr;
+ status = android_atomic_cas(prev, prev | value, ptr);
+ } while (__builtin_expect(status != 0, 0));
+ return prev;
+}
+
+#endif /* ANDROID_CUTILS_ATOMIC_X86_H */