2 * Copyright (C) 2010 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef ANDROID_CUTILS_ATOMIC_X86_H
18 #define ANDROID_CUTILS_ATOMIC_X86_H
22 extern inline void android_compiler_barrier(void)
24 __asm__
__volatile__ ("" : : : "memory");
28 extern inline void android_memory_barrier(void)
30 android_compiler_barrier();
32 extern inline void android_memory_store_barrier(void)
34 android_compiler_barrier();
37 extern inline void android_memory_barrier(void)
39 __asm__
__volatile__ ("mfence" : : : "memory");
41 extern inline void android_memory_store_barrier(void)
43 android_compiler_barrier();
47 extern inline int32_t android_atomic_acquire_load(volatile const int32_t *ptr
)
50 android_compiler_barrier();
54 extern inline int32_t android_atomic_release_load(volatile const int32_t *ptr
)
56 android_memory_barrier();
60 extern inline void android_atomic_acquire_store(int32_t value
,
61 volatile int32_t *ptr
)
64 android_memory_barrier();
67 extern inline void android_atomic_release_store(int32_t value
,
68 volatile int32_t *ptr
)
70 android_compiler_barrier();
74 extern inline int android_atomic_cas(int32_t old_value
, int32_t new_value
,
75 volatile int32_t *ptr
)
78 __asm__
__volatile__ ("lock; cmpxchgl %1, %2"
80 : "q" (new_value
), "m" (*ptr
), "0" (old_value
)
82 return prev
!= old_value
;
85 extern inline int android_atomic_acquire_cas(int32_t old_value
,
87 volatile int32_t *ptr
)
89 /* Loads are not reordered with other loads. */
90 return android_atomic_cas(old_value
, new_value
, ptr
);
93 extern inline int android_atomic_release_cas(int32_t old_value
,
95 volatile int32_t *ptr
)
97 /* Stores are not reordered with other stores. */
98 return android_atomic_cas(old_value
, new_value
, ptr
);
101 extern inline int32_t android_atomic_add(int32_t increment
,
102 volatile int32_t *ptr
)
104 __asm__
__volatile__ ("lock; xaddl %0, %1"
105 : "+r" (increment
), "+m" (*ptr
)
107 /* increment now holds the old value of *ptr */
111 extern inline int32_t android_atomic_inc(volatile int32_t *addr
)
113 return android_atomic_add(1, addr
);
116 extern inline int32_t android_atomic_dec(volatile int32_t *addr
)
118 return android_atomic_add(-1, addr
);
121 extern inline int32_t android_atomic_and(int32_t value
,
122 volatile int32_t *ptr
)
124 int32_t prev
, status
;
127 status
= android_atomic_cas(prev
, prev
& value
, ptr
);
128 } while (__builtin_expect(status
!= 0, 0));
132 extern inline int32_t android_atomic_or(int32_t value
, volatile int32_t *ptr
)
134 int32_t prev
, status
;
137 status
= android_atomic_cas(prev
, prev
| value
, ptr
);
138 } while (__builtin_expect(status
!= 0, 0));
142 #endif /* ANDROID_CUTILS_ATOMIC_X86_H */