16fe512423a74c5b4b7df4f29160a6041ea891fb
2 * Copyright (C) 2010 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef ANDROID_CUTILS_ATOMIC_ARM_H
18 #define ANDROID_CUTILS_ATOMIC_ARM_H
21 #include <machine/cpu-features.h>
23 extern inline void android_compiler_barrier(void)
25 __asm__
__volatile__ ("" : : : "memory");
29 extern inline void android_memory_barrier(void)
31 android_compiler_barrier();
33 extern inline void android_memory_store_barrier(void)
35 android_compiler_barrier();
37 #elif defined(__ARM_HAVE_DMB)
38 extern inline void android_memory_barrier(void)
40 __asm__
__volatile__ ("dmb" : : : "memory");
42 extern inline void android_memory_store_barrier(void)
44 __asm__
__volatile__ ("dmb st" : : : "memory");
46 #elif defined(__ARM_HAVE_LDREX_STREX)
47 extern inline void android_memory_barrier(void)
49 __asm__
__volatile__ ("mcr p15, 0, %0, c7, c10, 5" : : "r" (0) : "memory");
51 extern inline void android_memory_store_barrier(void)
53 android_memory_barrier();
56 extern inline void android_memory_barrier(void)
58 typedef void (kuser_memory_barrier
)(void);
59 (*(kuser_memory_barrier
*)0xffff0fa0)();
61 extern inline void android_memory_store_barrier(void)
63 android_memory_barrier();
67 extern inline int32_t android_atomic_acquire_load(volatile const int32_t *ptr
)
70 android_memory_barrier();
74 extern inline int32_t android_atomic_release_load(volatile const int32_t *ptr
)
76 android_memory_barrier();
80 extern inline void android_atomic_acquire_store(int32_t value
,
81 volatile int32_t *ptr
)
84 android_memory_barrier();
87 extern inline void android_atomic_release_store(int32_t value
,
88 volatile int32_t *ptr
)
90 android_memory_barrier();
94 #if defined(__thumb__)
95 extern int android_atomic_cas(int32_t old_value
, int32_t new_value
,
96 volatile int32_t *ptr
);
97 #elif defined(__ARM_HAVE_LDREX_STREX)
98 extern inline int android_atomic_cas(int32_t old_value
, int32_t new_value
,
99 volatile int32_t *ptr
)
101 int32_t prev
, status
;
103 __asm__
__volatile__ ("ldrex %0, [%3]\n"
106 "strexeq %1, %5, [%3]"
107 : "=&r" (prev
), "=&r" (status
), "+m"(*ptr
)
108 : "r" (ptr
), "Ir" (old_value
), "r" (new_value
)
110 } while (__builtin_expect(status
!= 0, 0));
111 return prev
!= old_value
;
114 extern inline int android_atomic_cas(int32_t old_value
, int32_t new_value
,
115 volatile int32_t *ptr
)
117 typedef int (kuser_cmpxchg
)(int32_t, int32_t, volatile int32_t *);
118 int32_t prev
, status
;
121 status
= (*(kuser_cmpxchg
*)0xffff0fc0)(old_value
, new_value
, ptr
);
122 if (__builtin_expect(status
== 0, 1))
125 } while (prev
== old_value
);
130 extern inline int android_atomic_acquire_cas(int32_t old_value
,
132 volatile int32_t *ptr
)
134 int status
= android_atomic_cas(old_value
, new_value
, ptr
);
135 android_memory_barrier();
139 extern inline int android_atomic_release_cas(int32_t old_value
,
141 volatile int32_t *ptr
)
143 android_memory_barrier();
144 return android_atomic_cas(old_value
, new_value
, ptr
);
148 #if defined(__thumb__)
149 extern int32_t android_atomic_add(int32_t increment
,
150 volatile int32_t *ptr
);
151 #elif defined(__ARM_HAVE_LDREX_STREX)
152 extern inline int32_t android_atomic_add(int32_t increment
,
153 volatile int32_t *ptr
)
155 int32_t prev
, tmp
, status
;
156 android_memory_barrier();
158 __asm__
__volatile__ ("ldrex %0, [%4]\n"
161 : "=&r" (prev
), "=&r" (tmp
),
162 "=&r" (status
), "+m" (*ptr
)
163 : "r" (ptr
), "Ir" (increment
)
165 } while (__builtin_expect(status
!= 0, 0));
169 extern inline int32_t android_atomic_add(int32_t increment
,
170 volatile int32_t *ptr
)
172 int32_t prev
, status
;
173 android_memory_barrier();
176 status
= android_atomic_cas(prev
, prev
+ increment
, ptr
);
177 } while (__builtin_expect(status
!= 0, 0));
182 extern inline int32_t android_atomic_inc(volatile int32_t *addr
)
184 return android_atomic_add(1, addr
);
187 extern inline int32_t android_atomic_dec(volatile int32_t *addr
)
189 return android_atomic_add(-1, addr
);
192 #if defined(__thumb__)
193 extern int32_t android_atomic_and(int32_t value
, volatile int32_t *ptr
);
194 #elif defined(__ARM_HAVE_LDREX_STREX)
195 extern inline int32_t android_atomic_and(int32_t value
, volatile int32_t *ptr
)
197 int32_t prev
, tmp
, status
;
198 android_memory_barrier();
200 __asm__
__volatile__ ("ldrex %0, [%4]\n"
203 : "=&r" (prev
), "=&r" (tmp
),
204 "=&r" (status
), "+m" (*ptr
)
205 : "r" (ptr
), "Ir" (value
)
207 } while (__builtin_expect(status
!= 0, 0));
211 extern inline int32_t android_atomic_and(int32_t value
, volatile int32_t *ptr
)
213 int32_t prev
, status
;
214 android_memory_barrier();
217 status
= android_atomic_cas(prev
, prev
& value
, ptr
);
218 } while (__builtin_expect(status
!= 0, 0));
223 #if defined(__thumb__)
224 extern int32_t android_atomic_or(int32_t value
, volatile int32_t *ptr
);
225 #elif defined(__ARM_HAVE_LDREX_STREX)
226 extern inline int32_t android_atomic_or(int32_t value
, volatile int32_t *ptr
)
228 int32_t prev
, tmp
, status
;
229 android_memory_barrier();
231 __asm__
__volatile__ ("ldrex %0, [%4]\n"
234 : "=&r" (prev
), "=&r" (tmp
),
235 "=&r" (status
), "+m" (*ptr
)
236 : "r" (ptr
), "Ir" (value
)
238 } while (__builtin_expect(status
!= 0, 0));
242 extern inline int32_t android_atomic_or(int32_t value
, volatile int32_t *ptr
)
244 int32_t prev
, status
;
245 android_memory_barrier();
248 status
= android_atomic_cas(prev
, prev
| value
, ptr
);
249 } while (__builtin_expect(status
!= 0, 0));
254 #endif /* ANDROID_CUTILS_ATOMIC_ARM_H */