Commit | Line | Data |
---|---|---|
1e494cf4 JB |
1 | /* |
2 | * Copyright (C) 2010 The Android Open Source Project | |
3 | * | |
4 | * Licensed under the Apache License, Version 2.0 (the "License"); | |
5 | * you may not use this file except in compliance with the License. | |
6 | * You may obtain a copy of the License at | |
7 | * | |
8 | * http://www.apache.org/licenses/LICENSE-2.0 | |
9 | * | |
10 | * Unless required by applicable law or agreed to in writing, software | |
11 | * distributed under the License is distributed on an "AS IS" BASIS, | |
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
13 | * See the License for the specific language governing permissions and | |
14 | * limitations under the License. | |
15 | */ | |
16 | ||
17 | #ifndef ANDROID_CUTILS_ATOMIC_X86_H | |
18 | #define ANDROID_CUTILS_ATOMIC_X86_H | |
19 | ||
20 | #include <stdint.h> | |
21 | ||
22 | extern inline void android_compiler_barrier(void) | |
23 | { | |
24 | __asm__ __volatile__ ("" : : : "memory"); | |
25 | } | |
26 | ||
27 | #if ANDROID_SMP == 0 | |
28 | extern inline void android_memory_barrier(void) | |
29 | { | |
30 | android_compiler_barrier(); | |
31 | } | |
32 | extern inline void android_memory_store_barrier(void) | |
33 | { | |
34 | android_compiler_barrier(); | |
35 | } | |
36 | #else | |
37 | extern inline void android_memory_barrier(void) | |
38 | { | |
39 | __asm__ __volatile__ ("mfence" : : : "memory"); | |
40 | } | |
41 | extern inline void android_memory_store_barrier(void) | |
42 | { | |
43 | android_compiler_barrier(); | |
44 | } | |
45 | #endif | |
46 | ||
47 | extern inline int32_t android_atomic_acquire_load(volatile const int32_t *ptr) | |
48 | { | |
49 | int32_t value = *ptr; | |
50 | android_compiler_barrier(); | |
51 | return value; | |
52 | } | |
53 | ||
54 | extern inline int32_t android_atomic_release_load(volatile const int32_t *ptr) | |
55 | { | |
56 | android_memory_barrier(); | |
57 | return *ptr; | |
58 | } | |
59 | ||
60 | extern inline void android_atomic_acquire_store(int32_t value, | |
61 | volatile int32_t *ptr) | |
62 | { | |
63 | *ptr = value; | |
64 | android_memory_barrier(); | |
65 | } | |
66 | ||
67 | extern inline void android_atomic_release_store(int32_t value, | |
68 | volatile int32_t *ptr) | |
69 | { | |
70 | android_compiler_barrier(); | |
71 | *ptr = value; | |
72 | } | |
73 | ||
74 | extern inline int android_atomic_cas(int32_t old_value, int32_t new_value, | |
75 | volatile int32_t *ptr) | |
76 | { | |
77 | int32_t prev; | |
78 | __asm__ __volatile__ ("lock; cmpxchgl %1, %2" | |
79 | : "=a" (prev) | |
80 | : "q" (new_value), "m" (*ptr), "0" (old_value) | |
81 | : "memory"); | |
82 | return prev != old_value; | |
83 | } | |
84 | ||
85 | extern inline int android_atomic_acquire_cas(int32_t old_value, | |
86 | int32_t new_value, | |
87 | volatile int32_t *ptr) | |
88 | { | |
89 | /* Loads are not reordered with other loads. */ | |
90 | return android_atomic_cas(old_value, new_value, ptr); | |
91 | } | |
92 | ||
93 | extern inline int android_atomic_release_cas(int32_t old_value, | |
94 | int32_t new_value, | |
95 | volatile int32_t *ptr) | |
96 | { | |
97 | /* Stores are not reordered with other stores. */ | |
98 | return android_atomic_cas(old_value, new_value, ptr); | |
99 | } | |
100 | ||
101 | extern inline int32_t android_atomic_add(int32_t increment, | |
102 | volatile int32_t *ptr) | |
103 | { | |
104 | __asm__ __volatile__ ("lock; xaddl %0, %1" | |
105 | : "+r" (increment), "+m" (*ptr) | |
106 | : : "memory"); | |
107 | /* increment now holds the old value of *ptr */ | |
108 | return increment; | |
109 | } | |
110 | ||
111 | extern inline int32_t android_atomic_inc(volatile int32_t *addr) | |
112 | { | |
113 | return android_atomic_add(1, addr); | |
114 | } | |
115 | ||
116 | extern inline int32_t android_atomic_dec(volatile int32_t *addr) | |
117 | { | |
118 | return android_atomic_add(-1, addr); | |
119 | } | |
120 | ||
121 | extern inline int32_t android_atomic_and(int32_t value, | |
122 | volatile int32_t *ptr) | |
123 | { | |
124 | int32_t prev, status; | |
125 | do { | |
126 | prev = *ptr; | |
127 | status = android_atomic_cas(prev, prev & value, ptr); | |
128 | } while (__builtin_expect(status != 0, 0)); | |
129 | return prev; | |
130 | } | |
131 | ||
132 | extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr) | |
133 | { | |
134 | int32_t prev, status; | |
135 | do { | |
136 | prev = *ptr; | |
137 | status = android_atomic_cas(prev, prev | value, ptr); | |
138 | } while (__builtin_expect(status != 0, 0)); | |
139 | return prev; | |
140 | } | |
141 | ||
142 | #endif /* ANDROID_CUTILS_ATOMIC_X86_H */ |