1*7c3d14c8STreehugger Robot /*===-- atomic.c - Implement support functions for atomic operations.------===
2*7c3d14c8STreehugger Robot *
3*7c3d14c8STreehugger Robot * The LLVM Compiler Infrastructure
4*7c3d14c8STreehugger Robot *
5*7c3d14c8STreehugger Robot * This file is dual licensed under the MIT and the University of Illinois Open
6*7c3d14c8STreehugger Robot * Source Licenses. See LICENSE.TXT for details.
7*7c3d14c8STreehugger Robot *
8*7c3d14c8STreehugger Robot *===----------------------------------------------------------------------===
9*7c3d14c8STreehugger Robot *
10*7c3d14c8STreehugger Robot * atomic.c defines a set of functions for performing atomic accesses on
11*7c3d14c8STreehugger Robot * arbitrary-sized memory locations. This design uses locks that should
12*7c3d14c8STreehugger Robot * be fast in the uncontended case, for two reasons:
13*7c3d14c8STreehugger Robot *
14*7c3d14c8STreehugger Robot * 1) This code must work with C programs that do not link to anything
15*7c3d14c8STreehugger Robot * (including pthreads) and so it should not depend on any pthread
16*7c3d14c8STreehugger Robot * functions.
17*7c3d14c8STreehugger Robot * 2) Atomic operations, rather than explicit mutexes, are most commonly used
18*7c3d14c8STreehugger Robot * on code where contended operations are rate.
19*7c3d14c8STreehugger Robot *
20*7c3d14c8STreehugger Robot * To avoid needing a per-object lock, this code allocates an array of
21*7c3d14c8STreehugger Robot * locks and hashes the object pointers to find the one that it should use.
22*7c3d14c8STreehugger Robot * For operations that must be atomic on two locations, the lower lock is
23*7c3d14c8STreehugger Robot * always acquired first, to avoid deadlock.
24*7c3d14c8STreehugger Robot *
25*7c3d14c8STreehugger Robot *===----------------------------------------------------------------------===
26*7c3d14c8STreehugger Robot */
27*7c3d14c8STreehugger Robot
28*7c3d14c8STreehugger Robot #include <stdint.h>
29*7c3d14c8STreehugger Robot #include <string.h>
30*7c3d14c8STreehugger Robot
31*7c3d14c8STreehugger Robot #include "assembly.h"
32*7c3d14c8STreehugger Robot
33*7c3d14c8STreehugger Robot // Clang objects if you redefine a builtin. This little hack allows us to
34*7c3d14c8STreehugger Robot // define a function with the same name as an intrinsic.
35*7c3d14c8STreehugger Robot #pragma redefine_extname __atomic_load_c SYMBOL_NAME(__atomic_load)
36*7c3d14c8STreehugger Robot #pragma redefine_extname __atomic_store_c SYMBOL_NAME(__atomic_store)
37*7c3d14c8STreehugger Robot #pragma redefine_extname __atomic_exchange_c SYMBOL_NAME(__atomic_exchange)
38*7c3d14c8STreehugger Robot #pragma redefine_extname __atomic_compare_exchange_c SYMBOL_NAME(__atomic_compare_exchange)
39*7c3d14c8STreehugger Robot
40*7c3d14c8STreehugger Robot /// Number of locks. This allocates one page on 32-bit platforms, two on
41*7c3d14c8STreehugger Robot /// 64-bit. This can be specified externally if a different trade between
42*7c3d14c8STreehugger Robot /// memory usage and contention probability is required for a given platform.
43*7c3d14c8STreehugger Robot #ifndef SPINLOCK_COUNT
44*7c3d14c8STreehugger Robot #define SPINLOCK_COUNT (1<<10)
45*7c3d14c8STreehugger Robot #endif
46*7c3d14c8STreehugger Robot static const long SPINLOCK_MASK = SPINLOCK_COUNT - 1;
47*7c3d14c8STreehugger Robot
48*7c3d14c8STreehugger Robot ////////////////////////////////////////////////////////////////////////////////
49*7c3d14c8STreehugger Robot // Platform-specific lock implementation. Falls back to spinlocks if none is
50*7c3d14c8STreehugger Robot // defined. Each platform should define the Lock type, and corresponding
51*7c3d14c8STreehugger Robot // lock() and unlock() functions.
52*7c3d14c8STreehugger Robot ////////////////////////////////////////////////////////////////////////////////
53*7c3d14c8STreehugger Robot #ifdef __FreeBSD__
54*7c3d14c8STreehugger Robot #include <errno.h>
55*7c3d14c8STreehugger Robot #include <sys/types.h>
56*7c3d14c8STreehugger Robot #include <machine/atomic.h>
57*7c3d14c8STreehugger Robot #include <sys/umtx.h>
58*7c3d14c8STreehugger Robot typedef struct _usem Lock;
unlock(Lock * l)59*7c3d14c8STreehugger Robot __inline static void unlock(Lock *l) {
60*7c3d14c8STreehugger Robot __c11_atomic_store((_Atomic(uint32_t)*)&l->_count, 1, __ATOMIC_RELEASE);
61*7c3d14c8STreehugger Robot __c11_atomic_thread_fence(__ATOMIC_SEQ_CST);
62*7c3d14c8STreehugger Robot if (l->_has_waiters)
63*7c3d14c8STreehugger Robot _umtx_op(l, UMTX_OP_SEM_WAKE, 1, 0, 0);
64*7c3d14c8STreehugger Robot }
lock(Lock * l)65*7c3d14c8STreehugger Robot __inline static void lock(Lock *l) {
66*7c3d14c8STreehugger Robot uint32_t old = 1;
67*7c3d14c8STreehugger Robot while (!__c11_atomic_compare_exchange_weak((_Atomic(uint32_t)*)&l->_count, &old,
68*7c3d14c8STreehugger Robot 0, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED)) {
69*7c3d14c8STreehugger Robot _umtx_op(l, UMTX_OP_SEM_WAIT, 0, 0, 0);
70*7c3d14c8STreehugger Robot old = 1;
71*7c3d14c8STreehugger Robot }
72*7c3d14c8STreehugger Robot }
73*7c3d14c8STreehugger Robot /// locks for atomic operations
74*7c3d14c8STreehugger Robot static Lock locks[SPINLOCK_COUNT] = { [0 ... SPINLOCK_COUNT-1] = {0,1,0} };
75*7c3d14c8STreehugger Robot
76*7c3d14c8STreehugger Robot #elif defined(__APPLE__)
77*7c3d14c8STreehugger Robot #include <libkern/OSAtomic.h>
78*7c3d14c8STreehugger Robot typedef OSSpinLock Lock;
unlock(Lock * l)79*7c3d14c8STreehugger Robot __inline static void unlock(Lock *l) {
80*7c3d14c8STreehugger Robot OSSpinLockUnlock(l);
81*7c3d14c8STreehugger Robot }
82*7c3d14c8STreehugger Robot /// Locks a lock. In the current implementation, this is potentially
83*7c3d14c8STreehugger Robot /// unbounded in the contended case.
lock(Lock * l)84*7c3d14c8STreehugger Robot __inline static void lock(Lock *l) {
85*7c3d14c8STreehugger Robot OSSpinLockLock(l);
86*7c3d14c8STreehugger Robot }
87*7c3d14c8STreehugger Robot static Lock locks[SPINLOCK_COUNT]; // initialized to OS_SPINLOCK_INIT which is 0
88*7c3d14c8STreehugger Robot
89*7c3d14c8STreehugger Robot #else
90*7c3d14c8STreehugger Robot typedef _Atomic(uintptr_t) Lock;
91*7c3d14c8STreehugger Robot /// Unlock a lock. This is a release operation.
unlock(Lock * l)92*7c3d14c8STreehugger Robot __inline static void unlock(Lock *l) {
93*7c3d14c8STreehugger Robot __c11_atomic_store(l, 0, __ATOMIC_RELEASE);
94*7c3d14c8STreehugger Robot }
95*7c3d14c8STreehugger Robot /// Locks a lock. In the current implementation, this is potentially
96*7c3d14c8STreehugger Robot /// unbounded in the contended case.
lock(Lock * l)97*7c3d14c8STreehugger Robot __inline static void lock(Lock *l) {
98*7c3d14c8STreehugger Robot uintptr_t old = 0;
99*7c3d14c8STreehugger Robot while (!__c11_atomic_compare_exchange_weak(l, &old, 1, __ATOMIC_ACQUIRE,
100*7c3d14c8STreehugger Robot __ATOMIC_RELAXED))
101*7c3d14c8STreehugger Robot old = 0;
102*7c3d14c8STreehugger Robot }
103*7c3d14c8STreehugger Robot /// locks for atomic operations
104*7c3d14c8STreehugger Robot static Lock locks[SPINLOCK_COUNT];
105*7c3d14c8STreehugger Robot #endif
106*7c3d14c8STreehugger Robot
107*7c3d14c8STreehugger Robot
108*7c3d14c8STreehugger Robot /// Returns a lock to use for a given pointer.
lock_for_pointer(void * ptr)109*7c3d14c8STreehugger Robot static __inline Lock *lock_for_pointer(void *ptr) {
110*7c3d14c8STreehugger Robot intptr_t hash = (intptr_t)ptr;
111*7c3d14c8STreehugger Robot // Disregard the lowest 4 bits. We want all values that may be part of the
112*7c3d14c8STreehugger Robot // same memory operation to hash to the same value and therefore use the same
113*7c3d14c8STreehugger Robot // lock.
114*7c3d14c8STreehugger Robot hash >>= 4;
115*7c3d14c8STreehugger Robot // Use the next bits as the basis for the hash
116*7c3d14c8STreehugger Robot intptr_t low = hash & SPINLOCK_MASK;
117*7c3d14c8STreehugger Robot // Now use the high(er) set of bits to perturb the hash, so that we don't
118*7c3d14c8STreehugger Robot // get collisions from atomic fields in a single object
119*7c3d14c8STreehugger Robot hash >>= 16;
120*7c3d14c8STreehugger Robot hash ^= low;
121*7c3d14c8STreehugger Robot // Return a pointer to the word to use
122*7c3d14c8STreehugger Robot return locks + (hash & SPINLOCK_MASK);
123*7c3d14c8STreehugger Robot }
124*7c3d14c8STreehugger Robot
125*7c3d14c8STreehugger Robot /// Macros for determining whether a size is lock free. Clang can not yet
126*7c3d14c8STreehugger Robot /// codegen __atomic_is_lock_free(16), so for now we assume 16-byte values are
127*7c3d14c8STreehugger Robot /// not lock free.
128*7c3d14c8STreehugger Robot #define IS_LOCK_FREE_1 __c11_atomic_is_lock_free(1)
129*7c3d14c8STreehugger Robot #define IS_LOCK_FREE_2 __c11_atomic_is_lock_free(2)
130*7c3d14c8STreehugger Robot #define IS_LOCK_FREE_4 __c11_atomic_is_lock_free(4)
131*7c3d14c8STreehugger Robot #define IS_LOCK_FREE_8 __c11_atomic_is_lock_free(8)
132*7c3d14c8STreehugger Robot #define IS_LOCK_FREE_16 0
133*7c3d14c8STreehugger Robot
134*7c3d14c8STreehugger Robot /// Macro that calls the compiler-generated lock-free versions of functions
135*7c3d14c8STreehugger Robot /// when they exist.
136*7c3d14c8STreehugger Robot #define LOCK_FREE_CASES() \
137*7c3d14c8STreehugger Robot do {\
138*7c3d14c8STreehugger Robot switch (size) {\
139*7c3d14c8STreehugger Robot case 2:\
140*7c3d14c8STreehugger Robot if (IS_LOCK_FREE_2) {\
141*7c3d14c8STreehugger Robot LOCK_FREE_ACTION(uint16_t);\
142*7c3d14c8STreehugger Robot }\
143*7c3d14c8STreehugger Robot case 4:\
144*7c3d14c8STreehugger Robot if (IS_LOCK_FREE_4) {\
145*7c3d14c8STreehugger Robot LOCK_FREE_ACTION(uint32_t);\
146*7c3d14c8STreehugger Robot }\
147*7c3d14c8STreehugger Robot case 8:\
148*7c3d14c8STreehugger Robot if (IS_LOCK_FREE_8) {\
149*7c3d14c8STreehugger Robot LOCK_FREE_ACTION(uint64_t);\
150*7c3d14c8STreehugger Robot }\
151*7c3d14c8STreehugger Robot case 16:\
152*7c3d14c8STreehugger Robot if (IS_LOCK_FREE_16) {\
153*7c3d14c8STreehugger Robot /* FIXME: __uint128_t isn't available on 32 bit platforms.
154*7c3d14c8STreehugger Robot LOCK_FREE_ACTION(__uint128_t);*/\
155*7c3d14c8STreehugger Robot }\
156*7c3d14c8STreehugger Robot }\
157*7c3d14c8STreehugger Robot } while (0)
158*7c3d14c8STreehugger Robot
159*7c3d14c8STreehugger Robot
160*7c3d14c8STreehugger Robot /// An atomic load operation. This is atomic with respect to the source
161*7c3d14c8STreehugger Robot /// pointer only.
__atomic_load_c(int size,void * src,void * dest,int model)162*7c3d14c8STreehugger Robot void __atomic_load_c(int size, void *src, void *dest, int model) {
163*7c3d14c8STreehugger Robot #define LOCK_FREE_ACTION(type) \
164*7c3d14c8STreehugger Robot *((type*)dest) = __c11_atomic_load((_Atomic(type)*)src, model);\
165*7c3d14c8STreehugger Robot return;
166*7c3d14c8STreehugger Robot LOCK_FREE_CASES();
167*7c3d14c8STreehugger Robot #undef LOCK_FREE_ACTION
168*7c3d14c8STreehugger Robot Lock *l = lock_for_pointer(src);
169*7c3d14c8STreehugger Robot lock(l);
170*7c3d14c8STreehugger Robot memcpy(dest, src, size);
171*7c3d14c8STreehugger Robot unlock(l);
172*7c3d14c8STreehugger Robot }
173*7c3d14c8STreehugger Robot
174*7c3d14c8STreehugger Robot /// An atomic store operation. This is atomic with respect to the destination
175*7c3d14c8STreehugger Robot /// pointer only.
__atomic_store_c(int size,void * dest,void * src,int model)176*7c3d14c8STreehugger Robot void __atomic_store_c(int size, void *dest, void *src, int model) {
177*7c3d14c8STreehugger Robot #define LOCK_FREE_ACTION(type) \
178*7c3d14c8STreehugger Robot __c11_atomic_store((_Atomic(type)*)dest, *(type*)dest, model);\
179*7c3d14c8STreehugger Robot return;
180*7c3d14c8STreehugger Robot LOCK_FREE_CASES();
181*7c3d14c8STreehugger Robot #undef LOCK_FREE_ACTION
182*7c3d14c8STreehugger Robot Lock *l = lock_for_pointer(dest);
183*7c3d14c8STreehugger Robot lock(l);
184*7c3d14c8STreehugger Robot memcpy(dest, src, size);
185*7c3d14c8STreehugger Robot unlock(l);
186*7c3d14c8STreehugger Robot }
187*7c3d14c8STreehugger Robot
188*7c3d14c8STreehugger Robot /// Atomic compare and exchange operation. If the value at *ptr is identical
189*7c3d14c8STreehugger Robot /// to the value at *expected, then this copies value at *desired to *ptr. If
190*7c3d14c8STreehugger Robot /// they are not, then this stores the current value from *ptr in *expected.
191*7c3d14c8STreehugger Robot ///
192*7c3d14c8STreehugger Robot /// This function returns 1 if the exchange takes place or 0 if it fails.
__atomic_compare_exchange_c(int size,void * ptr,void * expected,void * desired,int success,int failure)193*7c3d14c8STreehugger Robot int __atomic_compare_exchange_c(int size, void *ptr, void *expected,
194*7c3d14c8STreehugger Robot void *desired, int success, int failure) {
195*7c3d14c8STreehugger Robot #define LOCK_FREE_ACTION(type) \
196*7c3d14c8STreehugger Robot return __c11_atomic_compare_exchange_strong((_Atomic(type)*)ptr, (type*)expected,\
197*7c3d14c8STreehugger Robot *(type*)desired, success, failure)
198*7c3d14c8STreehugger Robot LOCK_FREE_CASES();
199*7c3d14c8STreehugger Robot #undef LOCK_FREE_ACTION
200*7c3d14c8STreehugger Robot Lock *l = lock_for_pointer(ptr);
201*7c3d14c8STreehugger Robot lock(l);
202*7c3d14c8STreehugger Robot if (memcmp(ptr, expected, size) == 0) {
203*7c3d14c8STreehugger Robot memcpy(ptr, desired, size);
204*7c3d14c8STreehugger Robot unlock(l);
205*7c3d14c8STreehugger Robot return 1;
206*7c3d14c8STreehugger Robot }
207*7c3d14c8STreehugger Robot memcpy(expected, ptr, size);
208*7c3d14c8STreehugger Robot unlock(l);
209*7c3d14c8STreehugger Robot return 0;
210*7c3d14c8STreehugger Robot }
211*7c3d14c8STreehugger Robot
212*7c3d14c8STreehugger Robot /// Performs an atomic exchange operation between two pointers. This is atomic
213*7c3d14c8STreehugger Robot /// with respect to the target address.
__atomic_exchange_c(int size,void * ptr,void * val,void * old,int model)214*7c3d14c8STreehugger Robot void __atomic_exchange_c(int size, void *ptr, void *val, void *old, int model) {
215*7c3d14c8STreehugger Robot #define LOCK_FREE_ACTION(type) \
216*7c3d14c8STreehugger Robot *(type*)old = __c11_atomic_exchange((_Atomic(type)*)ptr, *(type*)val,\
217*7c3d14c8STreehugger Robot model);\
218*7c3d14c8STreehugger Robot return;
219*7c3d14c8STreehugger Robot LOCK_FREE_CASES();
220*7c3d14c8STreehugger Robot #undef LOCK_FREE_ACTION
221*7c3d14c8STreehugger Robot Lock *l = lock_for_pointer(ptr);
222*7c3d14c8STreehugger Robot lock(l);
223*7c3d14c8STreehugger Robot memcpy(old, ptr, size);
224*7c3d14c8STreehugger Robot memcpy(ptr, val, size);
225*7c3d14c8STreehugger Robot unlock(l);
226*7c3d14c8STreehugger Robot }
227*7c3d14c8STreehugger Robot
228*7c3d14c8STreehugger Robot ////////////////////////////////////////////////////////////////////////////////
229*7c3d14c8STreehugger Robot // Where the size is known at compile time, the compiler may emit calls to
230*7c3d14c8STreehugger Robot // specialised versions of the above functions.
231*7c3d14c8STreehugger Robot ////////////////////////////////////////////////////////////////////////////////
232*7c3d14c8STreehugger Robot #define OPTIMISED_CASES\
233*7c3d14c8STreehugger Robot OPTIMISED_CASE(1, IS_LOCK_FREE_1, uint8_t)\
234*7c3d14c8STreehugger Robot OPTIMISED_CASE(2, IS_LOCK_FREE_2, uint16_t)\
235*7c3d14c8STreehugger Robot OPTIMISED_CASE(4, IS_LOCK_FREE_4, uint32_t)\
236*7c3d14c8STreehugger Robot OPTIMISED_CASE(8, IS_LOCK_FREE_8, uint64_t)\
237*7c3d14c8STreehugger Robot /* FIXME: __uint128_t isn't available on 32 bit platforms.
238*7c3d14c8STreehugger Robot OPTIMISED_CASE(16, IS_LOCK_FREE_16, __uint128_t)*/\
239*7c3d14c8STreehugger Robot
240*7c3d14c8STreehugger Robot #define OPTIMISED_CASE(n, lockfree, type)\
241*7c3d14c8STreehugger Robot type __atomic_load_##n(type *src, int model) {\
242*7c3d14c8STreehugger Robot if (lockfree)\
243*7c3d14c8STreehugger Robot return __c11_atomic_load((_Atomic(type)*)src, model);\
244*7c3d14c8STreehugger Robot Lock *l = lock_for_pointer(src);\
245*7c3d14c8STreehugger Robot lock(l);\
246*7c3d14c8STreehugger Robot type val = *src;\
247*7c3d14c8STreehugger Robot unlock(l);\
248*7c3d14c8STreehugger Robot return val;\
249*7c3d14c8STreehugger Robot }
250*7c3d14c8STreehugger Robot OPTIMISED_CASES
251*7c3d14c8STreehugger Robot #undef OPTIMISED_CASE
252*7c3d14c8STreehugger Robot
253*7c3d14c8STreehugger Robot #define OPTIMISED_CASE(n, lockfree, type)\
254*7c3d14c8STreehugger Robot void __atomic_store_##n(type *dest, type val, int model) {\
255*7c3d14c8STreehugger Robot if (lockfree) {\
256*7c3d14c8STreehugger Robot __c11_atomic_store((_Atomic(type)*)dest, val, model);\
257*7c3d14c8STreehugger Robot return;\
258*7c3d14c8STreehugger Robot }\
259*7c3d14c8STreehugger Robot Lock *l = lock_for_pointer(dest);\
260*7c3d14c8STreehugger Robot lock(l);\
261*7c3d14c8STreehugger Robot *dest = val;\
262*7c3d14c8STreehugger Robot unlock(l);\
263*7c3d14c8STreehugger Robot return;\
264*7c3d14c8STreehugger Robot }
265*7c3d14c8STreehugger Robot OPTIMISED_CASES
266*7c3d14c8STreehugger Robot #undef OPTIMISED_CASE
267*7c3d14c8STreehugger Robot
268*7c3d14c8STreehugger Robot #define OPTIMISED_CASE(n, lockfree, type)\
269*7c3d14c8STreehugger Robot type __atomic_exchange_##n(type *dest, type val, int model) {\
270*7c3d14c8STreehugger Robot if (lockfree)\
271*7c3d14c8STreehugger Robot return __c11_atomic_exchange((_Atomic(type)*)dest, val, model);\
272*7c3d14c8STreehugger Robot Lock *l = lock_for_pointer(dest);\
273*7c3d14c8STreehugger Robot lock(l);\
274*7c3d14c8STreehugger Robot type tmp = *dest;\
275*7c3d14c8STreehugger Robot *dest = val;\
276*7c3d14c8STreehugger Robot unlock(l);\
277*7c3d14c8STreehugger Robot return tmp;\
278*7c3d14c8STreehugger Robot }
279*7c3d14c8STreehugger Robot OPTIMISED_CASES
280*7c3d14c8STreehugger Robot #undef OPTIMISED_CASE
281*7c3d14c8STreehugger Robot
282*7c3d14c8STreehugger Robot #define OPTIMISED_CASE(n, lockfree, type)\
283*7c3d14c8STreehugger Robot int __atomic_compare_exchange_##n(type *ptr, type *expected, type desired,\
284*7c3d14c8STreehugger Robot int success, int failure) {\
285*7c3d14c8STreehugger Robot if (lockfree)\
286*7c3d14c8STreehugger Robot return __c11_atomic_compare_exchange_strong((_Atomic(type)*)ptr, expected, desired,\
287*7c3d14c8STreehugger Robot success, failure);\
288*7c3d14c8STreehugger Robot Lock *l = lock_for_pointer(ptr);\
289*7c3d14c8STreehugger Robot lock(l);\
290*7c3d14c8STreehugger Robot if (*ptr == *expected) {\
291*7c3d14c8STreehugger Robot *ptr = desired;\
292*7c3d14c8STreehugger Robot unlock(l);\
293*7c3d14c8STreehugger Robot return 1;\
294*7c3d14c8STreehugger Robot }\
295*7c3d14c8STreehugger Robot *expected = *ptr;\
296*7c3d14c8STreehugger Robot unlock(l);\
297*7c3d14c8STreehugger Robot return 0;\
298*7c3d14c8STreehugger Robot }
299*7c3d14c8STreehugger Robot OPTIMISED_CASES
300*7c3d14c8STreehugger Robot #undef OPTIMISED_CASE
301*7c3d14c8STreehugger Robot
302*7c3d14c8STreehugger Robot ////////////////////////////////////////////////////////////////////////////////
303*7c3d14c8STreehugger Robot // Atomic read-modify-write operations for integers of various sizes.
304*7c3d14c8STreehugger Robot ////////////////////////////////////////////////////////////////////////////////
305*7c3d14c8STreehugger Robot #define ATOMIC_RMW(n, lockfree, type, opname, op) \
306*7c3d14c8STreehugger Robot type __atomic_fetch_##opname##_##n(type *ptr, type val, int model) {\
307*7c3d14c8STreehugger Robot if (lockfree) \
308*7c3d14c8STreehugger Robot return __c11_atomic_fetch_##opname((_Atomic(type)*)ptr, val, model);\
309*7c3d14c8STreehugger Robot Lock *l = lock_for_pointer(ptr);\
310*7c3d14c8STreehugger Robot lock(l);\
311*7c3d14c8STreehugger Robot type tmp = *ptr;\
312*7c3d14c8STreehugger Robot *ptr = tmp op val;\
313*7c3d14c8STreehugger Robot unlock(l);\
314*7c3d14c8STreehugger Robot return tmp;\
315*7c3d14c8STreehugger Robot }
316*7c3d14c8STreehugger Robot
317*7c3d14c8STreehugger Robot #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, add, +)
318*7c3d14c8STreehugger Robot OPTIMISED_CASES
319*7c3d14c8STreehugger Robot #undef OPTIMISED_CASE
320*7c3d14c8STreehugger Robot #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, sub, -)
321*7c3d14c8STreehugger Robot OPTIMISED_CASES
322*7c3d14c8STreehugger Robot #undef OPTIMISED_CASE
323*7c3d14c8STreehugger Robot #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, and, &)
324*7c3d14c8STreehugger Robot OPTIMISED_CASES
325*7c3d14c8STreehugger Robot #undef OPTIMISED_CASE
326*7c3d14c8STreehugger Robot #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, or, |)
327*7c3d14c8STreehugger Robot OPTIMISED_CASES
328*7c3d14c8STreehugger Robot #undef OPTIMISED_CASE
329*7c3d14c8STreehugger Robot #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, xor, ^)
330*7c3d14c8STreehugger Robot OPTIMISED_CASES
331*7c3d14c8STreehugger Robot #undef OPTIMISED_CASE
332