xref: /aosp_15_r20/external/musl/arch/loongarch64/atomic_arch.h (revision c9945492fdd68bbe62686c5b452b4dc1be3f8453)
1 #define a_ll a_ll
a_ll(volatile int * p)2 static inline int a_ll(volatile int *p)
3 {
4 	int v;
5 	__asm__ __volatile__ (
6 		"ll.w %0, %1"
7 		: "=r"(v)
8 		: "ZC"(*p));
9 	return v;
10 }
11 
12 #define a_sc a_sc
a_sc(volatile int * p,int v)13 static inline int a_sc(volatile int *p, int v)
14 {
15 	int r;
16 	__asm__ __volatile__ (
17 		"sc.w %0, %1"
18 		: "=r"(r), "=ZC"(*p)
19 		: "0"(v) : "memory");
20 	return r;
21 }
22 
23 #define a_ll_p a_ll_p
a_ll_p(volatile void * p)24 static inline void *a_ll_p(volatile void *p)
25 {
26 	void *v;
27 	__asm__ __volatile__ (
28 		"ll.d %0, %1"
29 		: "=r"(v)
30 		: "ZC"(*(void *volatile *)p));
31 	return v;
32 }
33 
34 #define a_sc_p a_sc_p
a_sc_p(volatile void * p,void * v)35 static inline int a_sc_p(volatile void *p, void *v)
36 {
37 	long r;
38 	__asm__ __volatile__ (
39 		"sc.d %0, %1"
40 		: "=r"(r), "=ZC"(*(void *volatile *)p)
41 		: "0"(v)
42 		: "memory");
43 	return r;
44 }
45 
46 #define a_barrier a_barrier
a_barrier()47 static inline void a_barrier()
48 {
49 	__asm__ __volatile__ ("dbar 0" : : : "memory");
50 }
51 
52 #define a_pre_llsc  a_barrier
53 #define a_post_llsc a_barrier
54