1*67e74705SXin Li // RUN: %clang_cc1 < %s -triple armv5e-none-linux-gnueabi -emit-llvm -O1 | FileCheck %s
2*67e74705SXin Li
3*67e74705SXin Li enum memory_order {
4*67e74705SXin Li memory_order_relaxed, memory_order_consume, memory_order_acquire,
5*67e74705SXin Li memory_order_release, memory_order_acq_rel, memory_order_seq_cst
6*67e74705SXin Li };
7*67e74705SXin Li
test_c11_atomic_fetch_add_int_ptr(_Atomic (int *)* p)8*67e74705SXin Li int *test_c11_atomic_fetch_add_int_ptr(_Atomic(int *) *p) {
9*67e74705SXin Li // CHECK: test_c11_atomic_fetch_add_int_ptr
10*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_add_4(i8* {{%[0-9]+}}, i32 12, i32 5)
11*67e74705SXin Li return __c11_atomic_fetch_add(p, 3, memory_order_seq_cst);
12*67e74705SXin Li }
13*67e74705SXin Li
test_c11_atomic_fetch_sub_int_ptr(_Atomic (int *)* p)14*67e74705SXin Li int *test_c11_atomic_fetch_sub_int_ptr(_Atomic(int *) *p) {
15*67e74705SXin Li // CHECK: test_c11_atomic_fetch_sub_int_ptr
16*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_sub_4(i8* {{%[0-9]+}}, i32 20, i32 5)
17*67e74705SXin Li return __c11_atomic_fetch_sub(p, 5, memory_order_seq_cst);
18*67e74705SXin Li }
19*67e74705SXin Li
test_c11_atomic_fetch_add_int(_Atomic (int)* p)20*67e74705SXin Li int test_c11_atomic_fetch_add_int(_Atomic(int) *p) {
21*67e74705SXin Li // CHECK: test_c11_atomic_fetch_add_int
22*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_add_4(i8* {{%[0-9]+}}, i32 3, i32 5)
23*67e74705SXin Li return __c11_atomic_fetch_add(p, 3, memory_order_seq_cst);
24*67e74705SXin Li }
25*67e74705SXin Li
test_c11_atomic_fetch_sub_int(_Atomic (int)* p)26*67e74705SXin Li int test_c11_atomic_fetch_sub_int(_Atomic(int) *p) {
27*67e74705SXin Li // CHECK: test_c11_atomic_fetch_sub_int
28*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_sub_4(i8* {{%[0-9]+}}, i32 5, i32 5)
29*67e74705SXin Li return __c11_atomic_fetch_sub(p, 5, memory_order_seq_cst);
30*67e74705SXin Li }
31*67e74705SXin Li
fp2a(int ** p)32*67e74705SXin Li int *fp2a(int **p) {
33*67e74705SXin Li // CHECK: @fp2a
34*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_sub_4(i8* {{%[0-9]+}}, i32 4, i32 0)
35*67e74705SXin Li // Note, the GNU builtins do not multiply by sizeof(T)!
36*67e74705SXin Li return __atomic_fetch_sub(p, 4, memory_order_relaxed);
37*67e74705SXin Li }
38*67e74705SXin Li
test_atomic_fetch_add(int * p)39*67e74705SXin Li int test_atomic_fetch_add(int *p) {
40*67e74705SXin Li // CHECK: test_atomic_fetch_add
41*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_add_4(i8* {{%[0-9]+}}, i32 55, i32 5)
42*67e74705SXin Li return __atomic_fetch_add(p, 55, memory_order_seq_cst);
43*67e74705SXin Li }
44*67e74705SXin Li
test_atomic_fetch_sub(int * p)45*67e74705SXin Li int test_atomic_fetch_sub(int *p) {
46*67e74705SXin Li // CHECK: test_atomic_fetch_sub
47*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_sub_4(i8* {{%[0-9]+}}, i32 55, i32 5)
48*67e74705SXin Li return __atomic_fetch_sub(p, 55, memory_order_seq_cst);
49*67e74705SXin Li }
50*67e74705SXin Li
test_atomic_fetch_and(int * p)51*67e74705SXin Li int test_atomic_fetch_and(int *p) {
52*67e74705SXin Li // CHECK: test_atomic_fetch_and
53*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_and_4(i8* {{%[0-9]+}}, i32 55, i32 5)
54*67e74705SXin Li return __atomic_fetch_and(p, 55, memory_order_seq_cst);
55*67e74705SXin Li }
56*67e74705SXin Li
test_atomic_fetch_or(int * p)57*67e74705SXin Li int test_atomic_fetch_or(int *p) {
58*67e74705SXin Li // CHECK: test_atomic_fetch_or
59*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_or_4(i8* {{%[0-9]+}}, i32 55, i32 5)
60*67e74705SXin Li return __atomic_fetch_or(p, 55, memory_order_seq_cst);
61*67e74705SXin Li }
62*67e74705SXin Li
test_atomic_fetch_xor(int * p)63*67e74705SXin Li int test_atomic_fetch_xor(int *p) {
64*67e74705SXin Li // CHECK: test_atomic_fetch_xor
65*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_xor_4(i8* {{%[0-9]+}}, i32 55, i32 5)
66*67e74705SXin Li return __atomic_fetch_xor(p, 55, memory_order_seq_cst);
67*67e74705SXin Li }
68*67e74705SXin Li
test_atomic_fetch_nand(int * p)69*67e74705SXin Li int test_atomic_fetch_nand(int *p) {
70*67e74705SXin Li // CHECK: test_atomic_fetch_nand
71*67e74705SXin Li // CHECK: {{%[^ ]*}} = tail call i32 @__atomic_fetch_nand_4(i8* {{%[0-9]+}}, i32 55, i32 5)
72*67e74705SXin Li return __atomic_fetch_nand(p, 55, memory_order_seq_cst);
73*67e74705SXin Li }
74*67e74705SXin Li
test_atomic_add_fetch(int * p)75*67e74705SXin Li int test_atomic_add_fetch(int *p) {
76*67e74705SXin Li // CHECK: test_atomic_add_fetch
77*67e74705SXin Li // CHECK: [[CALL:%[^ ]*]] = tail call i32 @__atomic_fetch_add_4(i8* {{%[0-9]+}}, i32 55, i32 5)
78*67e74705SXin Li // CHECK: {{%[^ ]*}} = add i32 [[CALL]], 55
79*67e74705SXin Li return __atomic_add_fetch(p, 55, memory_order_seq_cst);
80*67e74705SXin Li }
81*67e74705SXin Li
test_atomic_sub_fetch(int * p)82*67e74705SXin Li int test_atomic_sub_fetch(int *p) {
83*67e74705SXin Li // CHECK: test_atomic_sub_fetch
84*67e74705SXin Li // CHECK: [[CALL:%[^ ]*]] = tail call i32 @__atomic_fetch_sub_4(i8* {{%[0-9]+}}, i32 55, i32 5)
85*67e74705SXin Li // CHECK: {{%[^ ]*}} = add i32 [[CALL]], -55
86*67e74705SXin Li return __atomic_sub_fetch(p, 55, memory_order_seq_cst);
87*67e74705SXin Li }
88*67e74705SXin Li
test_atomic_and_fetch(int * p)89*67e74705SXin Li int test_atomic_and_fetch(int *p) {
90*67e74705SXin Li // CHECK: test_atomic_and_fetch
91*67e74705SXin Li // CHECK: [[CALL:%[^ ]*]] = tail call i32 @__atomic_fetch_and_4(i8* {{%[0-9]+}}, i32 55, i32 5)
92*67e74705SXin Li // CHECK: {{%[^ ]*}} = and i32 [[CALL]], 55
93*67e74705SXin Li return __atomic_and_fetch(p, 55, memory_order_seq_cst);
94*67e74705SXin Li }
95*67e74705SXin Li
test_atomic_or_fetch(int * p)96*67e74705SXin Li int test_atomic_or_fetch(int *p) {
97*67e74705SXin Li // CHECK: test_atomic_or_fetch
98*67e74705SXin Li // CHECK: [[CALL:%[^ ]*]] = tail call i32 @__atomic_fetch_or_4(i8* {{%[0-9]+}}, i32 55, i32 5)
99*67e74705SXin Li // CHECK: {{%[^ ]*}} = or i32 [[CALL]], 55
100*67e74705SXin Li return __atomic_or_fetch(p, 55, memory_order_seq_cst);
101*67e74705SXin Li }
102*67e74705SXin Li
test_atomic_xor_fetch(int * p)103*67e74705SXin Li int test_atomic_xor_fetch(int *p) {
104*67e74705SXin Li // CHECK: test_atomic_xor_fetch
105*67e74705SXin Li // CHECK: [[CALL:%[^ ]*]] = tail call i32 @__atomic_fetch_xor_4(i8* {{%[0-9]+}}, i32 55, i32 5)
106*67e74705SXin Li // CHECK: {{%[^ ]*}} = xor i32 [[CALL]], 55
107*67e74705SXin Li return __atomic_xor_fetch(p, 55, memory_order_seq_cst);
108*67e74705SXin Li }
109*67e74705SXin Li
test_atomic_nand_fetch(int * p)110*67e74705SXin Li int test_atomic_nand_fetch(int *p) {
111*67e74705SXin Li // CHECK: test_atomic_nand_fetch
112*67e74705SXin Li // CHECK: [[CALL:%[^ ]*]] = tail call i32 @__atomic_fetch_nand_4(i8* {{%[0-9]+}}, i32 55, i32 5)
113*67e74705SXin Li // CHECK: [[OR:%[^ ]*]] = or i32 [[CALL]], -56
114*67e74705SXin Li // CHECK: {{%[^ ]*}} = xor i32 [[OR]], 55
115*67e74705SXin Li return __atomic_nand_fetch(p, 55, memory_order_seq_cst);
116*67e74705SXin Li }
117