1 use std::sync::atomic::{AtomicUsize, Ordering};
2 use std::sync::mpsc::channel;
3 use std::sync::{Arc, TryLockError};
4 use std::thread;
5
6 use crossbeam_utils::sync::ShardedLock;
7 use rand::Rng;
8
9 #[derive(Eq, PartialEq, Debug)]
10 struct NonCopy(i32);
11
12 #[test]
smoke()13 fn smoke() {
14 let l = ShardedLock::new(());
15 drop(l.read().unwrap());
16 drop(l.write().unwrap());
17 drop((l.read().unwrap(), l.read().unwrap()));
18 drop(l.write().unwrap());
19 }
20
21 #[test]
frob()22 fn frob() {
23 const N: u32 = 10;
24 #[cfg(miri)]
25 const M: usize = 50;
26 #[cfg(not(miri))]
27 const M: usize = 1000;
28
29 let r = Arc::new(ShardedLock::new(()));
30
31 let (tx, rx) = channel::<()>();
32 for _ in 0..N {
33 let tx = tx.clone();
34 let r = r.clone();
35 thread::spawn(move || {
36 let mut rng = rand::thread_rng();
37 for _ in 0..M {
38 if rng.gen_bool(1.0 / (N as f64)) {
39 drop(r.write().unwrap());
40 } else {
41 drop(r.read().unwrap());
42 }
43 }
44 drop(tx);
45 });
46 }
47 drop(tx);
48 let _ = rx.recv();
49 }
50
51 #[test]
52 // Android aborts on panic and this test relies on stack unwinding.
53 #[cfg(not(target_os = "android"))]
arc_poison_wr()54 fn arc_poison_wr() {
55 let arc = Arc::new(ShardedLock::new(1));
56 let arc2 = arc.clone();
57 let _: Result<(), _> = thread::spawn(move || {
58 let _lock = arc2.write().unwrap();
59 panic!();
60 })
61 .join();
62 assert!(arc.read().is_err());
63 }
64
65 #[test]
66 // Android aborts on panic and this test relies on stack unwinding.
67 #[cfg(not(target_os = "android"))]
arc_poison_ww()68 fn arc_poison_ww() {
69 let arc = Arc::new(ShardedLock::new(1));
70 assert!(!arc.is_poisoned());
71 let arc2 = arc.clone();
72 let _: Result<(), _> = thread::spawn(move || {
73 let _lock = arc2.write().unwrap();
74 panic!();
75 })
76 .join();
77 assert!(arc.write().is_err());
78 assert!(arc.is_poisoned());
79 }
80
81 #[test]
82 // Android aborts on panic and this test relies on stack unwinding.
83 #[cfg(not(target_os = "android"))]
arc_no_poison_rr()84 fn arc_no_poison_rr() {
85 let arc = Arc::new(ShardedLock::new(1));
86 let arc2 = arc.clone();
87 let _: Result<(), _> = thread::spawn(move || {
88 let _lock = arc2.read().unwrap();
89 panic!();
90 })
91 .join();
92 let lock = arc.read().unwrap();
93 assert_eq!(*lock, 1);
94 }
95 #[test]
96 // Android aborts on panic and this test relies on stack unwinding.
97 #[cfg(not(target_os = "android"))]
arc_no_poison_sl()98 fn arc_no_poison_sl() {
99 let arc = Arc::new(ShardedLock::new(1));
100 let arc2 = arc.clone();
101 let _: Result<(), _> = thread::spawn(move || {
102 let _lock = arc2.read().unwrap();
103 panic!()
104 })
105 .join();
106 let lock = arc.write().unwrap();
107 assert_eq!(*lock, 1);
108 }
109
110 #[test]
arc()111 fn arc() {
112 let arc = Arc::new(ShardedLock::new(0));
113 let arc2 = arc.clone();
114 let (tx, rx) = channel();
115
116 thread::spawn(move || {
117 let mut lock = arc2.write().unwrap();
118 for _ in 0..10 {
119 let tmp = *lock;
120 *lock = -1;
121 thread::yield_now();
122 *lock = tmp + 1;
123 }
124 tx.send(()).unwrap();
125 });
126
127 // Readers try to catch the writer in the act
128 let mut children = Vec::new();
129 for _ in 0..5 {
130 let arc3 = arc.clone();
131 children.push(thread::spawn(move || {
132 let lock = arc3.read().unwrap();
133 assert!(*lock >= 0);
134 }));
135 }
136
137 // Wait for children to pass their asserts
138 for r in children {
139 assert!(r.join().is_ok());
140 }
141
142 // Wait for writer to finish
143 rx.recv().unwrap();
144 let lock = arc.read().unwrap();
145 assert_eq!(*lock, 10);
146 }
147
148 #[test]
149 // Android aborts on panic and this test relies on stack unwinding.
150 #[cfg(not(target_os = "android"))]
arc_access_in_unwind()151 fn arc_access_in_unwind() {
152 let arc = Arc::new(ShardedLock::new(1));
153 let arc2 = arc.clone();
154 let _ = thread::spawn(move || {
155 struct Unwinder {
156 i: Arc<ShardedLock<isize>>,
157 }
158 impl Drop for Unwinder {
159 fn drop(&mut self) {
160 let mut lock = self.i.write().unwrap();
161 *lock += 1;
162 }
163 }
164 let _u = Unwinder { i: arc2 };
165 panic!();
166 })
167 .join();
168 let lock = arc.read().unwrap();
169 assert_eq!(*lock, 2);
170 }
171
172 #[test]
unsized_type()173 fn unsized_type() {
174 let sl: &ShardedLock<[i32]> = &ShardedLock::new([1, 2, 3]);
175 {
176 let b = &mut *sl.write().unwrap();
177 b[0] = 4;
178 b[2] = 5;
179 }
180 let comp: &[i32] = &[4, 2, 5];
181 assert_eq!(&*sl.read().unwrap(), comp);
182 }
183
184 #[test]
try_write()185 fn try_write() {
186 let lock = ShardedLock::new(0isize);
187 let read_guard = lock.read().unwrap();
188
189 let write_result = lock.try_write();
190 match write_result {
191 Err(TryLockError::WouldBlock) => (),
192 Ok(_) => panic!("try_write should not succeed while read_guard is in scope"),
193 Err(_) => panic!("unexpected error"),
194 }
195
196 drop(read_guard);
197 }
198
199 #[test]
test_into_inner()200 fn test_into_inner() {
201 let m = ShardedLock::new(NonCopy(10));
202 assert_eq!(m.into_inner().unwrap(), NonCopy(10));
203 }
204
205 #[test]
test_into_inner_drop()206 fn test_into_inner_drop() {
207 struct Foo(Arc<AtomicUsize>);
208 impl Drop for Foo {
209 fn drop(&mut self) {
210 self.0.fetch_add(1, Ordering::SeqCst);
211 }
212 }
213 let num_drops = Arc::new(AtomicUsize::new(0));
214 let m = ShardedLock::new(Foo(num_drops.clone()));
215 assert_eq!(num_drops.load(Ordering::SeqCst), 0);
216 {
217 let _inner = m.into_inner().unwrap();
218 assert_eq!(num_drops.load(Ordering::SeqCst), 0);
219 }
220 assert_eq!(num_drops.load(Ordering::SeqCst), 1);
221 }
222
223 #[test]
224 // Android aborts on panic and this test relies on stack unwinding.
225 #[cfg(not(target_os = "android"))]
test_into_inner_poison()226 fn test_into_inner_poison() {
227 let m = Arc::new(ShardedLock::new(NonCopy(10)));
228 let m2 = m.clone();
229 let _ = thread::spawn(move || {
230 let _lock = m2.write().unwrap();
231 panic!("test panic in inner thread to poison ShardedLock");
232 })
233 .join();
234
235 assert!(m.is_poisoned());
236 match Arc::try_unwrap(m).unwrap().into_inner() {
237 Err(e) => assert_eq!(e.into_inner(), NonCopy(10)),
238 Ok(x) => panic!("into_inner of poisoned ShardedLock is Ok: {:?}", x),
239 }
240 }
241
242 #[test]
test_get_mut()243 fn test_get_mut() {
244 let mut m = ShardedLock::new(NonCopy(10));
245 *m.get_mut().unwrap() = NonCopy(20);
246 assert_eq!(m.into_inner().unwrap(), NonCopy(20));
247 }
248
249 #[test]
250 // Android aborts on panic and this test relies on stack unwinding.
251 #[cfg(not(target_os = "android"))]
test_get_mut_poison()252 fn test_get_mut_poison() {
253 let m = Arc::new(ShardedLock::new(NonCopy(10)));
254 let m2 = m.clone();
255 let _ = thread::spawn(move || {
256 let _lock = m2.write().unwrap();
257 panic!("test panic in inner thread to poison ShardedLock");
258 })
259 .join();
260
261 assert!(m.is_poisoned());
262 match Arc::try_unwrap(m).unwrap().get_mut() {
263 Err(e) => assert_eq!(*e.into_inner(), NonCopy(10)),
264 Ok(x) => panic!("get_mut of poisoned ShardedLock is Ok: {:?}", x),
265 }
266 }
267