crossbeam_utils/atomic/
seq_lock.rs1use core::mem;
2use core::sync::atomic::{self, AtomicUsize, Ordering};
3
4use crate::Backoff;
5
6pub(crate) struct SeqLock {
8 state: AtomicUsize,
13}
14
15impl SeqLock {
16 pub(crate) const fn new() -> Self {
17 Self {
18 state: AtomicUsize::new(0),
19 }
20 }
21
22 #[inline]
26 pub(crate) fn optimistic_read(&self) -> Option<usize> {
27 let state = self.state.load(Ordering::Acquire);
28 if state == 1 {
29 None
30 } else {
31 Some(state)
32 }
33 }
34
35 #[inline]
40 pub(crate) fn validate_read(&self, stamp: usize) -> bool {
41 atomic::fence(Ordering::Acquire);
42 self.state.load(Ordering::Relaxed) == stamp
43 }
44
45 #[inline]
47 pub(crate) fn write(&'static self) -> SeqLockWriteGuard {
48 let backoff = Backoff::new();
49 loop {
50 let previous = self.state.swap(1, Ordering::Acquire);
51
52 if previous != 1 {
53 atomic::fence(Ordering::Release);
54
55 return SeqLockWriteGuard {
56 lock: self,
57 state: previous,
58 };
59 }
60
61 backoff.snooze();
62 }
63 }
64}
65
66pub(crate) struct SeqLockWriteGuard {
68 lock: &'static SeqLock,
70
71 state: usize,
73}
74
75impl SeqLockWriteGuard {
76 #[inline]
78 pub(crate) fn abort(self) {
79 self.lock.state.store(self.state, Ordering::Release);
80
81 mem::forget(self);
84 }
85}
86
87impl Drop for SeqLockWriteGuard {
88 #[inline]
89 fn drop(&mut self) {
90 self.lock
92 .state
93 .store(self.state.wrapping_add(2), Ordering::Release);
94 }
95}
96
97#[cfg(test)]
98mod tests {
99 use super::SeqLock;
100
101 #[test]
102 fn test_abort() {
103 static LK: SeqLock = SeqLock::new();
104 let before = LK.optimistic_read().unwrap();
105 {
106 let guard = LK.write();
107 guard.abort();
108 }
109 let after = LK.optimistic_read().unwrap();
110 assert_eq!(before, after, "aborted write does not update the stamp");
111 }
112}