static_init/phase_locker/
unsync.rs1use super::{LockNature, LockResult, Mappable, MutPhaseLocker, PhaseGuard, PhaseLocker};
2use crate::phase::*;
3use crate::{Phase, Phased};
4use core::cell::Cell;
5use core::mem::forget;
6use core::ops::Deref;
7
8#[cfg(any(feature = "parking_lot_core", debug_mode))]
9use std::panic::RefUnwindSafe;
10
11pub(crate) struct UnSyncPhaseLocker(Cell<u32>);
13
14pub(crate) struct UnSyncPhaseGuard<'a, T: ?Sized>(&'a T, &'a Cell<u32>, Phase);
16
17pub(crate) struct UnSyncReadPhaseGuard<'a, T: ?Sized>(&'a T, &'a Cell<u32>);
19
20impl<'a, T> Deref for UnSyncPhaseGuard<'a, T> {
21 type Target = T;
22 #[inline(always)]
23 fn deref(&self) -> &T {
24 self.0
25 }
26}
27
28impl<'a, T> Phased for UnSyncPhaseGuard<'a, T> {
29 fn phase(this: &Self) -> Phase {
30 this.2
31 }
32}
33
34impl<'a, T: ?Sized> UnSyncPhaseGuard<'a, T> {
35 #[inline(always)]
36 pub(crate) fn new(r: &'a T, p: &'a Cell<u32>) -> Self {
37 Self(r, p, Phase::from_bits_truncate(p.get()))
38 }
39
40 #[inline(always)]
41 pub fn map<S: ?Sized>(self, f: impl FnOnce(&'a T) -> &'a S) -> UnSyncPhaseGuard<'a, S> {
42 let p = UnSyncPhaseGuard(f(self.0), self.1, self.2);
43 forget(self);
44 p
45 }
46}
47impl<'a, T: 'a, U: 'a> Mappable<T, U, UnSyncPhaseGuard<'a, U>> for UnSyncPhaseGuard<'a, T> {
48 #[inline(always)]
49 fn map<F: FnOnce(&'a T) -> &'a U>(self, f: F) -> UnSyncPhaseGuard<'a, U> {
50 Self::map(self, f)
51 }
52}
53
54unsafe impl<'a, T: ?Sized> PhaseGuard<'a, T> for UnSyncPhaseGuard<'a, T> {
55 #[inline(always)]
56 fn set_phase(&mut self, p: Phase) {
57 self.2 = p;
58 }
59 #[inline(always)]
60 fn commit_phase(&mut self) {
61 self.1.set(self.2.bits() | LOCKED_BIT);
62 }
63 #[inline(always)]
64 fn phase(&self) -> Phase {
65 self.2
66 }
67 #[inline(always)]
68 fn transition<R>(
69 &mut self,
70 f: impl FnOnce(&'a T) -> R,
71 on_success: Phase,
72 on_panic: Phase,
73 ) -> R {
74 self.2 = on_panic;
75 let res = f(self.0);
76 self.2 = on_success;
77 res
78 }
79}
80
81#[cfg(any(feature = "parking_lot_core", debug_mode))]
82impl<'a, T: RefUnwindSafe> RefUnwindSafe for UnSyncPhaseGuard<'a, T> {}
83
84impl<'a, T: ?Sized> From<UnSyncPhaseGuard<'a, T>> for UnSyncReadPhaseGuard<'a, T> {
85 #[inline(always)]
86 fn from(l: UnSyncPhaseGuard<'a, T>) -> UnSyncReadPhaseGuard<'a, T> {
87 l.1.set(l.2.bits() | READER_UNITY);
88 let r = UnSyncReadPhaseGuard(l.0, l.1);
89 forget(l);
90 r
91 }
92}
93
94impl<'a, T: ?Sized> Drop for UnSyncPhaseGuard<'a, T> {
95 #[inline(always)]
96 fn drop(&mut self) {
97 self.1.set(self.2.bits());
98 }
99}
100
101impl<'a, T> Deref for UnSyncReadPhaseGuard<'a, T> {
102 type Target = T;
103 #[inline(always)]
104 fn deref(&self) -> &T {
105 self.0
106 }
107}
108
109impl<'a, T> Phased for UnSyncReadPhaseGuard<'a, T> {
110 fn phase(this: &Self) -> Phase {
111 this.phase()
112 }
113}
114impl<'a, T> Clone for UnSyncReadPhaseGuard<'a, T> {
115 fn clone(&self) -> Self {
116 self.1.set(self.1.get().checked_add(READER_UNITY).unwrap());
117 Self(self.0, self.1)
118 }
119}
120
121impl<'a, T: ?Sized> UnSyncReadPhaseGuard<'a, T> {
122 #[inline(always)]
123 pub(crate) fn new(r: &'a T, p: &'a Cell<u32>) -> Self {
124 Self(r, p)
125 }
126 #[inline(always)]
127 pub fn phase(&self) -> Phase {
128 Phase::from_bits_truncate(self.1.get())
129 }
130 #[inline(always)]
131 pub fn map<S: ?Sized>(self, f: impl FnOnce(&'a T) -> &'a S) -> UnSyncReadPhaseGuard<'a, S> {
132 let p = UnSyncReadPhaseGuard(f(self.0), self.1);
133 forget(self);
134 p
135 }
136}
137impl<'a, T: 'a, U: 'a> Mappable<T, U, UnSyncReadPhaseGuard<'a, U>> for UnSyncReadPhaseGuard<'a, T> {
138 #[inline(always)]
139 fn map<F: FnOnce(&'a T) -> &'a U>(self, f: F) -> UnSyncReadPhaseGuard<'a, U> {
140 Self::map(self, f)
141 }
142}
143
144#[cfg(any(feature = "parking_lot_core", debug_mode))]
145impl<'a, T> RefUnwindSafe for UnSyncReadPhaseGuard<'a, T> {}
146
147impl<'a, T: ?Sized> Drop for UnSyncReadPhaseGuard<'a, T> {
148 #[inline(always)]
149 fn drop(&mut self) {
150 self.1.set(self.1.get() - READER_UNITY);
151 }
152}
153
154unsafe impl MutPhaseLocker for UnSyncPhaseLocker {
159 #[inline(always)]
160 fn get_phase_unique(&mut self) -> Phase {
161 Phase::from_bits(self.0.get()).unwrap()
162 }
163
164 #[inline(always)]
165 fn set_phase(&mut self, p: Phase) {
166 *self.0.get_mut() = p.bits();
167 }
168
169 #[inline(always)]
170 fn transition<R>(&mut self, f: impl FnOnce() -> R, on_success: Phase, on_panic: Phase) -> R {
171 self.0.set(on_panic.bits());
172 let r = f();
173 self.0.set(on_success.bits());
174 r
175 }
176}
177unsafe impl<'a, T: 'a> PhaseLocker<'a, T> for UnSyncPhaseLocker {
178 type ReadGuard = UnSyncReadPhaseGuard<'a, T>;
179 type WriteGuard = UnSyncPhaseGuard<'a, T>;
180
181 #[inline(always)]
182 fn lock<FL: Fn(Phase) -> LockNature, FW: Fn(Phase) -> LockNature>(
183 &'a self,
184 value: &'a T,
185 lock_nature: FL,
186 _on_wake_nature: FW,
187 _hint: Phase,
188 ) -> LockResult<Self::ReadGuard, Self::WriteGuard> {
189 Self::lock(self, value, lock_nature)
190 }
191 #[inline(always)]
192 fn lock_mut(&'a mut self, value: &'a T) -> Self::WriteGuard {
193 Self::lock_mut(self, value)
194 }
195 #[inline(always)]
196 fn try_lock<F: Fn(Phase) -> LockNature>(
197 &'a self,
198 value: &'a T,
199 lock_nature: F,
200 _hint: Phase,
201 ) -> Option<LockResult<Self::ReadGuard, Self::WriteGuard>> {
202 Self::try_lock(self, value, lock_nature)
203 }
204 #[inline(always)]
205 fn phase(&self) -> Phase {
206 Self::phase(self)
207 }
208}
209
210impl Phased for UnSyncPhaseLocker {
211 fn phase(this: &Self) -> Phase {
212 this.phase()
213 }
214}
215
216impl UnSyncPhaseLocker {
217 #[inline(always)]
218 pub const fn new(p: Phase) -> Self {
219 Self(Cell::new(p.bits()))
220 }
221 #[inline(always)]
222 pub fn phase(&self) -> Phase {
224 Phase::from_bits_truncate(self.0.get())
225 }
226 #[inline(always)]
227 pub fn try_lock<'a, T: ?Sized>(
235 &'a self,
236 v: &'a T,
237 lock_nature: impl Fn(Phase) -> LockNature,
238 ) -> Option<LockResult<UnSyncReadPhaseGuard<'_, T>, UnSyncPhaseGuard<'_, T>>> {
239 match lock_nature(self.phase()) {
240 LockNature::Write => {
241 if self.0.get() & (LOCKED_BIT | READER_BITS) != 0 {
242 None
243 } else {
244 self.0.set(self.0.get() | LOCKED_BIT);
245 Some(LockResult::Write(UnSyncPhaseGuard::new(v, &self.0)))
246 }
247 }
248 LockNature::Read => {
249 if self.0.get() & LOCKED_BIT != 0 || self.0.get() & READER_BITS == READER_BITS {
250 None
251 } else {
252 self.0.set(self.0.get().checked_add(READER_UNITY).unwrap());
253 Some(LockResult::Read(UnSyncReadPhaseGuard::new(v, &self.0)))
254 }
255 }
256 LockNature::None => Some(LockResult::None(self.phase())),
257 }
258 }
259 #[inline(always)]
260 pub fn lock_mut<'a, T: ?Sized>(&'a mut self, v: &'a T) -> UnSyncPhaseGuard<'_, T> {
262 self.0.set(self.0.get() | LOCKED_BIT);
263 UnSyncPhaseGuard::new(v, &self.0)
264 }
265 #[inline(always)]
266 pub fn lock<'a, T: ?Sized>(
274 &'a self,
275 v: &'a T,
276 lock_nature: impl Fn(Phase) -> LockNature,
277 ) -> LockResult<UnSyncReadPhaseGuard<'_, T>, UnSyncPhaseGuard<'_, T>> {
278 match lock_nature(self.phase()) {
279 LockNature::Write => {
280 assert_eq!(
281 self.0.get() & (LOCKED_BIT | READER_BITS),
282 0,
283 "Cannot get a mutable reference if it is already mutably borrowed"
284 );
285 self.0.set(self.0.get() | LOCKED_BIT);
286 LockResult::Write(UnSyncPhaseGuard::new(v, &self.0))
287 }
288 LockNature::Read => {
289 assert_eq!(
290 self.0.get() & LOCKED_BIT,
291 0,
292 "Cannot get a shared reference if it is alread mutably borrowed"
293 );
294 assert_ne!(
295 self.0.get() & (READER_BITS),
296 READER_BITS,
297 "Maximal number of shared borrow reached."
298 );
299 self.0.set(self.0.get().checked_add(READER_UNITY).unwrap());
300 LockResult::Read(UnSyncReadPhaseGuard::new(v, &self.0))
301 }
302 LockNature::None => LockResult::None(self.phase()),
303 }
304 }
305}
306
307#[cfg(any(feature = "parking_lot_core", debug_mode))]
308impl RefUnwindSafe for UnSyncPhaseLocker {}