summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMica White <botahamec@gmail.com>2024-12-26 11:26:39 -0500
committerMica White <botahamec@gmail.com>2024-12-26 12:06:47 -0500
commitdc16634f4abdb1e830d2749e64b419740702b302 (patch)
treeeb51ba8293a1c719c7221d546185cfa7062c108c /src
parent096afea6f13692fddbfad0b07e5377cb2e81dd58 (diff)
Commenting
Diffstat (limited to 'src')
-rw-r--r--src/collection/boxed.rs9
-rw-r--r--src/collection/owned.rs12
-rw-r--r--src/collection/retry.rs47
-rw-r--r--src/collection/utils.rs8
-rw-r--r--src/handle_unwind.rs1
-rw-r--r--src/lockable.rs72
-rw-r--r--src/mutex/guard.rs5
-rw-r--r--src/mutex/mutex.rs3
-rw-r--r--src/poisonable/error.rs12
-rw-r--r--src/poisonable/poisonable.rs2
-rw-r--r--src/rwlock/read_guard.rs3
-rw-r--r--src/rwlock/read_lock.rs2
-rw-r--r--src/rwlock/rwlock.rs65
-rw-r--r--src/rwlock/write_guard.rs3
-rw-r--r--src/rwlock/write_lock.rs3
15 files changed, 160 insertions, 87 deletions
diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs
index a048d2b..c0cc294 100644
--- a/src/collection/boxed.rs
+++ b/src/collection/boxed.rs
@@ -21,6 +21,7 @@ fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
}
unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
+ #[mutants::skip] // this should never be called
fn poison(&self) {
for lock in &self.locks {
lock.poison();
@@ -84,6 +85,9 @@ unsafe impl<L: Sharable> Sharable for BoxedLockCollection<L> {
unsafe impl<L: OwnedLockable> OwnedLockable for BoxedLockCollection<L> {}
+// LockableGetMut can't be implemented because that would create mutable and
+// immutable references to the same value at the same time.
+
impl<L: LockableIntoInner> LockableIntoInner for BoxedLockCollection<L> {
type Inner = L::Inner;
@@ -131,7 +135,7 @@ unsafe impl<L: Send> Send for BoxedLockCollection<L> {}
unsafe impl<L: Sync> Sync for BoxedLockCollection<L> {}
impl<L> Drop for BoxedLockCollection<L> {
- #[mutants::skip]
+ #[mutants::skip] // i can't test for a memory leak
fn drop(&mut self) {
unsafe {
// safety: this collection will never be locked again
@@ -203,6 +207,9 @@ impl<L> BoxedLockCollection<L> {
}
}
+ // child_mut is immediate UB because it leads to mutable and immutable
+ // references happening at the same time
+
/// Gets an immutable reference to the underlying data
#[must_use]
pub fn child(&self) -> &L {
diff --git a/src/collection/owned.rs b/src/collection/owned.rs
index 59e1ff8..3d6fa93 100644
--- a/src/collection/owned.rs
+++ b/src/collection/owned.rs
@@ -128,11 +128,9 @@ impl<E: OwnedLockable + Extend<L>, L: OwnedLockable> Extend<L> for OwnedLockColl
}
}
-impl<T, L: AsRef<T>> AsRef<T> for OwnedLockCollection<L> {
- fn as_ref(&self) -> &T {
- self.data.as_ref()
- }
-}
+// AsRef can't be implemented because an impl of AsRef<L> for L could break the
+// invariant that there is only one way to lock the collection. AsMut is fine,
+// because the collection can't be locked as long as the reference is valid.
impl<T, L: AsMut<T>> AsMut<T> for OwnedLockCollection<L> {
fn as_mut(&mut self) -> &mut T {
@@ -416,6 +414,10 @@ impl<L> OwnedLockCollection<L> {
pub fn into_child(self) -> L {
self.data
}
+
+ pub fn child_mut(&mut self) -> &mut L {
+ &mut self.data
+ }
}
impl<L: LockableGetMut> OwnedLockCollection<L> {
diff --git a/src/collection/retry.rs b/src/collection/retry.rs
index cb6a1fb..83842f8 100644
--- a/src/collection/retry.rs
+++ b/src/collection/retry.rs
@@ -48,17 +48,22 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
let locks = get_locks(&self.data);
if locks.is_empty() {
+ // this probably prevents a panic later
return;
}
+ // these will be unlocked in case of a panic
let locked = RefCell::new(Vec::with_capacity(locks.len()));
handle_unwind(
|| unsafe {
'outer: loop {
+ // This prevents us from entering a spin loop waiting for
+ // the same lock to be unlocked
// safety: we have the thread key
locks[first_index].raw_lock();
for (i, lock) in locks.iter().enumerate() {
if i == first_index {
+ // we've already locked this one
continue;
}
@@ -70,10 +75,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
if lock.raw_try_lock() {
locked.borrow_mut().push(*lock)
} else {
- for lock in locks.iter().take(i) {
+ for lock in locked.borrow().iter() {
// safety: we already locked all of these
lock.raw_unlock();
}
+ // these are no longer locked
+ locked.borrow_mut().clear();
if first_index >= i {
// safety: this is already locked and can't be unlocked
@@ -81,6 +88,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
locks[first_index].raw_unlock();
}
+ // call lock on this to prevent a spin loop
first_index = i;
continue 'outer;
}
@@ -98,9 +106,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
let locks = get_locks(&self.data);
if locks.is_empty() {
+ // this is an interesting case, but it doesn't give us access to
+ // any data, and can't possibly cause a deadlock
return true;
}
+ // these will be unlocked in case of a panic
let locked = RefCell::new(Vec::with_capacity(locks.len()));
handle_unwind(
|| unsafe {
@@ -136,6 +147,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
let locks = get_locks(&self.data);
if locks.is_empty() {
+ // this probably prevents a panic later
return;
}
@@ -153,10 +165,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
if lock.raw_try_read() {
locked.borrow_mut().push(*lock);
} else {
- for lock in locks.iter().take(i) {
+ for lock in locked.borrow().iter() {
// safety: we already locked all of these
lock.raw_unlock_read();
}
+ // these are no longer locked
+ locked.borrow_mut().clear();
if first_index >= i {
// safety: this is already locked and can't be unlocked
@@ -164,6 +178,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
locks[first_index].raw_unlock_read();
}
+ // don't go into a spin loop, wait for this one to lock
first_index = i;
continue 'outer;
}
@@ -180,6 +195,8 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
let locks = get_locks(&self.data);
if locks.is_empty() {
+ // this is an interesting case, but it doesn't give us access to
+ // any data, and can't possibly cause a deadlock
return true;
}
@@ -229,6 +246,19 @@ unsafe impl<L: Lockable> Lockable for RetryingLockCollection<L> {
}
}
+unsafe impl<L: Sharable> Sharable for RetryingLockCollection<L> {
+ type ReadGuard<'g>
+ = L::ReadGuard<'g>
+ where
+ Self: 'g;
+
+ unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
+ self.data.read_guard()
+ }
+}
+
+unsafe impl<L: OwnedLockable> OwnedLockable for RetryingLockCollection<L> {}
+
impl<L: LockableGetMut> LockableGetMut for RetryingLockCollection<L> {
type Inner<'a>
= L::Inner<'a>
@@ -248,19 +278,6 @@ impl<L: LockableIntoInner> LockableIntoInner for RetryingLockCollection<L> {
}
}
-unsafe impl<L: Sharable> Sharable for RetryingLockCollection<L> {
- type ReadGuard<'g>
- = L::ReadGuard<'g>
- where
- Self: 'g;
-
- unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
- self.data.read_guard()
- }
-}
-
-unsafe impl<L: OwnedLockable> OwnedLockable for RetryingLockCollection<L> {}
-
impl<L> IntoIterator for RetryingLockCollection<L>
where
L: IntoIterator,
diff --git a/src/collection/utils.rs b/src/collection/utils.rs
index 36b19be..d368773 100644
--- a/src/collection/utils.rs
+++ b/src/collection/utils.rs
@@ -3,7 +3,9 @@ use std::cell::RefCell;
use crate::handle_unwind::handle_unwind;
use crate::lockable::RawLock;
+/// Lock a set of locks in the given order. It's UB to call this without a `ThreadKey`
pub unsafe fn ordered_lock(locks: &[&dyn RawLock]) {
+ // these will be unlocked in case of a panic
let locked = RefCell::new(Vec::with_capacity(locks.len()));
handle_unwind(
@@ -17,6 +19,7 @@ pub unsafe fn ordered_lock(locks: &[&dyn RawLock]) {
)
}
+/// Lock a set of locks in the given order. It's UB to call this without a `ThreadKey`
pub unsafe fn ordered_read(locks: &[&dyn RawLock]) {
let locked = RefCell::new(Vec::with_capacity(locks.len()));
@@ -63,6 +66,7 @@ pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
/// Locks the locks in the order they are given. This causes deadlock if this
/// is called by multiple threads with the locks in different orders.
pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
+ // these will be unlocked in case of a panic
let locked = RefCell::new(Vec::with_capacity(locks.len()));
handle_unwind(
@@ -88,6 +92,7 @@ pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
)
}
+/// Unlocks the already locked locks in order to recover from a panic
pub unsafe fn attempt_to_recover_locks_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
handle_unwind(
|| {
@@ -96,10 +101,12 @@ pub unsafe fn attempt_to_recover_locks_from_panic(locked: &RefCell<Vec<&dyn RawL
locked_lock.raw_unlock();
}
},
+ // if we get another panic in here, we'll just have to poison what remains
|| locked.borrow().iter().for_each(|l| l.poison()),
)
}
+/// Unlocks the already locked locks in order to recover from a panic
pub unsafe fn attempt_to_recover_reads_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
handle_unwind(
|| {
@@ -108,6 +115,7 @@ pub unsafe fn attempt_to_recover_reads_from_panic(locked: &RefCell<Vec<&dyn RawL
locked_lock.raw_unlock_read();
}
},
+ // if we get another panic in here, we'll just have to poison what remains
|| locked.borrow().iter().for_each(|l| l.poison()),
)
}
diff --git a/src/handle_unwind.rs b/src/handle_unwind.rs
index d515449..220d379 100644
--- a/src/handle_unwind.rs
+++ b/src/handle_unwind.rs
@@ -1,5 +1,6 @@
use std::panic::{catch_unwind, resume_unwind, AssertUnwindSafe};
+/// Runs `try_fn`. If it unwinds, it will run `catch` and then continue unwinding
pub fn handle_unwind<R, F: FnOnce() -> R, G: FnOnce()>(try_fn: F, catch: G) -> R {
let try_fn = AssertUnwindSafe(try_fn);
catch_unwind(try_fn).unwrap_or_else(|e| {
diff --git a/src/lockable.rs b/src/lockable.rs
index d599820..7ddc104 100644
--- a/src/lockable.rs
+++ b/src/lockable.rs
@@ -97,11 +97,10 @@ pub unsafe trait RawLock {
///
/// # Safety
///
-/// Acquiring the locks returned by `get_ptrs` must allow for the values
-/// returned by `guard` or `read_guard` to be safely used for exclusive or
-/// shared access, respectively.
+/// Acquiring the locks returned by `get_ptrs` must allow access to the values
+/// returned by `guard`.
///
-/// Dropping the `Guard` and `ReadGuard` types must unlock those same locks.
+/// Dropping the `Guard` must unlock those same locks.
///
/// The order of the resulting list from `get_ptrs` must be deterministic. As
/// long as the value is not mutated, the references must always be in the same
@@ -132,6 +131,41 @@ pub unsafe trait Lockable {
unsafe fn guard(&self) -> Self::Guard<'_>;
}
+/// Allows a lock to be accessed by multiple readers.
+///
+/// # Safety
+///
+/// Acquiring shared access to the locks returned by `get_ptrs` must allow
+/// shared access to the values returned by `read_guard`.
+///
+/// Dropping the `ReadGuard` must unlock those same locks.
+pub unsafe trait Sharable: Lockable {
+ /// The shared guard type that does not hold a key
+ type ReadGuard<'g>
+ where
+ Self: 'g;
+
+ /// Returns a guard that can be used to immutably access the underlying
+ /// data.
+ ///
+ /// # Safety
+ ///
+ /// All locks given by calling [`Lockable::get_ptrs`] must be locked using
+ /// [`RawLock::read`] before calling this function. The locks must not be
+ /// unlocked until this guard is dropped.
+ #[must_use]
+ unsafe fn read_guard(&self) -> Self::ReadGuard<'_>;
+}
+
+/// A type that may be locked and unlocked, and is known to be the only valid
+/// instance of the lock.
+///
+/// # Safety
+///
+/// There must not be any two values which can unlock the value at the same
+/// time, i.e., this must either be an owned value or a mutable reference.
+pub unsafe trait OwnedLockable: Lockable {}
+
/// A trait which indicates that `into_inner` is a valid operation for a
/// [`Lockable`].
///
@@ -170,34 +204,6 @@ pub trait LockableGetMut: Lockable {
fn get_mut(&mut self) -> Self::Inner<'_>;
}
-/// Allows a lock to be accessed by multiple readers.
-pub unsafe trait Sharable: Lockable {
- /// The shared guard type that does not hold a key
- type ReadGuard<'g>
- where
- Self: 'g;
-
- /// Returns a guard that can be used to immutably access the underlying
- /// data.
- ///
- /// # Safety
- ///
- /// All locks given by calling [`Lockable::get_ptrs`] must be locked using
- /// [`RawLock::read`] before calling this function. The locks must not be
- /// unlocked until this guard is dropped.
- #[must_use]
- unsafe fn read_guard(&self) -> Self::ReadGuard<'_>;
-}
-
-/// A type that may be locked and unlocked, and is known to be the only valid
-/// instance of the lock.
-///
-/// # Safety
-///
-/// There must not be any two values which can unlock the value at the same
-/// time, i.e., this must either be an owned value or a mutable reference.
-pub unsafe trait OwnedLockable: Lockable {}
-
unsafe impl<T: Lockable> Lockable for &T {
type Guard<'g>
= T::Guard<'g>
@@ -401,7 +407,7 @@ unsafe impl<T: Lockable> Lockable for Box<[T]> {
Self: 'g;
fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
- for lock in self.iter() {
+ for lock in self {
lock.get_ptrs(ptrs);
}
}
diff --git a/src/mutex/guard.rs b/src/mutex/guard.rs
index e661230..0d35cf4 100644
--- a/src/mutex/guard.rs
+++ b/src/mutex/guard.rs
@@ -10,6 +10,9 @@ use crate::lockable::RawLock;
use super::{Mutex, MutexGuard, MutexRef};
+// These impls make things slightly easier because now you can use
+// `println!("{guard}")` instead of `println!("{}", *guard)`
+
impl<T: PartialEq + ?Sized, R: RawMutex> PartialEq for MutexRef<'_, T, R> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(&**other)
@@ -36,8 +39,6 @@ impl<T: Hash + ?Sized, R: RawMutex> Hash for MutexRef<'_, T, R> {
}
}
-// This makes things slightly easier because now you can use
-// `println!("{guard}")` instead of `println!("{}", *guard)`
impl<T: Debug + ?Sized, R: RawMutex> Debug for MutexRef<'_, T, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
diff --git a/src/mutex/mutex.rs b/src/mutex/mutex.rs
index 2cf6bbf..505ff83 100644
--- a/src/mutex/mutex.rs
+++ b/src/mutex/mutex.rs
@@ -165,8 +165,7 @@ impl<T, R: RawMutex> From<T> for Mutex<T, R> {
}
// We don't need a `get_mut` because we don't have mutex poisoning. Hurray!
-// This is safe because you can't have a mutable reference to the lock if it's
-// locked. Being locked requires an immutable reference because of the guard.
+// We have it anyway for documentation
impl<T: ?Sized, R> AsMut<T> for Mutex<T, R> {
fn as_mut(&mut self) -> &mut T {
self.get_mut()
diff --git a/src/poisonable/error.rs b/src/poisonable/error.rs
index f27c6ab..d543294 100644
--- a/src/poisonable/error.rs
+++ b/src/poisonable/error.rs
@@ -16,6 +16,18 @@ impl<Guard> fmt::Display for PoisonError<Guard> {
}
}
+impl<Guard> AsRef<Guard> for PoisonError<Guard> {
+ fn as_ref(&self) -> &Guard {
+ self.get_ref()
+ }
+}
+
+impl<Guard> AsMut<Guard> for PoisonError<Guard> {
+ fn as_mut(&mut self) -> &mut Guard {
+ self.get_mut()
+ }
+}
+
impl<Guard> Error for PoisonError<Guard> {}
impl<Guard> PoisonError<Guard> {
diff --git a/src/poisonable/poisonable.rs b/src/poisonable/poisonable.rs
index 3ef1cdd..ea51dd5 100644
--- a/src/poisonable/poisonable.rs
+++ b/src/poisonable/poisonable.rs
@@ -81,6 +81,8 @@ unsafe impl<L: Sharable> Sharable for Poisonable<L> {
unsafe impl<L: OwnedLockable> OwnedLockable for Poisonable<L> {}
+// AsMut won't work here because we don't strictly return a &mut T
+// LockableGetMut is the next best thing
impl<L: LockableGetMut> LockableGetMut for Poisonable<L> {
type Inner<'a>
= PoisonResult<L::Inner<'a>>
diff --git a/src/rwlock/read_guard.rs b/src/rwlock/read_guard.rs
index 44b737e..8678a8e 100644
--- a/src/rwlock/read_guard.rs
+++ b/src/rwlock/read_guard.rs
@@ -10,6 +10,9 @@ use crate::lockable::RawLock;
use super::{RwLock, RwLockReadGuard, RwLockReadRef};
+// These impls make things slightly easier because now you can use
+// `println!("{guard}")` instead of `println!("{}", *guard)`
+
impl<T: PartialEq + ?Sized, R: RawRwLock> PartialEq for RwLockReadRef<'_, T, R> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(&**other)
diff --git a/src/rwlock/read_lock.rs b/src/rwlock/read_lock.rs
index 34e3f5e..d719a50 100644
--- a/src/rwlock/read_lock.rs
+++ b/src/rwlock/read_lock.rs
@@ -22,8 +22,6 @@ unsafe impl<T: Send, R: RawRwLock + Send + Sync> Lockable for ReadLock<'_, T, R>
}
}
-// Technically, the exclusive locks can also be shared, but there's currently
-// no way to express that. I don't think I want to ever express that.
unsafe impl<T: Send, R: RawRwLock + Send + Sync> Sharable for ReadLock<'_, T, R> {
type ReadGuard<'g>
= RwLockReadRef<'g, T, R>
diff --git a/src/rwlock/rwlock.rs b/src/rwlock/rwlock.rs
index 8bb170c..6432128 100644
--- a/src/rwlock/rwlock.rs
+++ b/src/rwlock/rwlock.rs
@@ -88,6 +88,19 @@ unsafe impl<T: Send, R: RawRwLock + Send + Sync> Lockable for RwLock<T, R> {
}
}
+unsafe impl<T: Send, R: RawRwLock + Send + Sync> Sharable for RwLock<T, R> {
+ type ReadGuard<'g>
+ = RwLockReadRef<'g, T, R>
+ where
+ Self: 'g;
+
+ unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
+ RwLockReadRef::new(self)
+ }
+}
+
+unsafe impl<T: Send, R: RawRwLock + Send + Sync> OwnedLockable for RwLock<T, R> {}
+
impl<T: Send, R: RawRwLock + Send + Sync> LockableIntoInner for RwLock<T, R> {
type Inner = T;
@@ -107,19 +120,6 @@ impl<T: Send, R: RawRwLock + Send + Sync> LockableGetMut for RwLock<T, R> {
}
}
-unsafe impl<T: Send, R: RawRwLock + Send + Sync> Sharable for RwLock<T, R> {
- type ReadGuard<'g>
- = RwLockReadRef<'g, T, R>
- where
- Self: 'g;
-
- unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
- RwLockReadRef::new(self)
- }
-}
-
-unsafe impl<T: Send, R: RawRwLock + Send + Sync> OwnedLockable for RwLock<T, R> {}
-
impl<T, R: RawRwLock> RwLock<T, R> {
/// Creates a new instance of an `RwLock<T>` which is unlocked.
///
@@ -138,20 +138,6 @@ impl<T, R: RawRwLock> RwLock<T, R> {
raw: R::INIT,
}
}
-
- /// Returns the underlying raw reader-writer lock object.
- ///
- /// Note that you will most likely need to import the [`RawRwLock`] trait
- /// from `lock_api` to be able to call functions on the raw reader-writer
- /// lock.
- ///
- /// # Safety
- ///
- /// This method is unsafe because it allows unlocking a mutex while
- /// still holding a reference to a lock guard.
- pub const unsafe fn raw(&self) -> &R {
- &self.raw
- }
}
impl<T: ?Sized + Debug, R: RawRwLock> Debug for RwLock<T, R> {
@@ -188,6 +174,9 @@ impl<T, R: RawRwLock> From<T> for RwLock<T, R> {
}
}
+// We don't need a `get_mut` because we don't have mutex poisoning. Hurray!
+// This is safe because you can't have a mutable reference to the lock if it's
+// locked. Being locked requires an immutable reference because of the guard.
impl<T: ?Sized, R> AsMut<T> for RwLock<T, R> {
fn as_mut(&mut self) -> &mut T {
self.data.get_mut()
@@ -216,6 +205,28 @@ impl<T, R> RwLock<T, R> {
}
}
+impl<T: ?Sized, R> RwLock<T, R> {
+ /// Returns a mutable reference to the underlying data.
+ ///
+ /// Since this call borrows `RwLock` mutably, no actual locking is taking
+ /// place. The mutable borrow statically guarantees that no locks exist.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use happylock::{ThreadKey, RwLock};
+ ///
+ /// let key = ThreadKey::get().unwrap();
+ /// let mut mutex = RwLock::new(0);
+ /// *mutex.get_mut() = 10;
+ /// assert_eq!(*mutex.read(key), 10);
+ /// ```
+ #[must_use]
+ pub fn get_mut(&mut self) -> &mut T {
+ self.data.get_mut()
+ }
+}
+
impl<T: ?Sized, R: RawRwLock> RwLock<T, R> {
/// Locks this `RwLock` with shared read access, blocking the current
/// thread until it can be acquired.
diff --git a/src/rwlock/write_guard.rs b/src/rwlock/write_guard.rs
index c22ebe1..aec3d3d 100644
--- a/src/rwlock/write_guard.rs
+++ b/src/rwlock/write_guard.rs
@@ -10,6 +10,9 @@ use crate::lockable::RawLock;
use super::{RwLock, RwLockWriteGuard, RwLockWriteRef};
+// These impls make things slightly easier because now you can use
+// `println!("{guard}")` instead of `println!("{}", *guard)`
+
impl<T: PartialEq + ?Sized, R: RawRwLock> PartialEq for RwLockWriteRef<'_, T, R> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(&**other)
diff --git a/src/rwlock/write_lock.rs b/src/rwlock/write_lock.rs
index 27fa1d2..e9be750 100644
--- a/src/rwlock/write_lock.rs
+++ b/src/rwlock/write_lock.rs
@@ -22,6 +22,9 @@ unsafe impl<T: Send, R: RawRwLock + Send + Sync> Lockable for WriteLock<'_, T, R
}
}
+// Technically, the exclusive locks can also be shared, but there's currently
+// no way to express that. I don't think I want to ever express that.
+
impl<T: ?Sized + Debug, R: RawRwLock> Debug for WriteLock<'_, T, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// safety: this is just a try lock, and the value is dropped