From 7bd236853ef5ae705328c8fdc492cf60fc6887c1 Mon Sep 17 00:00:00 2001 From: Mica White Date: Wed, 13 Mar 2024 22:44:46 -0400 Subject: Lockable overhaul --- src/mutex/mutex.rs | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) (limited to 'src/mutex/mutex.rs') diff --git a/src/mutex/mutex.rs b/src/mutex/mutex.rs index 917ab78..3b8c221 100644 --- a/src/mutex/mutex.rs +++ b/src/mutex/mutex.rs @@ -24,6 +24,23 @@ impl Mutex { data: UnsafeCell::new(data), } } + + /// Returns the raw underlying mutex. + /// + /// Note that you will most likely need to import the [`RawMutex`] trait + /// from `lock_api` to be able to call functions on the raw mutex. + /// + /// # Safety + /// + /// This method is unsafe because it allows unlocking a mutex while still + /// holding a reference to a [`MutexGuard`], and locking a mutex without + /// holding the [`ThreadKey`]. + /// + /// [`ThreadKey`]: `crate::ThreadKey` + #[must_use] + pub const unsafe fn raw(&self) -> &R { + &self.raw + } } impl Default for Mutex { @@ -138,15 +155,6 @@ impl Mutex { } } - /// Lock without a [`ThreadKey`]. You must exclusively own the - /// [`ThreadKey`] as long as the [`MutexRef`] is alive. This may cause - /// deadlock if called multiple times without unlocking first. - pub(crate) unsafe fn lock_no_key(&self) -> MutexRef<'_, T, R> { - self.raw.lock(); - - MutexRef(self, PhantomData) - } - /// Attempts to lock the `Mutex` without blocking. /// /// # Errors -- cgit v1.2.3 From ebbe3cfce28914d776f3e5f89894fab50911c57e Mon Sep 17 00:00:00 2001 From: Botahamec Date: Wed, 22 May 2024 15:53:49 -0400 Subject: Implemented necessary traits --- .gitignore | 2 + examples/basic.rs | 2 +- examples/double_mutex.rs | 4 +- examples/list.rs | 2 +- src/collection.rs | 9 +- src/collection/boxed.rs | 293 +++++++++++++++++++++++++++++++++++++++++----- src/collection/guard.rs | 25 ++++ src/collection/owned.rs | 50 ++++++++ src/collection/ref.rs | 33 +++--- src/collection/retry.rs | 102 ++++++++++++++++ src/mutex/guard.rs | 57 +++++++++ src/mutex/mutex.rs | 12 +- src/rwlock/read_guard.rs | 43 +++++++ src/rwlock/rwlock.rs | 12 +- src/rwlock/write_guard.rs | 28 +++++ 15 files changed, 612 insertions(+), 62 deletions(-) (limited to 'src/mutex/mutex.rs') diff --git a/.gitignore b/.gitignore index 4fffb2f..8fa03a9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,4 @@ +/.idea + /target /Cargo.lock diff --git a/examples/basic.rs b/examples/basic.rs index 1d611d3..b9c5641 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -23,5 +23,5 @@ fn main() { let key = ThreadKey::get().unwrap(); let data = DATA.lock(key); - println!("{}", *data); + println!("{data}"); } diff --git a/examples/double_mutex.rs b/examples/double_mutex.rs index e9a9c77..882fc46 100644 --- a/examples/double_mutex.rs +++ b/examples/double_mutex.rs @@ -27,6 +27,6 @@ fn main() { let key = ThreadKey::get().unwrap(); let data = RefLockCollection::new(&DATA); let data = data.lock(key); - println!("{}", *data.0); - println!("{}", *data.1); + println!("{}", data.0); + println!("{}", data.1); } diff --git a/examples/list.rs b/examples/list.rs index dda468a..a649eeb 100644 --- a/examples/list.rs +++ b/examples/list.rs @@ -59,6 +59,6 @@ fn main() { let data = RefLockCollection::new(&DATA); let data = data.lock(key); for val in &*data { - println!("{}", **val); + println!("{val}"); } } diff --git a/src/collection.rs b/src/collection.rs index 6623c8a..a84c1ce 100644 --- a/src/collection.rs +++ b/src/collection.rs @@ -8,6 +8,7 @@ mod owned; mod r#ref; mod retry; +#[derive(Debug)] pub struct OwnedLockCollection { data: L, } @@ -17,12 +18,16 @@ pub struct OwnedLockCollection { /// This could be a tuple of [`Lockable`] types, an array, or a `Vec`. But it /// can be safely locked without causing a deadlock. pub struct RefLockCollection<'a, L> { - locks: Vec<&'a dyn Lock>, data: &'a L, + locks: Vec<&'a dyn Lock>, } -pub struct BoxedLockCollection<'a, L: 'a>(RefLockCollection<'a, L>); +pub struct BoxedLockCollection { + data: Box, + locks: Vec<&'static dyn Lock>, +} +#[derive(Debug)] pub struct RetryingLockCollection { data: L, } diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs index 8b67ee9..a62a33d 100644 --- a/src/collection/boxed.rs +++ b/src/collection/boxed.rs @@ -1,60 +1,295 @@ -use std::ops::{Deref, DerefMut}; +use std::fmt::Debug; +use std::marker::PhantomData; -use crate::{Lockable, OwnedLockable}; +use crate::lockable::Lock; +use crate::{Keyable, Lockable, OwnedLockable, Sharable}; -use super::{BoxedLockCollection, RefLockCollection}; +use super::{BoxedLockCollection, LockGuard}; -impl<'a, L: 'a> Deref for BoxedLockCollection<'a, L> { - type Target = RefLockCollection<'a, L>; +/// returns `true` if the sorted list contains a duplicate +#[must_use] +fn contains_duplicates(l: &[&dyn Lock]) -> bool { + l.windows(2) + .any(|window| std::ptr::eq(window[0], window[1])) +} + +unsafe impl Lockable for BoxedLockCollection { + type Guard<'g> = L::Guard<'g> where Self: 'g; + + type ReadGuard<'g> = L::ReadGuard<'g> where Self: 'g; + + fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) { + self.data.get_ptrs(ptrs) + } + + unsafe fn guard(&self) -> Self::Guard<'_> { + self.data.guard() + } + + unsafe fn read_guard(&self) -> Self::ReadGuard<'_> { + self.data.read_guard() + } +} + +unsafe impl Sharable for BoxedLockCollection {} + +unsafe impl OwnedLockable for BoxedLockCollection {} + +impl IntoIterator for BoxedLockCollection +where + L: IntoIterator, +{ + type Item = ::Item; + type IntoIter = ::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.data.into_iter() + } +} + +impl<'a, L> IntoIterator for &'a BoxedLockCollection +where + &'a L: IntoIterator, +{ + type Item = <&'a L as IntoIterator>::Item; + type IntoIter = <&'a L as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.data.into_iter() + } +} + +impl<'a, L> IntoIterator for &'a mut BoxedLockCollection +where + &'a mut L: IntoIterator, +{ + type Item = <&'a mut L as IntoIterator>::Item; + type IntoIter = <&'a mut L as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.data.into_iter() + } +} + +impl + OwnedLockable> FromIterator + for BoxedLockCollection +{ + fn from_iter>(iter: T) -> Self { + let iter: I = iter.into_iter().collect(); + Self::new(iter) + } +} + +impl, L: OwnedLockable> Extend for BoxedLockCollection { + fn extend>(&mut self, iter: T) { + self.data.extend(iter) + } +} + +impl AsRef for BoxedLockCollection { + fn as_ref(&self) -> &L { + &self.data + } +} + +impl AsMut for BoxedLockCollection { + fn as_mut(&mut self) -> &mut L { + &mut self.data + } +} - fn deref(&self) -> &Self::Target { - &self.0 +impl Debug for BoxedLockCollection { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct(stringify!(BoxedLockCollection)) + .field("data", &self.data) + .finish_non_exhaustive() } } -impl<'a, L: 'a> DerefMut for BoxedLockCollection<'a, L> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 +impl Default for BoxedLockCollection { + fn default() -> Self { + Self::new(L::default()) } } -impl<'a, L: 'a> Drop for BoxedLockCollection<'a, L> { - fn drop(&mut self) { - // this was created with Box::new - let boxed = unsafe { Box::from_raw((self.0.data as *const L).cast_mut()) }; - drop(boxed); +impl From for BoxedLockCollection { + fn from(value: L) -> Self { + Self::new(value) } } -impl<'a, L: OwnedLockable + 'a> BoxedLockCollection<'a, L> { +impl BoxedLockCollection { #[must_use] pub fn new(data: L) -> Self { - let boxed = Box::leak(Box::new(data)); - Self(RefLockCollection::new(boxed)) + // safety: owned lockable types cannot contain duplicates + unsafe { Self::new_unchecked(data) } } } -impl<'a, L: OwnedLockable + 'a> BoxedLockCollection<'a, L> { +impl<'a, L: OwnedLockable> BoxedLockCollection<&'a L> { #[must_use] pub fn new_ref(data: &'a L) -> Self { - let boxed = Box::leak(Box::new(data)); - - // safety: this is a reference to an OwnedLockable, which can't - // possibly contain inner duplicates - Self(unsafe { RefLockCollection::new_unchecked(boxed) }) + // safety: owned lockable types cannot contain duplicates + unsafe { Self::new_unchecked(data) } } } -impl<'a, L: Lockable + 'a> BoxedLockCollection<'a, L> { +impl BoxedLockCollection { #[must_use] pub unsafe fn new_unchecked(data: L) -> Self { - let boxed = Box::leak(Box::new(data)); - Self(RefLockCollection::new_unchecked(boxed)) + let data = Box::new(data); + let mut locks = Vec::new(); + data.get_ptrs(&mut locks); + + // safety: the box will be dropped after the lock references, so it's + // safe to just pretend they're static + let locks = std::mem::transmute(locks); + Self { data, locks } } #[must_use] pub fn try_new(data: L) -> Option { - let boxed = Box::leak(Box::new(data)); - RefLockCollection::try_new(boxed).map(Self) + // safety: we are checking for duplicates before returning + unsafe { + let this = Self::new_unchecked(data); + if contains_duplicates(&this.locks) { + return None; + } + Some(this) + } + } + + #[must_use] + pub fn into_inner(self) -> Box { + self.data + } + + pub fn lock<'g, 'key: 'g, Key: Keyable + 'key>( + &'g self, + key: Key, + ) -> LockGuard<'key, L::Guard<'g>, Key> { + for lock in &self.locks { + // safety: we have the thread key + unsafe { lock.lock() }; + } + + LockGuard { + // safety: we've already acquired the lock + guard: unsafe { self.data.guard() }, + key, + _phantom: PhantomData, + } + } + + pub fn try_lock<'g, 'key: 'g, Key: Keyable + 'key>( + &'g self, + key: Key, + ) -> Option, Key>> { + let guard = unsafe { + for (i, lock) in self.locks.iter().enumerate() { + // safety: we have the thread key + let success = lock.try_lock(); + + if !success { + for lock in &self.locks[0..i] { + // safety: this lock was already acquired + lock.unlock(); + } + return None; + } + } + + // safety: we've acquired the locks + self.data.guard() + }; + + Some(LockGuard { + guard, + key, + _phantom: PhantomData, + }) + } + + pub fn unlock<'key, Key: Keyable + 'key>(guard: LockGuard<'key, L::Guard<'_>, Key>) -> Key { + drop(guard.guard); + guard.key + } +} + +impl BoxedLockCollection { + pub fn read<'g, 'key: 'g, Key: Keyable + 'key>( + &'g self, + key: Key, + ) -> LockGuard<'key, L::ReadGuard<'g>, Key> { + for lock in &self.locks { + // safety: we have the thread key + unsafe { lock.read() }; + } + + LockGuard { + // safety: we've already acquired the lock + guard: unsafe { self.data.read_guard() }, + key, + _phantom: PhantomData, + } + } + + pub fn try_read<'g, 'key: 'g, Key: Keyable + 'key>( + &'g self, + key: Key, + ) -> Option, Key>> { + let guard = unsafe { + for (i, lock) in self.locks.iter().enumerate() { + // safety: we have the thread key + let success = lock.try_read(); + + if !success { + for lock in &self.locks[0..i] { + // safety: this lock was already acquired + lock.unlock_read(); + } + return None; + } + } + + // safety: we've acquired the locks + self.data.read_guard() + }; + + Some(LockGuard { + guard, + key, + _phantom: PhantomData, + }) + } + + pub fn unlock_read<'key, Key: Keyable + 'key>( + guard: LockGuard<'key, L::ReadGuard<'_>, Key>, + ) -> Key { + drop(guard.guard); + guard.key + } +} + +impl<'a, L: 'a> BoxedLockCollection +where + &'a L: IntoIterator, +{ + /// Returns an iterator over references to each value in the collection. + #[must_use] + pub fn iter(&'a self) -> <&'a L as IntoIterator>::IntoIter { + self.into_iter() + } +} + +impl<'a, L: 'a> BoxedLockCollection +where + &'a mut L: IntoIterator, +{ + /// Returns an iterator over mutable references to each value in the + /// collection. + #[must_use] + pub fn iter_mut(&'a mut self) -> <&'a mut L as IntoIterator>::IntoIter { + self.into_iter() } } diff --git a/src/collection/guard.rs b/src/collection/guard.rs index b8561eb..8857c5f 100644 --- a/src/collection/guard.rs +++ b/src/collection/guard.rs @@ -1,9 +1,22 @@ +use std::fmt::{Debug, Display}; use std::ops::{Deref, DerefMut}; use crate::key::Keyable; use super::LockGuard; +impl<'key, Guard: Debug, Key: Keyable> Debug for LockGuard<'key, Guard, Key> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl<'key, Guard: Display, Key: Keyable> Display for LockGuard<'key, Guard, Key> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Display::fmt(&**self, f) + } +} + impl<'key, Guard, Key: Keyable> Deref for LockGuard<'key, Guard, Key> { type Target = Guard; @@ -17,3 +30,15 @@ impl<'key, Guard, Key: Keyable> DerefMut for LockGuard<'key, Guard, Key> { &mut self.guard } } + +impl<'key, Guard, Key: Keyable> AsRef for LockGuard<'key, Guard, Key> { + fn as_ref(&self) -> &Guard { + &self.guard + } +} + +impl<'key, Guard, Key: Keyable> AsMut for LockGuard<'key, Guard, Key> { + fn as_mut(&mut self) -> &mut Guard { + &mut self.guard + } +} diff --git a/src/collection/owned.rs b/src/collection/owned.rs index 3415ac4..eb5e03a 100644 --- a/src/collection/owned.rs +++ b/src/collection/owned.rs @@ -32,12 +32,62 @@ unsafe impl Sharable for OwnedLockCollection {} unsafe impl OwnedLockable for OwnedLockCollection {} +impl IntoIterator for OwnedLockCollection +where + L: IntoIterator, +{ + type Item = ::Item; + type IntoIter = ::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.data.into_iter() + } +} + +impl + OwnedLockable> FromIterator + for OwnedLockCollection +{ + fn from_iter>(iter: T) -> Self { + let iter: I = iter.into_iter().collect(); + Self::new(iter) + } +} + +impl, L: OwnedLockable> Extend for OwnedLockCollection { + fn extend>(&mut self, iter: T) { + self.data.extend(iter) + } +} + +impl AsMut for OwnedLockCollection { + fn as_mut(&mut self) -> &mut L { + &mut self.data + } +} + +impl Default for OwnedLockCollection { + fn default() -> Self { + Self::new(L::default()) + } +} + +impl From for OwnedLockCollection { + fn from(value: L) -> Self { + Self::new(value) + } +} + impl OwnedLockCollection { #[must_use] pub const fn new(data: L) -> Self { Self { data } } + #[must_use] + pub fn into_inner(self) -> L { + self.data + } + pub fn lock<'g, 'key, Key: Keyable + 'key>( &'g self, key: Key, diff --git a/src/collection/ref.rs b/src/collection/ref.rs index da18c62..329f0ae 100644 --- a/src/collection/ref.rs +++ b/src/collection/ref.rs @@ -1,3 +1,4 @@ +use std::fmt::Debug; use std::marker::PhantomData; use crate::{key::Keyable, lockable::Lock, Lockable, OwnedLockable, Sharable}; @@ -5,7 +6,7 @@ use crate::{key::Keyable, lockable::Lock, Lockable, OwnedLockable, Sharable}; use super::{LockGuard, RefLockCollection}; #[must_use] -fn get_locks(data: &L) -> Vec<&dyn Lock> { +pub fn get_locks(data: &L) -> Vec<&dyn Lock> { let mut locks = Vec::new(); data.get_ptrs(&mut locks); locks.sort_by_key(|lock| std::ptr::from_ref(*lock)); @@ -19,24 +20,12 @@ fn contains_duplicates(l: &[&dyn Lock]) -> bool { .any(|window| std::ptr::eq(window[0], window[1])) } -impl<'a, L: Lockable> AsRef for RefLockCollection<'a, L> { +impl<'a, L> AsRef for RefLockCollection<'a, L> { fn as_ref(&self) -> &L { self.data } } -impl<'a, L: Lockable> AsRef for RefLockCollection<'a, L> { - fn as_ref(&self) -> &Self { - self - } -} - -impl<'a, L: Lockable> AsMut for RefLockCollection<'a, L> { - fn as_mut(&mut self) -> &mut Self { - self - } -} - impl<'a, L> IntoIterator for &'a RefLockCollection<'a, L> where &'a L: IntoIterator, @@ -69,6 +58,20 @@ unsafe impl<'c, L: Lockable> Lockable for RefLockCollection<'c, L> { unsafe impl<'c, L: Sharable> Sharable for RefLockCollection<'c, L> {} +impl<'a, L: Debug> Debug for RefLockCollection<'a, L> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct(stringify!(RefLockCollection)) + .field("data", self.data) + .finish_non_exhaustive() + } +} + +impl<'a, L: OwnedLockable + Default> From<&'a L> for RefLockCollection<'a, L> { + fn from(value: &'a L) -> Self { + Self::new(value) + } +} + impl<'a, L: OwnedLockable> RefLockCollection<'a, L> { /// Creates a new collection of owned locks. /// @@ -142,7 +145,7 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> { return None; } - Some(Self { locks, data }) + Some(Self { data, locks }) } /// Locks the collection diff --git a/src/collection/retry.rs b/src/collection/retry.rs index 3000f8b..58a0642 100644 --- a/src/collection/retry.rs +++ b/src/collection/retry.rs @@ -41,6 +41,81 @@ unsafe impl Sharable for RetryingLockCollection {} unsafe impl OwnedLockable for RetryingLockCollection {} +impl IntoIterator for RetryingLockCollection +where + L: IntoIterator, +{ + type Item = ::Item; + type IntoIter = ::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.data.into_iter() + } +} + +impl<'a, L> IntoIterator for &'a RetryingLockCollection +where + &'a L: IntoIterator, +{ + type Item = <&'a L as IntoIterator>::Item; + type IntoIter = <&'a L as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.data.into_iter() + } +} + +impl<'a, L> IntoIterator for &'a mut RetryingLockCollection +where + &'a mut L: IntoIterator, +{ + type Item = <&'a mut L as IntoIterator>::Item; + type IntoIter = <&'a mut L as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.data.into_iter() + } +} + +impl + OwnedLockable> FromIterator + for RetryingLockCollection +{ + fn from_iter>(iter: T) -> Self { + let iter: I = iter.into_iter().collect(); + Self::new(iter) + } +} + +impl, L: OwnedLockable> Extend for RetryingLockCollection { + fn extend>(&mut self, iter: T) { + self.data.extend(iter) + } +} + +impl AsRef for RetryingLockCollection { + fn as_ref(&self) -> &L { + &self.data + } +} + +impl AsMut for RetryingLockCollection { + fn as_mut(&mut self) -> &mut L { + &mut self.data + } +} + +impl Default for RetryingLockCollection { + fn default() -> Self { + Self::new(L::default()) + } +} + +impl From for RetryingLockCollection { + fn from(value: L) -> Self { + Self::new(value) + } +} + impl RetryingLockCollection { #[must_use] pub const fn new(data: L) -> Self { @@ -65,6 +140,10 @@ impl RetryingLockCollection { contains_duplicates(&data).then_some(Self { data }) } + pub fn into_inner(self) -> L { + self.data + } + pub fn lock<'g, 'key: 'g, Key: Keyable + 'key>( &'g self, key: Key, @@ -269,3 +348,26 @@ impl RetryingLockCollection { guard.key } } + +impl<'a, L: 'a> RetryingLockCollection +where + &'a L: IntoIterator, +{ + /// Returns an iterator over references to each value in the collection. + #[must_use] + pub fn iter(&'a self) -> <&'a L as IntoIterator>::IntoIter { + self.into_iter() + } +} + +impl<'a, L: 'a> RetryingLockCollection +where + &'a mut L: IntoIterator, +{ + /// Returns an iterator over mutable references to each value in the + /// collection. + #[must_use] + pub fn iter_mut(&'a mut self) -> <&'a mut L as IntoIterator>::IntoIter { + self.into_iter() + } +} diff --git a/src/mutex/guard.rs b/src/mutex/guard.rs index 38ea125..35fe1f2 100644 --- a/src/mutex/guard.rs +++ b/src/mutex/guard.rs @@ -1,3 +1,4 @@ +use std::fmt::{Debug, Display}; use std::marker::PhantomData; use std::ops::{Deref, DerefMut}; @@ -7,6 +8,18 @@ use crate::key::Keyable; use super::{Mutex, MutexGuard, MutexRef}; +impl<'a, T: Debug + ?Sized + 'a, R: RawMutex> Debug for MutexRef<'a, T, R> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl<'a, T: Display + ?Sized + 'a, R: RawMutex> Display for MutexRef<'a, T, R> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Display::fmt(&**self, f) + } +} + impl<'a, T: ?Sized + 'a, R: RawMutex> Drop for MutexRef<'a, T, R> { fn drop(&mut self) { // safety: this guard is being destroyed, so the data cannot be @@ -35,12 +48,40 @@ impl<'a, T: ?Sized + 'a, R: RawMutex> DerefMut for MutexRef<'a, T, R> { } } +impl<'a, T: ?Sized + 'a, R: RawMutex> AsRef for MutexRef<'a, T, R> { + fn as_ref(&self) -> &T { + self + } +} + +impl<'a, T: ?Sized + 'a, R: RawMutex> AsMut for MutexRef<'a, T, R> { + fn as_mut(&mut self) -> &mut T { + self + } +} + impl<'a, T: ?Sized + 'a, R: RawMutex> MutexRef<'a, T, R> { pub unsafe fn new(mutex: &'a Mutex) -> Self { Self(mutex, PhantomData) } } +impl<'a, 'key, T: Debug + ?Sized + 'a, Key: Keyable + 'key, R: RawMutex> Debug + for MutexGuard<'a, 'key, T, Key, R> +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl<'a, 'key, T: Display + ?Sized + 'a, Key: Keyable + 'key, R: RawMutex> Display + for MutexGuard<'a, 'key, T, Key, R> +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Display::fmt(&**self, f) + } +} + impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawMutex> Deref for MutexGuard<'a, 'key, T, Key, R> { @@ -59,6 +100,22 @@ impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawMutex> DerefMut } } +impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawMutex> AsRef + for MutexGuard<'a, 'key, T, Key, R> +{ + fn as_ref(&self) -> &T { + self + } +} + +impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawMutex> AsMut + for MutexGuard<'a, 'key, T, Key, R> +{ + fn as_mut(&mut self) -> &mut T { + self + } +} + impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawMutex> MutexGuard<'a, 'key, T, Key, R> { /// Create a guard to the given mutex. Undefined if multiple guards to the /// same mutex exist at once. diff --git a/src/mutex/mutex.rs b/src/mutex/mutex.rs index 3b8c221..52b6081 100644 --- a/src/mutex/mutex.rs +++ b/src/mutex/mutex.rs @@ -43,12 +43,6 @@ impl Mutex { } } -impl Default for Mutex { - fn default() -> Self { - Self::new(T::default()) - } -} - impl Debug for Mutex { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // safety: this is just a try lock, and the value is dropped @@ -71,6 +65,12 @@ impl Debug for Mutex { } } +impl Default for Mutex { + fn default() -> Self { + Self::new(T::default()) + } +} + impl From for Mutex { fn from(value: T) -> Self { Self::new(value) diff --git a/src/rwlock/read_guard.rs b/src/rwlock/read_guard.rs index 8428987..da3d101 100644 --- a/src/rwlock/read_guard.rs +++ b/src/rwlock/read_guard.rs @@ -1,3 +1,4 @@ +use std::fmt::{Debug, Display}; use std::marker::PhantomData; use std::ops::Deref; @@ -7,6 +8,18 @@ use crate::key::Keyable; use super::{RwLock, RwLockReadGuard, RwLockReadRef}; +impl<'a, T: Debug + ?Sized + 'a, R: RawRwLock> Debug for RwLockReadRef<'a, T, R> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl<'a, T: Display + ?Sized + 'a, R: RawRwLock> Display for RwLockReadRef<'a, T, R> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Display::fmt(&**self, f) + } +} + impl<'a, T: ?Sized + 'a, R: RawRwLock> Deref for RwLockReadRef<'a, T, R> { type Target = T; @@ -18,6 +31,12 @@ impl<'a, T: ?Sized + 'a, R: RawRwLock> Deref for RwLockReadRef<'a, T, R> { } } +impl<'a, T: ?Sized + 'a, R: RawRwLock> AsRef for RwLockReadRef<'a, T, R> { + fn as_ref(&self) -> &T { + self + } +} + impl<'a, T: ?Sized + 'a, R: RawRwLock> Drop for RwLockReadRef<'a, T, R> { fn drop(&mut self) { // safety: this guard is being destroyed, so the data cannot be @@ -32,6 +51,22 @@ impl<'a, T: ?Sized + 'a, R: RawRwLock> RwLockReadRef<'a, T, R> { } } +impl<'a, 'key, T: Debug + ?Sized + 'a, Key: Keyable + 'key, R: RawRwLock> Debug + for RwLockReadGuard<'a, 'key, T, Key, R> +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl<'a, 'key, T: Display + ?Sized + 'a, Key: Keyable + 'key, R: RawRwLock> Display + for RwLockReadGuard<'a, 'key, T, Key, R> +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Display::fmt(&**self, f) + } +} + impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawRwLock> Deref for RwLockReadGuard<'a, 'key, T, Key, R> { @@ -42,6 +77,14 @@ impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawRwLock> Deref } } +impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawRwLock> AsRef + for RwLockReadGuard<'a, 'key, T, Key, R> +{ + fn as_ref(&self) -> &T { + self + } +} + impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawRwLock> RwLockReadGuard<'a, 'key, T, Key, R> { diff --git a/src/rwlock/rwlock.rs b/src/rwlock/rwlock.rs index 7070b0e..9a7590a 100644 --- a/src/rwlock/rwlock.rs +++ b/src/rwlock/rwlock.rs @@ -40,12 +40,6 @@ impl RwLock { } } -impl Default for RwLock { - fn default() -> Self { - Self::new(T::default()) - } -} - impl Debug for RwLock { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // safety: this is just a try lock, and the value is dropped @@ -68,6 +62,12 @@ impl Debug for RwLock { } } +impl Default for RwLock { + fn default() -> Self { + Self::new(T::default()) + } +} + impl From for RwLock { fn from(value: T) -> Self { Self::new(value) diff --git a/src/rwlock/write_guard.rs b/src/rwlock/write_guard.rs index 16b474e..c8dd58b 100644 --- a/src/rwlock/write_guard.rs +++ b/src/rwlock/write_guard.rs @@ -27,6 +27,18 @@ impl<'a, T: ?Sized + 'a, R: RawRwLock> DerefMut for RwLockWriteRef<'a, T, R> { } } +impl<'a, T: ?Sized + 'a, R: RawRwLock> AsRef for RwLockWriteRef<'a, T, R> { + fn as_ref(&self) -> &T { + self + } +} + +impl<'a, T: ?Sized + 'a, R: RawRwLock> AsMut for RwLockWriteRef<'a, T, R> { + fn as_mut(&mut self) -> &mut T { + self + } +} + impl<'a, T: ?Sized + 'a, R: RawRwLock> Drop for RwLockWriteRef<'a, T, R> { fn drop(&mut self) { // safety: this guard is being destroyed, so the data cannot be @@ -59,6 +71,22 @@ impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawRwLock> DerefMut } } +impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawRwLock> AsRef + for RwLockWriteGuard<'a, 'key, T, Key, R> +{ + fn as_ref(&self) -> &T { + self + } +} + +impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawRwLock> AsMut + for RwLockWriteGuard<'a, 'key, T, Key, R> +{ + fn as_mut(&mut self) -> &mut T { + self + } +} + impl<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable, R: RawRwLock> RwLockWriteGuard<'a, 'key, T, Key, R> { -- cgit v1.2.3 From f81d4b40a007fecf6502a36b4c24a1e31807a731 Mon Sep 17 00:00:00 2001 From: Botahamec Date: Thu, 23 May 2024 19:50:32 -0400 Subject: Comments --- src/collection.rs | 20 +++++++++++++++++++- src/collection/boxed.rs | 30 +++++++----------------------- src/collection/owned.rs | 28 +++++----------------------- src/collection/ref.rs | 28 +++++----------------------- src/collection/utils.rs | 44 ++++++++++++++++++++++++++++++++++++++++++++ src/key.rs | 7 +++++++ src/lib.rs | 3 +++ src/lockable.rs | 28 ++++++++++++++++++++++++++-- src/mutex.rs | 5 ++++- src/mutex/guard.rs | 10 ++++++++++ src/mutex/mutex.rs | 4 ++++ src/rwlock/write_lock.rs | 11 +++++------ 12 files changed, 139 insertions(+), 79 deletions(-) create mode 100644 src/collection/utils.rs (limited to 'src/mutex/mutex.rs') diff --git a/src/collection.rs b/src/collection.rs index 5dc6946..27ec1c4 100644 --- a/src/collection.rs +++ b/src/collection.rs @@ -7,6 +7,7 @@ mod guard; mod owned; mod r#ref; mod retry; +mod utils; /// Locks a collection of locks, which cannot be shared immutably. /// @@ -24,6 +25,9 @@ mod retry; /// /// [`Lockable`]: `crate::lockable::Lockable` /// [`OwnedLockable`]: `crate::lockable::OwnedLockable` + +// this type caches the idea that no immutable references to the underlying +// collection exist #[derive(Debug)] pub struct OwnedLockCollection { data: L, @@ -47,6 +51,13 @@ pub struct OwnedLockCollection { /// /// [`Lockable`]: `crate::lockable::Lockable` /// [`OwnedLockable`]: `crate::lockable::OwnedLockable` + +// This type was born when I eventually realized that I needed a self +// referential structure. That used boxing, so I elected to make a more +// efficient implementation (polonius please save us) + +// This type caches the sorting order of the locks and the fact that it doesn't +// contain any duplicates. pub struct RefLockCollection<'a, L> { data: &'a L, locks: Vec<&'a dyn RawLock>, @@ -70,9 +81,14 @@ pub struct RefLockCollection<'a, L> { /// /// [`Lockable`]: `crate::lockable::Lockable` /// [`OwnedLockable`]: `crate::lockable::OwnedLockable` + +// This type caches the sorting order of the locks and the fact that it doesn't +// contain any duplicates. pub struct BoxedLockCollection { data: Box, - locks: Vec<&'static dyn RawLock>, + locks: Vec<&'static dyn RawLock>, // As far as you know, it's static. + // Believe it or not, saying the lifetime + // is static when it's not isn't UB } /// Locks a collection of locks using a retrying algorithm. @@ -97,6 +113,8 @@ pub struct BoxedLockCollection { /// [`Lockable`]: `crate::lockable::Lockable` /// [`OwnedLockable`]: `crate::lockable::OwnedLockable` /// [livelocking]: https://en.wikipedia.org/wiki/Deadlock#Livelock + +// This type caches the fact that there are no duplicates #[derive(Debug)] pub struct RetryingLockCollection { data: L, diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs index 224eedb..5ced6d1 100644 --- a/src/collection/boxed.rs +++ b/src/collection/boxed.rs @@ -4,7 +4,7 @@ use std::marker::PhantomData; use crate::lockable::{Lockable, OwnedLockable, RawLock, Sharable}; use crate::Keyable; -use super::{BoxedLockCollection, LockGuard}; +use super::{utils, BoxedLockCollection, LockGuard}; /// returns `true` if the sorted list contains a duplicate #[must_use] @@ -185,6 +185,8 @@ impl BoxedLockCollection { let data = Box::new(data); let mut locks = Vec::new(); data.get_ptrs(&mut locks); + + // cast to *const () because fat pointers can't be converted to usize locks.sort_by_key(|lock| std::ptr::from_ref(*lock).cast::<()>() as usize); // safety: the box will be dropped after the lock references, so it's @@ -310,17 +312,8 @@ impl BoxedLockCollection { key: Key, ) -> Option, Key>> { let guard = unsafe { - for (i, lock) in self.locks.iter().enumerate() { - // safety: we have the thread key - let success = lock.try_lock(); - - if !success { - for lock in &self.locks[0..i] { - // safety: this lock was already acquired - lock.unlock(); - } - return None; - } + if !utils::ordered_try_lock(&self.locks) { + return None; } // safety: we've acquired the locks @@ -424,17 +417,8 @@ impl BoxedLockCollection { key: Key, ) -> Option, Key>> { let guard = unsafe { - for (i, lock) in self.locks.iter().enumerate() { - // safety: we have the thread key - let success = lock.try_read(); - - if !success { - for lock in &self.locks[0..i] { - // safety: this lock was already acquired - lock.unlock_read(); - } - return None; - } + if !utils::ordered_try_read(&self.locks) { + return None; } // safety: we've acquired the locks diff --git a/src/collection/owned.rs b/src/collection/owned.rs index e1549b2..919c403 100644 --- a/src/collection/owned.rs +++ b/src/collection/owned.rs @@ -3,7 +3,7 @@ use std::marker::PhantomData; use crate::lockable::{Lockable, OwnedLockable, RawLock, Sharable}; use crate::Keyable; -use super::{LockGuard, OwnedLockCollection}; +use super::{utils, LockGuard, OwnedLockCollection}; fn get_locks(data: &L) -> Vec<&dyn RawLock> { let mut locks = Vec::new(); @@ -191,17 +191,8 @@ impl OwnedLockCollection { ) -> Option, Key>> { let locks = get_locks(&self.data); let guard = unsafe { - for (i, lock) in locks.iter().enumerate() { - // safety: we have the thread key - let success = lock.try_lock(); - - if !success { - for lock in &locks[0..i] { - // safety: this lock was already acquired - lock.unlock(); - } - return None; - } + if !utils::ordered_try_lock(&locks) { + return None; } // safety: we've acquired the locks @@ -315,17 +306,8 @@ impl OwnedLockCollection { ) -> Option, Key>> { let locks = get_locks(&self.data); let guard = unsafe { - for (i, lock) in locks.iter().enumerate() { - // safety: we have the thread key - let success = lock.try_read(); - - if !success { - for lock in &locks[0..i] { - // safety: this lock was already acquired - lock.unlock(); - } - return None; - } + if !utils::ordered_try_read(&locks) { + return None; } // safety: we've acquired the locks diff --git a/src/collection/ref.rs b/src/collection/ref.rs index e5c548f..d8c7f2e 100644 --- a/src/collection/ref.rs +++ b/src/collection/ref.rs @@ -4,7 +4,7 @@ use std::marker::PhantomData; use crate::lockable::{Lockable, OwnedLockable, RawLock, Sharable}; use crate::Keyable; -use super::{LockGuard, RefLockCollection}; +use super::{utils, LockGuard, RefLockCollection}; #[must_use] pub fn get_locks(data: &L) -> Vec<&dyn RawLock> { @@ -221,17 +221,8 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> { key: Key, ) -> Option, Key>> { let guard = unsafe { - for (i, lock) in self.locks.iter().enumerate() { - // safety: we have the thread key - let success = lock.try_lock(); - - if !success { - for lock in &self.locks[0..i] { - // safety: this lock was already acquired - lock.unlock(); - } - return None; - } + if !utils::ordered_try_lock(&self.locks) { + return None; } // safety: we've acquired the locks @@ -339,17 +330,8 @@ impl<'a, L: Sharable> RefLockCollection<'a, L> { key: Key, ) -> Option, Key>> { let guard = unsafe { - for (i, lock) in self.locks.iter().enumerate() { - // safety: we have the thread key - let success = lock.try_read(); - - if !success { - for lock in &self.locks[0..i] { - // safety: this lock was already acquired - lock.unlock_read(); - } - return None; - } + if !utils::ordered_try_read(&self.locks) { + return None; } // safety: we've acquired the locks diff --git a/src/collection/utils.rs b/src/collection/utils.rs new file mode 100644 index 0000000..dc58399 --- /dev/null +++ b/src/collection/utils.rs @@ -0,0 +1,44 @@ +use crate::lockable::RawLock; + +/// Locks the locks in the order they are given. This causes deadlock if the +/// locks contain duplicates, or if this is called by multiple threads with the +/// locks in different orders. +pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool { + unsafe { + for (i, lock) in locks.iter().enumerate() { + // safety: we have the thread key + let success = lock.try_lock(); + + if !success { + for lock in &locks[0..i] { + // safety: this lock was already acquired + lock.unlock(); + } + return false; + } + } + + true + } +} + +/// Locks the locks in the order they are given. This causes deadlock f this is +/// called by multiple threads with the locks in different orders. +pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool { + unsafe { + for (i, lock) in locks.iter().enumerate() { + // safety: we have the thread key + let success = lock.try_read(); + + if !success { + for lock in &locks[0..i] { + // safety: this lock was already acquired + lock.unlock_read(); + } + return false; + } + } + + true + } +} diff --git a/src/key.rs b/src/key.rs index 875f4be..4d6504f 100644 --- a/src/key.rs +++ b/src/key.rs @@ -7,6 +7,8 @@ use thread_local::ThreadLocal; use sealed::Sealed; +// Sealed to prevent other key types from being implemented. Otherwise, this +// would almost instant undefined behavior. mod sealed { use super::ThreadKey; @@ -15,6 +17,9 @@ mod sealed { impl Sealed for &mut ThreadKey {} } +// I am concerned that having multiple crates linked together with different +// static variables could break my key system. Library code probably shouldn't +// be creating keys at all. static KEY: Lazy> = Lazy::new(ThreadLocal::new); /// The key for the current thread. @@ -34,6 +39,7 @@ pub struct ThreadKey { /// values invalid. pub unsafe trait Keyable: Sealed {} unsafe impl Keyable for ThreadKey {} +// the ThreadKey can't be moved while a mutable reference to it exists unsafe impl Keyable for &mut ThreadKey {} impl Debug for ThreadKey { @@ -42,6 +48,7 @@ impl Debug for ThreadKey { } } +// If you lose the thread key, you can get it back by calling ThreadKey::get impl Drop for ThreadKey { fn drop(&mut self) { unsafe { KEY.get().unwrap().force_unlock() } diff --git a/src/lib.rs b/src/lib.rs index 9c39c6d..643c3e7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -176,6 +176,9 @@ pub use key::{Keyable, ThreadKey}; #[cfg(feature = "spin")] pub use mutex::SpinLock; +// Personally, I think re-exports look ugly in the rust documentation, so I +// went with type aliases instead. + /// A collection of locks that can be acquired simultaneously. /// /// This re-exports [`BoxedLockCollection`] as a sensible default. diff --git a/src/lockable.rs b/src/lockable.rs index 6b9c7c6..9f44981 100644 --- a/src/lockable.rs +++ b/src/lockable.rs @@ -14,6 +14,13 @@ use lock_api::{RawMutex, RawRwLock}; /// A deadlock must never occur. The `unlock` method must correctly unlock the /// data. The `get_ptrs` method must be implemented correctly. The `Output` /// must be unlocked when it is dropped. + +// Why not use a RawRwLock? Because that would be semantically incorrect, and I +// don't want an INIT or GuardMarker associated item. +// Originally, RawLock had a sister trait: RawSharableLock. I removed it +// because it'd be difficult to implement a separate type that takes a +// different kind of RawLock. But now the Sharable marker trait is needed to +// indicate if reads can be used. pub unsafe trait RawLock: Send + Sync { /// Blocks until the lock is acquired /// @@ -175,6 +182,8 @@ unsafe impl RawLock for Mutex { self.raw().unlock() } + // this is the closest thing to a read we can get, but Sharable isn't + // implemented for this unsafe fn read(&self) { self.raw().lock() } @@ -291,8 +300,13 @@ unsafe impl<'l, T: Send, R: RawRwLock + Send + Sync> Lockable for WriteLock<'l, } } +// Technically, the exclusive locks can also be shared, but there's currently +// no way to express that. I don't think I want to ever express that. unsafe impl<'l, T: Send, R: RawRwLock + Send + Sync> Sharable for ReadLock<'l, T, R> {} +// Because both ReadLock and WriteLock hold references to RwLocks, they can't +// implement OwnedLockable + unsafe impl Lockable for &T { type Guard<'g> = T::Guard<'g> where Self: 'g; @@ -335,6 +349,8 @@ unsafe impl Sharable for &mut T {} unsafe impl OwnedLockable for &mut T {} +/// Implements `Lockable`, `Sharable`, and `OwnedLockable` for tuples +/// ex: `tuple_impls!(A B C, 0 1 2);` macro_rules! tuple_impls { ($($generic:ident)*, $($value:tt)*) => { unsafe impl<$($generic: Lockable,)*> Lockable for ($($generic,)*) { @@ -347,6 +363,8 @@ macro_rules! tuple_impls { } unsafe fn guard(&self) -> Self::Guard<'_> { + // It's weird that this works + // I don't think any other way of doing it compiles ($(self.$value.guard(),)*) } @@ -381,6 +399,8 @@ unsafe impl Lockable for [T; N] { } unsafe fn guard<'g>(&'g self) -> Self::Guard<'g> { + // The MaybeInit helper functions for arrays aren't stable yet, so + // we'll just have to implement it ourselves let mut guards = MaybeUninit::<[MaybeUninit>; N]>::uninit().assume_init(); for i in 0..N { guards[i].write(self[i].guard()); @@ -430,7 +450,8 @@ unsafe impl Lockable for Box<[T]> { } unsafe impl Lockable for Vec { - type Guard<'g> = Vec> where Self: 'g; + // There's no reason why I'd ever want to extend a list of lock guards + type Guard<'g> = Box<[T::Guard<'g>]> where Self: 'g; type ReadGuard<'g> = Box<[T::ReadGuard<'g>]> where Self: 'g; @@ -446,7 +467,7 @@ unsafe impl Lockable for Vec { guards.push(lock.guard()); } - guards + guards.into_boxed_slice() } unsafe fn read_guard(&self) -> Self::ReadGuard<'_> { @@ -459,6 +480,9 @@ unsafe impl Lockable for Vec { } } +// I'd make a generic impl> Lockable for I +// but I think that'd require sealing up this trait + unsafe impl Sharable for [T; N] {} unsafe impl Sharable for Box<[T]> {} unsafe impl Sharable for Vec {} diff --git a/src/mutex.rs b/src/mutex.rs index a3baa00..b30c2b1 100644 --- a/src/mutex.rs +++ b/src/mutex.rs @@ -49,8 +49,11 @@ pub struct MutexRef<'a, T: ?Sized + 'a, R: RawMutex>( /// /// [`lock`]: `Mutex::lock` /// [`try_lock`]: `Mutex::try_lock` + +// This is the most lifetime-intensive thing I've ever written. Can I graduate +// from borrow checker university now? pub struct MutexGuard<'a, 'key: 'a, T: ?Sized + 'a, Key: Keyable + 'key, R: RawMutex> { - mutex: MutexRef<'a, T, R>, + mutex: MutexRef<'a, T, R>, // this way we don't need to re-implement Drop thread_key: Key, _phantom: PhantomData<&'key ()>, } diff --git a/src/mutex/guard.rs b/src/mutex/guard.rs index 9e8e2e6..f9324ad 100644 --- a/src/mutex/guard.rs +++ b/src/mutex/guard.rs @@ -8,6 +8,9 @@ use crate::key::Keyable; use super::{Mutex, MutexGuard, MutexRef}; +// This makes things slightly easier because now you can use +// `println!("{guard}")` instead of `println!("{}", *guard)`. I wonder if I +// should implement some other standard library traits like this too? impl<'a, T: Debug + ?Sized + 'a, R: RawMutex> Debug for MutexRef<'a, T, R> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { Debug::fmt(&**self, f) @@ -63,11 +66,18 @@ impl<'a, T: ?Sized + 'a, R: RawMutex> AsMut for MutexRef<'a, T, R> { impl<'a, T: ?Sized + 'a, R: RawMutex> MutexRef<'a, T, R> { /// Creates a reference to the underlying data of a mutex without /// attempting to lock it or take ownership of the key. + + // This might be useful to export, because it makes it easier to express + // the concept of: "Get the data out the mutex but don't lock it or take + // the key". But it's also quite dangerous to drop. pub(crate) unsafe fn new(mutex: &'a Mutex) -> Self { Self(mutex, PhantomData) } } +// it's kinda annoying to re-implement some of this stuff on guards +// there's nothing i can do about that + impl<'a, 'key, T: Debug + ?Sized + 'a, Key: Keyable + 'key, R: RawMutex> Debug for MutexGuard<'a, 'key, T, Key, R> { diff --git a/src/mutex/mutex.rs b/src/mutex/mutex.rs index 52b6081..89dfef9 100644 --- a/src/mutex/mutex.rs +++ b/src/mutex/mutex.rs @@ -48,6 +48,7 @@ impl Debug for Mutex { // safety: this is just a try lock, and the value is dropped // immediately after, so there's no risk of blocking ourselves // or any other threads + // when i implement try_clone this code will become less unsafe if let Some(value) = unsafe { self.try_lock_no_key() } { f.debug_struct("Mutex").field("data", &&*value).finish() } else { @@ -77,6 +78,9 @@ impl From for Mutex { } } +// We don't need a `get_mut` because we don't have mutex poisoning. Hurray! +// This is safe because you can't have a mutable reference to the lock if it's +// locked. Being locked requires an immutable reference because of the guard. impl AsMut for Mutex { fn as_mut(&mut self) -> &mut T { self.get_mut() diff --git a/src/rwlock/write_lock.rs b/src/rwlock/write_lock.rs index 8501cd8..2cf73cd 100644 --- a/src/rwlock/write_lock.rs +++ b/src/rwlock/write_lock.rs @@ -11,7 +11,9 @@ impl<'l, T: ?Sized + Debug, R: RawRwLock> Debug for WriteLock<'l, T, R> { // safety: this is just a try lock, and the value is dropped // immediately after, so there's no risk of blocking ourselves // or any other threads - if let Some(value) = unsafe { self.try_lock_no_key() } { + // It makes zero sense to try using an exclusive lock for this, so this + // is the only time when WriteLock does a read. + if let Some(value) = unsafe { self.0.try_read_no_key() } { f.debug_struct("WriteLock").field("data", &&*value).finish() } else { struct LockedPlaceholder; @@ -75,11 +77,8 @@ impl<'l, T: ?Sized, R: RawRwLock> WriteLock<'l, T, R> { self.0.try_write(key) } - /// Attempts to create an exclusive lock without a key. Locking this - /// without exclusive access to the key is undefined behavior. - pub(crate) unsafe fn try_lock_no_key(&self) -> Option> { - self.0.try_write_no_key() - } + // There's no `try_lock_no_key`. Instead, `try_read_no_key` is called on + // the referenced `RwLock`. /// Immediately drops the guard, and consequently releases the exclusive /// lock. -- cgit v1.2.3