summaryrefslogtreecommitdiff
path: root/src/collection
diff options
context:
space:
mode:
Diffstat (limited to 'src/collection')
-rw-r--r--src/collection/boxed.rs9
-rw-r--r--src/collection/owned.rs12
-rw-r--r--src/collection/retry.rs47
-rw-r--r--src/collection/utils.rs8
4 files changed, 55 insertions, 21 deletions
diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs
index a048d2b..c0cc294 100644
--- a/src/collection/boxed.rs
+++ b/src/collection/boxed.rs
@@ -21,6 +21,7 @@ fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
}
unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
+ #[mutants::skip] // this should never be called
fn poison(&self) {
for lock in &self.locks {
lock.poison();
@@ -84,6 +85,9 @@ unsafe impl<L: Sharable> Sharable for BoxedLockCollection<L> {
unsafe impl<L: OwnedLockable> OwnedLockable for BoxedLockCollection<L> {}
+// LockableGetMut can't be implemented because that would create mutable and
+// immutable references to the same value at the same time.
+
impl<L: LockableIntoInner> LockableIntoInner for BoxedLockCollection<L> {
type Inner = L::Inner;
@@ -131,7 +135,7 @@ unsafe impl<L: Send> Send for BoxedLockCollection<L> {}
unsafe impl<L: Sync> Sync for BoxedLockCollection<L> {}
impl<L> Drop for BoxedLockCollection<L> {
- #[mutants::skip]
+ #[mutants::skip] // i can't test for a memory leak
fn drop(&mut self) {
unsafe {
// safety: this collection will never be locked again
@@ -203,6 +207,9 @@ impl<L> BoxedLockCollection<L> {
}
}
+ // child_mut is immediate UB because it leads to mutable and immutable
+ // references happening at the same time
+
/// Gets an immutable reference to the underlying data
#[must_use]
pub fn child(&self) -> &L {
diff --git a/src/collection/owned.rs b/src/collection/owned.rs
index 59e1ff8..3d6fa93 100644
--- a/src/collection/owned.rs
+++ b/src/collection/owned.rs
@@ -128,11 +128,9 @@ impl<E: OwnedLockable + Extend<L>, L: OwnedLockable> Extend<L> for OwnedLockColl
}
}
-impl<T, L: AsRef<T>> AsRef<T> for OwnedLockCollection<L> {
- fn as_ref(&self) -> &T {
- self.data.as_ref()
- }
-}
+// AsRef can't be implemented because an impl of AsRef<L> for L could break the
+// invariant that there is only one way to lock the collection. AsMut is fine,
+// because the collection can't be locked as long as the reference is valid.
impl<T, L: AsMut<T>> AsMut<T> for OwnedLockCollection<L> {
fn as_mut(&mut self) -> &mut T {
@@ -416,6 +414,10 @@ impl<L> OwnedLockCollection<L> {
pub fn into_child(self) -> L {
self.data
}
+
+ pub fn child_mut(&mut self) -> &mut L {
+ &mut self.data
+ }
}
impl<L: LockableGetMut> OwnedLockCollection<L> {
diff --git a/src/collection/retry.rs b/src/collection/retry.rs
index cb6a1fb..83842f8 100644
--- a/src/collection/retry.rs
+++ b/src/collection/retry.rs
@@ -48,17 +48,22 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
let locks = get_locks(&self.data);
if locks.is_empty() {
+ // this probably prevents a panic later
return;
}
+ // these will be unlocked in case of a panic
let locked = RefCell::new(Vec::with_capacity(locks.len()));
handle_unwind(
|| unsafe {
'outer: loop {
+ // This prevents us from entering a spin loop waiting for
+ // the same lock to be unlocked
// safety: we have the thread key
locks[first_index].raw_lock();
for (i, lock) in locks.iter().enumerate() {
if i == first_index {
+ // we've already locked this one
continue;
}
@@ -70,10 +75,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
if lock.raw_try_lock() {
locked.borrow_mut().push(*lock)
} else {
- for lock in locks.iter().take(i) {
+ for lock in locked.borrow().iter() {
// safety: we already locked all of these
lock.raw_unlock();
}
+ // these are no longer locked
+ locked.borrow_mut().clear();
if first_index >= i {
// safety: this is already locked and can't be unlocked
@@ -81,6 +88,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
locks[first_index].raw_unlock();
}
+ // call lock on this to prevent a spin loop
first_index = i;
continue 'outer;
}
@@ -98,9 +106,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
let locks = get_locks(&self.data);
if locks.is_empty() {
+ // this is an interesting case, but it doesn't give us access to
+ // any data, and can't possibly cause a deadlock
return true;
}
+ // these will be unlocked in case of a panic
let locked = RefCell::new(Vec::with_capacity(locks.len()));
handle_unwind(
|| unsafe {
@@ -136,6 +147,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
let locks = get_locks(&self.data);
if locks.is_empty() {
+ // this probably prevents a panic later
return;
}
@@ -153,10 +165,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
if lock.raw_try_read() {
locked.borrow_mut().push(*lock);
} else {
- for lock in locks.iter().take(i) {
+ for lock in locked.borrow().iter() {
// safety: we already locked all of these
lock.raw_unlock_read();
}
+ // these are no longer locked
+ locked.borrow_mut().clear();
if first_index >= i {
// safety: this is already locked and can't be unlocked
@@ -164,6 +178,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
locks[first_index].raw_unlock_read();
}
+ // don't go into a spin loop, wait for this one to lock
first_index = i;
continue 'outer;
}
@@ -180,6 +195,8 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
let locks = get_locks(&self.data);
if locks.is_empty() {
+ // this is an interesting case, but it doesn't give us access to
+ // any data, and can't possibly cause a deadlock
return true;
}
@@ -229,6 +246,19 @@ unsafe impl<L: Lockable> Lockable for RetryingLockCollection<L> {
}
}
+unsafe impl<L: Sharable> Sharable for RetryingLockCollection<L> {
+ type ReadGuard<'g>
+ = L::ReadGuard<'g>
+ where
+ Self: 'g;
+
+ unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
+ self.data.read_guard()
+ }
+}
+
+unsafe impl<L: OwnedLockable> OwnedLockable for RetryingLockCollection<L> {}
+
impl<L: LockableGetMut> LockableGetMut for RetryingLockCollection<L> {
type Inner<'a>
= L::Inner<'a>
@@ -248,19 +278,6 @@ impl<L: LockableIntoInner> LockableIntoInner for RetryingLockCollection<L> {
}
}
-unsafe impl<L: Sharable> Sharable for RetryingLockCollection<L> {
- type ReadGuard<'g>
- = L::ReadGuard<'g>
- where
- Self: 'g;
-
- unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
- self.data.read_guard()
- }
-}
-
-unsafe impl<L: OwnedLockable> OwnedLockable for RetryingLockCollection<L> {}
-
impl<L> IntoIterator for RetryingLockCollection<L>
where
L: IntoIterator,
diff --git a/src/collection/utils.rs b/src/collection/utils.rs
index 36b19be..d368773 100644
--- a/src/collection/utils.rs
+++ b/src/collection/utils.rs
@@ -3,7 +3,9 @@ use std::cell::RefCell;
use crate::handle_unwind::handle_unwind;
use crate::lockable::RawLock;
+/// Lock a set of locks in the given order. It's UB to call this without a `ThreadKey`
pub unsafe fn ordered_lock(locks: &[&dyn RawLock]) {
+ // these will be unlocked in case of a panic
let locked = RefCell::new(Vec::with_capacity(locks.len()));
handle_unwind(
@@ -17,6 +19,7 @@ pub unsafe fn ordered_lock(locks: &[&dyn RawLock]) {
)
}
+/// Lock a set of locks in the given order. It's UB to call this without a `ThreadKey`
pub unsafe fn ordered_read(locks: &[&dyn RawLock]) {
let locked = RefCell::new(Vec::with_capacity(locks.len()));
@@ -63,6 +66,7 @@ pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
/// Locks the locks in the order they are given. This causes deadlock if this
/// is called by multiple threads with the locks in different orders.
pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
+ // these will be unlocked in case of a panic
let locked = RefCell::new(Vec::with_capacity(locks.len()));
handle_unwind(
@@ -88,6 +92,7 @@ pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
)
}
+/// Unlocks the already locked locks in order to recover from a panic
pub unsafe fn attempt_to_recover_locks_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
handle_unwind(
|| {
@@ -96,10 +101,12 @@ pub unsafe fn attempt_to_recover_locks_from_panic(locked: &RefCell<Vec<&dyn RawL
locked_lock.raw_unlock();
}
},
+ // if we get another panic in here, we'll just have to poison what remains
|| locked.borrow().iter().for_each(|l| l.poison()),
)
}
+/// Unlocks the already locked locks in order to recover from a panic
pub unsafe fn attempt_to_recover_reads_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
handle_unwind(
|| {
@@ -108,6 +115,7 @@ pub unsafe fn attempt_to_recover_reads_from_panic(locked: &RefCell<Vec<&dyn RawL
locked_lock.raw_unlock_read();
}
},
+ // if we get another panic in here, we'll just have to poison what remains
|| locked.borrow().iter().for_each(|l| l.poison()),
)
}