summaryrefslogtreecommitdiff
path: root/src/collection
diff options
context:
space:
mode:
Diffstat (limited to 'src/collection')
-rw-r--r--src/collection/boxed.rs55
-rw-r--r--src/collection/owned.rs57
-rw-r--r--src/collection/ref.rs50
-rw-r--r--src/collection/retry.rs42
-rw-r--r--src/collection/utils.rs42
5 files changed, 176 insertions, 70 deletions
diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs
index bef3df2..3cfc336 100644
--- a/src/collection/boxed.rs
+++ b/src/collection/boxed.rs
@@ -1,5 +1,5 @@
use std::alloc::Layout;
-use std::cell::UnsafeCell;
+use std::cell::{RefCell, UnsafeCell};
use std::fmt::Debug;
use std::marker::PhantomData;
@@ -20,7 +20,7 @@ fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
.any(|window| std::ptr::eq(window[0], window[1]))
}
-unsafe impl<L: Lockable + Send + Sync> RawLock for BoxedLockCollection<L> {
+unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
fn kill(&self) {
for lock in &self.locks {
lock.kill();
@@ -28,8 +28,15 @@ unsafe impl<L: Lockable + Send + Sync> RawLock for BoxedLockCollection<L> {
}
unsafe fn raw_lock(&self) {
+ let locks = self.locks();
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_locks_from_panic(&locked)
+ };
+
for lock in self.locks() {
lock.raw_lock();
+ locked.borrow_mut().push(*lock);
}
}
@@ -44,8 +51,15 @@ unsafe impl<L: Lockable + Send + Sync> RawLock for BoxedLockCollection<L> {
}
unsafe fn raw_read(&self) {
+ let locks = self.locks();
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_reads_from_panic(&locked)
+ };
+
for lock in self.locks() {
lock.raw_read();
+ locked.borrow_mut().push(*lock);
}
}
@@ -335,16 +349,16 @@ impl<L: Lockable> BoxedLockCollection<L> {
&'g self,
key: Key,
) -> LockGuard<'key, L::Guard<'g>, Key> {
- for lock in self.locks() {
+ unsafe {
// safety: we have the thread key
- unsafe { lock.raw_lock() };
- }
+ self.raw_lock();
- LockGuard {
- // safety: we've already acquired the lock
- guard: unsafe { self.data().guard() },
- key,
- _phantom: PhantomData,
+ LockGuard {
+ // safety: we've already acquired the lock
+ guard: self.data().guard(),
+ key,
+ _phantom: PhantomData,
+ }
}
}
@@ -377,7 +391,7 @@ impl<L: Lockable> BoxedLockCollection<L> {
key: Key,
) -> Option<LockGuard<'key, L::Guard<'g>, Key>> {
let guard = unsafe {
- if !utils::ordered_try_lock(self.locks()) {
+ if !self.raw_try_lock() {
return None;
}
@@ -439,16 +453,16 @@ impl<L: Sharable> BoxedLockCollection<L> {
&'g self,
key: Key,
) -> LockGuard<'key, L::ReadGuard<'g>, Key> {
- for lock in self.locks() {
+ unsafe {
// safety: we have the thread key
- unsafe { lock.raw_read() };
- }
+ self.raw_read();
- LockGuard {
- // safety: we've already acquired the lock
- guard: unsafe { self.data().read_guard() },
- key,
- _phantom: PhantomData,
+ LockGuard {
+ // safety: we've already acquired the lock
+ guard: self.data().read_guard(),
+ key,
+ _phantom: PhantomData,
+ }
}
}
@@ -482,7 +496,8 @@ impl<L: Sharable> BoxedLockCollection<L> {
key: Key,
) -> Option<LockGuard<'key, L::ReadGuard<'g>, Key>> {
let guard = unsafe {
- if !utils::ordered_try_read(self.locks()) {
+ // safety: we have the thread key
+ if !self.raw_try_read() {
return None;
}
diff --git a/src/collection/owned.rs b/src/collection/owned.rs
index 69347cc..3ea93b6 100644
--- a/src/collection/owned.rs
+++ b/src/collection/owned.rs
@@ -1,3 +1,4 @@
+use std::cell::RefCell;
use std::marker::PhantomData;
use crate::lockable::{Lockable, LockableIntoInner, OwnedLockable, RawLock, Sharable};
@@ -11,7 +12,7 @@ fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
locks
}
-unsafe impl<L: Lockable + Send + Sync> RawLock for OwnedLockCollection<L> {
+unsafe impl<L: Lockable> RawLock for OwnedLockCollection<L> {
fn kill(&self) {
let locks = get_locks(&self.data);
for lock in locks {
@@ -21,8 +22,14 @@ unsafe impl<L: Lockable + Send + Sync> RawLock for OwnedLockCollection<L> {
unsafe fn raw_lock(&self) {
let locks = get_locks(&self.data);
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_locks_from_panic(&locked)
+ };
+
for lock in locks {
lock.raw_lock();
+ locked.borrow_mut().push(lock);
}
}
@@ -40,8 +47,14 @@ unsafe impl<L: Lockable + Send + Sync> RawLock for OwnedLockCollection<L> {
unsafe fn raw_read(&self) {
let locks = get_locks(&self.data);
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_reads_from_panic(&locked)
+ };
+
for lock in locks {
lock.raw_read();
+ locked.borrow_mut().push(lock);
}
}
@@ -205,15 +218,15 @@ impl<L: OwnedLockable> OwnedLockCollection<L> {
&'g self,
key: Key,
) -> LockGuard<'key, L::Guard<'g>, Key> {
- let locks = get_locks(&self.data);
- for lock in locks {
+ let guard = unsafe {
// safety: we have the thread key, and these locks happen in a
// predetermined order
- unsafe { lock.raw_lock() };
- }
+ self.raw_lock();
+
+ // safety: we've locked all of this already
+ self.data.guard()
+ };
- // safety: we've locked all of this already
- let guard = unsafe { self.data.guard() };
LockGuard {
guard,
key,
@@ -250,9 +263,8 @@ impl<L: OwnedLockable> OwnedLockCollection<L> {
&'g self,
key: Key,
) -> Option<LockGuard<'key, L::Guard<'g>, Key>> {
- let locks = get_locks(&self.data);
let guard = unsafe {
- if !utils::ordered_try_lock(&locks) {
+ if !self.raw_try_lock() {
return None;
}
@@ -319,19 +331,16 @@ impl<L: Sharable> OwnedLockCollection<L> {
&'g self,
key: Key,
) -> LockGuard<'key, L::ReadGuard<'g>, Key> {
- let locks = get_locks(&self.data);
- for lock in locks {
- // safety: we have the thread key, and these locks happen in a
- // predetermined order
- unsafe { lock.raw_read() };
- }
-
- // safety: we've locked all of this already
- let guard = unsafe { self.data.read_guard() };
- LockGuard {
- guard,
- key,
- _phantom: PhantomData,
+ unsafe {
+ // safety: we have the thread key
+ self.raw_read();
+
+ LockGuard {
+ // safety: we've already acquired the lock
+ guard: self.data.read_guard(),
+ key,
+ _phantom: PhantomData,
+ }
}
}
@@ -365,9 +374,9 @@ impl<L: Sharable> OwnedLockCollection<L> {
&'g self,
key: Key,
) -> Option<LockGuard<'key, L::ReadGuard<'g>, Key>> {
- let locks = get_locks(&self.data);
let guard = unsafe {
- if !utils::ordered_try_read(&locks) {
+ // safety: we have the thread key
+ if !self.raw_try_read() {
return None;
}
diff --git a/src/collection/ref.rs b/src/collection/ref.rs
index 0e2f057..31ef173 100644
--- a/src/collection/ref.rs
+++ b/src/collection/ref.rs
@@ -1,3 +1,4 @@
+use std::cell::RefCell;
use std::fmt::Debug;
use std::marker::PhantomData;
@@ -44,7 +45,7 @@ where
}
}
-unsafe impl<L: Lockable + Send + Sync> RawLock for RefLockCollection<'_, L> {
+unsafe impl<L: Lockable> RawLock for RefLockCollection<'_, L> {
fn kill(&self) {
for lock in &self.locks {
lock.kill();
@@ -52,8 +53,15 @@ unsafe impl<L: Lockable + Send + Sync> RawLock for RefLockCollection<'_, L> {
}
unsafe fn raw_lock(&self) {
+ let locks = &self.locks;
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_locks_from_panic(&locked)
+ };
+
for lock in &self.locks {
lock.raw_lock();
+ locked.borrow_mut().push(*lock);
}
}
@@ -68,8 +76,15 @@ unsafe impl<L: Lockable + Send + Sync> RawLock for RefLockCollection<'_, L> {
}
unsafe fn raw_read(&self) {
+ let locks = &self.locks;
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_reads_from_panic(&locked)
+ };
+
for lock in &self.locks {
lock.raw_read();
+ locked.borrow_mut().push(*lock);
}
}
@@ -229,14 +244,16 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> {
&'a self,
key: Key,
) -> LockGuard<'key, L::Guard<'a>, Key> {
- for lock in &self.locks {
+ let guard = unsafe {
// safety: we have the thread key
- unsafe { lock.raw_lock() };
- }
+ self.raw_lock();
+
+ // safety: we've locked all of this already
+ self.data.guard()
+ };
LockGuard {
- // safety: we've already acquired the lock
- guard: unsafe { self.data.guard() },
+ guard,
key,
_phantom: PhantomData,
}
@@ -272,7 +289,7 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> {
key: Key,
) -> Option<LockGuard<'key, L::Guard<'a>, Key>> {
let guard = unsafe {
- if !utils::ordered_try_lock(&self.locks) {
+ if !self.raw_try_lock() {
return None;
}
@@ -337,16 +354,16 @@ impl<'a, L: Sharable> RefLockCollection<'a, L> {
&'a self,
key: Key,
) -> LockGuard<'key, L::ReadGuard<'a>, Key> {
- for lock in &self.locks {
+ unsafe {
// safety: we have the thread key
- unsafe { lock.raw_read() };
- }
+ self.raw_read();
- LockGuard {
- // safety: we've already acquired the lock
- guard: unsafe { self.data.read_guard() },
- key,
- _phantom: PhantomData,
+ LockGuard {
+ // safety: we've already acquired the lock
+ guard: self.data.read_guard(),
+ key,
+ _phantom: PhantomData,
+ }
}
}
@@ -381,7 +398,8 @@ impl<'a, L: Sharable> RefLockCollection<'a, L> {
key: Key,
) -> Option<LockGuard<'key, L::ReadGuard<'a>, Key>> {
let guard = unsafe {
- if !utils::ordered_try_read(&self.locks) {
+ // safety: we have the thread key
+ if !self.raw_try_read() {
return None;
}
diff --git a/src/collection/retry.rs b/src/collection/retry.rs
index 05adc3e..e5246cd 100644
--- a/src/collection/retry.rs
+++ b/src/collection/retry.rs
@@ -1,8 +1,10 @@
+use crate::collection::utils;
use crate::lockable::{
Lockable, LockableAsMut, LockableIntoInner, OwnedLockable, RawLock, Sharable,
};
use crate::Keyable;
+use std::cell::RefCell;
use std::collections::HashSet;
use std::marker::PhantomData;
@@ -47,6 +49,11 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
return;
}
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_locks_from_panic(&locked)
+ };
+
unsafe {
'outer: loop {
// safety: we have the thread key
@@ -61,7 +68,9 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
// it does return false, then the lock function is called
// immediately after, causing a panic
// safety: we have the thread key
- if !lock.raw_try_lock() {
+ if lock.raw_try_lock() {
+ locked.borrow_mut().push(*lock)
+ } else {
for lock in locks.iter().take(i) {
// safety: we already locked all of these
lock.raw_unlock();
@@ -91,10 +100,17 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
return true;
}
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_locks_from_panic(&locked)
+ };
+
unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
- if !lock.raw_try_lock() {
+ if lock.raw_try_lock() {
+ locked.borrow_mut().push(*lock);
+ } else {
for lock in locks.iter().take(i) {
// safety: we already locked all of these
lock.raw_unlock();
@@ -119,6 +135,15 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
let mut first_index = 0;
let locks = get_locks(&self.data);
+ if locks.is_empty() {
+ return;
+ }
+
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_reads_from_panic(&locked)
+ };
+
'outer: loop {
// safety: we have the thread key
locks[first_index].raw_read();
@@ -128,7 +153,9 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
// safety: we have the thread key
- if !lock.raw_try_read() {
+ if lock.raw_try_read() {
+ locked.borrow_mut().push(*lock);
+ } else {
for lock in locks.iter().take(i) {
// safety: we already locked all of these
lock.raw_unlock_read();
@@ -154,10 +181,17 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
return true;
}
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ utils::attempt_to_recover_reads_from_panic(&locked)
+ };
+
unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
- if !lock.raw_try_read() {
+ if lock.raw_try_read() {
+ locked.borrow_mut().push(*lock);
+ } else {
for lock in locks.iter().take(i) {
// safety: we already locked all of these
lock.raw_unlock_read();
diff --git a/src/collection/utils.rs b/src/collection/utils.rs
index c114541..d845450 100644
--- a/src/collection/utils.rs
+++ b/src/collection/utils.rs
@@ -1,15 +1,23 @@
+use std::cell::RefCell;
+
use crate::lockable::RawLock;
/// Locks the locks in the order they are given. This causes deadlock if the
/// locks contain duplicates, or if this is called by multiple threads with the
/// locks in different orders.
pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ // safety: everything in locked is locked
+ attempt_to_recover_locks_from_panic(&locked)
+ };
+
unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
- let success = lock.raw_try_lock();
-
- if !success {
+ if lock.raw_try_lock() {
+ locked.borrow_mut().push(*lock);
+ } else {
for lock in &locks[0..i] {
// safety: this lock was already acquired
lock.raw_unlock();
@@ -25,12 +33,18 @@ pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
/// Locks the locks in the order they are given. This causes deadlock if this
/// is called by multiple threads with the locks in different orders.
pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
+ let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ scopeguard::defer_on_unwind! {
+ // safety: everything in locked is locked
+ attempt_to_recover_reads_from_panic(&locked)
+ };
+
unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
- let success = lock.raw_try_read();
-
- if !success {
+ if lock.raw_try_read() {
+ locked.borrow_mut().push(*lock);
+ } else {
for lock in &locks[0..i] {
// safety: this lock was already acquired
lock.raw_unlock_read();
@@ -42,3 +56,19 @@ pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
true
}
}
+
+pub unsafe fn attempt_to_recover_locks_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
+ scopeguard::defer_on_unwind! { locked.borrow().iter().for_each(|l| l.kill()); };
+ let mut locked = locked.borrow_mut();
+ while let Some(locked_lock) = locked.pop() {
+ locked_lock.raw_unlock();
+ }
+}
+
+pub unsafe fn attempt_to_recover_reads_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
+ scopeguard::defer_on_unwind! { locked.borrow().iter().for_each(|l| l.kill()); };
+ let mut locked = locked.borrow_mut();
+ while let Some(locked_lock) = locked.pop() {
+ locked_lock.raw_unlock_read();
+ }
+}