summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/collection.rs78
-rw-r--r--src/collection/boxed.rs8
-rw-r--r--src/collection/owned.rs7
-rw-r--r--src/collection/ref.rs9
-rw-r--r--src/collection/retry.rs8
-rw-r--r--src/lib.rs11
-rw-r--r--src/lockable.rs123
7 files changed, 201 insertions, 43 deletions
diff --git a/src/collection.rs b/src/collection.rs
index a84c1ce..c51e3cf 100644
--- a/src/collection.rs
+++ b/src/collection.rs
@@ -1,6 +1,6 @@
use std::marker::PhantomData;
-use crate::{key::Keyable, lockable::Lock};
+use crate::{key::Keyable, lockable::RawLock};
mod boxed;
mod guard;
@@ -8,25 +8,95 @@ mod owned;
mod r#ref;
mod retry;
+/// Locks a collection of locks, which cannot be shared immutably.
+///
+/// This could be a tuple of [`Lockable`] types, an array, or a `Vec`. But it
+/// can be safely locked without causing a deadlock.
+///
+/// The data in this collection is guaranteed to not contain duplicates because
+/// `L` must always implement [`OwnedLockable`]. The underlying data may not be
+/// immutably referenced and locked. Because of this, there is no need for
+/// sorting the locks in the collection, or checking for duplicates, because it
+/// can be guaranteed that until the underlying collection is mutated (which
+/// requires releasing all acquired locks in the collection to do), then the
+/// locks will stay in the same order and be locked in that order, preventing
+/// cyclic wait.
+///
+/// [`Lockable`]: `crate::lockable::Lockable`
+/// [`OwnedLockable`]: `crate::lockable::OwnedLockable`
#[derive(Debug)]
pub struct OwnedLockCollection<L> {
data: L,
}
-/// A type which can be locked.
+/// Locks a reference to a collection of locks, by sorting them by memory
+/// address.
///
/// This could be a tuple of [`Lockable`] types, an array, or a `Vec`. But it
/// can be safely locked without causing a deadlock.
+///
+/// Upon construction, it must be confirmed that the collection contains no
+/// duplicate locks. This can be done by either using [`OwnedLockable`] or by
+/// checking. Regardless of how this is done, the locks will be sorted by their
+/// memory address before locking them. The sorted order of the locks is stored
+/// within this collection.
+///
+/// Unlike [`BoxedLockCollection`], this type does not allocate memory for the
+/// data, although it does allocate memory for the sorted list of lock
+/// references. This makes it slightly faster, but lifetimes must be handled.
+///
+/// [`Lockable`]: `crate::lockable::Lockable`
+/// [`OwnedLockable`]: `crate::lockable::OwnedLockable`
pub struct RefLockCollection<'a, L> {
data: &'a L,
- locks: Vec<&'a dyn Lock>,
+ locks: Vec<&'a dyn RawLock>,
}
+/// Locks a collection of locks, stored in the heap, by sorting them by memory
+/// address.
+///
+/// This could be a tuple of [`Lockable`] types, an array, or a `Vec`. But it
+/// can be safely locked without causing a deadlock.
+///
+/// Upon construction, it must be confirmed that the collection contains no
+/// duplicate locks. This can be done by either using [`OwnedLockable`] or by
+/// checking. Regardless of how this is done, the locks will be sorted by their
+/// memory address before locking them. The sorted order of the locks is stored
+/// within this collection.
+///
+/// Unlike [`RefLockCollection`], this is a self-referential type which boxes
+/// the data that is given to it. This means no lifetimes are necessary on the
+/// type itself, but it is slightly slower because of the memory allocation.
+///
+/// [`Lockable`]: `crate::lockable::Lockable`
+/// [`OwnedLockable`]: `crate::lockable::OwnedLockable`
pub struct BoxedLockCollection<L> {
data: Box<L>,
- locks: Vec<&'static dyn Lock>,
+ locks: Vec<&'static dyn RawLock>,
}
+/// Locks a collection of locks using a retrying algorithm.
+///
+/// This could be a tuple of [`Lockable`] types, an array, or a `Vec`. But it
+/// can be safely locked without causing a deadlock.
+///
+/// The data in this collection is guaranteed to not contain duplicates, but it
+/// also not be sorted. In some cases the lack of sorting can increase
+/// performance. However, in most cases, this collection will be slower. Cyclic
+/// wait is not guaranteed here, so the locking algorithm must release all its
+/// locks if one of the lock attempts blocks. This results in wasted time and
+/// potential [livelocking].
+///
+/// However, one case where this might be faster than [`RefLockCollection`] is
+/// when the first lock in the collection is always the first in any
+/// collection, and the other locks in the collection are always locked after
+/// that first lock is acquired. This means that as soon as it is locked, there
+/// will be no need to unlock it later on subsequent lock attempts, because
+/// they will always succeed.
+///
+/// [`Lockable`]: `crate::lockable::Lockable`
+/// [`OwnedLockable`]: `crate::lockable::OwnedLockable`
+/// [livelocking]: https://en.wikipedia.org/wiki/Deadlock#Livelock
#[derive(Debug)]
pub struct RetryingLockCollection<L> {
data: L,
diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs
index a62a33d..ea840ab 100644
--- a/src/collection/boxed.rs
+++ b/src/collection/boxed.rs
@@ -1,14 +1,14 @@
use std::fmt::Debug;
use std::marker::PhantomData;
-use crate::lockable::Lock;
-use crate::{Keyable, Lockable, OwnedLockable, Sharable};
+use crate::lockable::{Lockable, OwnedLockable, RawLock, Sharable};
+use crate::Keyable;
use super::{BoxedLockCollection, LockGuard};
/// returns `true` if the sorted list contains a duplicate
#[must_use]
-fn contains_duplicates(l: &[&dyn Lock]) -> bool {
+fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
l.windows(2)
.any(|window| std::ptr::eq(window[0], window[1]))
}
@@ -18,7 +18,7 @@ unsafe impl<L: Lockable> Lockable for BoxedLockCollection<L> {
type ReadGuard<'g> = L::ReadGuard<'g> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.data.get_ptrs(ptrs)
}
diff --git a/src/collection/owned.rs b/src/collection/owned.rs
index eb5e03a..d77d568 100644
--- a/src/collection/owned.rs
+++ b/src/collection/owned.rs
@@ -1,10 +1,11 @@
use std::marker::PhantomData;
-use crate::{lockable::Lock, Keyable, Lockable, OwnedLockable, Sharable};
+use crate::lockable::{Lockable, OwnedLockable, RawLock, Sharable};
+use crate::Keyable;
use super::{LockGuard, OwnedLockCollection};
-fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn Lock> {
+fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
let mut locks = Vec::new();
data.get_ptrs(&mut locks);
locks
@@ -15,7 +16,7 @@ unsafe impl<L: Lockable> Lockable for OwnedLockCollection<L> {
type ReadGuard<'g> = L::ReadGuard<'g> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.data.get_ptrs(ptrs)
}
diff --git a/src/collection/ref.rs b/src/collection/ref.rs
index 329f0ae..2e2883a 100644
--- a/src/collection/ref.rs
+++ b/src/collection/ref.rs
@@ -1,12 +1,13 @@
use std::fmt::Debug;
use std::marker::PhantomData;
-use crate::{key::Keyable, lockable::Lock, Lockable, OwnedLockable, Sharable};
+use crate::lockable::{Lockable, OwnedLockable, RawLock, Sharable};
+use crate::Keyable;
use super::{LockGuard, RefLockCollection};
#[must_use]
-pub fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn Lock> {
+pub fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
let mut locks = Vec::new();
data.get_ptrs(&mut locks);
locks.sort_by_key(|lock| std::ptr::from_ref(*lock));
@@ -15,7 +16,7 @@ pub fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn Lock> {
/// returns `true` if the sorted list contains a duplicate
#[must_use]
-fn contains_duplicates(l: &[&dyn Lock]) -> bool {
+fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
l.windows(2)
.any(|window| std::ptr::eq(window[0], window[1]))
}
@@ -43,7 +44,7 @@ unsafe impl<'c, L: Lockable> Lockable for RefLockCollection<'c, L> {
type ReadGuard<'g> = L::ReadGuard<'g> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
ptrs.extend_from_slice(&self.locks);
}
diff --git a/src/collection/retry.rs b/src/collection/retry.rs
index 58a0642..d15d7d6 100644
--- a/src/collection/retry.rs
+++ b/src/collection/retry.rs
@@ -1,4 +1,6 @@
-use crate::{lockable::Lock, Keyable, Lockable, OwnedLockable, Sharable};
+use crate::lockable::{Lockable, OwnedLockable, RawLock, Sharable};
+use crate::Keyable;
+
use std::collections::HashSet;
use std::marker::PhantomData;
@@ -7,7 +9,7 @@ use super::{LockGuard, RetryingLockCollection};
fn contains_duplicates<L: Lockable>(data: L) -> bool {
let mut locks = Vec::new();
data.get_ptrs(&mut locks);
- let locks = locks.into_iter().map(|l| l as *const dyn Lock);
+ let locks = locks.into_iter().map(|l| l as *const dyn RawLock);
let mut locks_set = HashSet::new();
for lock in locks {
@@ -24,7 +26,7 @@ unsafe impl<L: Lockable> Lockable for RetryingLockCollection<L> {
type ReadGuard<'g> = L::ReadGuard<'g> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.data.get_ptrs(ptrs)
}
diff --git a/src/lib.rs b/src/lib.rs
index 668f3db..673d279 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -107,19 +107,24 @@
//! ```
mod key;
-mod lockable;
pub mod collection;
+pub mod lockable;
pub mod mutex;
pub mod rwlock;
-pub use collection::BoxedLockCollection as LockCollection;
pub use key::{Keyable, ThreadKey};
-pub use lockable::{Lockable, OwnedLockable, Sharable};
#[cfg(feature = "spin")]
pub use mutex::SpinLock;
+/// A collection of locks that can be acquired simultaneously.
+///
+/// This re-exports [`BoxedLockCollection`] as a sensible default.
+///
+/// [`BoxedLockCollection`]: collection::BoxedLockCollection
+pub type LockCollection<L> = collection::BoxedLockCollection<L>;
+
/// A mutual exclusion primitive useful for protecting shared data, which cannot deadlock.
///
/// By default, this uses `parking_lot` as a backend.
diff --git a/src/lockable.rs b/src/lockable.rs
index 23aeb4c..2f98d3a 100644
--- a/src/lockable.rs
+++ b/src/lockable.rs
@@ -7,14 +7,14 @@ use crate::{
use lock_api::{RawMutex, RawRwLock};
-/// A type that may be locked and unlocked
+/// A raw lock type that may be locked and unlocked
///
/// # Safety
///
/// A deadlock must never occur. The `unlock` method must correctly unlock the
/// data. The `get_ptrs` method must be implemented correctly. The `Output`
/// must be unlocked when it is dropped.
-pub unsafe trait Lock: Send + Sync {
+pub unsafe trait RawLock: Send + Sync {
/// Blocks until the lock is acquired
///
/// # Safety
@@ -46,32 +46,111 @@ pub unsafe trait Lock: Send + Sync {
/// It is undefined behavior to use this if the lock is not acquired
unsafe fn unlock(&self);
+ /// Blocks until the data the lock protects can be safely read.
+ ///
+ /// Some locks, but not all, will allow multiple readers at once. If
+ /// multiple readers are allowed for a [`Lockable`] type, then the
+ /// [`Sharable`] marker trait should be implemented.
+ ///
+ /// # Safety
+ ///
+ /// It is undefined behavior to use this without ownership or mutable
+ /// access to the [`ThreadKey`], which should last as long as the return
+ /// value is alive.
+ ///
+ /// [`ThreadKey`]: `crate::ThreadKey`
unsafe fn read(&self);
+ // Attempt to read without blocking.
+ ///
+ /// Returns `true` if successful, `false` otherwise.
+ ///
+ /// Some locks, but not all, will allow multiple readers at once. If
+ /// multiple readers are allowed for a [`Lockable`] type, then the
+ /// [`Sharable`] marker trait should be implemented.
+ ///
+ /// # Safety
+ ///
+ /// It is undefined behavior to use this without ownership or mutable
+ /// access to the [`ThreadKey`], which should last as long as the return
+ /// value is alive.
+ ///
+ /// [`ThreadKey`]: `crate::ThreadKey`
unsafe fn try_read(&self) -> bool;
+ /// Releases the lock after calling `read`.
+ ///
+ /// # Safety
+ ///
+ /// It is undefined behavior to use this if the read lock is not acquired
unsafe fn unlock_read(&self);
}
+/// A type that may be locked and unlocked.
+///
+/// This trait is usually implemented on collections of [`RawLock`]s. For
+/// example, a `Vec<Mutex<i32>>`.
+///
+/// # Safety
+///
+/// Acquiring the locks returned by `get_ptrs` must allow for the values
+/// returned by `guard` or `read_guard` to be safely used for exclusive or
+/// shared access, respectively.
+///
+/// Dropping the `Guard` and `ReadGuard` types must unlock those same locks.
+///
+/// The order of the resulting list from `get_ptrs` must be deterministic. As
+/// long as the value is not mutated, the references must always be in the same
+/// order.
pub unsafe trait Lockable {
- /// The guard returned that does not hold a key
+ /// The exclusive guard that does not hold a key
type Guard<'g>
where
Self: 'g;
+ /// The shared guard type that does not hold a key
type ReadGuard<'g>
where
Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>);
+ /// Yields a list of references to the [`RawLock`]s contained within this
+ /// value.
+ ///
+ /// These reference locks which must be locked before acquiring a guard,
+ /// and unlocked when the guard is dropped. The order of the resulting list
+ /// is deterministic. As long as the value is not mutated, the references
+ /// will always be in the same order.
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>);
+ /// Returns a guard that can be used to access the underlying data mutably.
+ ///
+ /// # Safety
+ ///
+ /// All locks given by calling [`Lockable::get_ptrs`] must be locked
+ /// exclusively before calling this function. The locks must not be
+ /// unlocked until this guard is dropped.
#[must_use]
unsafe fn guard(&self) -> Self::Guard<'_>;
+ /// Returns a guard that can be used to immutably access the underlying
+ /// data.
+ ///
+ /// # Safety
+ ///
+ /// All locks given by calling [`Lockable::get_ptrs`] must be locked using
+ /// [`RawLock::read`] before calling this function. The locks must not be
+ /// unlocked until this guard is dropped.
#[must_use]
unsafe fn read_guard(&self) -> Self::ReadGuard<'_>;
}
+/// A marker trait to indicate that multiple readers can access the lock at a
+/// time.
+///
+/// # Safety
+///
+/// This type must only be implemented if the lock can be safely shared between
+/// multiple readers.
pub unsafe trait Sharable: Lockable {}
/// A type that may be locked and unlocked, and is known to be the only valid
@@ -83,7 +162,7 @@ pub unsafe trait Sharable: Lockable {}
/// time, i.e., this must either be an owned value or a mutable reference.
pub unsafe trait OwnedLockable: Lockable {}
-unsafe impl<T: Send, R: RawMutex + Send + Sync> Lock for Mutex<T, R> {
+unsafe impl<T: Send, R: RawMutex + Send + Sync> RawLock for Mutex<T, R> {
unsafe fn lock(&self) {
self.raw().lock()
}
@@ -109,7 +188,7 @@ unsafe impl<T: Send, R: RawMutex + Send + Sync> Lock for Mutex<T, R> {
}
}
-unsafe impl<T: Send, R: RawRwLock + Send + Sync> Lock for RwLock<T, R> {
+unsafe impl<T: Send, R: RawRwLock + Send + Sync> RawLock for RwLock<T, R> {
unsafe fn lock(&self) {
self.raw().lock_exclusive()
}
@@ -139,7 +218,7 @@ unsafe impl<T: Send, R: RawMutex + Send + Sync> Lockable for Mutex<T, R> {
type Guard<'g> = MutexRef<'g, T, R> where Self: 'g;
type ReadGuard<'g> = MutexRef<'g, T, R> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
ptrs.push(self);
}
@@ -157,7 +236,7 @@ unsafe impl<T: Send, R: RawRwLock + Send + Sync> Lockable for RwLock<T, R> {
type ReadGuard<'g> = RwLockReadRef<'g, T, R> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
ptrs.push(self);
}
@@ -181,7 +260,7 @@ unsafe impl<'l, T: Send, R: RawRwLock + Send + Sync> Lockable for ReadLock<'l, T
type ReadGuard<'g> = RwLockReadRef<'g, T, R> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
ptrs.push(self.as_ref());
}
@@ -199,7 +278,7 @@ unsafe impl<'l, T: Send, R: RawRwLock + Send + Sync> Lockable for WriteLock<'l,
type ReadGuard<'g> = RwLockWriteRef<'g, T, R> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
ptrs.push(self.as_ref());
}
@@ -219,7 +298,7 @@ unsafe impl<T: Lockable> Lockable for &T {
type ReadGuard<'g> = T::ReadGuard<'g> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
(*self).get_ptrs(ptrs);
}
@@ -237,7 +316,7 @@ unsafe impl<T: Lockable> Lockable for &mut T {
type ReadGuard<'g> = T::ReadGuard<'g> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
(**self).get_ptrs(ptrs)
}
@@ -257,7 +336,7 @@ unsafe impl<A: Lockable> Lockable for (A,) {
type ReadGuard<'g> = (A::ReadGuard<'g>,) where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.0.get_ptrs(ptrs);
}
@@ -275,7 +354,7 @@ unsafe impl<A: Lockable, B: Lockable> Lockable for (A, B) {
type ReadGuard<'g> = (A::ReadGuard<'g>, B::ReadGuard<'g>) where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.0.get_ptrs(ptrs);
self.1.get_ptrs(ptrs);
}
@@ -294,7 +373,7 @@ unsafe impl<A: Lockable, B: Lockable, C: Lockable> Lockable for (A, B, C) {
type ReadGuard<'g> = (A::ReadGuard<'g>, B::ReadGuard<'g>, C::ReadGuard<'g>) where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.0.get_ptrs(ptrs);
self.1.get_ptrs(ptrs);
self.2.get_ptrs(ptrs);
@@ -323,7 +402,7 @@ unsafe impl<A: Lockable, B: Lockable, C: Lockable, D: Lockable> Lockable for (A,
D::ReadGuard<'g>,
) where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.0.get_ptrs(ptrs);
self.1.get_ptrs(ptrs);
self.2.get_ptrs(ptrs);
@@ -368,7 +447,7 @@ unsafe impl<A: Lockable, B: Lockable, C: Lockable, D: Lockable, E: Lockable> Loc
E::ReadGuard<'g>,
) where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.0.get_ptrs(ptrs);
self.1.get_ptrs(ptrs);
self.2.get_ptrs(ptrs);
@@ -418,7 +497,7 @@ unsafe impl<A: Lockable, B: Lockable, C: Lockable, D: Lockable, E: Lockable, F:
F::ReadGuard<'g>,
) where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.0.get_ptrs(ptrs);
self.1.get_ptrs(ptrs);
self.2.get_ptrs(ptrs);
@@ -473,7 +552,7 @@ unsafe impl<A: Lockable, B: Lockable, C: Lockable, D: Lockable, E: Lockable, F:
G::ReadGuard<'g>,
) where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.0.get_ptrs(ptrs);
self.1.get_ptrs(ptrs);
self.2.get_ptrs(ptrs);
@@ -573,7 +652,7 @@ unsafe impl<T: Lockable, const N: usize> Lockable for [T; N] {
type ReadGuard<'g> = [T::ReadGuard<'g>; N] where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
for lock in self {
lock.get_ptrs(ptrs);
}
@@ -603,7 +682,7 @@ unsafe impl<T: Lockable> Lockable for Box<[T]> {
type ReadGuard<'g> = Box<[T::ReadGuard<'g>]> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
for lock in self.iter() {
lock.get_ptrs(ptrs);
}
@@ -633,7 +712,7 @@ unsafe impl<T: Lockable> Lockable for Vec<T> {
type ReadGuard<'g> = Box<[T::ReadGuard<'g>]> where Self: 'g;
- fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn Lock>) {
+ fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
for lock in self {
lock.get_ptrs(ptrs);
}