From 2f331ada10745bf93fdd80c30142d90c5b2bf9b5 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 30 Jul 2022 16:33:01 -0400 Subject: [PATCH 01/47] Switch UnprotectedStorage::get_mut to accept &self This is the first step for addressing a soundness issue where parallel joins create aliasing mutable references to the storage and where in regular joins for some storages previously returned references will be invalidated by calling `get_mut` (at least under stacked borrows afaict). The internals of each storage are adjusted to store components within a `SyncUnsafeCell` to allow handing out mutable references. `SyncUnsafeCell` is a wrapper of `UnsafeCell` that provides `Sync` by default. Other various details: * Edge cases with `as` casts on 16-bit and 32-bit platforms addressed to avoid UB in `UnprotectedStorage` impls. * Safety documentation added to unsafe code usage within `UnprotectedStorage` impls. * Safety documentation added to unsafe impls of DistinctStorage (only in storages.rs) * Started introduction of `#[deny(unsafe_op_in_unsafe_fn)]` lint in various modules. * Safety requirements on `UnprotectedStorage::get/get_mut` updated. * `NullStorage` internals updated to handle ZSTs better (including properly dropping them when `clean` is called and not dropping them in `insert`) and not require `T: Default`. * In `Storage::insert` add the `id` to the mask after calling `inner.insert()` to protect against unwinding from the `insert` call. --- src/storage/flagged.rs | 61 +++-- src/storage/mod.rs | 53 +++- src/storage/storages.rs | 432 ++++++++++++++++++++++---------- src/storage/sync_unsafe_cell.rs | 53 ++++ 4 files changed, 431 insertions(+), 168 deletions(-) create mode 100644 src/storage/sync_unsafe_cell.rs diff --git a/src/storage/flagged.rs b/src/storage/flagged.rs index 4adb86828..54512499f 100644 --- a/src/storage/flagged.rs +++ b/src/storage/flagged.rs @@ -1,9 +1,14 @@ +// TODO: promote to the whole crate +#![deny(unsafe_op_in_unsafe_fn)] + use std::marker::PhantomData; use hibitset::BitSetLike; use crate::{ - storage::{ComponentEvent, DenseVecStorage, Tracked, TryDefault, UnprotectedStorage}, + storage::{ + ComponentEvent, DenseVecStorage, SyncUnsafeCell, Tracked, TryDefault, UnprotectedStorage, + }, world::{Component, Index}, }; @@ -165,7 +170,7 @@ use shrev::EventChannel; /// } /// ``` pub struct FlaggedStorage> { - channel: EventChannel, + channel: SyncUnsafeCell>, storage: T, #[cfg(feature = "storage-event-control")] event_emission: bool, @@ -190,7 +195,7 @@ where { fn default() -> Self { FlaggedStorage { - channel: EventChannel::::default(), + channel: SyncUnsafeCell::new(EventChannel::::default()), storage: T::unwrap_default(), #[cfg(feature = "storage-event-control")] event_emission: true, @@ -210,51 +215,73 @@ impl> UnprotectedStorage for FlaggedSt where B: BitSetLike, { - self.storage.clean(has); + // SAFETY: Requirements passed to caller. + unsafe { self.storage.clean(has) }; } unsafe fn get(&self, id: Index) -> &C { - self.storage.get(id) + // SAFETY: Requirements passed to caller. + unsafe { self.storage.get_mut(id) } } #[cfg(feature = "nightly")] - unsafe fn get_mut(&mut self, id: Index) -> >::AccessMut<'_> { + unsafe fn get_mut(&self, id: Index) -> >::AccessMut<'_> { if self.emit_event() { - self.channel.single_write(ComponentEvent::Modified(id)); + let channel_ptr = self.channel.get(); + // SAFETY: Caller required to ensure references returned from other + // safe methods such as Tracked::channel are no longer alive. + unsafe { &mut *channel_ptr }.single_write(ComponentEvent::Modified(id)); } - self.storage.get_mut(id) + // SAFETY: Requirements passed to caller. + unsafe { self.storage.get_mut(id) } } #[cfg(not(feature = "nightly"))] - unsafe fn get_mut(&mut self, id: Index) -> &mut C { + unsafe fn get_mut(&self, id: Index) -> &mut C { if self.emit_event() { - self.channel.single_write(ComponentEvent::Modified(id)); + let channel_ptr = self.channel.get(); + // SAFETY: Caller required to ensure references returned from other + // safe methods such as Tracked::channel are no longer alive. + unsafe { &mut *channel_ptr }.single_write(ComponentEvent::Modified(id)); } - self.storage.get_mut(id) + // SAFETY: Requirements passed to caller. + unsafe { self.storage.get_mut(id) } } unsafe fn insert(&mut self, id: Index, comp: C) { if self.emit_event() { - self.channel.single_write(ComponentEvent::Inserted(id)); + self.channel + .get_mut() + .single_write(ComponentEvent::Inserted(id)); } - self.storage.insert(id, comp); + // SAFETY: Requirements passed to caller. + unsafe { self.storage.insert(id, comp) }; } unsafe fn remove(&mut self, id: Index) -> C { if self.emit_event() { - self.channel.single_write(ComponentEvent::Removed(id)); + self.channel + .get_mut() + .single_write(ComponentEvent::Removed(id)); } - self.storage.remove(id) + // SAFETY: Requirements passed to caller. + unsafe { self.storage.remove(id) } } } impl Tracked for FlaggedStorage { fn channel(&self) -> &EventChannel { - &self.channel + let channel_ptr = self.channel.get(); + // SAFETY: The only place that mutably accesses the channel via a shared + // reference is the impl of `UnprotectedStorage::get_mut` which requires + // callers to avoid calling safe methods with `&self` while those + // mutable references are in use and to ensure any references from those + // safe methods are no longer alive. + unsafe { &*channel_ptr } } fn channel_mut(&mut self) -> &mut EventChannel { - &mut self.channel + self.channel.get_mut() } #[cfg(feature = "storage-event-control")] diff --git a/src/storage/mod.rs b/src/storage/mod.rs index e08365ca2..4c4632512 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -37,6 +37,7 @@ use crate::{ }; use self::drain::Drain; +use self::sync_unsafe_cell::SyncUnsafeCell; mod data; #[cfg(feature = "nightly")] @@ -47,6 +48,7 @@ mod flagged; mod generic; mod restrict; mod storages; +mod sync_unsafe_cell; #[cfg(test)] mod tests; mod track; @@ -351,9 +353,9 @@ where std::mem::swap(&mut v, unsafe { self.data.inner.get_mut(id).deref_mut() }); Ok(Some(v)) } else { - self.data.mask.add(id); // SAFETY: The mask was previously empty, so it is safe to insert. unsafe { self.data.inner.insert(id, v) }; + self.data.mask.add(id); Ok(None) } } else { @@ -512,7 +514,8 @@ pub trait UnprotectedStorage: TryDefault { Self: 'a; /// Clean the storage given a bitset with bits set for valid indices. - /// Allows us to safely drop the storage. + /// + /// Allows us to drop the storage without leaking components. /// /// # Safety /// @@ -529,10 +532,13 @@ pub trait UnprotectedStorage: TryDefault { /// # Safety /// /// May only be called after a call to `insert` with `id` and - /// no following call to `remove` with `id`. + /// no following call to `remove` with `id` or to `clean`. /// /// A mask should keep track of those states, and an `id` being contained /// in the tracking mask is sufficient to call this method. + /// + /// There must be no extant aliasing mutable reference to this component + /// (i.e. obtained from `get_mut` with the same `id`). unsafe fn get(&self, id: Index) -> &T; /// Tries mutating the data associated with an `Index`. @@ -542,12 +548,22 @@ pub trait UnprotectedStorage: TryDefault { /// # Safety /// /// May only be called after a call to `insert` with `id` and - /// no following call to `remove` with `id`. + /// no following call to `remove` with `id` or to `clean`. /// - /// A mask should keep track of those states, and an `id` being contained - /// in the tracking mask is sufficient to call this method. + /// A mask should keep track of those states, and an `id` being contained in + /// the tracking mask is sufficient to call this method. + /// + /// There must be no extant aliasing references to this component (i.e. + /// obtained with the same `id` from `get` or `get_mut`). Additionally, + /// while the references returned here are in use, safe methods on this + /// type that take `&self` (e.g. [`SliceAccess::as_slice`], + /// [`Tracked::channel`]) must not be called and any references returned by + /// such methods must no longer be alive when `get_mut` is called. + /// + /// Unless this type implements `DistinctStorage`, calling this from + /// multiple threads at once is unsound. #[cfg(feature = "nightly")] - unsafe fn get_mut(&mut self, id: Index) -> Self::AccessMut<'_>; + unsafe fn get_mut(&self, id: Index) -> Self::AccessMut<'_>; /// Tries mutating the data associated with an `Index`. /// This is unsafe because the external set used @@ -556,22 +572,35 @@ pub trait UnprotectedStorage: TryDefault { /// # Safety /// /// May only be called after a call to `insert` with `id` and - /// no following call to `remove` with `id`. + /// no following call to `remove` with `id` or to `clean`. /// - /// A mask should keep track of those states, and an `id` being contained - /// in the tracking mask is sufficient to call this method. + /// A mask should keep track of those states, and an `id` being contained in + /// the tracking mask is sufficient to call this method. + /// + /// There must be no extant aliasing references to this component (i.e. + /// obtained with the same `id` from `get` or `get_mut`). + /// + /// Unless this type implements `DistinctStorage`, calling this from + /// multiple threads at once is unsound. #[cfg(not(feature = "nightly"))] - unsafe fn get_mut(&mut self, id: Index) -> &mut T; + unsafe fn get_mut(&self, id: Index) -> &mut T; /// Inserts new data for a given `Index`. /// /// # Safety /// + // TODO: does anything rely on `insert` not having been called before or is + // this just trying to make sure things are dropped (if so it should not be + // listed under the safety requirements)? /// May only be called if `insert` was not called with `id` before, or - /// was reverted by a call to `remove` with `id. + /// was reverted by a call to `remove` with `id` or a call to `clean`. /// /// A mask should keep track of those states, and an `id` missing from the /// mask is sufficient to call `insert`. + /// + /// If this call unwinds the insertion should be considered to have failed + /// and not be included in the mask or count as having called `insert` for + /// the safety requirements of other methods here. unsafe fn insert(&mut self, id: Index, value: T); /// Removes the data associated with an `Index`. diff --git a/src/storage/storages.rs b/src/storage/storages.rs index 073a98387..161663847 100644 --- a/src/storage/storages.rs +++ b/src/storage/storages.rs @@ -1,12 +1,15 @@ +// TODO: promote to the whole crate +#![deny(unsafe_op_in_unsafe_fn)] //! Different types of storages you can use for your components. -use std::{collections::BTreeMap, mem::MaybeUninit}; +use core::{marker::PhantomData, mem::MaybeUninit, ptr, ptr::NonNull}; +use std::collections::BTreeMap; use ahash::AHashMap as HashMap; use hibitset::BitSetLike; use crate::{ - storage::{DistinctStorage, UnprotectedStorage}, + storage::{DistinctStorage, SyncUnsafeCell, UnprotectedStorage}, world::Index, }; @@ -23,7 +26,7 @@ pub trait SliceAccess { } /// BTreeMap-based storage. -pub struct BTreeStorage(BTreeMap); +pub struct BTreeStorage(BTreeMap>); impl Default for BTreeStorage { fn default() -> Self { @@ -33,41 +36,44 @@ impl Default for BTreeStorage { impl UnprotectedStorage for BTreeStorage { #[cfg(feature = "nightly")] - type AccessMut<'a> - where - T: 'a, - = &'a mut T; + type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, _has: B) where B: BitSetLike, { - // nothing to do + // nothing to do (components will be dropped with the storage) } unsafe fn get(&self, id: Index) -> &T { - &self.0[&id] + let ptr = self.0[&id].get(); + // SAFETY: See `VecStorage` impl. + unsafe { &*ptr } } - unsafe fn get_mut(&mut self, id: Index) -> &mut T { - self.0.get_mut(&id).unwrap() + unsafe fn get_mut(&self, id: Index) -> &mut T { + let ptr = self.0[&id].get(); + // SAFETY: See `VecStorage` impl. + unsafe { &mut *ptr } } unsafe fn insert(&mut self, id: Index, v: T) { - self.0.insert(id, v); + self.0.insert(id, SyncUnsafeCell::new(v)); } unsafe fn remove(&mut self, id: Index) -> T { - self.0.remove(&id).unwrap() + self.0.remove(&id).unwrap().0.into_inner() } } +// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when +// provided distinct indices. unsafe impl DistinctStorage for BTreeStorage {} /// `HashMap`-based storage. Best suited for rare components. /// /// This uses the [std::collections::HashMap] internally. -pub struct HashMapStorage(HashMap); +pub struct HashMapStorage(HashMap>); impl Default for HashMapStorage { fn default() -> Self { @@ -77,35 +83,38 @@ impl Default for HashMapStorage { impl UnprotectedStorage for HashMapStorage { #[cfg(feature = "nightly")] - type AccessMut<'a> - where - T: 'a, - = &'a mut T; + type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, _has: B) where B: BitSetLike, { - //nothing to do + // nothing to do (components will be dropped with the storage) } unsafe fn get(&self, id: Index) -> &T { - &self.0[&id] + let ptr = self.0[&id].get(); + // SAFETY: See `VecStorage` impl. + unsafe { &*ptr } } - unsafe fn get_mut(&mut self, id: Index) -> &mut T { - self.0.get_mut(&id).unwrap() + unsafe fn get_mut(&self, id: Index) -> &mut T { + let ptr = self.0[&id].get(); + // SAFETY: See `VecStorage` impl. + unsafe { &mut *ptr } } unsafe fn insert(&mut self, id: Index, v: T) { - self.0.insert(id, v); + self.0.insert(id, SyncUnsafeCell::new(v)); } unsafe fn remove(&mut self, id: Index) -> T { - self.0.remove(&id).unwrap() + self.0.remove(&id).unwrap().0.into_inner() } } +// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when +// provided distinct indices. unsafe impl DistinctStorage for HashMapStorage {} /// Dense vector storage. Has a redirection 2-way table @@ -121,7 +130,7 @@ unsafe impl DistinctStorage for HashMapStorage {} /// a particular entity's position within this slice may change /// over time. pub struct DenseVecStorage { - data: Vec, + data: Vec>, entity_id: Vec, data_id: Vec>, } @@ -145,7 +154,13 @@ impl SliceAccess for DenseVecStorage { /// and especially do not correspond with entity IDs. #[inline] fn as_slice(&self) -> &[Self::Element] { - self.data.as_slice() + let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.data.as_slice()).get(); + // SAFETY: The only place that mutably accesses these elements via a + // shared reference is the impl of `UnprotectedStorage::get_mut` which + // requires callers to avoid calling safe methods with `&self` while + // those mutable references are in use and to ensure any references + // from those safe methods are no longer alive. + unsafe { &*unsafe_cell_slice_ptr } } /// Returns a mutable slice of all the components in this storage. @@ -154,112 +169,179 @@ impl SliceAccess for DenseVecStorage { /// and especially do not correspond with entity IDs. #[inline] fn as_mut_slice(&mut self) -> &mut [Self::Element] { - self.data.as_mut_slice() + SyncUnsafeCell::as_slice_mut(self.data.as_mut_slice()) } } impl UnprotectedStorage for DenseVecStorage { #[cfg(feature = "nightly")] - type AccessMut<'a> - where - T: 'a, - = &'a mut T; + type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, _has: B) where B: BitSetLike, { - // nothing to do + // nothing to do (components will be dropped with the storage) } unsafe fn get(&self, id: Index) -> &T { - let did = self.data_id.get_unchecked(id as usize).assume_init(); - self.data.get_unchecked(did as usize) - } - - unsafe fn get_mut(&mut self, id: Index) -> &mut T { - let did = self.data_id.get_unchecked(id as usize).assume_init(); - self.data.get_unchecked_mut(did as usize) + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY (get_unchecked and assume_init): Caller required to call + // `insert` with this `id` (with no following call to `remove` with that + // id or to `clean`). + let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() }; + // SAFETY: Indices retrieved from `data_id` with a valid `id` will + // always correspond to an element in `data`. + let ptr = unsafe { self.data.get_unchecked(did as usize) }.get(); + // SAFETY: See `VecStorage` impl. + unsafe { &*ptr } + } + + unsafe fn get_mut(&self, id: Index) -> &mut T { + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY (get_unchecked and assume_init): Caller required to call + // `insert` with this `id` (with no following call to `remove` with that + // id or to `clean`). + let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() }; + // SAFETY: Indices retrieved from `data_id` with a valid `id` will + // always correspond to an element in `data`. + let ptr = unsafe { self.data.get_unchecked(did as usize) }.get(); + // SAFETY: See `VecStorage` impl. + unsafe { &mut *ptr } } unsafe fn insert(&mut self, id: Index, v: T) { - let id = id as usize; + let id = if Index::BITS > usize::BITS { + // Saturate the cast to usize::MAX so if this overflows usize the + // allocation below will fail. + core::cmp::min(id, usize::MAX as Index) as usize + } else { + id as usize + }; + if self.data_id.len() <= id { - let delta = id + 1 - self.data_id.len(); + // NOTE: saturating add ensures that if this computation would + // overflow it will instead fail the allocation when calling + // reserve. + let delta = if Index::BITS >= usize::BITS { + id.saturating_add(1) + } else { + id + 1 + } - self.data_id.len(); self.data_id.reserve(delta); - self.data_id.set_len(id + 1); + // NOTE: Allocation would have failed if this addition would overflow + // SAFETY: MaybeUninit elements don't require initialization and + // the reserve call ensures the capacity will be sufficient for this + // new length. + unsafe { self.data_id.set_len(id + 1) }; } - self.data_id - .get_unchecked_mut(id) - .as_mut_ptr() - .write(self.data.len() as Index); + // NOTE: `as` cast here is not lossy since the length will be at most + // `Index::MAX` if there is still an entity without this component. + unsafe { self.data_id.get_unchecked_mut(id) }.write(self.data.len() as Index); + // NOTE: `id` originally of the type `Index` so the cast back won't + // overflow. self.entity_id.push(id as Index); - self.data.push(v); + self.data.push(SyncUnsafeCell::new(v)); } unsafe fn remove(&mut self, id: Index) -> T { - let did = self.data_id.get_unchecked(id as usize).assume_init(); + // NOTE: cast to usize won't overflow since `insert` would have failed + // to allocate. + // SAFETY (get_unchecked and assume_init): Caller required to have + // called `insert` with this `id`. + let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() }; let last = *self.entity_id.last().unwrap(); - self.data_id - .get_unchecked_mut(last as usize) - .as_mut_ptr() - .write(did); + // NOTE: cast to usize won't overflow since `insert` would have failed + // to allocate. + // SAFETY: indices in `self.entity_id` correspond to components present + // in this storage so this will be in-bounds. + unsafe { self.data_id.get_unchecked_mut(last as usize) }.write(did); + // NOTE: casting the index in the dense data array to usize won't + // overflow since the maximum number of components if limited to + // `Index::MAX + 1`. self.entity_id.swap_remove(did as usize); - self.data.swap_remove(did as usize) + self.data.swap_remove(did as usize).0.into_inner() } } +// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when +// provided distinct indices. unsafe impl DistinctStorage for DenseVecStorage {} /// A null storage type, used for cases where the component /// doesn't contain any data and instead works as a simple flag. -pub struct NullStorage(T); +pub struct NullStorage(PhantomData); -impl UnprotectedStorage for NullStorage -where - T: Default, -{ +impl Default for NullStorage { + fn default() -> Self { + use core::mem::size_of; + + assert_eq!(size_of::(), 0, "NullStorage can only be used with ZST"); + + NullStorage(PhantomData) + } +} + +impl UnprotectedStorage for NullStorage { #[cfg(feature = "nightly")] - type AccessMut<'a> - where - T: 'a, - = &'a mut T; + type AccessMut<'a> = &'a mut T where T: 'a; - unsafe fn clean(&mut self, _has: B) + unsafe fn clean(&mut self, has: B) where B: BitSetLike, { + for id in has.iter() { + // SAFETY: Caller required to provide mask that keeps track of the + // existing elements, so every `id` is valid to use with `remove`. + unsafe { self.remove(id) }; + } } unsafe fn get(&self, _: Index) -> &T { - &self.0 - } - - unsafe fn get_mut(&mut self, _: Index) -> &mut T { - &mut self.0 + // SAFETY: Because the caller is required by the safety docs to first + // insert a component with this index, this corresponds to an instance + // of the ZST we conceptually own. The caller also must manage the + // aliasing of accesses via get/get_mut. + // + // Self::default asserts that `T` is a ZST which makes generating a + // reference from a dangling pointer not UB. + unsafe { &*NonNull::dangling().as_ptr() } + } + + unsafe fn get_mut(&self, _: Index) -> &mut T { + // SAFETY: Because the caller is required by the safety docs to first + // insert a component with this index, this corresponds to an instance + // of the ZST we conceptually own. The caller also must manage the + // aliasing of accesses via get/get_mut. + // + // Self::default asserts that `T` is a ZST which makes generating a + // reference from a dangling pointer not UB. + unsafe { &mut *NonNull::dangling().as_ptr() } + } + + unsafe fn insert(&mut self, _: Index, v: T) { + // We rely on the caller tracking the presence of the ZST via the mask. + // + // We need to forget this to avoid the drop impl from running so the + // storage logically is taking ownership of this instance of the ZST. + core::mem::forget(v) } - unsafe fn insert(&mut self, _: Index, _: T) {} - unsafe fn remove(&mut self, _: Index) -> T { - Default::default() + // SAFETY: Because the caller is required by the safety docs to first + // insert a component with this index, this corresponds to an instance + // of the ZST we conceptually own. + // + // Self::default asserts that `T` is a ZST which makes reading from a + // dangling pointer not UB. + unsafe { ptr::read(NonNull::dangling().as_ptr()) } } } -impl Default for NullStorage -where - T: Default, -{ - fn default() -> Self { - use std::mem::size_of; - - assert_eq!(size_of::(), 0, "NullStorage can only be used with ZST"); - - NullStorage(Default::default()) - } -} - -/// This is safe because you cannot mutate ZSTs. +// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when +// provided distinct indices. unsafe impl DistinctStorage for NullStorage {} /// Vector storage. Uses a simple `Vec`. Supposed to have maximum @@ -269,7 +351,7 @@ unsafe impl DistinctStorage for NullStorage {} /// entity IDs. These can be compared to other `VecStorage`s, to /// other `DefaultVecStorage`s, and to `Entity::id()`s for live /// entities. -pub struct VecStorage(Vec>); +pub struct VecStorage(Vec>>); impl Default for VecStorage { fn default() -> Self { @@ -282,62 +364,117 @@ impl SliceAccess for VecStorage { #[inline] fn as_slice(&self) -> &[Self::Element] { - self.0.as_slice() + let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.0.as_slice()).get(); + // SAFETY: The only place that mutably accesses these elements via a + // shared reference is the impl of `UnprotectedStorage::get_mut` which + // requires callers to avoid calling safe methods with `&self` while + // those mutable references are in use. + unsafe { &*unsafe_cell_slice_ptr } } #[inline] fn as_mut_slice(&mut self) -> &mut [Self::Element] { - self.0.as_mut_slice() + SyncUnsafeCell::as_slice_mut(self.0.as_mut_slice()) } } impl UnprotectedStorage for VecStorage { #[cfg(feature = "nightly")] - type AccessMut<'a> - where - T: 'a, - = &'a mut T; + type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, has: B) where B: BitSetLike, { - use std::ptr; for (i, v) in self.0.iter_mut().enumerate() { + // NOTE: `as` cast is safe since the index used for insertion is a + // `u32` so the indices will never be over `u32::MAX`. + const _: Index = 0u32; if has.contains(i as u32) { // drop in place - ptr::drop_in_place(&mut *v.as_mut_ptr()); + let v_inner = v.get_mut(); + // SAFETY: Present in the provided mask. All components are + // considered removed after a call to `clean`. + unsafe { v_inner.assume_init_drop() }; } } - self.0.set_len(0); } unsafe fn get(&self, id: Index) -> &T { - &*self.0.get_unchecked(id as usize).as_ptr() - } - - unsafe fn get_mut(&mut self, id: Index) -> &mut T { - &mut *self.0.get_unchecked_mut(id as usize).as_mut_ptr() + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY: Caller required to call `insert` with this `id` (with no + // following call to `remove` with that id or to `clean`). + let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); + // SAFETY: Caller required to manage aliasing between this and + // `get_mut`. + let maybe_uninit = unsafe { &*ptr }; + // SAFETY: Requirement to have `insert`ed this component ensures that it + // will be initialized. + unsafe { maybe_uninit.assume_init_ref() } + } + + unsafe fn get_mut(&self, id: Index) -> &mut T { + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY: Caller required to call `insert` with this `id` (with no + // following call to `remove` with that id or to `clean`). + let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); + // SAFETY: Caller required to manage aliasing (both ensuring + // `get_mut`/`get` is called without aliasing refs returned here, and + // ensuring other safe methods that take `&self` aren't called while the + // returned mutable references are alive). + let maybe_uninit = unsafe { &mut *ptr }; + // SAFETY: Requirement to have `insert`ed this component ensures that it + // will be initialized. + unsafe { maybe_uninit.assume_init_mut() } } unsafe fn insert(&mut self, id: Index, v: T) { - let id = id as usize; + let id = if Index::BITS > usize::BITS { + // Saturate the cast to usize::MAX so if this overflows usize the + // allocation below will fail. + core::cmp::min(id, usize::MAX as Index) as usize + } else { + id as usize + }; + if self.0.len() <= id { - let delta = id + 1 - self.0.len(); + // NOTE: saturating add ensures that if this computation would + // overflow it will instead fail the allocation when calling + // reserve. + let delta = if Index::BITS >= usize::BITS { + id.saturating_add(1) + } else { + id + 1 + } - self.0.len(); self.0.reserve(delta); - self.0.set_len(id + 1); + // NOTE: Allocation would have failed if this addition would overflow + // SAFETY: MaybeUninit elements don't require initialization and + // the reserve call ensures the capacity will be sufficient for this + // new length. + unsafe { self.0.set_len(id + 1) }; } // Write the value without reading or dropping // the (currently uninitialized) memory. - *self.0.get_unchecked_mut(id as usize) = MaybeUninit::new(v); + // SAFETY: The length of the vec was extended to contain this index + // above. + unsafe { self.0.get_unchecked_mut(id) }.get_mut().write(v); } unsafe fn remove(&mut self, id: Index) -> T { - use std::ptr; - ptr::read(self.get(id)) + // SAFETY: Caller required to have called `insert` with this `id`. + // Exclusive `&mut self` ensures no aliasing is occuring. + let component_ref = unsafe { self.get(id) }; + // SAFETY: Caller not allowed to call other methods that access this + // `id` as an initialized value after this call to `remove` so it is + // safe to move out of this. + unsafe { ptr::read(component_ref) } } } +// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when +// provided distinct indices. unsafe impl DistinctStorage for VecStorage {} /// Vector storage, like `VecStorage`, but allows safe access to the @@ -348,7 +485,7 @@ unsafe impl DistinctStorage for VecStorage {} /// `as_slice()` and `as_mut_slice()` indices correspond to entity IDs. /// These can be compared to other `DefaultVecStorage`s, to other /// `VecStorage`s, and to `Entity::id()`s for live entities. -pub struct DefaultVecStorage(Vec); +pub struct DefaultVecStorage(Vec>); impl Default for DefaultVecStorage { fn default() -> Self { @@ -356,15 +493,33 @@ impl Default for DefaultVecStorage { } } +impl SliceAccess for DefaultVecStorage { + type Element = T; + + /// Returns a slice of all the components in this storage. + #[inline] + fn as_slice(&self) -> &[Self::Element] { + let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.0.as_slice()).get(); + // SAFETY: The only place that mutably accesses these elements via a + // shared reference is the impl of `UnprotectedStorage::get_mut` which + // requires callers to avoid calling safe methods with `&self` while + // those mutable references are in use. + unsafe { &*unsafe_cell_slice_ptr } + } + + /// Returns a mutable slice of all the components in this storage. + #[inline] + fn as_mut_slice(&mut self) -> &mut [Self::Element] { + SyncUnsafeCell::as_slice_mut(self.0.as_mut_slice()) + } +} + impl UnprotectedStorage for DefaultVecStorage where T: Default, { #[cfg(feature = "nightly")] - type AccessMut<'a> - where - T: 'a, - = &'a mut T; + type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, _has: B) where @@ -374,51 +529,50 @@ where } unsafe fn get(&self, id: Index) -> &T { - self.0.get_unchecked(id as usize) + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY: See `VecStorage` impl. + let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); + // SAFETY: See `VecStorage` impl. + unsafe { &*ptr } } - unsafe fn get_mut(&mut self, id: Index) -> &mut T { - self.0.get_unchecked_mut(id as usize) + unsafe fn get_mut(&self, id: Index) -> &mut T { + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY: See `VecStorage` impl. + let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); + // SAFETY: See `VecStorage` impl. + unsafe { &mut *ptr } } unsafe fn insert(&mut self, id: Index, v: T) { - let id = id as usize; + let id = if Index::BITS > usize::BITS { + // Saturate the cast to usize::MAX so if this overflows usize the + // allocation below will fail. + core::cmp::min(id, usize::MAX as Index) as usize + } else { + id as usize + }; if self.0.len() <= id { // fill all the empty slots with default values self.0.resize_with(id, Default::default); // store the desired value - self.0.push(v) + self.0.push(SyncUnsafeCell::new(v)) } else { // store the desired value directly - self.0[id] = v; + *self.0[id].get_mut() = v; } } unsafe fn remove(&mut self, id: Index) -> T { - // make a new default value - let mut v = T::default(); - // swap it into the vec - std::ptr::swap(self.0.get_unchecked_mut(id as usize), &mut v); - // return the old value - v + // Take value leaving a default instance behind + // SAFETY: Caller required to have called `insert` with this `id`. + core::mem::take(unsafe { self.0.get_unchecked_mut(id as usize) }.get_mut()) } } +// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when +// provided distinct indices. unsafe impl DistinctStorage for DefaultVecStorage {} - -impl SliceAccess for DefaultVecStorage { - type Element = T; - - /// Returns a slice of all the components in this storage. - #[inline] - fn as_slice(&self) -> &[Self::Element] { - self.0.as_slice() - } - - /// Returns a mutable slice of all the components in this storage. - #[inline] - fn as_mut_slice(&mut self) -> &mut [Self::Element] { - self.0.as_mut_slice() - } -} diff --git a/src/storage/sync_unsafe_cell.rs b/src/storage/sync_unsafe_cell.rs new file mode 100644 index 000000000..aa89f0cd6 --- /dev/null +++ b/src/storage/sync_unsafe_cell.rs @@ -0,0 +1,53 @@ +// TODO: promote to the whole crate +#![deny(unsafe_op_in_unsafe_fn)] +//! Stand in for core::cell::SyncUnsafeCell since that is still unstable. +//! +//! TODO: Remove when core::cell::SyncUnsafeCell is stabilized + +use core::cell::UnsafeCell; +use core::ops::{Deref, DerefMut}; + +#[repr(transparent)] +pub struct SyncUnsafeCell(pub UnsafeCell); + +// SAFETY: Proper synchronization is left to the user of the unsafe `get` call. +// `UnsafeCell` itself doesn't implement `Sync` to prevent accidental mis-use. +unsafe impl Sync for SyncUnsafeCell {} + +impl SyncUnsafeCell { + pub fn new(value: T) -> Self { + Self(UnsafeCell::new(value)) + } + + pub fn as_cell_of_slice(slice: &[Self]) -> &SyncUnsafeCell<[T]> { + // SAFETY: `T` has the same memory layout as `SyncUnsafeCell`. + unsafe { &*(slice as *const [Self] as *const SyncUnsafeCell<[T]>) } + } + + pub fn as_slice_mut(slice: &mut [Self]) -> &mut [T] { + // SAFETY: `T` has the same memory layout as `SyncUnsafeCell` and we + // have a mutable reference which means the `SyncUnsafeCell` can be + // safely removed since we have exclusive access here. + unsafe { &mut *(slice as *mut [Self] as *mut [T]) } + } +} + +impl Deref for SyncUnsafeCell { + type Target = UnsafeCell; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for SyncUnsafeCell { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl Default for SyncUnsafeCell { + fn default() -> Self { + Self::new(Default::default()) + } +} From 8366ddbb85ee743709cbd790b928616d838af127 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 30 Jul 2022 23:23:37 -0400 Subject: [PATCH 02/47] Update safety comments on uses of `UnprotectedStorage::get_mut` and change how `UnprotectedStorage::clean` works. * `clean` now always clears all components even if dropping the storage would have dropped them automatically (this helps address an edge case with `DenseVecStorage::insert` overflowing a `u32`). * Add safety requirement to `clean` that indicates the caller should ensure the mask has been cleared even if unwinding occurs from the `clean` call. * Ensured uses of `clean` met this requirement. * Also continue expanding application of `unsafe_op_in_unsafe_fn` lint. * Fixed typo from previous commit where FlaggedStorage::get was using get_mut internally. --- src/changeset.rs | 56 +++++++++++++++--------------- src/storage/entry.rs | 14 +++++--- src/storage/flagged.rs | 5 +-- src/storage/mod.rs | 60 +++++++++++++++++++++++---------- src/storage/restrict.rs | 5 ++- src/storage/storages.rs | 13 ++++--- src/storage/sync_unsafe_cell.rs | 2 -- 7 files changed, 93 insertions(+), 62 deletions(-) diff --git a/src/changeset.rs b/src/changeset.rs index 3cbbe9b7e..5a5098055 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -1,3 +1,6 @@ +// TODO: promote to the whole crate +#![deny(unsafe_op_in_unsafe_fn)] + //! Provides a changeset that can be collected from an iterator. use std::{iter::FromIterator, ops::AddAssign}; @@ -62,28 +65,28 @@ impl ChangeSet { T: AddAssign, { if self.mask.contains(entity.id()) { - // SAFETY: we checked the mask, thus it's safe to call - unsafe { - *self.inner.get_mut(entity.id()) += value; - } + // SAFETY: We have exclusive access (which ensures no aliasing or + // concurrent calls from other threads) and we checked the mask, + // thus it's safe to call. + unsafe { *self.inner.get_mut(entity.id()) += value }; } else { - // SAFETY: we checked the mask, thus it's safe to call - unsafe { - self.inner.insert(entity.id(), value); - } + // SAFETY: We checked the mask, thus it's safe to call. + unsafe { self.inner.insert(entity.id(), value) }; self.mask.add(entity.id()); } } /// Clear the changeset pub fn clear(&mut self) { - for id in &self.mask { - // SAFETY: we checked the mask, thus it's safe to call - unsafe { - self.inner.remove(id); - } - } - self.mask.clear(); + // NOTE: We replace with default empty mask temporarily to protect against + // unwinding from `Drop` of components. + let mut mask_temp = core::mem::take(&mut self.mask); + // SAFETY: `self.mask` is the correct mask as specified. We swap in a + // temporary empty mask to ensure if this unwinds that the mask will be + // cleared. + unsafe { self.inner.clean(&mask_temp) }; + mask_temp.clear(); + self.mask = mask_temp; } } @@ -116,17 +119,16 @@ impl<'a, T> Join for &'a mut ChangeSet { type Type = &'a mut T; type Value = &'a mut DenseVecStorage; - // SAFETY: No unsafe code and no invariants to meet. unsafe fn open(self) -> (Self::Mask, Self::Value) { (&self.mask, &mut self.inner) } - // SAFETY: No unsafe code and no invariants to meet. - // `DistinctStorage` invariants are also met, but no `ParJoin` implementation - // exists yet. + // `DistinctStorage` invariants are also met, but no `ParJoin` + // implementation exists yet. unsafe fn get(v: &mut Self::Value, id: Index) -> Self::Type { let value: *mut Self::Value = v as *mut Self::Value; - (*value).get_mut(id) + // SAFETY: S-TODO modify Join trait + unsafe { (*value).get_mut(id) } } } @@ -135,16 +137,15 @@ impl<'a, T> Join for &'a ChangeSet { type Type = &'a T; type Value = &'a DenseVecStorage; - // SAFETY: No unsafe code and no invariants to meet. unsafe fn open(self) -> (Self::Mask, Self::Value) { (&self.mask, &self.inner) } - // SAFETY: No unsafe code and no invariants to meet. - // `DistinctStorage` invariants are also met, but no `ParJoin` implementation + // `DistinctStorage` invariants are met, but no `ParJoin` implementation // exists yet. unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { - value.get(id) + // SAFETY: S-TODO + unsafe { value.get(id) } } } @@ -155,16 +156,13 @@ impl Join for ChangeSet { type Type = T; type Value = DenseVecStorage; - // SAFETY: No unsafe code and no invariants to meet. unsafe fn open(self) -> (Self::Mask, Self::Value) { (self.mask, self.inner) } - // SAFETY: No unsafe code and no invariants to meet. - // `DistinctStorage` invariants are also met, but no `ParJoin` implementation - // exists yet. unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { - value.remove(id) + // SAFETY: S-TODO + unsafe { value.remove(id) } } } diff --git a/src/storage/entry.rs b/src/storage/entry.rs index 55ee1c745..5cde07443 100644 --- a/src/storage/entry.rs +++ b/src/storage/entry.rs @@ -147,15 +147,18 @@ where // This is HACK. See implementation of Join for &'a mut Storage<'e, T, D> for // details why it is necessary. let storage: *mut Storage<'b, T, D> = *value as *mut Storage<'b, T, D>; - if (*storage).data.mask.contains(id) { + // SAFETY: S-TODO redo when updating join trait + if unsafe { &*storage }.data.mask.contains(id) { StorageEntry::Occupied(OccupiedEntry { id, - storage: &mut *storage, + // SAFETY: S-TODO redo when updating join trait + storage: unsafe { &mut *storage }, }) } else { StorageEntry::Vacant(VacantEntry { id, - storage: &mut *storage, + // SAFETY: S-TODO redo when updating join trait + storage: unsafe { &mut *storage }, }) } } @@ -192,6 +195,7 @@ where { /// Get a mutable reference to the component associated with the entity. pub fn get_mut(&mut self) -> AccessMutReturn<'_, T> { + // S-TODO update safety comment after changing Join // SAFETY: This is safe since `OccupiedEntry` is only constructed // after checking the mask. unsafe { self.storage.data.inner.get_mut(self.id) } @@ -200,6 +204,7 @@ where /// Converts the `OccupiedEntry` into a mutable reference bounded by /// the storage's lifetime. pub fn into_mut(self) -> AccessMutReturn<'a, T> { + // S-TODO update safety comment after changing Join // SAFETY: This is safe since `OccupiedEntry` is only constructed // after checking the mask. unsafe { self.storage.data.inner.get_mut(self.id) } @@ -231,10 +236,11 @@ where { /// Inserts a value into the storage. pub fn insert(self, component: T) -> AccessMutReturn<'a, T> { - self.storage.data.mask.add(self.id); + // S-TODO safety comment incomplete // SAFETY: This is safe since we added `self.id` to the mask. unsafe { self.storage.data.inner.insert(self.id, component); + self.storage.data.mask.add(self.id); self.storage.data.inner.get_mut(self.id) } } diff --git a/src/storage/flagged.rs b/src/storage/flagged.rs index 54512499f..f74c3cb8b 100644 --- a/src/storage/flagged.rs +++ b/src/storage/flagged.rs @@ -1,6 +1,3 @@ -// TODO: promote to the whole crate -#![deny(unsafe_op_in_unsafe_fn)] - use std::marker::PhantomData; use hibitset::BitSetLike; @@ -221,7 +218,7 @@ impl> UnprotectedStorage for FlaggedSt unsafe fn get(&self, id: Index) -> &C { // SAFETY: Requirements passed to caller. - unsafe { self.storage.get_mut(id) } + unsafe { self.storage.get(id) } } #[cfg(feature = "nightly")] diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 4c4632512..4a9fa8e50 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -1,3 +1,6 @@ +// TODO: promote to the whole crate +#![deny(unsafe_op_in_unsafe_fn)] + //! Component storage types, implementations for component joins, etc. #[cfg(feature = "nightly")] @@ -175,11 +178,15 @@ impl MaskedStorage { /// Clear the contents of this storage. pub fn clear(&mut self) { - // SAFETY: `self.mask` is the correct mask as specified. - unsafe { - self.inner.clean(&self.mask); - } - self.mask.clear(); + // NOTE: We replace with default empty mask temporarily to protect against + // unwinding from `Drop` of components. + let mut mask_temp = core::mem::take(&mut self.mask); + // SAFETY: `self.mask` is the correct mask as specified. We swap in a + // temporary empty mask to ensure if this unwinds that the mask will be + // cleared. + unsafe { self.inner.clean(&mask_temp) }; + mask_temp.clear(); + self.mask = mask_temp; } /// Remove an element by a given index. @@ -195,7 +202,8 @@ impl MaskedStorage { /// Drop an element by a given index. pub fn drop(&mut self, id: Index) { if self.mask.remove(id) { - // SAFETY: We checked the mask (`remove` returned `true`) + // SAFETY: We checked the mask and removed the id before calling + // drop (`remove` returned `true`). unsafe { self.inner.drop(id); } @@ -332,7 +340,9 @@ where /// Tries to mutate the data associated with an `Entity`. pub fn get_mut(&mut self, e: Entity) -> Option> { if self.data.mask.contains(e.id()) && self.entities.is_alive(e) { - // SAFETY: We checked the mask, so all invariants are met. + // SAFETY: We have exclusive access (which ensures no aliasing or + // concurrent calls from other threads) and we checked the mask, + // thus it's safe to call. Some(unsafe { self.data.inner.get_mut(e.id()) }) } else { None @@ -349,8 +359,10 @@ where if self.entities.is_alive(e) { let id = e.id(); if self.data.mask.contains(id) { - // SAFETY: We checked the mask, so all invariants are met. - std::mem::swap(&mut v, unsafe { self.data.inner.get_mut(id).deref_mut() }); + // SAFETY: We have exclusive access (which ensures no aliasing or + // concurrent calls from other threads) and we checked the mask, so + // all invariants are met. + std::mem::swap(&mut v, unsafe { self.data.inner.get_mut(id) }.deref_mut()); Ok(Some(v)) } else { // SAFETY: The mask was previously empty, so it is safe to insert. @@ -412,15 +424,15 @@ where type Type = &'a T; type Value = &'a T::Storage; - // SAFETY: No unsafe code and no invariants. unsafe fn open(self) -> (Self::Mask, Self::Value) { (&self.data.mask, &self.data.inner) } - // SAFETY: Since we require that the mask was checked, an element for `i` must - // have been inserted without being removed. unsafe fn get(v: &mut Self::Value, i: Index) -> &'a T { - v.get(i) + // S-TODO probably more to add to this comment + // SAFETY: Since we require that the mask was checked, an element for + // `i` must have been inserted without being removed. + unsafe { v.get(i) } } } @@ -467,7 +479,8 @@ where // to abstract mutable/immutable state at the moment, so we have to hack // our way through it. let value: *mut Self::Value = v as *mut Self::Value; - (*value).get_mut(i) + // SAFETY: S-TODO modify Join trait + unsafe { (*value).get_mut(i) } } } @@ -513,7 +526,8 @@ pub trait UnprotectedStorage: TryDefault { where Self: 'a; - /// Clean the storage given a bitset with bits set for valid indices. + /// Clean the storage given a bitset with bits set for valid indices + /// dropping all existing components. /// /// Allows us to drop the storage without leaking components. /// @@ -521,6 +535,9 @@ pub trait UnprotectedStorage: TryDefault { /// /// May only be called with the mask which keeps track of the elements /// existing in this storage. + /// + /// If this unwinds (e.g. due to a drop impl panicing), the mask should + /// still be cleared. unsafe fn clean(&mut self, has: B) where B: BitSetLike; @@ -578,7 +595,11 @@ pub trait UnprotectedStorage: TryDefault { /// the tracking mask is sufficient to call this method. /// /// There must be no extant aliasing references to this component (i.e. - /// obtained with the same `id` from `get` or `get_mut`). + /// obtained with the same `id` from `get` or `get_mut`). Additionally, + /// while the references returned here are in use, safe methods on this + /// type that take `&self` (e.g. [`SliceAccess::as_slice`], + /// [`Tracked::channel`]) must not be called and any references returned by + /// such methods must no longer be alive when `get_mut` is called. /// /// Unless this type implements `DistinctStorage`, calling this from /// multiple threads at once is unsound. @@ -620,8 +641,13 @@ pub trait UnprotectedStorage: TryDefault { /// /// May only be called if an element with `id` was `insert`ed and not yet /// removed / dropped. + /// + /// Caller must ensure this is cleared from the mask even if the drop impl + /// of the component panics and this unwinds. Usually, this can be + /// accomplished by removing the id from the mask just before calling this. unsafe fn drop(&mut self, id: Index) { - self.remove(id); + // SAFETY: Requirements passed to the caller. + unsafe { self.remove(id) }; } } diff --git a/src/storage/restrict.rs b/src/storage/restrict.rs index 6ec1c5f79..b60091a30 100644 --- a/src/storage/restrict.rs +++ b/src/storage/restrict.rs @@ -147,7 +147,8 @@ where } unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { - let value: &'rf mut Self::Value = &mut *(value as *mut Self::Value); + // SAFETY: S-TODO update when changing Join trait + let value: &'rf mut Self::Value = unsafe { &mut *(value as *mut Self::Value) }; PairedStorage { index: id, storage: value.0, @@ -249,6 +250,7 @@ where /// Gets the component related to the current entry without checking whether /// the storage has it or not. pub fn get_mut_unchecked(&mut self) -> AccessMutReturn<'_, C> { + // SAFETY: S-TODO update comment when Join trait is fixed. unsafe { self.storage.borrow_mut().get_mut(self.index) } } } @@ -291,6 +293,7 @@ where /// threads. pub fn get_mut(&mut self, entity: Entity) -> Option> { if self.bitset.borrow().contains(entity.id()) && self.entities.is_alive(entity) { + // SAFETY: S-TODO update comment when Join trait is fixed. Some(unsafe { self.storage.borrow_mut().get_mut(entity.id()) }) } else { None diff --git a/src/storage/storages.rs b/src/storage/storages.rs index 161663847..9f588737a 100644 --- a/src/storage/storages.rs +++ b/src/storage/storages.rs @@ -1,5 +1,3 @@ -// TODO: promote to the whole crate -#![deny(unsafe_op_in_unsafe_fn)] //! Different types of storages you can use for your components. use core::{marker::PhantomData, mem::MaybeUninit, ptr, ptr::NonNull}; @@ -42,7 +40,7 @@ impl UnprotectedStorage for BTreeStorage { where B: BitSetLike, { - // nothing to do (components will be dropped with the storage) + self.0.clear(); } unsafe fn get(&self, id: Index) -> &T { @@ -89,7 +87,7 @@ impl UnprotectedStorage for HashMapStorage { where B: BitSetLike, { - // nothing to do (components will be dropped with the storage) + self.0.clear(); } unsafe fn get(&self, id: Index) -> &T { @@ -181,7 +179,12 @@ impl UnprotectedStorage for DenseVecStorage { where B: BitSetLike, { - // nothing to do (components will be dropped with the storage) + // NOTE: clearing `data` may panic due to drop impls. So to makes sure + // everything is cleared and ensure `remove` is sound we clear `data` + // last. + self.data_id.clear(); + self.entity_id.clear(); + self.data.clear(); } unsafe fn get(&self, id: Index) -> &T { diff --git a/src/storage/sync_unsafe_cell.rs b/src/storage/sync_unsafe_cell.rs index aa89f0cd6..3e0f6b31e 100644 --- a/src/storage/sync_unsafe_cell.rs +++ b/src/storage/sync_unsafe_cell.rs @@ -1,5 +1,3 @@ -// TODO: promote to the whole crate -#![deny(unsafe_op_in_unsafe_fn)] //! Stand in for core::cell::SyncUnsafeCell since that is still unstable. //! //! TODO: Remove when core::cell::SyncUnsafeCell is stabilized From ba8b976c7fb4063b3a57ba98dfa4d5c7f73ca291 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 30 Jul 2022 23:44:15 -0400 Subject: [PATCH 03/47] Change GAT where clause location to match new formatting recommendation --- src/storage/deref_flagged.rs | 8 +++----- src/storage/flagged.rs | 5 +---- src/storage/generic.rs | 12 ++++-------- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/src/storage/deref_flagged.rs b/src/storage/deref_flagged.rs index f4317fb71..0391a5e57 100644 --- a/src/storage/deref_flagged.rs +++ b/src/storage/deref_flagged.rs @@ -56,10 +56,8 @@ where } impl> UnprotectedStorage for DerefFlaggedStorage { - type AccessMut<'a> - where - T: 'a, - = FlaggedAccessMut<'a, >::AccessMut<'a>, C>; + type AccessMut<'a> = FlaggedAccessMut<'a, >::AccessMut<'a>, C> + where T: 'a; unsafe fn clean(&mut self, has: B) where @@ -72,7 +70,7 @@ impl> UnprotectedStorage for DerefFlag self.storage.get(id) } - unsafe fn get_mut(&mut self, id: Index) -> Self::AccessMut<'_> { + unsafe fn get_mut(&self, id: Index) -> Self::AccessMut<'_> { let emit = self.emit_event(); FlaggedAccessMut { channel: &mut self.channel, diff --git a/src/storage/flagged.rs b/src/storage/flagged.rs index f74c3cb8b..56014cde3 100644 --- a/src/storage/flagged.rs +++ b/src/storage/flagged.rs @@ -203,10 +203,7 @@ where impl> UnprotectedStorage for FlaggedStorage { #[cfg(feature = "nightly")] - type AccessMut<'a> - where - T: 'a, - = >::AccessMut<'a>; + type AccessMut<'a> = >::AccessMut<'a> where T: 'a; unsafe fn clean(&mut self, has: B) where diff --git a/src/storage/generic.rs b/src/storage/generic.rs index 68bfbefe6..b674d6da6 100644 --- a/src/storage/generic.rs +++ b/src/storage/generic.rs @@ -121,10 +121,8 @@ where T: Component, { #[cfg(feature = "nightly")] - type AccessMut<'b> - where - Self: 'b, - = <::Storage as UnprotectedStorage>::AccessMut<'b>; + type AccessMut<'b> = <::Storage as UnprotectedStorage>::AccessMut<'b> + where Self: 'b; type Component = T; fn get_mut(&mut self, entity: Entity) -> Option> { @@ -162,10 +160,8 @@ where T: Component, { #[cfg(feature = "nightly")] - type AccessMut<'c> - where - Self: 'c, - = <::Storage as UnprotectedStorage>::AccessMut<'c>; + type AccessMut<'c> = <::Storage as UnprotectedStorage>::AccessMut<'c> + where Self: 'c; type Component = T; fn get_mut(&mut self, entity: Entity) -> Option> { From ac2146ded48234efe18eeb665da6ab4c56f5237f Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 30 Jul 2022 23:48:59 -0400 Subject: [PATCH 04/47] Update DerefFlaggedStorage to account for changes, get_mut impl postponed since we most likely want to transition this to a streaming only storage (which is thus given &mut access). --- src/storage/deref_flagged.rs | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/storage/deref_flagged.rs b/src/storage/deref_flagged.rs index 0391a5e57..a484749c4 100644 --- a/src/storage/deref_flagged.rs +++ b/src/storage/deref_flagged.rs @@ -63,36 +63,41 @@ impl> UnprotectedStorage for DerefFlag where B: BitSetLike, { - self.storage.clean(has); + // SAFETY: Requirements passed to caller. + unsafe { self.storage.clean(has) }; } unsafe fn get(&self, id: Index) -> &C { - self.storage.get(id) + // SAFETY: Requirements passed to caller. + unsafe { self.storage.get(id) } } - unsafe fn get_mut(&self, id: Index) -> Self::AccessMut<'_> { - let emit = self.emit_event(); + unsafe fn get_mut(&self, _id: Index) -> Self::AccessMut<'_> { + /*let emit = self.emit_event(); FlaggedAccessMut { channel: &mut self.channel, emit, id, access: self.storage.get_mut(id), phantom: PhantomData, - } + }*/ + todo!("adapt to streaming only") } unsafe fn insert(&mut self, id: Index, comp: C) { if self.emit_event() { self.channel.single_write(ComponentEvent::Inserted(id)); } - self.storage.insert(id, comp); + // SAFETY: Requirements passed to caller. + unsafe { self.storage.insert(id, comp) }; } unsafe fn remove(&mut self, id: Index) -> C { if self.emit_event() { self.channel.single_write(ComponentEvent::Removed(id)); } - self.storage.remove(id) + // SAFETY: Requirements passed to caller. + unsafe { self.storage.remove(id) } } } @@ -116,6 +121,7 @@ impl Tracked for DerefFlaggedStorage { } } +/// S-TODO document pub struct FlaggedAccessMut<'a, A, C> { channel: &'a mut EventChannel, emit: bool, From bf620e1d9a060d6627533ca49fe63fa722ba6603 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 31 Jul 2022 18:00:03 -0400 Subject: [PATCH 05/47] Refactor `UnprotectedStorage` and add `SharedGetAccessMut` trait Implementations and uses of these traits are not yet changed. However, hopefully this is the final form needed to have storages that only support lending/streaming joins while also allowing some storages to support regular `Iterator` like joins and parallel joins as well as allowing storages where `UnprotectedStorage::AccessMut` doesn't implement `DerefMut` (like a planned variant of the flagged storage). --- src/storage/mod.rs | 137 ++++++++++++++++++++++++++++----------------- 1 file changed, 87 insertions(+), 50 deletions(-) diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 4a9fa8e50..e4f54be89 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -118,8 +118,8 @@ where /// This is a marker trait which requires you to uphold the following guarantee: /// -/// > Multiple threads may call `get_mut()` with distinct indices without -/// causing > undefined behavior. +/// > Multiple threads may call `shared_get_access_mut()` with distinct indices +/// without causing > undefined behavior. /// /// This is for example valid for `Vec`: /// @@ -127,15 +127,14 @@ where /// vec![1, 2, 3]; /// ``` /// -/// We may modify both element 1 and 2 at the same time; indexing the vector -/// mutably does not modify anything else than the respective elements. +/// We may modify both element 1 and 2 at the same time. /// /// As a counter example, we may have some kind of cached storage; it caches /// elements when they're retrieved, so pushes a new element to some /// cache-vector. This storage is not allowed to implement `DistinctStorage`. /// /// Implementing this trait marks the storage safe for concurrent mutation (of -/// distinct elements), thus allows `join_par()`. +/// distinct elements), thus allows `par_join()`. pub unsafe trait DistinctStorage {} /// The status of an `insert()`ion into a storage. @@ -518,11 +517,29 @@ where } } +macro_rules! get_access_mut_docs { + ($fn_definition:item) => { + /// Gets mutable access to the the data associated with an `Index`. + /// + /// This is unsafe because the external set used to protect this storage is + /// absent. + /// + /// # Safety + /// + /// May only be called after a call to `insert` with `id` and no following + /// call to `remove` with `id` or to `clean`. + /// + /// A mask should keep track of those states, and an `id` being contained in + /// the tracking mask is sufficient to call this method. + $fn_definition + }; +} + /// Used by the framework to quickly join components. pub trait UnprotectedStorage: TryDefault { /// The wrapper through with mutable access of a component is performed. #[cfg(feature = "nightly")] - type AccessMut<'a>: DerefMut + type AccessMut<'a> where Self: 'a; @@ -542,9 +559,10 @@ pub trait UnprotectedStorage: TryDefault { where B: BitSetLike; - /// Tries reading the data associated with an `Index`. - /// This is unsafe because the external set used - /// to protect this storage is absent. + /// Gets a shared reference to the data associated with an `Index`. + /// + /// This is unsafe because the external set used to protect this storage is + /// absent. /// /// # Safety /// @@ -558,61 +576,34 @@ pub trait UnprotectedStorage: TryDefault { /// (i.e. obtained from `get_mut` with the same `id`). unsafe fn get(&self, id: Index) -> &T; - /// Tries mutating the data associated with an `Index`. - /// This is unsafe because the external set used - /// to protect this storage is absent. + /// Gets an exclusive reference to the data associated with an `Index`. + /// + /// This is unsafe because the external set used to protect this storage is + /// absent. /// /// # Safety /// - /// May only be called after a call to `insert` with `id` and - /// no following call to `remove` with `id` or to `clean`. + /// May only be called after a call to `insert` with `id` and no following + /// call to `remove` with `id` or to `clean`. /// /// A mask should keep track of those states, and an `id` being contained in /// the tracking mask is sufficient to call this method. - /// - /// There must be no extant aliasing references to this component (i.e. - /// obtained with the same `id` from `get` or `get_mut`). Additionally, - /// while the references returned here are in use, safe methods on this - /// type that take `&self` (e.g. [`SliceAccess::as_slice`], - /// [`Tracked::channel`]) must not be called and any references returned by - /// such methods must no longer be alive when `get_mut` is called. - /// - /// Unless this type implements `DistinctStorage`, calling this from - /// multiple threads at once is unsound. + unsafe fn get_mut(&mut self, id: Index) -> Self::AccessMut<'_>; + #[cfg(feature = "nightly")] - unsafe fn get_mut(&self, id: Index) -> Self::AccessMut<'_>; + get_access_mut_docs! { + unsafe fn get_access_mut(&self, id: Index) -> Self::AccessMut<'_>; + } - /// Tries mutating the data associated with an `Index`. - /// This is unsafe because the external set used - /// to protect this storage is absent. - /// - /// # Safety - /// - /// May only be called after a call to `insert` with `id` and - /// no following call to `remove` with `id` or to `clean`. - /// - /// A mask should keep track of those states, and an `id` being contained in - /// the tracking mask is sufficient to call this method. - /// - /// There must be no extant aliasing references to this component (i.e. - /// obtained with the same `id` from `get` or `get_mut`). Additionally, - /// while the references returned here are in use, safe methods on this - /// type that take `&self` (e.g. [`SliceAccess::as_slice`], - /// [`Tracked::channel`]) must not be called and any references returned by - /// such methods must no longer be alive when `get_mut` is called. - /// - /// Unless this type implements `DistinctStorage`, calling this from - /// multiple threads at once is unsound. #[cfg(not(feature = "nightly"))] - unsafe fn get_mut(&self, id: Index) -> &mut T; + get_access_mut_docs! { + unsafe fn get_access_mut(&mut self, id: Index) -> &mut T; + } /// Inserts new data for a given `Index`. /// /// # Safety /// - // TODO: does anything rely on `insert` not having been called before or is - // this just trying to make sure things are dropped (if so it should not be - // listed under the safety requirements)? /// May only be called if `insert` was not called with `id` before, or /// was reverted by a call to `remove` with `id` or a call to `clean`. /// @@ -651,6 +642,52 @@ pub trait UnprotectedStorage: TryDefault { } } +macro_rules! shared_get_access_mut_docs { + ($fn_definition:item) => { + /// Gets mutable access to the the data associated with an `Index`. + /// + /// This is unsafe because the external set used to protect this storage is + /// absent and because it doesn't protect against concurrent calls from + /// multiple threads and aliasing must manually be managed. + /// + /// # Safety + /// + /// May only be called after a call to `insert` with `id` and no following + /// call to `remove` with `id` or to `clean`. + /// + /// A mask should keep track of those states, and an `id` being contained in + /// the tracking mask is sufficient to call this method. + /// + /// There must be no extant aliasing references to this component (i.e. + /// obtained with the same `id`). Additionally, references obtained from + /// methods on this type that take `&self` (e.g. + /// [`UnprotectedStorage::get`], [`SliceAccess::as_slice`], + /// [`Tracked::channel`]) must no longer be alive when + /// `shared_get_access_mut` is called and these methods must not be + /// called while the references returned here are alive. Essentially, + /// the `unsafe` code calling this must hold exclusive access of the + /// storage at some level to ensure only known code is calling `&self` + /// methods during the usage of this method and the references it + /// produces. + /// + /// Unless this type implements `DistinctStorage`, calling this from + /// multiple threads at once is unsound. + $fn_definition + }; +} + +trait SharedGetAccessMutStorage: UnprotectedStorage { + #[cfg(feature = "nightly")] + shared_get_access_mut_docs! { + fn shared_get_access_mut(&self, id: Index) -> Self::AccessMut<'_> {} + } + + #[cfg(not(feature = "nightly"))] + shared_get_access_mut_docs! { + fn shared_get_access_mut(&self, id: Index) -> &mut T {} + } +} + #[cfg(test)] #[cfg(feature = "parallel")] mod tests_inline { From e7d0aa155442834251b12207959898316abd908d Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 13 Aug 2022 14:47:35 -0400 Subject: [PATCH 06/47] Refactor `Join` family of traits so they can be more safely implemented/used: (NOTE: nothing compiling yet since implementation of these traits have not been updated) * Introduced `LendJoin` trait that is like the lending iterator version of `Join`. This is useful for types that need to return aliasing mutable references from `get` calls with distinct `id`s (e.g. `Entries`, `DerefFlaggedStorage`, `RestrictStoraged`). `LendJoin` uses `nougat` crate to provide a GAT based API on stable rust. * Removed unsound `JoinIter::get`/`JoinIter::get_unchecked` but these methods are present on `JoinLendIter` where they can be soundly implemented. * Since there is a single `MaybeJoin` type used for all joins, the convenient `.maybe()` method was moved to `LendJoin` which should be the common denominator of implemented join traits (if we put this method on multiple traits, rust might start wondering which one you want to call, which isn't convenient...). * `ParJoin` trait is no an longer empty trait that relies on the implementation in `Join`. The new `ParJoin::get` takes a shared reference so the `ParallelIterator` implementation no longer creates aliasing exclusive references to call `Join::get`. * `Join` is now an `unsafe` trait to require that the mask/values returned from `Join::open` are properly associated. * Extended application of `deny(unsafe_op_in_unsafe_fn)` to the `join` module and added safety documentation to calls to unsafe functions there. * Removed `Clone` implementation for `JoinIter where J::Mask: Clone, J::Value: Clone`. Nothing, in `Join::get` safety requirements implies that this is safe, in the cases where this is safe, the user can just call `.join()` twice for similar effect. Other misc changes: * `BitAnd` helper trait and `MaybeJoin` struct moved to their own files to declutter `join/mod.rs`. --- Cargo.toml | 1 + src/join/bit_and.rs | 57 ++++ src/join/lend_join.rs | 258 +++++++++++++++++ src/join/maybe.rs | 121 ++++++++ src/join/mod.rs | 634 ++++++++++++++++++------------------------ src/join/par_join.rs | 104 ++++--- src/lib.rs | 2 +- src/prelude.rs | 2 +- src/storage/mod.rs | 1 - 9 files changed, 770 insertions(+), 410 deletions(-) create mode 100644 src/join/bit_and.rs create mode 100644 src/join/lend_join.rs create mode 100644 src/join/maybe.rs diff --git a/Cargo.toml b/Cargo.toml index a72d012ac..28de53213 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ log = "0.4.8" shred = { version = "0.14.1", default-features = false } shrev = "1.1.1" tuple_utils = "0.4.0" +nougat = "0.2.3" rayon = { version = "1.5.1", optional = true } serde = { version = "1.0.104", optional = true, features = ["serde_derive"] } diff --git a/src/join/bit_and.rs b/src/join/bit_and.rs new file mode 100644 index 000000000..26d8ec425 --- /dev/null +++ b/src/join/bit_and.rs @@ -0,0 +1,57 @@ +use hibitset::{BitSetAnd, BitSetLike}; +use tuple_utils::Split; + +/// `BitAnd` is a helper method to & bitsets together resulting in a tree. +pub trait BitAnd { + /// The combined bitsets. + type Value: BitSetLike; + /// Combines `Self` into a single `BitSetLike` through `BitSetAnd`. + fn and(self) -> Self::Value; +} + +/// This needs to be special cased +impl BitAnd for (A,) +where + A: BitSetLike, +{ + type Value = A; + + fn and(self) -> Self::Value { + self.0 + } +} + +macro_rules! bitset_and { + // use variables to indicate the arity of the tuple + ($($from:ident),*) => { + impl<$($from),*> BitAnd for ($($from),*) + where $($from: BitSetLike),* + { + type Value = BitSetAnd< + <::Left as BitAnd>::Value, + <::Right as BitAnd>::Value + >; + + fn and(self) -> Self::Value { + let (l, r) = self.split(); + BitSetAnd(l.and(), r.and()) + } + } + } +} + +bitset_and! {A, B} +bitset_and! {A, B, C} +bitset_and! {A, B, C, D} +bitset_and! {A, B, C, D, E} +bitset_and! {A, B, C, D, E, F} +bitset_and! {A, B, C, D, E, F, G} +bitset_and! {A, B, C, D, E, F, G, H} +bitset_and! {A, B, C, D, E, F, G, H, I} +bitset_and! {A, B, C, D, E, F, G, H, I, J} +bitset_and! {A, B, C, D, E, F, G, H, I, J, K} +bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L} +bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L, M} +bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L, M, N} +bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L, M, N, O} +bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P} diff --git a/src/join/lend_join.rs b/src/join/lend_join.rs new file mode 100644 index 000000000..5b1ce2e7c --- /dev/null +++ b/src/join/lend_join.rs @@ -0,0 +1,258 @@ +/// Like the `Join` trait except this is similar to a `LendingIterator` in that +/// only one item can be accessed at once. +/// +/// # Safety +/// +/// The `Self::Mask` value returned with the `Self::Value` must correspond such +/// that it is safe to retrieve items from `Self::Value` whose presence is +/// indicated in the mask. +#[nougat::gat] +pub unsafe trait LendJoin { + /// Type of joined components. + /// + /// # Note + /// + /// This type is using macro magic to emulate GATs on stable. So to refer to + /// it you need to use the [`LendJoinType<'next, J>`](LendJoinType) type + /// alias. + type Type<'next> + where + Self: 'next; + /// Type of joined storages. + type Value; + /// Type of joined bit mask. + type Mask: BitSetLike; + + /// Create a joined lending iterator over the contents. + fn lend_join(self) -> JoinLendIter + where + Self: Sized, + { + JoinLendIter::new(self) + } + + /// Returns a structure that implements `Join`/`LendJoin`/`MaybeJoin` if the + /// contained `T` does and that yields all indices, returning `None` for all + /// missing elements and `Some(T)` for found elements. + /// + /// To join over and optional component mutably this pattern can be used: + /// `(&mut storage).maybe()`. + /// + /// WARNING: Do not have a join of only `MaybeJoin`s. Otherwise the join + /// will iterate over every single index of the bitset. If you want a + /// join with all `MaybeJoin`s, add an `EntitiesRes` to the join as well + /// to bound the join to all entities that are alive. + /// + /// ``` + /// # use specs::prelude::*; + /// # #[derive(Debug, PartialEq)] + /// # struct Pos { x: i32, y: i32 } impl Component for Pos { type Storage = VecStorage; } + /// # #[derive(Debug, PartialEq)] + /// # struct Vel { x: i32, y: i32 } impl Component for Vel { type Storage = VecStorage; } + /// struct ExampleSystem; + /// impl<'a> System<'a> for ExampleSystem { + /// type SystemData = ( + /// WriteStorage<'a, Pos>, + /// ReadStorage<'a, Vel>, + /// ); + /// fn run(&mut self, (mut positions, velocities): Self::SystemData) { + /// let mut join = (&mut positions, velocities.maybe()).lend_join(); + /// while let Some ((mut position, maybe_velocity)) = join.next() { + /// if let Some(velocity) = maybe_velocity { + /// position.x += velocity.x; + /// position.y += velocity.y; + /// } + /// } + /// } + /// } + /// + /// fn main() { + /// let mut world = World::new(); + /// let mut dispatcher = DispatcherBuilder::new() + /// .with(ExampleSystem, "example_system", &[]) + /// .build(); + /// + /// dispatcher.setup(&mut world); + /// + /// let e1 = world.create_entity() + /// .with(Pos { x: 0, y: 0 }) + /// .with(Vel { x: 5, y: 2 }) + /// .build(); + /// + /// let e2 = world.create_entity() + /// .with(Pos { x: 0, y: 0 }) + /// .build(); + /// + /// dispatcher.dispatch(&mut world); + /// + /// let positions = world.read_storage::(); + /// assert_eq!(positions.get(e1), Some(&Pos { x: 5, y: 2 })); + /// assert_eq!(positions.get(e2), Some(&Pos { x: 0, y: 0 })); + /// } + /// ``` + fn maybe(self) -> MaybeJoin + where + Self: Sized, + { + MaybeJoin(self) + } + + /// Open this join by returning the mask and the storages. + /// + /// # Safety + /// + /// This is unsafe because implementations of this trait can permit the + /// `Value` to be mutated independently of the `Mask`. If the `Mask` does + /// not correctly report the status of the `Value` then illegal memory + /// access can occur. + unsafe fn open(self) -> (Self::Mask, Self::Value); + + /// Get a joined component value by a given index. + /// + /// # Safety + /// + /// * A call to `get` must be preceded by a check if `id` is part of + /// `Self::Mask` + unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type<'_>; + + /// If this `LendJoin` typically returns all indices in the mask, then + /// iterating over only it or combined with other joins that are also + /// dangerous will cause the `JoinLendIter` to go through all indices which + /// is usually not what is wanted and will kill performance. + #[inline] + fn is_unconstrained() -> bool { + false + } +} + +/// Type alias to refer to the `::Type<'next>` (except this +/// doesn't actually exist in this form so the `nougat::Gat!` macro is needed). +pub type LendJoinType<'next, J> = nougat::Gat!(::Type<'next>); + +/// `JoinLendIter` is an is a lending/streaming iterator over components from a +/// group of storages. +#[must_use] +pub struct JoinLendIter { + keys: BitIter, + values: J::Value, +} + +impl JoinLendIter { + /// Create a new lending join iterator. + pub fn new(j: J) -> Self { + if ::is_unconstrained() { + log::warn!( + "`LendJoin` possibly iterating through all indices, \ + you might've made a join with all `MaybeJoin`s, \ + which is unbounded in length." + ); + } + + // SAFETY: We do not swap out the mask or the values, nor do we allow it + // by exposing them. + let (keys, values) = unsafe { j.open() }; + JoinLendIter { + keys: keys.iter(), + values, + } + } +} + +impl JoinLendIter { + /// Lending `next`. + /// + /// Can be used to iterate with this pattern: + /// + /// `while let Some(components) = join_lending_iter.next() {` + fn next(&mut self) -> Option> { + // SAFETY: since `idx` is yielded from `keys` (the mask), it is necessarily a + // part of it. Thus, requirements are fulfilled for calling `get`. + self.keys + .next() + .map(|idx| unsafe { J::get(&mut self.values, idx) }) + } + + fn for_each(mut self, mut f: impl FnMut(LendJoinType<'_, J>)) { + self.keys.for_each(|idx| { + // SAFETY: since `idx` is yielded from `keys` (the mask), it is + // necessarily a part of it. Thus, requirements are fulfilled for + // calling `get`. + let item = unsafe { J::get(&mut self.values, idx) }; + f(item); + }) + } + + /// Allows getting joined values for specific entity. + /// + /// ## Example + /// + /// ``` + /// # use specs::prelude::*; + /// # #[derive(Debug, PartialEq)] + /// # struct Pos; impl Component for Pos { type Storage = VecStorage; } + /// # #[derive(Debug, PartialEq)] + /// # struct Vel; impl Component for Vel { type Storage = VecStorage; } + /// let mut world = World::new(); + /// + /// world.register::(); + /// world.register::(); + /// + /// // This entity could be stashed anywhere (into `Component`, `Resource`, `System`s data, etc.) as it's just a number. + /// let entity = world + /// .create_entity() + /// .with(Pos) + /// .with(Vel) + /// .build(); + /// + /// // Later + /// { + /// let mut pos = world.write_storage::(); + /// let vel = world.read_storage::(); + /// + /// assert_eq!( + /// Some((&mut Pos, &Vel)), + /// (&mut pos, &vel).lend_join().get(entity, &world.entities()), + /// "The entity that was stashed still has the needed components and is alive." + /// ); + /// } + /// + /// // The entity has found nice spot and doesn't need to move anymore. + /// world.write_storage::().remove(entity); + /// + /// // Even later + /// { + /// let mut pos = world.write_storage::(); + /// let vel = world.read_storage::(); + /// + /// assert_eq!( + /// None, + /// (&mut pos, &vel).lend_join().get(entity, &world.entities()), + /// "The entity doesn't have velocity anymore." + /// ); + /// } + /// ``` + pub fn get(&mut self, entity: Entity, entities: &Entities) -> Option> { + if self.keys.contains(entity.id()) && entities.is_alive(entity) { + // SAFETY: the mask (`keys`) is checked as specified in the docs of `get`. + Some(unsafe { J::get(&mut self.values, entity.id()) }) + } else { + None + } + } + + /// Allows getting joined values for specific raw index. + /// + /// The raw index for an `Entity` can be retrieved using `Entity::id` + /// method. + /// + /// As this method operates on raw indices, there is no check to see if the + /// entity is still alive, so the caller should ensure it instead. + pub fn get_unchecked(&mut self, index: Index) -> Option> { + if self.keys.contains(index) { + // SAFETY: the mask (`keys`) is checked as specified in the docs of `get`. + Some(unsafe { J::get(&mut self.values, index) }) + } else { + None + } + } +} diff --git a/src/join/maybe.rs b/src/join/maybe.rs new file mode 100644 index 000000000..d9bc654e9 --- /dev/null +++ b/src/join/maybe.rs @@ -0,0 +1,121 @@ +use super::{Join, LendJoin, ParJoin}; + +/// Returns a structure that implements `Join`/`LendJoin`/`MaybeJoin` if the +/// contained `T` does and that yields all indices, returning `None` for all +/// missing elements and `Some(T)` for found elements. +/// +/// For usage see [`LendJoin::maybe()`](LendJoin::Maybe). +/// +/// WARNING: Do not have a join of only `MaybeJoin`s. Otherwise the join will +/// iterate over every single index of the bitset. If you want a join with +/// all `MaybeJoin`s, add an `EntitiesRes` to the join as well to bound the +/// join to all entities that are alive. +pub struct MaybeJoin(pub J); + +// SAFETY: We return a mask containing all items, but check the original mask in +// the `get` implementation. +unsafe impl LendJoin for MaybeJoin +where + T: LendJoin, +{ + type Mask = BitSetAll; + type Type = Option<::Type>; + type Value = (::Mask, ::Value); + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + // SAFETY: While we do expose the mask and the values and therefore + // would allow swapping them, this method is `unsafe` and relies on the + // same invariants. + let (mask, value) = unsafe { self.0.open() }; + (BitSetAll, (mask, value)) + } + + unsafe fn get((mask, value): &mut Self::Value, id: Index) -> Self::Type { + if mask.contains(id) { + // SAFETY: The mask was just checked for `id`. + Some(unsafe { ::get(value, id) }) + } else { + None + } + } + + #[inline] + fn is_unconstrained() -> bool { + true + } +} + +// SAFETY: We return a mask containing all items, but check the original mask in +// the `get` implementation. +unsafe impl Join for MaybeJoin +where + T: Join, +{ + type Mask = BitSetAll; + type Type = Option<::Type>; + type Value = (::Mask, ::Value); + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + // SAFETY: While we do expose the mask and the values and therefore + // would allow swapping them, this method is `unsafe` and relies on the + // same invariants. + let (mask, value) = unsafe { self.0.open() }; + (BitSetAll, (mask, value)) + } + + unsafe fn get((mask, value): &mut Self::Value, id: Index) -> Self::Type { + if mask.contains(id) { + // SAFETY: The mask was just checked for `id`. This has the same + // requirements on the caller to not call with the same `id` until + // the previous value is no longer in use. + Some(unsafe { ::get(value, id) }) + } else { + None + } + } + + #[inline] + fn is_unconstrained() -> bool { + true + } +} + +// SAFETY: This is safe as long as `T` implements `ParJoin` safely. The `get` +// implementation here makes no assumptions about being called from a single +// thread. +// +// We return a mask containing all items, but check the original mask in +// the `get` implementation. +#[cfg(feature = "parallel")] +unsafe impl ParJoin for MaybeJoin +where + T: ParJoin, +{ + type Mask = BitSetAll; + type Type = Option<::Type>; + type Value = (::Mask, ::Value); + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + // SAFETY: While we do expose the mask and the values and therefore + // would allow swapping them, this method is `unsafe` and relies on the + // same invariants. + let (mask, value) = unsafe { self.0.open() }; + (BitSetAll, (mask, value)) + } + + unsafe fn get((mask, value): &Self::Value, id: Index) -> Self::Type { + if mask.contains(id) { + // SAFETY: The mask was just checked for `id`. This has the same + // requirements on the caller to not call with the same `id` until + // the previous value is no longer in use. + Some(unsafe { ::get(value, id) }) + } else { + None + } + } + + #[inline] + fn is_unconstrained() -> bool { + true + } +} diff --git a/src/join/mod.rs b/src/join/mod.rs index 41eede95c..94ef8064b 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -1,72 +1,25 @@ //! Joining of components for iteration over entities with specific components. +// TODO: promote to the whole crate +#![deny(unsafe_op_in_unsafe_fn)] + use hibitset::{BitIter, BitSetAll, BitSetAnd, BitSetLike}; use shred::{Fetch, FetchMut, Read, ReadExpect, Resource, Write, WriteExpect}; use std::ops::{Deref, DerefMut}; -use tuple_utils::Split; use crate::world::{Entities, Entity, Index}; +mod bit_and; +mod lend_join; +mod maybe; #[cfg(feature = "parallel")] mod par_join; +pub use bit_and::BitAnd; +pub use lend_join::{JoinLendIter, LendJoin, LendJoinType}; +pub use maybe::MaybeJoin; #[cfg(feature = "parallel")] -pub use self::par_join::{JoinParIter, ParJoin}; - -/// `BitAnd` is a helper method to & bitsets together resulting in a tree. -pub trait BitAnd { - /// The combined bitsets. - type Value: BitSetLike; - /// Combines `Self` into a single `BitSetLike` through `BitSetAnd`. - fn and(self) -> Self::Value; -} - -/// This needs to be special cased -impl BitAnd for (A,) -where - A: BitSetLike, -{ - type Value = A; - - fn and(self) -> Self::Value { - self.0 - } -} - -macro_rules! bitset_and { - // use variables to indicate the arity of the tuple - ($($from:ident),*) => { - impl<$($from),*> BitAnd for ($($from),*) - where $($from: BitSetLike),* - { - type Value = BitSetAnd< - <::Left as BitAnd>::Value, - <::Right as BitAnd>::Value - >; - - fn and(self) -> Self::Value { - let (l, r) = self.split(); - BitSetAnd(l.and(), r.and()) - } - } - } -} - -bitset_and! {A, B} -bitset_and! {A, B, C} -bitset_and! {A, B, C, D} -bitset_and! {A, B, C, D, E} -bitset_and! {A, B, C, D, E, F} -bitset_and! {A, B, C, D, E, F, G} -bitset_and! {A, B, C, D, E, F, G, H} -bitset_and! {A, B, C, D, E, F, G, H, I} -bitset_and! {A, B, C, D, E, F, G, H, I, J} -bitset_and! {A, B, C, D, E, F, G, H, I, J, K} -bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L} -bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L, M} -bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L, M, N} -bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L, M, N, O} -bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P} +pub use par_join::{JoinParIter, ParJoin}; /// The purpose of the `Join` trait is to provide a way /// to access multiple storages at the same time with @@ -137,7 +90,13 @@ bitset_and! {A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P} /// /// `Join` can also be used to iterate over a single /// storage, just by writing `(&storage).join()`. -pub trait Join { +/// +/// # Safety +/// +/// The `Self::Mask` value returned with the `Self::Value` must correspond such +/// that it is safe to retrieve items from `Self::Value` whose presence is +/// indicated in the mask. +pub unsafe trait Join { /// Type of joined components. type Type; /// Type of joined storages. @@ -153,149 +112,39 @@ pub trait Join { JoinIter::new(self) } - /// Returns a `Join`-able structure that yields all indices, returning - /// `None` for all missing elements and `Some(T)` for found elements. - /// - /// WARNING: Do not have a join of only `MaybeJoin`s. Otherwise the join - /// will iterate over every single index of the bitset. If you want a - /// join with all `MaybeJoin`s, add an `EntitiesRes` to the join as well - /// to bound the join to all entities that are alive. - /// - /// ``` - /// # use specs::prelude::*; - /// # #[derive(Debug, PartialEq)] - /// # struct Pos { x: i32, y: i32 } impl Component for Pos { type Storage = VecStorage; } - /// # #[derive(Debug, PartialEq)] - /// # struct Vel { x: i32, y: i32 } impl Component for Vel { type Storage = VecStorage; } - /// struct ExampleSystem; - /// impl<'a> System<'a> for ExampleSystem { - /// type SystemData = ( - /// WriteStorage<'a, Pos>, - /// ReadStorage<'a, Vel>, - /// ); - /// fn run(&mut self, (mut positions, velocities): Self::SystemData) { - /// for (mut position, maybe_velocity) in (&mut positions, velocities.maybe()).join() { - /// if let Some(velocity) = maybe_velocity { - /// position.x += velocity.x; - /// position.y += velocity.y; - /// } - /// } - /// } - /// } - /// - /// fn main() { - /// let mut world = World::new(); - /// let mut dispatcher = DispatcherBuilder::new() - /// .with(ExampleSystem, "example_system", &[]) - /// .build(); - /// - /// dispatcher.setup(&mut world); - /// - /// let e1 = world.create_entity() - /// .with(Pos { x: 0, y: 0 }) - /// .with(Vel { x: 5, y: 2 }) - /// .build(); - /// - /// let e2 = world.create_entity() - /// .with(Pos { x: 0, y: 0 }) - /// .build(); - /// - /// dispatcher.dispatch(&mut world); - /// - /// let positions = world.read_storage::(); - /// assert_eq!(positions.get(e1), Some(&Pos { x: 5, y: 2 })); - /// assert_eq!(positions.get(e2), Some(&Pos { x: 0, y: 0 })); - /// } - /// ``` - fn maybe(self) -> MaybeJoin - where - Self: Sized, - { - MaybeJoin(self) - } - /// Open this join by returning the mask and the storages. /// /// # Safety /// - /// This is unsafe because implementations of this trait can permit - /// the `Value` to be mutated independently of the `Mask`. - /// If the `Mask` does not correctly report the status of the `Value` - /// then illegal memory access can occur. + /// This is unsafe because implementations of this trait can permit the + /// `Value` to be mutated independently of the `Mask`. If the `Mask` does + /// not correctly report the status of the `Value` then illegal memory + /// access can occur. unsafe fn open(self) -> (Self::Mask, Self::Value); /// Get a joined component value by a given index. /// + // S-TODO: evaluate all impls + /// /// # Safety /// /// * A call to `get` must be preceded by a check if `id` is part of - /// `Self::Mask` - /// * The implementation of this method may use unsafe code, but has no - /// invariants to meet + /// `Self::Mask`. + /// * The use of the mutable reference returned from this method must end + /// before subsequent calls with the same `id`. unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type; /// If this `Join` typically returns all indices in the mask, then iterating - /// over only it or combined with other joins that are also dangerous - /// will cause the `JoinIter`/`ParJoin` to go through all indices which - /// is usually not what is wanted and will kill performance. + /// over only it or combined with other joins that are also dangerous will + /// cause the `JoinIter` to go through all indices which is usually not what + /// is wanted and will kill performance. #[inline] fn is_unconstrained() -> bool { false } } -/// A `Join`-able structure that yields all indices, returning `None` for all -/// missing elements and `Some(T)` for found elements. -/// -/// For usage see [`Join::maybe()`]. -/// -/// WARNING: Do not have a join of only `MaybeJoin`s. Otherwise the join will -/// iterate over every single index of the bitset. If you want a join with -/// all `MaybeJoin`s, add an `EntitiesRes` to the join as well to bound the -/// join to all entities that are alive. -/// -/// [`Join::maybe()`]: ../join/trait.Join.html#method.maybe -pub struct MaybeJoin(pub J); - -impl Join for MaybeJoin -where - T: Join, -{ - type Mask = BitSetAll; - type Type = Option<::Type>; - type Value = (::Mask, ::Value); - - // SAFETY: This wraps another implementation of `open`, making it dependent on - // `J`'s correctness. We can safely assume `J` is valid, thus this must be - // valid, too. No invariants to meet. - unsafe fn open(self) -> (Self::Mask, Self::Value) { - let (mask, value) = self.0.open(); - (BitSetAll, (mask, value)) - } - - // SAFETY: No invariants to meet and the unsafe code checks the mask, thus - // fulfills the requirements for calling `get` - unsafe fn get((mask, value): &mut Self::Value, id: Index) -> Self::Type { - if mask.contains(id) { - Some(::get(value, id)) - } else { - None - } - } - - #[inline] - fn is_unconstrained() -> bool { - true - } -} - -// SAFETY: This is safe as long as `T` implements `ParJoin` safely. `MaybeJoin` -// relies on `T as Join` for all storage access and safely wraps the inner -// `Join` API, so it should also be able to implement `ParJoin`. -#[cfg(feature = "parallel")] -unsafe impl ParJoin for MaybeJoin where T: ParJoin {} - -/// `JoinIter` is an `Iterator` over a group of `Storages`. +/// `JoinIter` is an `Iterator` over a group of storages. #[must_use] pub struct JoinIter { keys: BitIter, @@ -307,12 +156,14 @@ impl JoinIter { pub fn new(j: J) -> Self { if ::is_unconstrained() { log::warn!( - "`Join` possibly iterating through all indices, you might've made a join with all `MaybeJoin`s, which is unbounded in length." + "`Join` possibly iterating through all indices, \ + you might've made a join with all `MaybeJoin`s, \ + which is unbounded in length." ); } - // SAFETY: We do not swap out the mask or the values, nor do we allow it by - // exposing them. + // SAFETY: We do not swap out the mask or the values, nor do we allow it + // by exposing them. let (keys, values) = unsafe { j.open() }; JoinIter { keys: keys.iter(), @@ -321,82 +172,6 @@ impl JoinIter { } } -impl JoinIter { - /// Allows getting joined values for specific entity. - /// - /// ## Example - /// - /// ``` - /// # use specs::prelude::*; - /// # #[derive(Debug, PartialEq)] - /// # struct Pos; impl Component for Pos { type Storage = VecStorage; } - /// # #[derive(Debug, PartialEq)] - /// # struct Vel; impl Component for Vel { type Storage = VecStorage; } - /// let mut world = World::new(); - /// - /// world.register::(); - /// world.register::(); - /// - /// // This entity could be stashed anywhere (into `Component`, `Resource`, `System`s data, etc.) as it's just a number. - /// let entity = world - /// .create_entity() - /// .with(Pos) - /// .with(Vel) - /// .build(); - /// - /// // Later - /// { - /// let mut pos = world.write_storage::(); - /// let vel = world.read_storage::(); - /// - /// assert_eq!( - /// Some((&mut Pos, &Vel)), - /// (&mut pos, &vel).join().get(entity, &world.entities()), - /// "The entity that was stashed still has the needed components and is alive." - /// ); - /// } - /// - /// // The entity has found nice spot and doesn't need to move anymore. - /// world.write_storage::().remove(entity); - /// - /// // Even later - /// { - /// let mut pos = world.write_storage::(); - /// let vel = world.read_storage::(); - /// - /// assert_eq!( - /// None, - /// (&mut pos, &vel).join().get(entity, &world.entities()), - /// "The entity doesn't have velocity anymore." - /// ); - /// } - /// ``` - pub fn get(&mut self, entity: Entity, entities: &Entities) -> Option { - if self.keys.contains(entity.id()) && entities.is_alive(entity) { - // SAFETY: the mask (`keys`) is checked as specified in the docs of `get`. - Some(unsafe { J::get(&mut self.values, entity.id()) }) - } else { - None - } - } - - /// Allows getting joined values for specific raw index. - /// - /// The raw index for an `Entity` can be retrieved using `Entity::id` - /// method. - /// - /// As this method operates on raw indices, there is no check to see if the - /// entity is still alive, so the caller should ensure it instead. - pub fn get_unchecked(&mut self, index: Index) -> Option { - if self.keys.contains(index) { - // SAFETY: the mask (`keys`) is checked as specified in the docs of `get`. - Some(unsafe { J::get(&mut self.values, index) }) - } else { - None - } - } -} - impl std::iter::Iterator for JoinIter { type Item = J::Type; @@ -409,104 +184,84 @@ impl std::iter::Iterator for JoinIter { } } -/// Clones the `JoinIter`. -/// -/// # Examples -/// -/// ``` -/// # use specs::prelude::*; -/// # #[derive(Debug)] -/// # struct Position; impl Component for Position { type Storage = VecStorage; } -/// # #[derive(Debug)] -/// # struct Collider; impl Component for Collider { type Storage = VecStorage; } -/// let mut world = World::new(); -/// -/// world.register::(); -/// world.register::(); -/// -/// // add some entities to our world -/// for _ in 0..10 { -/// let entity = world.create_entity().with(Position).with(Collider).build(); -/// } -/// -/// // check for collisions between entities -/// let positions = world.read_storage::(); -/// let colliders = world.read_storage::(); -/// -/// let mut join_iter = (&positions, &colliders).join(); -/// while let Some(a) = join_iter.next() { -/// for b in join_iter.clone() { -/// # let check_collision = |a, b| true; -/// if check_collision(a, b) { -/// // do stuff -/// } -/// } -/// } -/// ``` -/// -/// It is *not* possible to clone a `JoinIter` which allows for -/// mutation of its content, as this would lead to shared mutable -/// access. -/// -/// ```compile_fail -/// # use specs::prelude::*; -/// # #[derive(Debug)] -/// # struct Position; impl Component for Position { type Storage = VecStorage; } -/// # let mut world = World::new(); -/// # world.register::(); -/// # let entity = world.create_entity().with(Position).build(); -/// // .. previous example -/// -/// let mut positions = world.write_storage::(); -/// -/// let mut join_iter = (&mut positions).join(); -/// // this must not compile, as the following line would cause -/// // undefined behavior! -/// let mut cloned_iter = join_iter.clone(); -/// let (mut alias_one, mut alias_two) = (join_iter.next(), cloned_iter.next()); -/// ``` -impl Clone for JoinIter -where - J::Mask: Clone, - J::Value: Clone, -{ - fn clone(&self) -> Self { - Self { - keys: self.keys.clone(), - values: self.values.clone(), - } - } -} +// Implementations of `LendJoin`, `Join`, and `ParJoin` for tuples, `Fetch`, +// `Read`, `ReadExpect`, `FetchMut`, `Write`, and `WriteExpect`. macro_rules! define_open { // use variables to indicate the arity of the tuple ($($from:ident),*) => { - impl<$($from,)*> Join for ($($from),*,) + // SAFETY: The returned mask in `open` is the intersection of the masks + // from each type in this tuple. So if an `id` is present in the + // combined mask, it will be safe to retrieve the corresponding items. + unsafe impl<$($from,)*> LendJoin for ($($from),*,) + where $($from: LendJoin),*, + ($(<$from as LendJoin>::Mask,)*): BitAnd, + { + type Type = ($($from::Type),*,); + type Value = ($($from::Value),*,); + type Mask = <($($from::Mask,)*) as BitAnd>::Value; + + #[allow(non_snake_case)] + unsafe fn open(self) -> (Self::Mask, Self::Value) { + let ($($from,)*) = self; + // SAFETY: While we do expose the mask and the values and + // therefore would allow swapping them, this method is `unsafe` + // and relies on the same invariants. + let ($($from,)*) = unsafe { ($($from.open(),)*) }; + ( + ($($from.0),*,).and(), + ($($from.1),*,) + ) + } + + #[allow(non_snake_case)] + unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { + let &mut ($(ref mut $from,)*) = v; + // SAFETY: `get` is safe to call as the caller must have checked + // the mask, which only has a key that exists in all of the + // storages. + unsafe { ($($from::get($from, i),)*) } + } + + #[inline] + fn is_unconstrained() -> bool { + let mut unconstrained = true; + $( unconstrained = unconstrained && $from::is_unconstrained(); )* + unconstrained + } + } + + // SAFETY: The returned mask in `open` is the intersection of the masks + // from each type in this tuple. So if an `id` is present in the + // combined mask, it will be safe to retrieve the corresponding items. + unsafe impl<$($from,)*> Join for ($($from),*,) where $($from: Join),*, ($(<$from as Join>::Mask,)*): BitAnd, { type Type = ($($from::Type),*,); type Value = ($($from::Value),*,); type Mask = <($($from::Mask,)*) as BitAnd>::Value; - #[allow(non_snake_case)] - // SAFETY: While we do expose the mask and the values and therefore would allow swapping them, - // this method is `unsafe` and relies on the same invariants. + #[allow(non_snake_case)] unsafe fn open(self) -> (Self::Mask, Self::Value) { let ($($from,)*) = self; - let ($($from,)*) = ($($from.open(),)*); + // SAFETY: While we do expose the mask and the values and + // therefore would allow swapping them, this method is `unsafe` + // and relies on the same invariants. + let ($($from,)*) = unsafe { ($($from.open(),)*) }; ( ($($from.0),*,).and(), ($($from.1),*,) ) } - // SAFETY: No invariants to meet and `get` is safe to call as the caller must have checked the mask, - // which only has a key that exists in all of the storages. #[allow(non_snake_case)] unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { let &mut ($(ref mut $from,)*) = v; - ($($from::get($from, i),)*) + // SAFETY: `get` is safe to call as the caller must have checked + // the mask, which only has a key that exists in all of the + // storages. + unsafe { ($($from::get($from, i),)*) } } #[inline] @@ -517,16 +272,51 @@ macro_rules! define_open { } } - // SAFETY: This is safe to implement since all components implement `ParJoin`. - // If the access of every individual `get` leads to disjoint memory access, calling - // all of them after another does in no case lead to access of common memory. + // SAFETY: This is safe to implement since all components implement + // `ParJoin`. If the access of every individual `get` is callable from + // multiple threads, then this `get` method will be as well. + // + // The returned mask in `open` is the intersection of the masks + // from each type in this tuple. So if an `id` is present in the + // combined mask, it will be safe to retrieve the corresponding items. #[cfg(feature = "parallel")] unsafe impl<$($from,)*> ParJoin for ($($from),*,) where $($from: ParJoin),*, - ($(<$from as Join>::Mask,)*): BitAnd, - {} + ($(<$from as ParJoin>::Mask,)*): BitAnd, + { + type Type = ($($from::Type),*,); + type Value = ($($from::Value),*,); + type Mask = <($($from::Mask,)*) as BitAnd>::Value; - } + #[allow(non_snake_case)] + unsafe fn open(self) -> (Self::Mask, Self::Value) { + let ($($from,)*) = self; + // SAFETY: While we do expose the mask and the values and + // therefore would allow swapping them, this method is `unsafe` + // and relies on the same invariants. + let ($($from,)*) = unsafe { ($($from.open(),)*) }; + ( + ($($from.0),*,).and(), + ($($from.1),*,) + ) + } + + #[allow(non_snake_case)] + unsafe fn get(v: &Self::Value, i: Index) -> Self::Type { + let &mut ($(ref mut $from,)*) = v; + // SAFETY: `get` is safe to call as the caller must have checked + // the mask, which only has a key that exists in all of the + // storages. + unsafe { ($($from::get($from, i),)*) } + } + + #[inline] + fn is_unconstrained() -> bool { + let mut unconstrained = true; + $( unconstrained = unconstrained && $from::is_unconstrained(); )* + unconstrained + } + } } define_open! {A} @@ -559,7 +349,40 @@ define_open!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R); macro_rules! immutable_resource_join { ($($ty:ty),*) => { $( - impl<'a, 'b, T> Join for &'a $ty + // SAFETY: Since `T` implements `LendJoin` it is safe to deref and defer + // to its implementation. + unsafe impl<'a, 'b, T> LendJoin for &'a $ty + where + &'a T: LendJoin, + T: Resource, + { + type Type = <&'a T as LendJoin>::Type; + type Value = <&'a T as LendJoin>::Value; + type Mask = <&'a T as LendJoin>::Mask; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + // SAFETY: This only wraps `T` and, while exposing the mask and + // the values, requires the same invariants as the original + // implementation and is thus safe. + unsafe { self.deref().open() } + } + + unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { + // SAFETY: The mask of `Self` and `T` are identical, thus a + // check to `Self`'s mask (which is required) is equal to a + // check of `T`'s mask, which makes `get` safe to call. + unsafe { <&'a T as LendJoin>::get(v, i) } + } + + #[inline] + fn is_unconstrained() -> bool { + <&'a T as LendJoin>::is_unconstrained() + } + } + + // SAFETY: Since `T` implements `Join` it is safe to deref and defer to + // its implementation. + unsafe impl<'a, 'b, T> Join for &'a $ty where &'a T: Join, T: Resource, @@ -568,16 +391,18 @@ macro_rules! immutable_resource_join { type Value = <&'a T as Join>::Value; type Mask = <&'a T as Join>::Mask; - // SAFETY: This only wraps `T` and, while exposing the mask and the values, - // requires the same invariants as the original implementation and is thus safe. unsafe fn open(self) -> (Self::Mask, Self::Value) { - self.deref().open() + // SAFETY: This only wraps `T` and, while exposing the mask and + // the values, requires the same invariants as the original + // implementation and is thus safe. + unsafe { self.deref().open() } } - // SAFETY: The mask of `Self` and `T` are identical, thus a check to `Self`'s mask (which is required) - // is equal to a check of `T`'s mask, which makes `get` safe to call. unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { - <&'a T as Join>::get(v, i) + // SAFETY: The mask of `Self` and `T` are identical, thus a + // check to `Self`'s mask (which is required) is equal to a + // check of `T`'s mask, which makes `get` safe to call. + unsafe { <&'a T as Join>::get(v, i) } } #[inline] @@ -586,14 +411,37 @@ macro_rules! immutable_resource_join { } } - // SAFETY: This is just a wrapper of `T`'s implementation for `ParJoin` and can - // in no case lead to other memory access patterns. + // SAFETY: Since `T` implements `ParJoin` it is safe to deref and defer to + // its implementation. S-TODO we can rely on errors if $ty is not sync? #[cfg(feature = "parallel")] unsafe impl<'a, 'b, T> ParJoin for &'a $ty where &'a T: ParJoin, - T: Resource - {} + T: Resource, + { + type Type = <&'a T as ParJoin>::Type; + type Value = <&'a T as ParJoin>::Value; + type Mask = <&'a T as ParJoin>::Mask; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + // SAFETY: This only wraps `T` and, while exposing the mask and + // the values, requires the same invariants as the original + // implementation and is thus safe. + unsafe { self.deref().open() } + } + + unsafe fn get(v: &Self::Value, i: Index) -> Self::Type { + // SAFETY: The mask of `Self` and `T` are identical, thus a + // check to `Self`'s mask (which is required) is equal to a + // check of `T`'s mask, which makes `get` safe to call. + unsafe { <&'a T as ParJoin>::get(v, i) } + } + + #[inline] + fn is_unconstrained() -> bool { + <&'a T as ParJoin>::is_unconstrained() + } + } )* }; } @@ -601,7 +449,40 @@ macro_rules! immutable_resource_join { macro_rules! mutable_resource_join { ($($ty:ty),*) => { $( - impl<'a, 'b, T> Join for &'a mut $ty + // SAFETY: Since `T` implements `LendJoin` it is safe to deref and defer + // to its implementation. + unsafe impl<'a, 'b, T> LendJoin for &'a mut $ty + where + &'a mut T: LendJoin, + T: Resource, + { + type Type = <&'a mut T as LendJoin>::Type; + type Value = <&'a mut T as LendJoin>::Value; + type Mask = <&'a mut T as LendJoin>::Mask; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + // SAFETY: This only wraps `T` and, while exposing the mask and + // the values, requires the same invariants as the original + // implementation and is thus safe. + unsafe { self.deref_mut().open() } + } + + unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { + // SAFETY: The mask of `Self` and `T` are identical, thus a check to + // `Self`'s mask (which is required) is equal to a check of `T`'s + // mask, which makes `get_mut` safe to call. + unsafe { <&'a mut T as LendJoin>::get(v, i) } + } + + #[inline] + fn is_unconstrained() -> bool { + <&'a mut T as LendJoin>::is_unconstrained() + } + } + + // SAFETY: Since `T` implements `Join` it is safe to deref and defer to + // its implementation. + unsafe impl<'a, 'b, T> Join for &'a mut $ty where &'a mut T: Join, T: Resource, @@ -610,16 +491,18 @@ macro_rules! mutable_resource_join { type Value = <&'a mut T as Join>::Value; type Mask = <&'a mut T as Join>::Mask; - // SAFETY: This only wraps `T` and, while exposing the mask and the values, - // requires the same invariants as the original implementation and is thus safe. unsafe fn open(self) -> (Self::Mask, Self::Value) { - self.deref_mut().open() + // SAFETY: This only wraps `T` and, while exposing the mask and + // the values, requires the same invariants as the original + // implementation and is thus safe. + unsafe { self.deref_mut().open() } } - // SAFETY: The mask of `Self` and `T` are identical, thus a check to `Self`'s mask (which is required) - // is equal to a check of `T`'s mask, which makes `get_mut` safe to call. unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { - <&'a mut T as Join>::get(v, i) + // SAFETY: The mask of `Self` and `T` are identical, thus a check to + // `Self`'s mask (which is required) is equal to a check of `T`'s + // mask, which makes `get_mut` safe to call. + unsafe { <&'a mut T as Join>::get(v, i) } } #[inline] @@ -628,14 +511,37 @@ macro_rules! mutable_resource_join { } } - // SAFETY: This is just a wrapper of `T`'s implementation for `ParJoin` and can - // in no case lead to other memory access patterns. + // SAFETY: Since `T` implements `ParJoin` it is safe to deref and defer + // its implementation. S-TODO we can rely on errors if $ty is not sync? #[cfg(feature = "parallel")] unsafe impl<'a, 'b, T> ParJoin for &'a mut $ty where &'a mut T: ParJoin, - T: Resource - {} + T: Resource, + { + type Type = <&'a mut T as ParJoin>::Type; + type Value = <&'a mut T as ParJoin>::Value; + type Mask = <&'a mut T as ParJoin>::Mask; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + // SAFETY: This only wraps `T` and, while exposing the mask and + // the values, requires the same invariants as the original + // implementation and is thus safe. + unsafe { self.deref_mut().open() } + } + + unsafe fn get(v: &Self::Value, i: Index) -> Self::Type { + // SAFETY: The mask of `Self` and `T` are identical, thus a check to + // `Self`'s mask (which is required) is equal to a check of `T`'s + // mask, which makes `get_mut` safe to call. + unsafe { <&'a mut T as ParJoin>::get(v, i) } + } + + #[inline] + fn is_unconstrained() -> bool { + <&'a mut T as ParJoin>::is_unconstrained() + } + } )* }; } diff --git a/src/join/par_join.rs b/src/join/par_join.rs index 40159d833..e996eaf0f 100644 --- a/src/join/par_join.rs +++ b/src/join/par_join.rs @@ -15,34 +15,74 @@ use rayon::iter::{ /// # Safety /// /// The implementation of `ParallelIterator` for `ParJoin` makes multiple -/// assumptions on the structure of `Self`. In particular, `::get` -/// must be callable from multiple threads, simultaneously, without mutating -/// values not exclusively associated with `id`. -// NOTE: This is currently unspecified behavior. It seems very unlikely that it -// breaks in the future, but technically it's not specified as valid Rust code. -pub unsafe trait ParJoin: Join { +/// assumptions on the structure of `Self`. In particular, `ParJoin::get` must +/// be callable from multiple threads, simultaneously, without creating mutable +/// references not exclusively associated with `id`. +/// +/// The `Self::Mask` value returned with the `Self::Value` must correspond such +/// that it is safe to retrieve items from `Self::Value` whose presence is +/// indicated in the mask. +pub unsafe trait ParJoin { + /// Type of joined components. + type Type; + /// Type of joined storages. + type Value; + /// Type of joined bit mask. + type Mask: BitSetLike; + /// Create a joined parallel iterator over the contents. fn par_join(self) -> JoinParIter where Self: Sized, { - if ::is_unconstrained() { + if Self::is_unconstrained() { log::warn!( - "`ParJoin` possibly iterating through all indices, you might've made a join with all `MaybeJoin`s, which is unbounded in length." + "`ParJoin` possibly iterating through all indices, \ + you might've made a join with all `MaybeJoin`s, \ + which is unbounded in length." ); } JoinParIter(self) } + + /// Open this join by returning the mask and the storages. + /// + /// # Safety + /// + /// This is unsafe because implementations of this trait can permit the + /// `Value` to be mutated independently of the `Mask`. If the `Mask` does + /// not correctly report the status of the `Value` then illegal memory + /// access can occur. + unsafe fn open(self) -> (Self::Mask, Self::Value); + + /// Get a joined component value by a given index. + /// + /// # Safety + /// + /// * A call to `get` must be preceded by a check if `id` is part of + /// `Self::Mask`. + /// * The use of the mutable reference returned from this method must end + /// before subsequent calls with the same `id`. + unsafe fn get(value: &Self::Value, id: Index) -> Self::Type; + + /// If this `LendJoin` typically returns all indices in the mask, then + /// iterating over only it or combined with other joins that are also + /// dangerous will cause the `JoinLendIter` to go through all indices which + /// is usually not what is wanted and will kill performance. + #[inline] + fn is_unconstrained() -> bool { + false + } } -/// `JoinParIter` is a `ParallelIterator` over a group of `Storages`. +/// `JoinParIter` is a `ParallelIterator` over a group of storages. #[must_use] pub struct JoinParIter(J); impl ParallelIterator for JoinParIter where - J: Join + Send, + J: ParJoin + Send, J::Mask: Send + Sync, J::Type: Send, J::Value: Send, @@ -53,12 +93,11 @@ where where C: UnindexedConsumer, { + // SAFETY: `keys` and `values` are not exposed outside this module and + // we only use `values` for calling `ParJoin::get`. let (keys, values) = unsafe { self.0.open() }; // Create a bit producer which splits on up to three levels let producer = BitProducer((&keys).iter(), 3); - // HACK: use `UnsafeCell` to share `values` between threads; - // this is the unspecified behavior referred to above. - let values = UnsafeCell::new(values); bridge_unindexed(JoinProducer::::new(producer, &values), consumer) } @@ -66,47 +105,30 @@ where struct JoinProducer<'a, J> where - J: Join + Send, + J: ParJoin + Send, J::Mask: Send + Sync + 'a, J::Type: Send, J::Value: Send + 'a, { keys: BitProducer<'a, J::Mask>, - values: &'a UnsafeCell, + values: &'a J::Value, } impl<'a, J> JoinProducer<'a, J> where - J: Join + Send, + J: ParJoin + Send, J::Type: Send, J::Value: 'a + Send, J::Mask: 'a + Send + Sync, { - fn new(keys: BitProducer<'a, J::Mask>, values: &'a UnsafeCell) -> Self { + fn new(keys: BitProducer<'a, J::Mask>, values: &'a J::Value) -> Self { JoinProducer { keys, values } } } -// SAFETY: `Send` is safe to implement if all components of `Self` are logically -// `Send`. `keys` already has `Send` implemented, thus no reasoning is required. -// `values` is a reference to an `UnsafeCell` wrapping `J::Value`; -// `J::Value` is constrained to implement `Send`. -// `UnsafeCell` provides interior mutability, but the specification of it allows -// sharing as long as access does not happen simultaneously; this makes it -// generally safe to `Send`, but we are accessing it simultaneously, which is -// technically not allowed. Also see https://github.com/slide-rs/specs/issues/220 -unsafe impl<'a, J> Send for JoinProducer<'a, J> -where - J: Join + Send, - J::Type: Send, - J::Value: 'a + Send, - J::Mask: 'a + Send + Sync, -{ -} - impl<'a, J> UnindexedProducer for JoinProducer<'a, J> where - J: Join + Send, + J: ParJoin + Send, J::Type: Send, J::Value: 'a + Send, J::Mask: 'a + Send + Sync, @@ -127,14 +149,10 @@ where F: Folder, { let JoinProducer { values, keys, .. } = self; - let iter = keys.0.map(|idx| unsafe { - // This unsafe block should be safe if the `J::get` - // can be safely called from different threads with distinct indices. - - // The indices here are guaranteed to be distinct because of the fact - // that the bit set is split. - J::get(&mut *values.get(), idx) - }); + // SAFETY: `idx` is obtained from the `Mask` returned by + // `ParJoin::open`. The indices here are guaranteed to be distinct + // because of the fact that the bit set is split. + let iter = keys.0.map(|idx| unsafe { J::get(values, idx) }); folder.consume_iter(iter) } diff --git a/src/lib.rs b/src/lib.rs index 65a876921..036aed845 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -227,7 +227,7 @@ pub use specs_derive::{Component, ConvertSaveload}; pub use crate::join::ParJoin; pub use crate::{ changeset::ChangeSet, - join::Join, + join::{Join, LendJoin}, storage::{ DefaultVecStorage, DenseVecStorage, FlaggedStorage, HashMapStorage, NullStorage, ReadStorage, Storage, Tracked, VecStorage, WriteStorage, diff --git a/src/prelude.rs b/src/prelude.rs index 8865fec86..cbbf871c8 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -2,9 +2,9 @@ //! //! Contains all of the most common traits, structures, -pub use crate::join::Join; #[cfg(feature = "parallel")] pub use crate::join::ParJoin; +pub use crate::join::{Join, LendJoin}; pub use hibitset::BitSet; pub use shred::{ Accessor, Dispatcher, DispatcherBuilder, Read, ReadExpect, Resource, ResourceId, RunNow, diff --git a/src/storage/mod.rs b/src/storage/mod.rs index e4f54be89..20d08d66d 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -467,7 +467,6 @@ where type Type = AccessMutReturn<'a, T>; type Value = &'a mut T::Storage; - // SAFETY: No unsafe code and no invariants to fulfill. unsafe fn open(self) -> (Self::Mask, Self::Value) { self.data.open_mut() } From cc97588e8dee83d2b29b59d4ef10914ee691f171 Mon Sep 17 00:00:00 2001 From: Imbris Date: Tue, 16 Aug 2022 22:24:00 -0400 Subject: [PATCH 07/47] Update Join/ParJoin/Unprotected implementations to match changes in these traits and add LendJoin implementations. Compiles again!!! * Several Join implementors where commented out (marked with `D-TODO`) so that I can update them in a separate batch. Want to make sure the changes were working first. * Remove `where Self: 'next'` bound from `LendJoin::Type<'next>'` since this was causing issues and an unnecessary bound. * Fix several other errors related to usage of `LendJoin`'s GAT. * Fix other misc errors from the last few commits * `deny(unsafe_op_in_unsafe_fn)` now covers the whole crate. * Add safety comments to unsafe code used in `Generation` methods. Still need to: * Implement `SharedGetAccessMutStorage` for relevant storages. * Update commented out types that implement `Join`. * Update some safety comments. --- src/bitset.rs | 46 ++++++- src/changeset.rs | 8 +- src/join/lend_join.rs | 16 ++- src/join/maybe.rs | 16 ++- src/join/mod.rs | 37 ++--- src/join/par_join.rs | 25 ++-- src/lib.rs | 5 +- src/prelude.rs | 2 +- src/storage/flagged.rs | 33 ++++- src/storage/mod.rs | 290 +++++++++++++++++++++++++++++++--------- src/storage/storages.rs | 110 +++++++++++++-- src/storage/track.rs | 4 +- src/world/entity.rs | 64 ++++++++- 13 files changed, 519 insertions(+), 137 deletions(-) diff --git a/src/bitset.rs b/src/bitset.rs index cbad9ed32..b18b045a3 100644 --- a/src/bitset.rs +++ b/src/bitset.rs @@ -2,39 +2,75 @@ //! //! Normally used for `Join`s and filtering entities. -#![cfg_attr(rustfmt, rustfmt_skip)] +// TODO: rustfmt bug (probably fixed in next rust release) +// #![cfg_attr(rustfmt, rustfmt::skip)] use hibitset::{AtomicBitSet, BitSet, BitSetAnd, BitSetLike, BitSetNot, BitSetOr, BitSetXor}; use crate::join::Join; +#[nougat::gat(Type)] +use crate::join::LendJoin; #[cfg(feature = "parallel")] use crate::join::ParJoin; use crate::world::Index; macro_rules! define_bit_join { ( impl < ( $( $lifetime:tt )* ) ( $( $arg:ident ),* ) > for $bitset:ty ) => { - impl<$( $lifetime, )* $( $arg ),*> Join for $bitset + // SAFETY: `get` just returns the provided `id` (`Self::Value` is `()` + // and corresponds with any mask instance). + #[nougat::gat] + unsafe impl<$( $lifetime, )* $( $arg ),*> LendJoin for $bitset + where $( $arg: BitSetLike ),* + { + type Type<'next> = Index; + type Value = (); + type Mask = $bitset; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + (self, ()) + } + + unsafe fn get(_: &mut Self::Value, id: Index) -> Self::Type<'_> { + id + } + } + // SAFETY: `get` just returns the provided `id` (`Self::Value` is `()` + // and corresponds with any mask instance). + unsafe impl<$( $lifetime, )* $( $arg ),*> Join for $bitset where $( $arg: BitSetLike ),* { type Type = Index; type Value = (); type Mask = $bitset; - // SAFETY: This just moves a `BitSet`; invariants of `Join` are fulfilled, since `Self::Value` cannot be mutated. unsafe fn open(self) -> (Self::Mask, Self::Value) { (self, ()) } - // SAFETY: No unsafe code and no invariants to meet. unsafe fn get(_: &mut Self::Value, id: Index) -> Self::Type { id } } + // SAFETY: `get` is safe to call concurrently and just returns the + // provided `id` (`Self::Value` is `()` and corresponds with any mask + // instance). #[cfg(feature = "parallel")] unsafe impl<$( $lifetime, )* $( $arg ),*> ParJoin for $bitset where $( $arg: BitSetLike ),* - { } + { + type Type = Index; + type Value = (); + type Mask = $bitset; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + (self, ()) + } + + unsafe fn get(_: &Self::Value, id: Index) -> Self::Type { + id + } + } } } diff --git a/src/changeset.rs b/src/changeset.rs index 5a5098055..732627d41 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -1,6 +1,3 @@ -// TODO: promote to the whole crate -#![deny(unsafe_op_in_unsafe_fn)] - //! Provides a changeset that can be collected from an iterator. use std::{iter::FromIterator, ops::AddAssign}; @@ -161,6 +158,11 @@ impl Join for ChangeSet { } unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { + // NOTE: Following the safety requirements of `Join::get`, users can get + // this to panic by calling `get` dropping the returned value and + // calling `get` with the same `id`. However, such a panic isn't + // unsound. Also, the current `JoinIter` implementation will never do + // this. // SAFETY: S-TODO unsafe { value.remove(id) } } diff --git a/src/join/lend_join.rs b/src/join/lend_join.rs index 5b1ce2e7c..0b25600e2 100644 --- a/src/join/lend_join.rs +++ b/src/join/lend_join.rs @@ -1,4 +1,9 @@ -/// Like the `Join` trait except this is similar to a `LendingIterator` in that +use super::MaybeJoin; +use hibitset::{BitIter, BitSetLike}; + +use crate::world::{Entities, Entity, Index}; + +/// Like the `Join` trait except this is similar to a lending iterator in that /// only one item can be accessed at once. /// /// # Safety @@ -15,9 +20,7 @@ pub unsafe trait LendJoin { /// This type is using macro magic to emulate GATs on stable. So to refer to /// it you need to use the [`LendJoinType<'next, J>`](LendJoinType) type /// alias. - type Type<'next> - where - Self: 'next; + type Type<'next>; /// Type of joined storages. type Value; /// Type of joined bit mask. @@ -164,7 +167,7 @@ impl JoinLendIter { /// Can be used to iterate with this pattern: /// /// `while let Some(components) = join_lending_iter.next() {` - fn next(&mut self) -> Option> { + pub fn next(&mut self) -> Option> { // SAFETY: since `idx` is yielded from `keys` (the mask), it is necessarily a // part of it. Thus, requirements are fulfilled for calling `get`. self.keys @@ -172,7 +175,8 @@ impl JoinLendIter { .map(|idx| unsafe { J::get(&mut self.values, idx) }) } - fn for_each(mut self, mut f: impl FnMut(LendJoinType<'_, J>)) { + /// Calls a closure on each entity in the join. + pub fn for_each(mut self, mut f: impl FnMut(LendJoinType<'_, J>)) { self.keys.for_each(|idx| { // SAFETY: since `idx` is yielded from `keys` (the mask), it is // necessarily a part of it. Thus, requirements are fulfilled for diff --git a/src/join/maybe.rs b/src/join/maybe.rs index d9bc654e9..ede41e4f2 100644 --- a/src/join/maybe.rs +++ b/src/join/maybe.rs @@ -1,4 +1,9 @@ -use super::{Join, LendJoin, ParJoin}; +#[nougat::gat(Type)] +use super::LendJoin; +use super::{Join, ParJoin}; +use hibitset::{BitSetAll, BitSetLike}; + +use crate::world::Index; /// Returns a structure that implements `Join`/`LendJoin`/`MaybeJoin` if the /// contained `T` does and that yields all indices, returning `None` for all @@ -10,16 +15,17 @@ use super::{Join, LendJoin, ParJoin}; /// iterate over every single index of the bitset. If you want a join with /// all `MaybeJoin`s, add an `EntitiesRes` to the join as well to bound the /// join to all entities that are alive. -pub struct MaybeJoin(pub J); +pub struct MaybeJoin(pub J); // SAFETY: We return a mask containing all items, but check the original mask in // the `get` implementation. -unsafe impl LendJoin for MaybeJoin +#[nougat::gat] +unsafe impl LendJoin for MaybeJoin where T: LendJoin, { type Mask = BitSetAll; - type Type = Option<::Type>; + type Type<'next> = Option<::Type<'next>>; type Value = (::Mask, ::Value); unsafe fn open(self) -> (Self::Mask, Self::Value) { @@ -30,7 +36,7 @@ where (BitSetAll, (mask, value)) } - unsafe fn get((mask, value): &mut Self::Value, id: Index) -> Self::Type { + unsafe fn get((mask, value): &mut Self::Value, id: Index) -> Self::Type<'_> { if mask.contains(id) { // SAFETY: The mask was just checked for `id`. Some(unsafe { ::get(value, id) }) diff --git a/src/join/mod.rs b/src/join/mod.rs index 94ef8064b..218a56917 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -1,13 +1,10 @@ //! Joining of components for iteration over entities with specific components. -// TODO: promote to the whole crate -#![deny(unsafe_op_in_unsafe_fn)] - -use hibitset::{BitIter, BitSetAll, BitSetAnd, BitSetLike}; +use hibitset::{BitIter, BitSetLike}; use shred::{Fetch, FetchMut, Read, ReadExpect, Resource, Write, WriteExpect}; use std::ops::{Deref, DerefMut}; -use crate::world::{Entities, Entity, Index}; +use crate::world::Index; mod bit_and; mod lend_join; @@ -16,7 +13,9 @@ mod maybe; mod par_join; pub use bit_and::BitAnd; -pub use lend_join::{JoinLendIter, LendJoin, LendJoinType}; +#[nougat::gat(Type)] +pub use lend_join::LendJoin; +pub use lend_join::{JoinLendIter, LendJoinType}; pub use maybe::MaybeJoin; #[cfg(feature = "parallel")] pub use par_join::{JoinParIter, ParJoin}; @@ -130,8 +129,8 @@ pub unsafe trait Join { /// /// * A call to `get` must be preceded by a check if `id` is part of /// `Self::Mask`. - /// * The use of the mutable reference returned from this method must end - /// before subsequent calls with the same `id`. + /// * The value returned from this method must be dropped before subsequent + /// calls with the same `id`. (S-TODO update callers to match edit) unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type; /// If this `Join` typically returns all indices in the mask, then iterating @@ -193,11 +192,12 @@ macro_rules! define_open { // SAFETY: The returned mask in `open` is the intersection of the masks // from each type in this tuple. So if an `id` is present in the // combined mask, it will be safe to retrieve the corresponding items. + #[nougat::gat] unsafe impl<$($from,)*> LendJoin for ($($from),*,) where $($from: LendJoin),*, ($(<$from as LendJoin>::Mask,)*): BitAnd, { - type Type = ($($from::Type),*,); + type Type<'next> = ($(<$from as LendJoin>::Type<'next>),*,); type Value = ($($from::Value),*,); type Mask = <($($from::Mask,)*) as BitAnd>::Value; @@ -215,7 +215,7 @@ macro_rules! define_open { } #[allow(non_snake_case)] - unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { + unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type<'_> { let &mut ($(ref mut $from,)*) = v; // SAFETY: `get` is safe to call as the caller must have checked // the mask, which only has a key that exists in all of the @@ -303,7 +303,7 @@ macro_rules! define_open { #[allow(non_snake_case)] unsafe fn get(v: &Self::Value, i: Index) -> Self::Type { - let &mut ($(ref mut $from,)*) = v; + let &($(ref $from,)*) = v; // SAFETY: `get` is safe to call as the caller must have checked // the mask, which only has a key that exists in all of the // storages. @@ -317,6 +317,7 @@ macro_rules! define_open { unconstrained } } + } } define_open! {A} @@ -351,12 +352,13 @@ macro_rules! immutable_resource_join { $( // SAFETY: Since `T` implements `LendJoin` it is safe to deref and defer // to its implementation. + #[nougat::gat] unsafe impl<'a, 'b, T> LendJoin for &'a $ty where &'a T: LendJoin, T: Resource, { - type Type = <&'a T as LendJoin>::Type; + type Type<'next> = <&'a T as LendJoin>::Type<'next>; type Value = <&'a T as LendJoin>::Value; type Mask = <&'a T as LendJoin>::Mask; @@ -367,7 +369,7 @@ macro_rules! immutable_resource_join { unsafe { self.deref().open() } } - unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { + unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type<'_> { // SAFETY: The mask of `Self` and `T` are identical, thus a // check to `Self`'s mask (which is required) is equal to a // check of `T`'s mask, which makes `get` safe to call. @@ -412,7 +414,7 @@ macro_rules! immutable_resource_join { } // SAFETY: Since `T` implements `ParJoin` it is safe to deref and defer to - // its implementation. S-TODO we can rely on errors if $ty is not sync? + // its implementation. S-TODO we can rely on errors if $ty is not sync? #[cfg(feature = "parallel")] unsafe impl<'a, 'b, T> ParJoin for &'a $ty where @@ -451,12 +453,13 @@ macro_rules! mutable_resource_join { $( // SAFETY: Since `T` implements `LendJoin` it is safe to deref and defer // to its implementation. + #[nougat::gat] unsafe impl<'a, 'b, T> LendJoin for &'a mut $ty where &'a mut T: LendJoin, T: Resource, { - type Type = <&'a mut T as LendJoin>::Type; + type Type<'next> = <&'a mut T as LendJoin>::Type<'next>; type Value = <&'a mut T as LendJoin>::Value; type Mask = <&'a mut T as LendJoin>::Mask; @@ -467,7 +470,7 @@ macro_rules! mutable_resource_join { unsafe { self.deref_mut().open() } } - unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { + unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type<'_> { // SAFETY: The mask of `Self` and `T` are identical, thus a check to // `Self`'s mask (which is required) is equal to a check of `T`'s // mask, which makes `get_mut` safe to call. @@ -512,7 +515,7 @@ macro_rules! mutable_resource_join { } // SAFETY: Since `T` implements `ParJoin` it is safe to deref and defer - // its implementation. S-TODO we can rely on errors if $ty is not sync? + // its implementation. S-TODO we can rely on errors if $ty is not sync? #[cfg(feature = "parallel")] unsafe impl<'a, 'b, T> ParJoin for &'a mut $ty where diff --git a/src/join/par_join.rs b/src/join/par_join.rs index e996eaf0f..f641f3d83 100644 --- a/src/join/par_join.rs +++ b/src/join/par_join.rs @@ -1,25 +1,20 @@ -use std::cell::UnsafeCell; - use hibitset::{BitProducer, BitSetLike}; - -use crate::join::Join; use rayon::iter::{ plumbing::{bridge_unindexed, Folder, UnindexedConsumer, UnindexedProducer}, ParallelIterator, }; +use crate::world::Index; + /// The purpose of the `ParJoin` trait is to provide a way /// to access multiple storages in parallel at the same time with /// the merged bit set. /// /// # Safety /// -/// The implementation of `ParallelIterator` for `ParJoin` makes multiple -/// assumptions on the structure of `Self`. In particular, `ParJoin::get` must -/// be callable from multiple threads, simultaneously, without creating mutable -/// references not exclusively associated with `id`. +/// `ParJoin::get` must be callable from multiple threads, simultaneously. /// -/// The `Self::Mask` value returned with the `Self::Value` must correspond such +/// The Self::Mask` value returned with the `Self::Value` must correspond such /// that it is safe to retrieve items from `Self::Value` whose presence is /// indicated in the mask. pub unsafe trait ParJoin { @@ -62,8 +57,8 @@ pub unsafe trait ParJoin { /// /// * A call to `get` must be preceded by a check if `id` is part of /// `Self::Mask`. - /// * The use of the mutable reference returned from this method must end - /// before subsequent calls with the same `id`. + /// * The value returned from this method must be dropped before subsequent + /// calls with the same `id`. (S-TODO update callers to match edit) unsafe fn get(value: &Self::Value, id: Index) -> Self::Type; /// If this `LendJoin` typically returns all indices in the mask, then @@ -85,7 +80,7 @@ where J: ParJoin + Send, J::Mask: Send + Sync, J::Type: Send, - J::Value: Send, + J::Value: Send + Sync, { type Item = J::Type; @@ -108,7 +103,7 @@ where J: ParJoin + Send, J::Mask: Send + Sync + 'a, J::Type: Send, - J::Value: Send + 'a, + J::Value: Send + Sync + 'a, { keys: BitProducer<'a, J::Mask>, values: &'a J::Value, @@ -118,7 +113,7 @@ impl<'a, J> JoinProducer<'a, J> where J: ParJoin + Send, J::Type: Send, - J::Value: 'a + Send, + J::Value: 'a + Send + Sync, J::Mask: 'a + Send + Sync, { fn new(keys: BitProducer<'a, J::Mask>, values: &'a J::Value) -> Self { @@ -130,7 +125,7 @@ impl<'a, J> UnindexedProducer for JoinProducer<'a, J> where J: ParJoin + Send, J::Type: Send, - J::Value: 'a + Send, + J::Value: 'a + Send + Sync, J::Mask: 'a + Send + Sync, { type Item = J::Type; diff --git a/src/lib.rs b/src/lib.rs index 036aed845..4f66cc624 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,5 @@ #![warn(missing_docs)] +#![deny(unsafe_op_in_unsafe_fn)] #![deny(clippy::disallowed_types)] #![cfg_attr( feature = "nightly", @@ -202,7 +203,7 @@ pub extern crate uuid; pub mod saveload; mod bitset; -pub mod changeset; +// D-TODO pub mod changeset; pub mod error; pub mod join; pub mod prelude; @@ -226,7 +227,7 @@ pub use specs_derive::{Component, ConvertSaveload}; #[cfg(feature = "parallel")] pub use crate::join::ParJoin; pub use crate::{ - changeset::ChangeSet, + // D-TODO changeset::ChangeSet, join::{Join, LendJoin}, storage::{ DefaultVecStorage, DenseVecStorage, FlaggedStorage, HashMapStorage, NullStorage, diff --git a/src/prelude.rs b/src/prelude.rs index cbbf871c8..4b0d4fee3 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -18,7 +18,7 @@ pub use rayon::iter::ParallelIterator; pub use shred::AsyncDispatcher; pub use crate::{ - changeset::ChangeSet, + // D-TODO changeset::ChangeSet, storage::{ ComponentEvent, DefaultVecStorage, DenseVecStorage, FlaggedStorage, HashMapStorage, NullStorage, ReadStorage, Storage, Tracked, VecStorage, WriteStorage, diff --git a/src/storage/flagged.rs b/src/storage/flagged.rs index 56014cde3..bbe577339 100644 --- a/src/storage/flagged.rs +++ b/src/storage/flagged.rs @@ -218,8 +218,35 @@ impl> UnprotectedStorage for FlaggedSt unsafe { self.storage.get(id) } } + unsafe fn get_mut(&mut self, id: Index) -> &mut C { + if self.emit_event() { + self.channel + .get_mut() + .single_write(ComponentEvent::Modified(id)); + } + // SAFETY: Requirements passed to caller. + unsafe { self.storage.get_mut(id) } + } + #[cfg(feature = "nightly")] - unsafe fn get_mut(&self, id: Index) -> >::AccessMut<'_> { + unsafe fn get_access_mut(&mut self, id: Index) -> >::AccessMut<'_> { + if self.emit_event() { + self.channel + .get_mut() + .single_write(ComponentEvent::Modified(id)); + } + // SAFETY: Requirements passed to caller. + unsafe { self.storage.get_access_mut(id) } + } + + #[cfg(not(feature = "nightly"))] + unsafe fn get_access_mut(&mut self, id: Index) -> &mut C { + // SAFETY: Requirements passed to caller. + unsafe { self.get_mut(id) } + } + + /*#[cfg(feature = "nightly")] + unsafe fn shared_get_access_mut(&self, id: Index) -> >::AccessMut<'_> { if self.emit_event() { let channel_ptr = self.channel.get(); // SAFETY: Caller required to ensure references returned from other @@ -231,7 +258,7 @@ impl> UnprotectedStorage for FlaggedSt } #[cfg(not(feature = "nightly"))] - unsafe fn get_mut(&self, id: Index) -> &mut C { + unsafe fn shared_get_access_mut(&self, id: Index) -> &mut C { if self.emit_event() { let channel_ptr = self.channel.get(); // SAFETY: Caller required to ensure references returned from other @@ -240,7 +267,7 @@ impl> UnprotectedStorage for FlaggedSt } // SAFETY: Requirements passed to caller. unsafe { self.storage.get_mut(id) } - } + }*/ unsafe fn insert(&mut self, id: Index, comp: C) { if self.emit_event() { diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 20d08d66d..089566a85 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -1,19 +1,16 @@ -// TODO: promote to the whole crate -#![deny(unsafe_op_in_unsafe_fn)] - //! Component storage types, implementations for component joins, etc. #[cfg(feature = "nightly")] pub use self::deref_flagged::{DerefFlaggedStorage, FlaggedAccessMut}; pub use self::{ data::{ReadStorage, WriteStorage}, - entry::{Entries, OccupiedEntry, StorageEntry, VacantEntry}, + // D-TODO entry::{Entries, OccupiedEntry, StorageEntry, VacantEntry}, flagged::FlaggedStorage, generic::{GenericReadStorage, GenericWriteStorage}, - restrict::{ - ImmutableParallelRestriction, MutableParallelRestriction, PairedStorage, RestrictedStorage, - SequentialRestriction, - }, + // D-TODO restrict::{ + // ImmutableParallelRestriction, MutableParallelRestriction, PairedStorage, RestrictedStorage, + // SequentialRestriction, + //}, storages::{ BTreeStorage, DefaultVecStorage, DenseVecStorage, HashMapStorage, NullStorage, VecStorage, }, @@ -31,25 +28,27 @@ use std::{ use hibitset::{BitSet, BitSetLike, BitSetNot}; use shred::{CastFrom, Fetch}; +#[nougat::gat(Type)] +use crate::join::LendJoin; #[cfg(feature = "parallel")] use crate::join::ParJoin; use crate::{ error::{Error, WrongGeneration}, join::Join, - world::{Component, EntitiesRes, Entity, Generation, Index}, + world::{Component, EntitiesRes, Entity, Index}, }; -use self::drain::Drain; +// D-TODO use self::drain::Drain; use self::sync_unsafe_cell::SyncUnsafeCell; mod data; #[cfg(feature = "nightly")] mod deref_flagged; -mod drain; -mod entry; +// D-TODO mod drain; +// D-TODO mod entry; mod flagged; mod generic; -mod restrict; +// D-TODO mod restrict; mod storages; mod sync_unsafe_cell; #[cfg(test)] @@ -65,26 +64,49 @@ type AccessMutReturn<'a, T> = &'a mut T; /// that do not have a particular component type. pub struct AntiStorage<'a>(pub &'a BitSet); -impl<'a> Join for AntiStorage<'a> { +// SAFETY: Items are just `()` and it is always safe to retrieve them regardless +// of the mask and value returned by `open`. +#[nougat::gat] +unsafe impl<'a> LendJoin for AntiStorage<'a> { type Mask = BitSetNot<&'a BitSet>; - type Type = (); + type Type<'next> = (); type Value = (); - // SAFETY: No invariants to meet and no unsafe code. unsafe fn open(self) -> (Self::Mask, ()) { (BitSetNot(self.0), ()) } - // SAFETY: No invariants to meet and no unsafe code. unsafe fn get(_: &mut (), _: Index) {} } -// SAFETY: Since `get` does not do any memory access, this is safe to implement. -unsafe impl<'a> DistinctStorage for AntiStorage<'a> {} +// SAFETY: Items are just `()` and it is always safe to retrieve them regardless +// of the mask and value returned by `open`. +unsafe impl<'a> Join for AntiStorage<'a> { + type Mask = BitSetNot<&'a BitSet>; + type Type = (); + type Value = (); + + unsafe fn open(self) -> (Self::Mask, ()) { + (BitSetNot(self.0), ()) + } + + unsafe fn get(_: &mut (), _: Index) {} +} -// SAFETY: Since `get` does not do any memory access, this is safe to implement. +// SAFETY: Since `get` does not do anything it is safe to concurrently call. +// Items are just `()` and it is always safe to retrieve them regardless #[cfg(feature = "parallel")] -unsafe impl<'a> ParJoin for AntiStorage<'a> {} +unsafe impl<'a> ParJoin for AntiStorage<'a> { + type Mask = BitSetNot<&'a BitSet>; + type Type = (); + type Value = (); + + unsafe fn open(self) -> (Self::Mask, ()) { + (BitSetNot(self.0), ()) + } + + unsafe fn get(_: &(), _: Index) {} +} /// A dynamic storage. pub trait AnyStorage { @@ -392,13 +414,13 @@ where self.data.clear(); } - /// Creates a draining storage wrapper which can be `.join`ed - /// to get a draining iterator. - pub fn drain(&mut self) -> Drain { - Drain { - data: &mut self.data, - } - } + // /// Creates a draining storage wrapper which can be `.join`ed + // /// to get a draining iterator. + // D-TODO pub fn drain(&mut self) -> Drain { + // Drain { + // data: &mut self.data, + // } + //} } impl<'a, T, D: Clone> Clone for Storage<'a, T, D> { @@ -407,20 +429,29 @@ impl<'a, T, D: Clone> Clone for Storage<'a, T, D> { } } -// SAFETY: This is safe, since `T::Storage` is `DistinctStorage` and `Join::get` -// only accesses the storage and nothing else. -unsafe impl<'a, T: Component, D> DistinctStorage for Storage<'a, T, D> where - T::Storage: DistinctStorage +impl<'a, 'e, T, D> Not for &'a Storage<'e, T, D> +where + T: Component, + D: Deref>, { + type Output = AntiStorage<'a>; + + fn not(self) -> Self::Output { + AntiStorage(&self.data.mask) + } } -impl<'a, 'e, T, D> Join for &'a Storage<'e, T, D> +// SAFETY: The mask and unprotected storage contained in `MaskedStorage` +// correspond and `open` returns references to them from the same +// `MaskedStorage` instance. +#[nougat::gat] +unsafe impl<'a, 'e, T, D> LendJoin for &'a Storage<'e, T, D> where T: Component, D: Deref>, { type Mask = &'a BitSet; - type Type = &'a T; + type Type<'next> = &'a T; type Value = &'a T::Storage; unsafe fn open(self) -> (Self::Mask, Self::Value) { @@ -428,27 +459,40 @@ where } unsafe fn get(v: &mut Self::Value, i: Index) -> &'a T { - // S-TODO probably more to add to this comment // SAFETY: Since we require that the mask was checked, an element for // `i` must have been inserted without being removed. unsafe { v.get(i) } } } -impl<'a, 'e, T, D> Not for &'a Storage<'e, T, D> +// SAFETY: The mask and unprotected storage contained in `MaskedStorage` +// correspond and `open` returns references to them from the same +// `MaskedStorage` instance. +unsafe impl<'a, 'e, T, D> Join for &'a Storage<'e, T, D> where T: Component, D: Deref>, { - type Output = AntiStorage<'a>; + type Mask = &'a BitSet; + type Type = &'a T; + type Value = &'a T::Storage; - fn not(self) -> Self::Output { - AntiStorage(&self.data.mask) + unsafe fn open(self) -> (Self::Mask, Self::Value) { + (&self.data.mask, &self.data.inner) + } + + unsafe fn get(v: &mut Self::Value, i: Index) -> &'a T { + // SAFETY: Since we require that the mask was checked, an element for + // `i` must have been inserted without being removed. + unsafe { v.get(i) } } } -// SAFETY: This is always safe because immutable access can in no case cause -// memory issues, even if access to common memory occurs. +// SAFETY: It is safe to call `::get` from +// multiple threads at once since `T::Storage: Sync`. +// +// The mask and unprotected storage contained in `MaskedStorage` correspond and +// `open` returns references to them from the same `MaskedStorage` instance. #[cfg(feature = "parallel")] unsafe impl<'a, 'e, T, D> ParJoin for &'a Storage<'e, T, D> where @@ -456,40 +500,161 @@ where D: Deref>, T::Storage: Sync, { + type Mask = &'a BitSet; + type Type = &'a T; + type Value = &'a T::Storage; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + (&self.data.mask, &self.data.inner) + } + + unsafe fn get(v: &Self::Value, i: Index) -> &'a T { + // SAFETY: Since we require that the mask was checked, an element for + // `i` must have been inserted without being removed. + unsafe { v.get(i) } + } } -impl<'a, 'e, T, D> Join for &'a mut Storage<'e, T, D> +// SAFETY: The mask and unprotected storage contained in `MaskedStorage` +// correspond and `open` returns references to them from the same +// `MaskedStorage` instance. +#[nougat::gat] +unsafe impl<'a, 'e, T, D> LendJoin for &'a mut Storage<'e, T, D> where T: Component, D: DerefMut>, { type Mask = &'a BitSet; - type Type = AccessMutReturn<'a, T>; + type Type<'next> = AccessMutReturn<'next, T>; type Value = &'a mut T::Storage; unsafe fn open(self) -> (Self::Mask, Self::Value) { self.data.open_mut() } - // TODO: audit unsafe - unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { - // This is horribly unsafe. Unfortunately, Rust doesn't provide a way - // to abstract mutable/immutable state at the moment, so we have to hack - // our way through it. - let value: *mut Self::Value = v as *mut Self::Value; - // SAFETY: S-TODO modify Join trait - unsafe { (*value).get_mut(i) } + unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type<'_> { + // SAFETY: Since we require that the mask was checked, an element for + // `id` must have been inserted without being removed. + unsafe { value.get_access_mut(id) } + } +} + +mod shared_get_access_mut_only { + use super::{AccessMutReturn, Component, Index, SharedGetAccessMutStorage}; + + /// This type provides a way to ensure only `shared_get_access_mut` can be + /// called for the lifetime `'a` and that no references previously obtained + /// from the storage exist when it is created. While internally this is a + /// shared reference, constructing it requires an exclusive borrow for the + /// lifetime `'a`. + pub struct SharedGetAccessMutOnly<'a, T: Component>(&'a T::Storage); + + impl<'a, T: Component> SharedGetAccessMutOnly<'a, T> { + pub(super) fn new(storage: &'a mut T::Storage) -> Self { + Self(storage) + } + + /// # Safety + /// + /// May only be called after a call to `insert` with `id` and no following + /// call to `remove` with `id` or to `clean`. + /// + /// A mask should keep track of those states, and an `id` being contained in + /// the tracking mask is sufficient to call this method. + /// + /// There must be no extant aliasing references to this component (i.e. + /// obtained with the same `id`). + /// + /// Unless `T::Storage` implements `DistinctStorage`, calling this from + /// multiple threads at once is unsound. + pub(super) unsafe fn get(&self, i: Index) -> AccessMutReturn<'a, T> + where + T::Storage: SharedGetAccessMutStorage, + { + // SAFETY: `Self::new` takes an exclusive reference to this storage, + // ensuring there are no extant references to its content at the + // time `self` is created and ensuring that only `self` has access + // to the storage for its lifetime and the lifetime of the produced + // `AccessMutReturn`s (the reference we hold to the storage is not + // exposed outside of this module). + // + // This means we only have to worry about aliasing references being + // produced by calling `shared_get_access_mut`. Ensuring these don't + // alias and the remaining safety requirements are passed on to the + // caller. + unsafe { self.0.shared_get_access_mut(i) } + } + } +} +use shared_get_access_mut_only::SharedGetAccessMutOnly; + +// SAFETY: The mask and unprotected storage contained in `MaskedStorage` +// correspond and `open` returns references to them from the same +// `MaskedStorage` instance (the storage is wrapped in +// `SharedGetAccessMutOnly`). +unsafe impl<'a, 'e, T, D> Join for &'a mut Storage<'e, T, D> +where + T: Component, + D: DerefMut>, + T::Storage: SharedGetAccessMutStorage, +{ + type Mask = &'a BitSet; + type Type = AccessMutReturn<'a, T>; + type Value = SharedGetAccessMutOnly<'a, T>; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + let (mask, value) = self.data.open_mut(); + let value = SharedGetAccessMutOnly::new(value); + (mask, value) + } + + unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { + // SAFETY: + // * Since we require that the mask was checked, an element for + // `id` must have been inserted without being removed. + // * We also require that the caller drop the value returned before + // subsequent calls with the same `id`, so there are no extant + // references that were obtained with the same `id`. + // * Since we have an exclusive reference to `Self::Value`, we know this + // isn't being called from multiple threads at once. + unsafe { value.get(id) } } } -// SAFETY: This is safe because of the `DistinctStorage` guarantees. +// SAFETY: It is safe to call `SharedGetAccessMutOnly<'a, T>::get` from +// multiple threads at once since `T::Storage: DistinctStorage`. +// +// The mask and unprotected storage contained in `MaskedStorage` correspond and +// `open` returns references to them from the same `MaskedStorage` instance (the +// storage is wrapped in `SharedGetAccessMutOnly`). #[cfg(feature = "parallel")] unsafe impl<'a, 'e, T, D> ParJoin for &'a mut Storage<'e, T, D> where T: Component, D: DerefMut>, - T::Storage: Sync + DistinctStorage, + T::Storage: Sync + SharedGetAccessMutStorage + DistinctStorage, { + type Mask = &'a BitSet; + type Type = AccessMutReturn<'a, T>; + type Value = SharedGetAccessMutOnly<'a, T>; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + let (mask, value) = self.data.open_mut(); + let value = SharedGetAccessMutOnly::new(value); + (mask, value) + } + + unsafe fn get(value: &Self::Value, id: Index) -> Self::Type { + // SAFETY: + // * Since we require that the mask was checked, an element for + // `id` must have been inserted without being removed. + // * We also require that the caller drop the value returned before + // subsequent calls with the same `id`, so there are no extant + // references that were obtained with the same `id`. + // * `T::Storage` implements the unsafe trait `DistinctStorage` so it is + // safe to call this from multiple threads at once. + unsafe { value.get(id) } + } } /// Tries to create a default value, returns an `Err` with the name of the @@ -520,6 +685,12 @@ macro_rules! get_access_mut_docs { ($fn_definition:item) => { /// Gets mutable access to the the data associated with an `Index`. /// + /// Unlike `get_mut` this doesn't necessarily directly return a `&mut` + /// reference (at least with `nightly` feature). This allows storages + /// more flexibility. For example, some flagged storages utilize this to + /// defer generation of mutation events until the user obtains an `&mut` + /// reference out of the returned wrapper type. + /// /// This is unsafe because the external set used to protect this storage is /// absent. /// @@ -570,9 +741,6 @@ pub trait UnprotectedStorage: TryDefault { /// /// A mask should keep track of those states, and an `id` being contained /// in the tracking mask is sufficient to call this method. - /// - /// There must be no extant aliasing mutable reference to this component - /// (i.e. obtained from `get_mut` with the same `id`). unsafe fn get(&self, id: Index) -> &T; /// Gets an exclusive reference to the data associated with an `Index`. @@ -587,11 +755,11 @@ pub trait UnprotectedStorage: TryDefault { /// /// A mask should keep track of those states, and an `id` being contained in /// the tracking mask is sufficient to call this method. - unsafe fn get_mut(&mut self, id: Index) -> Self::AccessMut<'_>; + unsafe fn get_mut(&mut self, id: Index) -> &mut T; #[cfg(feature = "nightly")] get_access_mut_docs! { - unsafe fn get_access_mut(&self, id: Index) -> Self::AccessMut<'_>; + unsafe fn get_access_mut(&mut self, id: Index) -> Self::AccessMut<'_>; } #[cfg(not(feature = "nightly"))] @@ -675,15 +843,15 @@ macro_rules! shared_get_access_mut_docs { }; } -trait SharedGetAccessMutStorage: UnprotectedStorage { +trait SharedGetAccessMutStorage: UnprotectedStorage { #[cfg(feature = "nightly")] shared_get_access_mut_docs! { - fn shared_get_access_mut(&self, id: Index) -> Self::AccessMut<'_> {} + unsafe fn shared_get_access_mut(&self, id: Index) -> ::AccessMut<'_>; } #[cfg(not(feature = "nightly"))] shared_get_access_mut_docs! { - fn shared_get_access_mut(&self, id: Index) -> &mut T {} + unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T; } } diff --git a/src/storage/storages.rs b/src/storage/storages.rs index 9f588737a..c3ee7307b 100644 --- a/src/storage/storages.rs +++ b/src/storage/storages.rs @@ -1,5 +1,8 @@ //! Different types of storages you can use for your components. +// S-TODO redo distinct storage safety comments to point to proper method, check +// trait docs too + use core::{marker::PhantomData, mem::MaybeUninit, ptr, ptr::NonNull}; use std::collections::BTreeMap; @@ -49,11 +52,19 @@ impl UnprotectedStorage for BTreeStorage { unsafe { &*ptr } } - unsafe fn get_mut(&self, id: Index) -> &mut T { + unsafe fn get_mut(&mut self, id: Index) -> &mut T { + self.0.get_mut(&id).unwrap().get_mut() + } + + unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { + self.0.get_mut(&id).unwrap().get_mut() + } + + /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { let ptr = self.0[&id].get(); // SAFETY: See `VecStorage` impl. unsafe { &mut *ptr } - } + }*/ unsafe fn insert(&mut self, id: Index, v: T) { self.0.insert(id, SyncUnsafeCell::new(v)); @@ -96,11 +107,19 @@ impl UnprotectedStorage for HashMapStorage { unsafe { &*ptr } } - unsafe fn get_mut(&self, id: Index) -> &mut T { + unsafe fn get_mut(&mut self, id: Index) -> &mut T { + self.0.get_mut(&id).unwrap().get_mut() + } + + unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { + self.0.get_mut(&id).unwrap().get_mut() + } + + /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { let ptr = self.0[&id].get(); // SAFETY: See `VecStorage` impl. unsafe { &mut *ptr } - } + }*/ unsafe fn insert(&mut self, id: Index, v: T) { self.0.insert(id, SyncUnsafeCell::new(v)); @@ -188,6 +207,7 @@ impl UnprotectedStorage for DenseVecStorage { } unsafe fn get(&self, id: Index) -> &T { + // S-TODO recheck these safety notes (and all others in this module) // NOTE: `as` cast is not lossy since insert would have encountered an // allocation failure if this would overflow `usize.` // SAFETY (get_unchecked and assume_init): Caller required to call @@ -201,7 +221,25 @@ impl UnprotectedStorage for DenseVecStorage { unsafe { &*ptr } } - unsafe fn get_mut(&self, id: Index) -> &mut T { + unsafe fn get_mut(&mut self, id: Index) -> &mut T { + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY (get_unchecked and assume_init): Caller required to call + // `insert` with this `id` (with no following call to `remove` with that + // id or to `clean`). + let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() }; + // SAFETY: Indices retrieved from `data_id` with a valid `id` will + // always correspond to an element in `data`. + unsafe { self.data.get_unchecked_mut(did as usize) }.get_mut() + } + + unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { + // SAFETY: Requirements passed to caller + unsafe { self.get_mut(id) } + } + + /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { + // S-TODO recheck these safety notes // NOTE: `as` cast is not lossy since insert would have encountered an // allocation failure if this would overflow `usize.` // SAFETY (get_unchecked and assume_init): Caller required to call @@ -213,9 +251,10 @@ impl UnprotectedStorage for DenseVecStorage { let ptr = unsafe { self.data.get_unchecked(did as usize) }.get(); // SAFETY: See `VecStorage` impl. unsafe { &mut *ptr } - } + }*/ unsafe fn insert(&mut self, id: Index, v: T) { + // S-TODO recheck these safety notes let id = if Index::BITS > usize::BITS { // Saturate the cast to usize::MAX so if this overflows usize the // allocation below will fail. @@ -250,6 +289,7 @@ impl UnprotectedStorage for DenseVecStorage { } unsafe fn remove(&mut self, id: Index) -> T { + // S-TODO recheck these safety notes // NOTE: cast to usize won't overflow since `insert` would have failed // to allocate. // SAFETY (get_unchecked and assume_init): Caller required to have @@ -313,7 +353,8 @@ impl UnprotectedStorage for NullStorage { unsafe { &*NonNull::dangling().as_ptr() } } - unsafe fn get_mut(&self, _: Index) -> &mut T { + unsafe fn get_mut(&mut self, _: Index) -> &mut T { + // S-TODO: can we defer to shared_get_access_mut // SAFETY: Because the caller is required by the safety docs to first // insert a component with this index, this corresponds to an instance // of the ZST we conceptually own. The caller also must manage the @@ -324,6 +365,22 @@ impl UnprotectedStorage for NullStorage { unsafe { &mut *NonNull::dangling().as_ptr() } } + unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { + // SAFETY: Requirements passed to caller. + unsafe { self.get_mut(id) } + } + + /*unsafe fn shared_get_access_mut(&self, _: Index) -> &mut T { + // SAFETY: Because the caller is required by the safety docs to first + // insert a component with this index, this corresponds to an instance + // of the ZST we conceptually own. The caller also must manage the + // aliasing of accesses via get/get_mut. + // + // Self::default asserts that `T` is a ZST which makes generating a + // reference from a dangling pointer not UB. + unsafe { &mut *NonNull::dangling().as_ptr() } + }*/ + unsafe fn insert(&mut self, _: Index, v: T) { // We rely on the caller tracking the presence of the ZST via the mask. // @@ -409,6 +466,9 @@ impl UnprotectedStorage for VecStorage { // SAFETY: Caller required to call `insert` with this `id` (with no // following call to `remove` with that id or to `clean`). let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); + // S-TODO update these since we tweaked safety docs to focus + // requirements on `get_mut`. (also there are the shared_get_mut methods + // to consider, well those replace get_mut) // SAFETY: Caller required to manage aliasing between this and // `get_mut`. let maybe_uninit = unsafe { &*ptr }; @@ -417,7 +477,23 @@ impl UnprotectedStorage for VecStorage { unsafe { maybe_uninit.assume_init_ref() } } - unsafe fn get_mut(&self, id: Index) -> &mut T { + unsafe fn get_mut(&mut self, id: Index) -> &mut T { + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY: Caller required to call `insert` with this `id` (with no + // following call to `remove` with that id or to `clean`). + let maybe_uninit = unsafe { self.0.get_unchecked_mut(id as usize) }.get_mut(); + // SAFETY: Requirement to have `insert`ed this component ensures that it + // will be initialized. + unsafe { maybe_uninit.assume_init_mut() } + } + + unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { + // SAFETY: Requirements passed to caller. + unsafe { self.get_mut(id) } + } + + /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { // NOTE: `as` cast is not lossy since insert would have encountered an // allocation failure if this would overflow `usize.` // SAFETY: Caller required to call `insert` with this `id` (with no @@ -431,7 +507,7 @@ impl UnprotectedStorage for VecStorage { // SAFETY: Requirement to have `insert`ed this component ensures that it // will be initialized. unsafe { maybe_uninit.assume_init_mut() } - } + }*/ unsafe fn insert(&mut self, id: Index, v: T) { let id = if Index::BITS > usize::BITS { @@ -540,14 +616,26 @@ where unsafe { &*ptr } } - unsafe fn get_mut(&self, id: Index) -> &mut T { + unsafe fn get_mut(&mut self, id: Index) -> &mut T { + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY: See `VecStorage` impl. + unsafe { self.0.get_unchecked_mut(id as usize) }.get_mut() + } + + unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { + // SAFETY: Requirements passed to caller. + unsafe { self.get_mut(id) } + } + + /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { // NOTE: `as` cast is not lossy since insert would have encountered an // allocation failure if this would overflow `usize.` // SAFETY: See `VecStorage` impl. let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); // SAFETY: See `VecStorage` impl. unsafe { &mut *ptr } - } + }*/ unsafe fn insert(&mut self, id: Index, v: T) { let id = if Index::BITS > usize::BITS { diff --git a/src/storage/track.rs b/src/storage/track.rs index 98c7e0185..58f8450f5 100644 --- a/src/storage/track.rs +++ b/src/storage/track.rs @@ -69,7 +69,7 @@ where /// Returns the event channel for insertions/removals/modifications of this /// storage's components. pub fn channel_mut(&mut self) -> &mut EventChannel { - unsafe { self.open() }.1.channel_mut() + self.data.inner.channel_mut() } /// Starts tracking component events. Note that this reader id should be @@ -89,6 +89,6 @@ where /// not emitted. #[cfg(feature = "storage-event-control")] pub fn set_event_emission(&mut self, emit: bool) { - unsafe { self.open() }.1.set_event_emission(emit); + self.data.inner.set_event_emission(emit); } } diff --git a/src/world/entity.rs b/src/world/entity.rs index 03ea8d388..508c07b62 100644 --- a/src/world/entity.rs +++ b/src/world/entity.rs @@ -7,6 +7,8 @@ use std::{ use hibitset::{AtomicBitSet, BitSet, BitSetOr}; use shred::Read; +#[nougat::gat(Type)] +use crate::join::LendJoin; #[cfg(feature = "parallel")] use crate::join::ParJoin; use crate::{error::WrongGeneration, join::Join, storage::WriteStorage, world::Component}; @@ -317,27 +319,70 @@ impl EntitiesRes { } } -impl<'a> Join for &'a EntitiesRes { +// SAFETY: It is safe to retrieve elements with any `id` regardless of the mask. +#[nougat::gat] +unsafe impl<'a> LendJoin for &'a EntitiesRes { + type Mask = BitSetOr<&'a BitSet, &'a AtomicBitSet>; + type Type<'next> = Entity; + type Value = Self; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + (BitSetOr(&self.alloc.alive, &self.alloc.raised), self) + } + + unsafe fn get(v: &mut &'a EntitiesRes, id: Index) -> Entity { + let gen = v + .alloc + .generation(id) + .map(|gen| if gen.is_alive() { gen } else { gen.raised() }) + .unwrap_or_else(Generation::one); + Entity(id, gen) + } +} + +// SAFETY: It is safe to retrieve elements with any `id` regardless of the mask. +unsafe impl<'a> Join for &'a EntitiesRes { type Mask = BitSetOr<&'a BitSet, &'a AtomicBitSet>; type Type = Entity; type Value = Self; - unsafe fn open(self) -> (Self::Mask, Self) { + unsafe fn open(self) -> (Self::Mask, Self::Value) { (BitSetOr(&self.alloc.alive, &self.alloc.raised), self) } - unsafe fn get(v: &mut &'a EntitiesRes, idx: Index) -> Entity { + unsafe fn get(v: &mut &'a EntitiesRes, id: Index) -> Entity { let gen = v .alloc - .generation(idx) + .generation(id) .map(|gen| if gen.is_alive() { gen } else { gen.raised() }) .unwrap_or_else(Generation::one); - Entity(idx, gen) + Entity(id, gen) } } +// SAFETY: No unsafe code is used and it is safe to call `get` from multiple +// threads at once. +// +// It is safe to retrieve elements with any `id` regardless of the mask. #[cfg(feature = "parallel")] -unsafe impl<'a> ParJoin for &'a EntitiesRes {} +unsafe impl<'a> ParJoin for &'a EntitiesRes { + type Mask = BitSetOr<&'a BitSet, &'a AtomicBitSet>; + type Type = Entity; + type Value = Self; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + (BitSetOr(&self.alloc.alive, &self.alloc.raised), self) + } + + unsafe fn get(v: &&'a EntitiesRes, id: Index) -> Entity { + let gen = v + .alloc + .generation(id) + .map(|gen| if gen.is_alive() { gen } else { gen.raised() }) + .unwrap_or_else(Generation::one); + Entity(id, gen) + } +} /// An entity builder from `EntitiesRes`. Allows building an entity with its /// components if you have mutable access to the component storages. @@ -388,6 +433,7 @@ impl fmt::Debug for Generation { impl Generation { pub(crate) fn one() -> Self { + // SAFETY: `1` is not zero. Generation(unsafe { NonZeroI32::new_unchecked(1) }) } @@ -415,6 +461,10 @@ impl Generation { /// Panics if it is alive. fn raised(self) -> Generation { assert!(!self.is_alive()); + // SAFETY: Since `self` is not alive, `self.id()` will be negative so + // subtracting it from `1` will give us a value `>= 2`. If this + // overflows it will at most wrap to `i32::MIN + 1` (so it will never be + // zero). unsafe { Generation(NonZeroI32::new_unchecked(1 - self.id())) } } } @@ -455,6 +505,8 @@ impl ZeroableGeneration { fn raised(self) -> Generation { assert!(!self.is_alive()); let gen = 1i32.checked_sub(self.id()).expect("generation overflow"); + // SAFETY: Since `self` is not alive, `self.id()` will be negative so + // subtracting it from `1` will give us a value `>= 2`. Generation(unsafe { NonZeroI32::new_unchecked(gen) }) } From 483914bce20283f36e04d5d8661d9bd921df4a6a Mon Sep 17 00:00:00 2001 From: Imbris Date: Fri, 19 Aug 2022 02:02:11 -0400 Subject: [PATCH 08/47] Implement SharedGetAccessMutStorage for applicable storages and update storage safety comments. --- src/storage/deref_flagged.rs | 2 +- src/storage/flagged.rs | 71 ++++++----- src/storage/generic.rs | 2 +- src/storage/mod.rs | 6 +- src/storage/storages.rs | 231 ++++++++++++++++++----------------- 5 files changed, 168 insertions(+), 144 deletions(-) diff --git a/src/storage/deref_flagged.rs b/src/storage/deref_flagged.rs index a484749c4..a46c9fd0e 100644 --- a/src/storage/deref_flagged.rs +++ b/src/storage/deref_flagged.rs @@ -72,7 +72,7 @@ impl> UnprotectedStorage for DerefFlag unsafe { self.storage.get(id) } } - unsafe fn get_mut(&self, _id: Index) -> Self::AccessMut<'_> { + unsafe fn get_mut(&mut self, _id: Index) -> Self::AccessMut<'_> { /*let emit = self.emit_event(); FlaggedAccessMut { channel: &mut self.channel, diff --git a/src/storage/flagged.rs b/src/storage/flagged.rs index bbe577339..345a84ebb 100644 --- a/src/storage/flagged.rs +++ b/src/storage/flagged.rs @@ -4,7 +4,8 @@ use hibitset::BitSetLike; use crate::{ storage::{ - ComponentEvent, DenseVecStorage, SyncUnsafeCell, Tracked, TryDefault, UnprotectedStorage, + ComponentEvent, DenseVecStorage, SharedGetAccessMutStorage, SyncUnsafeCell, Tracked, + TryDefault, UnprotectedStorage, }, world::{Component, Index}, }; @@ -245,30 +246,6 @@ impl> UnprotectedStorage for FlaggedSt unsafe { self.get_mut(id) } } - /*#[cfg(feature = "nightly")] - unsafe fn shared_get_access_mut(&self, id: Index) -> >::AccessMut<'_> { - if self.emit_event() { - let channel_ptr = self.channel.get(); - // SAFETY: Caller required to ensure references returned from other - // safe methods such as Tracked::channel are no longer alive. - unsafe { &mut *channel_ptr }.single_write(ComponentEvent::Modified(id)); - } - // SAFETY: Requirements passed to caller. - unsafe { self.storage.get_mut(id) } - } - - #[cfg(not(feature = "nightly"))] - unsafe fn shared_get_access_mut(&self, id: Index) -> &mut C { - if self.emit_event() { - let channel_ptr = self.channel.get(); - // SAFETY: Caller required to ensure references returned from other - // safe methods such as Tracked::channel are no longer alive. - unsafe { &mut *channel_ptr }.single_write(ComponentEvent::Modified(id)); - } - // SAFETY: Requirements passed to caller. - unsafe { self.storage.get_mut(id) } - }*/ - unsafe fn insert(&mut self, id: Index, comp: C) { if self.emit_event() { self.channel @@ -290,14 +267,48 @@ impl> UnprotectedStorage for FlaggedSt } } +impl> SharedGetAccessMutStorage + for FlaggedStorage +{ + #[cfg(feature = "nightly")] + unsafe fn shared_get_access_mut( + &self, + id: Index, + ) -> >::AccessMut<'_> { + if self.emit_event() { + let channel_ptr = self.channel.get(); + // SAFETY: Caller required to ensure references returned from other + // safe methods such as Tracked::channel are no longer alive. This + // storage is not marked as `DistincStorage`. + unsafe { &mut *channel_ptr }.single_write(ComponentEvent::Modified(id)); + } + // SAFETY: Requirements passed to caller. + unsafe { self.storage.shared_get_access_mut(id) } + } + + #[cfg(not(feature = "nightly"))] + unsafe fn shared_get_access_mut(&self, id: Index) -> &mut C { + if self.emit_event() { + let channel_ptr = self.channel.get(); + // SAFETY: Caller required to ensure references returned from other + // safe methods such as Tracked::channel are no longer alive. This + // storage is not marked as `DistincStorage`. + unsafe { &mut *channel_ptr }.single_write(ComponentEvent::Modified(id)); + } + // SAFETY: Requirements passed to caller. + unsafe { self.storage.shared_get_access_mut(id) } + } +} + impl Tracked for FlaggedStorage { fn channel(&self) -> &EventChannel { let channel_ptr = self.channel.get(); - // SAFETY: The only place that mutably accesses the channel via a shared - // reference is the impl of `UnprotectedStorage::get_mut` which requires - // callers to avoid calling safe methods with `&self` while those - // mutable references are in use and to ensure any references from those - // safe methods are no longer alive. + // SAFETY: The only place that mutably accesses the channel via a + // shared reference is the impl of + // `SharedGetAccessMut::shared_get_access_mut` which requires callers to + // avoid calling other methods with `&self` while references returned there + // are still in use (and to ensure references from methods like this no + // longer exist). unsafe { &*channel_ptr } } diff --git a/src/storage/generic.rs b/src/storage/generic.rs index b674d6da6..f5e93f2a9 100644 --- a/src/storage/generic.rs +++ b/src/storage/generic.rs @@ -88,7 +88,7 @@ pub trait GenericWriteStorage { type Component: Component; /// The wrapper through with mutable access of a component is performed. #[cfg(feature = "nightly")] - type AccessMut<'a>: DerefMut + type AccessMut<'a> where Self: 'a; diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 089566a85..3a47fd997 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -140,8 +140,8 @@ where /// This is a marker trait which requires you to uphold the following guarantee: /// -/// > Multiple threads may call `shared_get_access_mut()` with distinct indices -/// without causing > undefined behavior. +/// > Multiple threads may call `SharedGetAccessMutStorage::shared_get_access_mut()` +/// with distinct indices without causing > undefined behavior. /// /// This is for example valid for `Vec`: /// @@ -846,7 +846,7 @@ macro_rules! shared_get_access_mut_docs { trait SharedGetAccessMutStorage: UnprotectedStorage { #[cfg(feature = "nightly")] shared_get_access_mut_docs! { - unsafe fn shared_get_access_mut(&self, id: Index) -> ::AccessMut<'_>; + unsafe fn shared_get_access_mut(&self, id: Index) -> >::AccessMut<'_>; } #[cfg(not(feature = "nightly"))] diff --git a/src/storage/storages.rs b/src/storage/storages.rs index c3ee7307b..07e6eddd2 100644 --- a/src/storage/storages.rs +++ b/src/storage/storages.rs @@ -1,8 +1,5 @@ //! Different types of storages you can use for your components. -// S-TODO redo distinct storage safety comments to point to proper method, check -// trait docs too - use core::{marker::PhantomData, mem::MaybeUninit, ptr, ptr::NonNull}; use std::collections::BTreeMap; @@ -10,7 +7,7 @@ use ahash::AHashMap as HashMap; use hibitset::BitSetLike; use crate::{ - storage::{DistinctStorage, SyncUnsafeCell, UnprotectedStorage}, + storage::{DistinctStorage, SharedGetAccessMutStorage, SyncUnsafeCell, UnprotectedStorage}, world::Index, }; @@ -60,12 +57,6 @@ impl UnprotectedStorage for BTreeStorage { self.0.get_mut(&id).unwrap().get_mut() } - /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { - let ptr = self.0[&id].get(); - // SAFETY: See `VecStorage` impl. - unsafe { &mut *ptr } - }*/ - unsafe fn insert(&mut self, id: Index, v: T) { self.0.insert(id, SyncUnsafeCell::new(v)); } @@ -75,8 +66,17 @@ impl UnprotectedStorage for BTreeStorage { } } -// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when -// provided distinct indices. +impl SharedGetAccessMutStorage for BTreeStorage { + unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { + let ptr = self.0[&id].get(); + // SAFETY: See `VecStorage` impl. + unsafe { &mut *ptr } + } +} + +// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// accesses when provided distinct indices and is safe to call from multiple +// threads at once. unsafe impl DistinctStorage for BTreeStorage {} /// `HashMap`-based storage. Best suited for rare components. @@ -115,12 +115,6 @@ impl UnprotectedStorage for HashMapStorage { self.0.get_mut(&id).unwrap().get_mut() } - /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { - let ptr = self.0[&id].get(); - // SAFETY: See `VecStorage` impl. - unsafe { &mut *ptr } - }*/ - unsafe fn insert(&mut self, id: Index, v: T) { self.0.insert(id, SyncUnsafeCell::new(v)); } @@ -130,8 +124,17 @@ impl UnprotectedStorage for HashMapStorage { } } -// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when -// provided distinct indices. +impl SharedGetAccessMutStorage for HashMapStorage { + unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { + let ptr = self.0[&id].get(); + // SAFETY: See `VecStorage` impl. + unsafe { &mut *ptr } + } +} + +// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// accesses when provided distinct indices and is safe to call from multiple +// threads at once. unsafe impl DistinctStorage for HashMapStorage {} /// Dense vector storage. Has a redirection 2-way table @@ -173,10 +176,11 @@ impl SliceAccess for DenseVecStorage { fn as_slice(&self) -> &[Self::Element] { let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.data.as_slice()).get(); // SAFETY: The only place that mutably accesses these elements via a - // shared reference is the impl of `UnprotectedStorage::get_mut` which - // requires callers to avoid calling safe methods with `&self` while - // those mutable references are in use and to ensure any references - // from those safe methods are no longer alive. + // shared reference is the impl of + // `SharedGetAccessMut::shared_get_access_mut` which requires callers to + // avoid calling other methods with `&self` while references returned there + // are still in use (and to ensure references from methods like this no + // longer exist). unsafe { &*unsafe_cell_slice_ptr } } @@ -207,7 +211,6 @@ impl UnprotectedStorage for DenseVecStorage { } unsafe fn get(&self, id: Index) -> &T { - // S-TODO recheck these safety notes (and all others in this module) // NOTE: `as` cast is not lossy since insert would have encountered an // allocation failure if this would overflow `usize.` // SAFETY (get_unchecked and assume_init): Caller required to call @@ -238,23 +241,7 @@ impl UnprotectedStorage for DenseVecStorage { unsafe { self.get_mut(id) } } - /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { - // S-TODO recheck these safety notes - // NOTE: `as` cast is not lossy since insert would have encountered an - // allocation failure if this would overflow `usize.` - // SAFETY (get_unchecked and assume_init): Caller required to call - // `insert` with this `id` (with no following call to `remove` with that - // id or to `clean`). - let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() }; - // SAFETY: Indices retrieved from `data_id` with a valid `id` will - // always correspond to an element in `data`. - let ptr = unsafe { self.data.get_unchecked(did as usize) }.get(); - // SAFETY: See `VecStorage` impl. - unsafe { &mut *ptr } - }*/ - unsafe fn insert(&mut self, id: Index, v: T) { - // S-TODO recheck these safety notes let id = if Index::BITS > usize::BITS { // Saturate the cast to usize::MAX so if this overflows usize the // allocation below will fail. @@ -289,7 +276,6 @@ impl UnprotectedStorage for DenseVecStorage { } unsafe fn remove(&mut self, id: Index) -> T { - // S-TODO recheck these safety notes // NOTE: cast to usize won't overflow since `insert` would have failed // to allocate. // SAFETY (get_unchecked and assume_init): Caller required to have @@ -302,15 +288,32 @@ impl UnprotectedStorage for DenseVecStorage { // in this storage so this will be in-bounds. unsafe { self.data_id.get_unchecked_mut(last as usize) }.write(did); // NOTE: casting the index in the dense data array to usize won't - // overflow since the maximum number of components if limited to + // overflow since the maximum number of components is limited to // `Index::MAX + 1`. self.entity_id.swap_remove(did as usize); self.data.swap_remove(did as usize).0.into_inner() } } -// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when -// provided distinct indices. +impl SharedGetAccessMutStorage for DenseVecStorage { + unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY (get_unchecked and assume_init): Caller required to call + // `insert` with this `id` (with no following call to `remove` with that + // id or to `clean`). + let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() }; + // SAFETY: Indices retrieved from `data_id` with a valid `id` will + // always correspond to an element in `data`. + let ptr = unsafe { self.data.get_unchecked(did as usize) }.get(); + // SAFETY: See `VecStorage` impl. + unsafe { &mut *ptr } + } +} + +// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// accesses when provided distinct indices and is safe to call from multiple +// threads at once. unsafe impl DistinctStorage for DenseVecStorage {} /// A null storage type, used for cases where the component @@ -353,16 +356,11 @@ impl UnprotectedStorage for NullStorage { unsafe { &*NonNull::dangling().as_ptr() } } - unsafe fn get_mut(&mut self, _: Index) -> &mut T { - // S-TODO: can we defer to shared_get_access_mut - // SAFETY: Because the caller is required by the safety docs to first - // insert a component with this index, this corresponds to an instance - // of the ZST we conceptually own. The caller also must manage the - // aliasing of accesses via get/get_mut. - // - // Self::default asserts that `T` is a ZST which makes generating a - // reference from a dangling pointer not UB. - unsafe { &mut *NonNull::dangling().as_ptr() } + unsafe fn get_mut(&mut self, id: Index) -> &mut T { + // SAFETY: Exclusive reference to `self` guarantees that that are no + // extant references to components and that we aren't calling this from + // multiple threads at once. Remaining requirements passed to caller. + unsafe { self.shared_get_access_mut(id) } } unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { @@ -370,17 +368,6 @@ impl UnprotectedStorage for NullStorage { unsafe { self.get_mut(id) } } - /*unsafe fn shared_get_access_mut(&self, _: Index) -> &mut T { - // SAFETY: Because the caller is required by the safety docs to first - // insert a component with this index, this corresponds to an instance - // of the ZST we conceptually own. The caller also must manage the - // aliasing of accesses via get/get_mut. - // - // Self::default asserts that `T` is a ZST which makes generating a - // reference from a dangling pointer not UB. - unsafe { &mut *NonNull::dangling().as_ptr() } - }*/ - unsafe fn insert(&mut self, _: Index, v: T) { // We rely on the caller tracking the presence of the ZST via the mask. // @@ -400,8 +387,22 @@ impl UnprotectedStorage for NullStorage { } } -// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when -// provided distinct indices. +impl SharedGetAccessMutStorage for NullStorage { + unsafe fn shared_get_access_mut(&self, _: Index) -> &mut T { + // SAFETY: Because the caller is required by the safety docs to first + // insert a component with this index, this corresponds to an instance + // of the ZST we conceptually own. The caller also must manage the + // aliasing of accesses via get/get_mut. + // + // Self::default asserts that `T` is a ZST which makes generating a + // reference from a dangling pointer not UB. + unsafe { &mut *NonNull::dangling().as_ptr() } + } +} + +// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// accesses when provided distinct indices and is safe to call from multiple +// threads at once. unsafe impl DistinctStorage for NullStorage {} /// Vector storage. Uses a simple `Vec`. Supposed to have maximum @@ -426,9 +427,11 @@ impl SliceAccess for VecStorage { fn as_slice(&self) -> &[Self::Element] { let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.0.as_slice()).get(); // SAFETY: The only place that mutably accesses these elements via a - // shared reference is the impl of `UnprotectedStorage::get_mut` which - // requires callers to avoid calling safe methods with `&self` while - // those mutable references are in use. + // shared reference is the impl of + // `SharedGetAccessMut::shared_get_access_mut` which requires callers to + // avoid calling other methods with `&self` while references returned there + // are still in use (and to ensure references from methods like this no + // longer exist). unsafe { &*unsafe_cell_slice_ptr } } @@ -466,11 +469,10 @@ impl UnprotectedStorage for VecStorage { // SAFETY: Caller required to call `insert` with this `id` (with no // following call to `remove` with that id or to `clean`). let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); - // S-TODO update these since we tweaked safety docs to focus - // requirements on `get_mut`. (also there are the shared_get_mut methods - // to consider, well those replace get_mut) - // SAFETY: Caller required to manage aliasing between this and - // `get_mut`. + // SAFETY: Only method that obtains exclusive references from this + // unsafe cell is `shared_get_access_mut` and callers are required to + // managed aliasing there and prevent other methods from being called + // while those exclusive references are alive. let maybe_uninit = unsafe { &*ptr }; // SAFETY: Requirement to have `insert`ed this component ensures that it // will be initialized. @@ -478,7 +480,7 @@ impl UnprotectedStorage for VecStorage { } unsafe fn get_mut(&mut self, id: Index) -> &mut T { - // NOTE: `as` cast is not lossy since insert would have encountered an + // NOTE: `as` cast is not lossy since `insert` would have encountered an // allocation failure if this would overflow `usize.` // SAFETY: Caller required to call `insert` with this `id` (with no // following call to `remove` with that id or to `clean`). @@ -493,22 +495,6 @@ impl UnprotectedStorage for VecStorage { unsafe { self.get_mut(id) } } - /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { - // NOTE: `as` cast is not lossy since insert would have encountered an - // allocation failure if this would overflow `usize.` - // SAFETY: Caller required to call `insert` with this `id` (with no - // following call to `remove` with that id or to `clean`). - let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); - // SAFETY: Caller required to manage aliasing (both ensuring - // `get_mut`/`get` is called without aliasing refs returned here, and - // ensuring other safe methods that take `&self` aren't called while the - // returned mutable references are alive). - let maybe_uninit = unsafe { &mut *ptr }; - // SAFETY: Requirement to have `insert`ed this component ensures that it - // will be initialized. - unsafe { maybe_uninit.assume_init_mut() } - }*/ - unsafe fn insert(&mut self, id: Index, v: T) { let id = if Index::BITS > usize::BITS { // Saturate the cast to usize::MAX so if this overflows usize the @@ -552,8 +538,27 @@ impl UnprotectedStorage for VecStorage { } } -// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when -// provided distinct indices. +impl SharedGetAccessMutStorage for VecStorage { + unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY: Caller required to call `insert` with this `id` (with no + // following call to `remove` with that id or to `clean`). + let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); + // SAFETY: Caller required to manage aliasing (ensuring there are no + // extant shared references into the storage, this is called with + // distinct ids, and that other methods that take `&self` aren't called + // while the exclusive references returned here are alive). + let maybe_uninit = unsafe { &mut *ptr }; + // SAFETY: Requirement to have `insert`ed this component ensures that it + // will be initialized. + unsafe { maybe_uninit.assume_init_mut() } + } +} + +// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// accesses when provided distinct indices and is safe to call from multiple +// threads at once. unsafe impl DistinctStorage for VecStorage {} /// Vector storage, like `VecStorage`, but allows safe access to the @@ -580,9 +585,11 @@ impl SliceAccess for DefaultVecStorage { fn as_slice(&self) -> &[Self::Element] { let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.0.as_slice()).get(); // SAFETY: The only place that mutably accesses these elements via a - // shared reference is the impl of `UnprotectedStorage::get_mut` which - // requires callers to avoid calling safe methods with `&self` while - // those mutable references are in use. + // shared reference is the impl of + // `SharedGetAccessMut::shared_get_access_mut` which requires callers to + // avoid calling other methods with `&self` while references returned there + // are still in use (and to ensure references from methods like this no + // longer exist). unsafe { &*unsafe_cell_slice_ptr } } @@ -628,15 +635,6 @@ where unsafe { self.get_mut(id) } } - /*unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { - // NOTE: `as` cast is not lossy since insert would have encountered an - // allocation failure if this would overflow `usize.` - // SAFETY: See `VecStorage` impl. - let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); - // SAFETY: See `VecStorage` impl. - unsafe { &mut *ptr } - }*/ - unsafe fn insert(&mut self, id: Index, v: T) { let id = if Index::BITS > usize::BITS { // Saturate the cast to usize::MAX so if this overflows usize the @@ -664,6 +662,21 @@ where } } -// SAFETY: `get_mut` doesn't perform any overlapping mutable accesses when -// provided distinct indices. +impl SharedGetAccessMutStorage for DefaultVecStorage +where + T: Default, +{ + unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { + // NOTE: `as` cast is not lossy since insert would have encountered an + // allocation failure if this would overflow `usize.` + // SAFETY: See `VecStorage` impl. + let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); + // SAFETY: See `VecStorage` impl. + unsafe { &mut *ptr } + } +} + +// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// accesses when provided distinct indices and is safe to call from multiple +// threads at once. unsafe impl DistinctStorage for DefaultVecStorage {} From 57e929cbedf79ae314417d691c8a373fb07dd3d4 Mon Sep 17 00:00:00 2001 From: Imbris Date: Fri, 19 Aug 2022 02:16:48 -0400 Subject: [PATCH 09/47] Remove `get_mut`, rename `get_access_mut` -> `get_mut`, rename `SharedGetAccessMutStorage` -> `SharedGetMutStorage`, rename `shared_get_access_mut` -> `shared_get_mut`. --- src/storage/flagged.rs | 49 ++++++++------------- src/storage/mod.rs | 96 +++++++++++++++++----------------------- src/storage/storages.rs | 97 ++++++++++++++--------------------------- 3 files changed, 93 insertions(+), 149 deletions(-) diff --git a/src/storage/flagged.rs b/src/storage/flagged.rs index 345a84ebb..221caf530 100644 --- a/src/storage/flagged.rs +++ b/src/storage/flagged.rs @@ -4,8 +4,8 @@ use hibitset::BitSetLike; use crate::{ storage::{ - ComponentEvent, DenseVecStorage, SharedGetAccessMutStorage, SyncUnsafeCell, Tracked, - TryDefault, UnprotectedStorage, + ComponentEvent, DenseVecStorage, SharedGetMutStorage, SyncUnsafeCell, Tracked, TryDefault, + UnprotectedStorage, }, world::{Component, Index}, }; @@ -219,7 +219,8 @@ impl> UnprotectedStorage for FlaggedSt unsafe { self.storage.get(id) } } - unsafe fn get_mut(&mut self, id: Index) -> &mut C { + #[cfg(feature = "nightly")] + unsafe fn get_mut(&mut self, id: Index) -> >::AccessMut<'_> { if self.emit_event() { self.channel .get_mut() @@ -229,21 +230,15 @@ impl> UnprotectedStorage for FlaggedSt unsafe { self.storage.get_mut(id) } } - #[cfg(feature = "nightly")] - unsafe fn get_access_mut(&mut self, id: Index) -> >::AccessMut<'_> { + #[cfg(not(feature = "nightly"))] + unsafe fn get_mut(&mut self, id: Index) -> &mut C { if self.emit_event() { self.channel .get_mut() .single_write(ComponentEvent::Modified(id)); } // SAFETY: Requirements passed to caller. - unsafe { self.storage.get_access_mut(id) } - } - - #[cfg(not(feature = "nightly"))] - unsafe fn get_access_mut(&mut self, id: Index) -> &mut C { - // SAFETY: Requirements passed to caller. - unsafe { self.get_mut(id) } + unsafe { self.storage.get_mut(id) } } unsafe fn insert(&mut self, id: Index, comp: C) { @@ -267,48 +262,42 @@ impl> UnprotectedStorage for FlaggedSt } } -impl> SharedGetAccessMutStorage - for FlaggedStorage -{ +impl> SharedGetMutStorage for FlaggedStorage { #[cfg(feature = "nightly")] - unsafe fn shared_get_access_mut( - &self, - id: Index, - ) -> >::AccessMut<'_> { + unsafe fn shared_get_mut(&self, id: Index) -> >::AccessMut<'_> { if self.emit_event() { let channel_ptr = self.channel.get(); // SAFETY: Caller required to ensure references returned from other // safe methods such as Tracked::channel are no longer alive. This - // storage is not marked as `DistincStorage`. + // storage is not marked with a `DistinctStorage` impl. unsafe { &mut *channel_ptr }.single_write(ComponentEvent::Modified(id)); } // SAFETY: Requirements passed to caller. - unsafe { self.storage.shared_get_access_mut(id) } + unsafe { self.storage.shared_get_mut(id) } } #[cfg(not(feature = "nightly"))] - unsafe fn shared_get_access_mut(&self, id: Index) -> &mut C { + unsafe fn shared_get_mut(&self, id: Index) -> &mut C { if self.emit_event() { let channel_ptr = self.channel.get(); // SAFETY: Caller required to ensure references returned from other // safe methods such as Tracked::channel are no longer alive. This - // storage is not marked as `DistincStorage`. + // storage is not marked with a `DistinctStorage` impl. unsafe { &mut *channel_ptr }.single_write(ComponentEvent::Modified(id)); } // SAFETY: Requirements passed to caller. - unsafe { self.storage.shared_get_access_mut(id) } + unsafe { self.storage.shared_get_mut(id) } } } impl Tracked for FlaggedStorage { fn channel(&self) -> &EventChannel { let channel_ptr = self.channel.get(); - // SAFETY: The only place that mutably accesses the channel via a - // shared reference is the impl of - // `SharedGetAccessMut::shared_get_access_mut` which requires callers to - // avoid calling other methods with `&self` while references returned there - // are still in use (and to ensure references from methods like this no - // longer exist). + // SAFETY: The only place that mutably accesses the channel via a shared + // reference is the impl of `SharedGetMut::shared_get_mut` which + // requires callers to avoid calling other methods with `&self` while + // references returned there are still in use (and to ensure references + // from methods like this no longer exist). unsafe { &*channel_ptr } } diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 3a47fd997..c8da1ec2f 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -140,7 +140,7 @@ where /// This is a marker trait which requires you to uphold the following guarantee: /// -/// > Multiple threads may call `SharedGetAccessMutStorage::shared_get_access_mut()` +/// > Multiple threads may call `SharedGetMutStorage::shared_get_mut()` /// with distinct indices without causing > undefined behavior. /// /// This is for example valid for `Vec`: @@ -535,21 +535,21 @@ where unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type<'_> { // SAFETY: Since we require that the mask was checked, an element for // `id` must have been inserted without being removed. - unsafe { value.get_access_mut(id) } + unsafe { value.get_mut(id) } } } -mod shared_get_access_mut_only { - use super::{AccessMutReturn, Component, Index, SharedGetAccessMutStorage}; +mod shared_get_mut_only { + use super::{AccessMutReturn, Component, Index, SharedGetMutStorage}; - /// This type provides a way to ensure only `shared_get_access_mut` can be - /// called for the lifetime `'a` and that no references previously obtained - /// from the storage exist when it is created. While internally this is a - /// shared reference, constructing it requires an exclusive borrow for the - /// lifetime `'a`. - pub struct SharedGetAccessMutOnly<'a, T: Component>(&'a T::Storage); + /// This type provides a way to ensure only `shared_get_mut` can be called + /// for the lifetime `'a` and that no references previously obtained from + /// the storage exist when it is created. While internally this is a shared + /// reference, constructing it requires an exclusive borrow for the lifetime + /// `'a`. + pub struct SharedGetMutOnly<'a, T: Component>(&'a T::Storage); - impl<'a, T: Component> SharedGetAccessMutOnly<'a, T> { + impl<'a, T: Component> SharedGetMutOnly<'a, T> { pub(super) fn new(storage: &'a mut T::Storage) -> Self { Self(storage) } @@ -569,7 +569,7 @@ mod shared_get_access_mut_only { /// multiple threads at once is unsound. pub(super) unsafe fn get(&self, i: Index) -> AccessMutReturn<'a, T> where - T::Storage: SharedGetAccessMutStorage, + T::Storage: SharedGetMutStorage, { // SAFETY: `Self::new` takes an exclusive reference to this storage, // ensuring there are no extant references to its content at the @@ -579,32 +579,32 @@ mod shared_get_access_mut_only { // exposed outside of this module). // // This means we only have to worry about aliasing references being - // produced by calling `shared_get_access_mut`. Ensuring these don't - // alias and the remaining safety requirements are passed on to the + // produced by calling `shared_get_mut`. Ensuring these don't alias + // and the remaining safety requirements are passed on to the // caller. - unsafe { self.0.shared_get_access_mut(i) } + unsafe { self.0.shared_get_mut(i) } } } } -use shared_get_access_mut_only::SharedGetAccessMutOnly; +use shared_get_mut_only::SharedGetMutOnly; // SAFETY: The mask and unprotected storage contained in `MaskedStorage` // correspond and `open` returns references to them from the same // `MaskedStorage` instance (the storage is wrapped in -// `SharedGetAccessMutOnly`). +// `SharedGetMutOnly`). unsafe impl<'a, 'e, T, D> Join for &'a mut Storage<'e, T, D> where T: Component, D: DerefMut>, - T::Storage: SharedGetAccessMutStorage, + T::Storage: SharedGetMutStorage, { type Mask = &'a BitSet; type Type = AccessMutReturn<'a, T>; - type Value = SharedGetAccessMutOnly<'a, T>; + type Value = SharedGetMutOnly<'a, T>; unsafe fn open(self) -> (Self::Mask, Self::Value) { let (mask, value) = self.data.open_mut(); - let value = SharedGetAccessMutOnly::new(value); + let value = SharedGetMutOnly::new(value); (mask, value) } @@ -621,26 +621,26 @@ where } } -// SAFETY: It is safe to call `SharedGetAccessMutOnly<'a, T>::get` from -// multiple threads at once since `T::Storage: DistinctStorage`. +// SAFETY: It is safe to call `SharedGetMutOnly<'a, T>::get` from multiple +// threads at once since `T::Storage: DistinctStorage`. // // The mask and unprotected storage contained in `MaskedStorage` correspond and // `open` returns references to them from the same `MaskedStorage` instance (the -// storage is wrapped in `SharedGetAccessMutOnly`). +// storage is wrapped in `SharedGetMutOnly`). #[cfg(feature = "parallel")] unsafe impl<'a, 'e, T, D> ParJoin for &'a mut Storage<'e, T, D> where T: Component, D: DerefMut>, - T::Storage: Sync + SharedGetAccessMutStorage + DistinctStorage, + T::Storage: Sync + SharedGetMutStorage + DistinctStorage, { type Mask = &'a BitSet; type Type = AccessMutReturn<'a, T>; - type Value = SharedGetAccessMutOnly<'a, T>; + type Value = SharedGetMutOnly<'a, T>; unsafe fn open(self) -> (Self::Mask, Self::Value) { let (mask, value) = self.data.open_mut(); - let value = SharedGetAccessMutOnly::new(value); + let value = SharedGetMutOnly::new(value); (mask, value) } @@ -681,13 +681,13 @@ where } } -macro_rules! get_access_mut_docs { +macro_rules! get_mut_docs { ($fn_definition:item) => { /// Gets mutable access to the the data associated with an `Index`. /// - /// Unlike `get_mut` this doesn't necessarily directly return a `&mut` - /// reference (at least with `nightly` feature). This allows storages - /// more flexibility. For example, some flagged storages utilize this to + /// This doesn't necessarily directly return a `&mut` reference (at + /// least with `nightly` feature). This allows storages more + /// flexibility. For example, some flagged storages utilize this to /// defer generation of mutation events until the user obtains an `&mut` /// reference out of the returned wrapper type. /// @@ -743,28 +743,14 @@ pub trait UnprotectedStorage: TryDefault { /// in the tracking mask is sufficient to call this method. unsafe fn get(&self, id: Index) -> &T; - /// Gets an exclusive reference to the data associated with an `Index`. - /// - /// This is unsafe because the external set used to protect this storage is - /// absent. - /// - /// # Safety - /// - /// May only be called after a call to `insert` with `id` and no following - /// call to `remove` with `id` or to `clean`. - /// - /// A mask should keep track of those states, and an `id` being contained in - /// the tracking mask is sufficient to call this method. - unsafe fn get_mut(&mut self, id: Index) -> &mut T; - #[cfg(feature = "nightly")] - get_access_mut_docs! { - unsafe fn get_access_mut(&mut self, id: Index) -> Self::AccessMut<'_>; + get_mut_docs! { + unsafe fn get_mut(&mut self, id: Index) -> Self::AccessMut<'_>; } #[cfg(not(feature = "nightly"))] - get_access_mut_docs! { - unsafe fn get_access_mut(&mut self, id: Index) -> &mut T; + get_mut_docs! { + unsafe fn get_mut(&mut self, id: Index) -> &mut T; } /// Inserts new data for a given `Index`. @@ -809,7 +795,7 @@ pub trait UnprotectedStorage: TryDefault { } } -macro_rules! shared_get_access_mut_docs { +macro_rules! shared_get_mut_docs { ($fn_definition:item) => { /// Gets mutable access to the the data associated with an `Index`. /// @@ -830,7 +816,7 @@ macro_rules! shared_get_access_mut_docs { /// methods on this type that take `&self` (e.g. /// [`UnprotectedStorage::get`], [`SliceAccess::as_slice`], /// [`Tracked::channel`]) must no longer be alive when - /// `shared_get_access_mut` is called and these methods must not be + /// `shared_get_mut` is called and these methods must not be /// called while the references returned here are alive. Essentially, /// the `unsafe` code calling this must hold exclusive access of the /// storage at some level to ensure only known code is calling `&self` @@ -843,15 +829,15 @@ macro_rules! shared_get_access_mut_docs { }; } -trait SharedGetAccessMutStorage: UnprotectedStorage { +trait SharedGetMutStorage: UnprotectedStorage { #[cfg(feature = "nightly")] - shared_get_access_mut_docs! { - unsafe fn shared_get_access_mut(&self, id: Index) -> >::AccessMut<'_>; + shared_get_mut_docs! { + unsafe fn shared_get_ut(&self, id: Index) -> >::AccessMut<'_>; } #[cfg(not(feature = "nightly"))] - shared_get_access_mut_docs! { - unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T; + shared_get_mut_docs! { + unsafe fn shared_get_mut(&self, id: Index) -> &mut T; } } diff --git a/src/storage/storages.rs b/src/storage/storages.rs index 07e6eddd2..1423ff622 100644 --- a/src/storage/storages.rs +++ b/src/storage/storages.rs @@ -7,7 +7,7 @@ use ahash::AHashMap as HashMap; use hibitset::BitSetLike; use crate::{ - storage::{DistinctStorage, SharedGetAccessMutStorage, SyncUnsafeCell, UnprotectedStorage}, + storage::{DistinctStorage, SharedGetMutStorage, SyncUnsafeCell, UnprotectedStorage}, world::Index, }; @@ -53,10 +53,6 @@ impl UnprotectedStorage for BTreeStorage { self.0.get_mut(&id).unwrap().get_mut() } - unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { - self.0.get_mut(&id).unwrap().get_mut() - } - unsafe fn insert(&mut self, id: Index, v: T) { self.0.insert(id, SyncUnsafeCell::new(v)); } @@ -66,15 +62,15 @@ impl UnprotectedStorage for BTreeStorage { } } -impl SharedGetAccessMutStorage for BTreeStorage { - unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { +impl SharedGetMutStorage for BTreeStorage { + unsafe fn shared_get_mut(&self, id: Index) -> &mut T { let ptr = self.0[&id].get(); // SAFETY: See `VecStorage` impl. unsafe { &mut *ptr } } } -// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// SAFETY: `shared_get_mut` doesn't perform any overlapping mutable // accesses when provided distinct indices and is safe to call from multiple // threads at once. unsafe impl DistinctStorage for BTreeStorage {} @@ -111,10 +107,6 @@ impl UnprotectedStorage for HashMapStorage { self.0.get_mut(&id).unwrap().get_mut() } - unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { - self.0.get_mut(&id).unwrap().get_mut() - } - unsafe fn insert(&mut self, id: Index, v: T) { self.0.insert(id, SyncUnsafeCell::new(v)); } @@ -124,15 +116,15 @@ impl UnprotectedStorage for HashMapStorage { } } -impl SharedGetAccessMutStorage for HashMapStorage { - unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { +impl SharedGetMutStorage for HashMapStorage { + unsafe fn shared_get_mut(&self, id: Index) -> &mut T { let ptr = self.0[&id].get(); // SAFETY: See `VecStorage` impl. unsafe { &mut *ptr } } } -// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// SAFETY: `shared_get_mut` doesn't perform any overlapping mutable // accesses when provided distinct indices and is safe to call from multiple // threads at once. unsafe impl DistinctStorage for HashMapStorage {} @@ -176,11 +168,10 @@ impl SliceAccess for DenseVecStorage { fn as_slice(&self) -> &[Self::Element] { let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.data.as_slice()).get(); // SAFETY: The only place that mutably accesses these elements via a - // shared reference is the impl of - // `SharedGetAccessMut::shared_get_access_mut` which requires callers to - // avoid calling other methods with `&self` while references returned there - // are still in use (and to ensure references from methods like this no - // longer exist). + // shared reference is the impl of `SharedGetMut::shared_get_mut` which + // requires callers to avoid calling other methods with `&self` while + // references returned there are still in use (and to ensure references + // from methods like this no longer exist). unsafe { &*unsafe_cell_slice_ptr } } @@ -236,11 +227,6 @@ impl UnprotectedStorage for DenseVecStorage { unsafe { self.data.get_unchecked_mut(did as usize) }.get_mut() } - unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { - // SAFETY: Requirements passed to caller - unsafe { self.get_mut(id) } - } - unsafe fn insert(&mut self, id: Index, v: T) { let id = if Index::BITS > usize::BITS { // Saturate the cast to usize::MAX so if this overflows usize the @@ -295,8 +281,8 @@ impl UnprotectedStorage for DenseVecStorage { } } -impl SharedGetAccessMutStorage for DenseVecStorage { - unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { +impl SharedGetMutStorage for DenseVecStorage { + unsafe fn shared_get_mut(&self, id: Index) -> &mut T { // NOTE: `as` cast is not lossy since insert would have encountered an // allocation failure if this would overflow `usize.` // SAFETY (get_unchecked and assume_init): Caller required to call @@ -311,7 +297,7 @@ impl SharedGetAccessMutStorage for DenseVecStorage { } } -// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// SAFETY: `shared_get_mut` doesn't perform any overlapping mutable // accesses when provided distinct indices and is safe to call from multiple // threads at once. unsafe impl DistinctStorage for DenseVecStorage {} @@ -360,12 +346,7 @@ impl UnprotectedStorage for NullStorage { // SAFETY: Exclusive reference to `self` guarantees that that are no // extant references to components and that we aren't calling this from // multiple threads at once. Remaining requirements passed to caller. - unsafe { self.shared_get_access_mut(id) } - } - - unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { - // SAFETY: Requirements passed to caller. - unsafe { self.get_mut(id) } + unsafe { self.shared_get_mut(id) } } unsafe fn insert(&mut self, _: Index, v: T) { @@ -387,8 +368,8 @@ impl UnprotectedStorage for NullStorage { } } -impl SharedGetAccessMutStorage for NullStorage { - unsafe fn shared_get_access_mut(&self, _: Index) -> &mut T { +impl SharedGetMutStorage for NullStorage { + unsafe fn shared_get_mut(&self, _: Index) -> &mut T { // SAFETY: Because the caller is required by the safety docs to first // insert a component with this index, this corresponds to an instance // of the ZST we conceptually own. The caller also must manage the @@ -400,7 +381,7 @@ impl SharedGetAccessMutStorage for NullStorage { } } -// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// SAFETY: `shared_get_mut` doesn't perform any overlapping mutable // accesses when provided distinct indices and is safe to call from multiple // threads at once. unsafe impl DistinctStorage for NullStorage {} @@ -427,11 +408,10 @@ impl SliceAccess for VecStorage { fn as_slice(&self) -> &[Self::Element] { let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.0.as_slice()).get(); // SAFETY: The only place that mutably accesses these elements via a - // shared reference is the impl of - // `SharedGetAccessMut::shared_get_access_mut` which requires callers to - // avoid calling other methods with `&self` while references returned there - // are still in use (and to ensure references from methods like this no - // longer exist). + // shared reference is the impl of `SharedGetMut::shared_get_mut` which + // requires callers to avoid calling other methods with `&self` while + // references returned there are still in use (and to ensure references + // from methods like this no longer exist). unsafe { &*unsafe_cell_slice_ptr } } @@ -470,7 +450,7 @@ impl UnprotectedStorage for VecStorage { // following call to `remove` with that id or to `clean`). let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); // SAFETY: Only method that obtains exclusive references from this - // unsafe cell is `shared_get_access_mut` and callers are required to + // unsafe cell is `shared_get_mut` and callers are required to // managed aliasing there and prevent other methods from being called // while those exclusive references are alive. let maybe_uninit = unsafe { &*ptr }; @@ -490,11 +470,6 @@ impl UnprotectedStorage for VecStorage { unsafe { maybe_uninit.assume_init_mut() } } - unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { - // SAFETY: Requirements passed to caller. - unsafe { self.get_mut(id) } - } - unsafe fn insert(&mut self, id: Index, v: T) { let id = if Index::BITS > usize::BITS { // Saturate the cast to usize::MAX so if this overflows usize the @@ -538,8 +513,8 @@ impl UnprotectedStorage for VecStorage { } } -impl SharedGetAccessMutStorage for VecStorage { - unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { +impl SharedGetMutStorage for VecStorage { + unsafe fn shared_get_mut(&self, id: Index) -> &mut T { // NOTE: `as` cast is not lossy since insert would have encountered an // allocation failure if this would overflow `usize.` // SAFETY: Caller required to call `insert` with this `id` (with no @@ -556,7 +531,7 @@ impl SharedGetAccessMutStorage for VecStorage { } } -// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// SAFETY: `shared_get_mut` doesn't perform any overlapping mutable // accesses when provided distinct indices and is safe to call from multiple // threads at once. unsafe impl DistinctStorage for VecStorage {} @@ -585,11 +560,10 @@ impl SliceAccess for DefaultVecStorage { fn as_slice(&self) -> &[Self::Element] { let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.0.as_slice()).get(); // SAFETY: The only place that mutably accesses these elements via a - // shared reference is the impl of - // `SharedGetAccessMut::shared_get_access_mut` which requires callers to - // avoid calling other methods with `&self` while references returned there - // are still in use (and to ensure references from methods like this no - // longer exist). + // shared reference is the impl of `SharedGetMut::shared_get_mut` which + // requires callers to avoid calling other methods with `&self` while + // references returned there are still in use (and to ensure references + // from methods like this no longer exist). unsafe { &*unsafe_cell_slice_ptr } } @@ -630,11 +604,6 @@ where unsafe { self.0.get_unchecked_mut(id as usize) }.get_mut() } - unsafe fn get_access_mut(&mut self, id: Index) -> &mut T { - // SAFETY: Requirements passed to caller. - unsafe { self.get_mut(id) } - } - unsafe fn insert(&mut self, id: Index, v: T) { let id = if Index::BITS > usize::BITS { // Saturate the cast to usize::MAX so if this overflows usize the @@ -662,11 +631,11 @@ where } } -impl SharedGetAccessMutStorage for DefaultVecStorage +impl SharedGetMutStorage for DefaultVecStorage where T: Default, { - unsafe fn shared_get_access_mut(&self, id: Index) -> &mut T { + unsafe fn shared_get_mut(&self, id: Index) -> &mut T { // NOTE: `as` cast is not lossy since insert would have encountered an // allocation failure if this would overflow `usize.` // SAFETY: See `VecStorage` impl. @@ -676,7 +645,7 @@ where } } -// SAFETY: `shared_get_access_mut` doesn't perform any overlapping mutable +// SAFETY: `shared_get_mut` doesn't perform any overlapping mutable // accesses when provided distinct indices and is safe to call from multiple // threads at once. unsafe impl DistinctStorage for DefaultVecStorage {} From 09680894162783841071cf7c660f1e6d042560a3 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 22 Jan 2023 01:16:29 -0500 Subject: [PATCH 10/47] Various next steps: * Start work on implementing LendJoin and safely re-implementing Join for `&ChangeSet`, `&mut ChangeSet`, and `ChangeSet`. * Add `AccessMut` trait as a replacement for a few cases that were using `DerefMut` (since we don't want to require that `UnprotectedStorage::AccessMut<'a>' has to implement `DerefMut`). IIRC the cases were originally missed because they are behind feature flags. * Modify `SharedGetMutOnly` to also be generic over the storage type so that we don't have to require `T: Component` (since we were getting the storage type from the associated `Component::Storage`). IIRC this is to support use in `ChangeSet` which doesn't require `T: Component`. --- src/changeset.rs | 118 ++++++++++++++++++++++++++++++----- src/lib.rs | 4 +- src/prelude.rs | 4 +- src/saveload/marker.rs | 3 +- src/storage/deref_flagged.rs | 8 ++- src/storage/generic.rs | 6 +- src/storage/mod.rs | 52 ++++++++++----- 7 files changed, 154 insertions(+), 41 deletions(-) diff --git a/src/changeset.rs b/src/changeset.rs index 732627d41..b5b147e68 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -2,7 +2,11 @@ use std::{iter::FromIterator, ops::AddAssign}; -use crate::{prelude::*, storage::UnprotectedStorage, world::Index}; +use crate::{ + prelude::*, + storage::{SharedGetMutOnly, UnprotectedStorage}, + world::Index, +}; /// Change set that can be collected from an iterator, and joined on for easy /// application to components. @@ -111,25 +115,75 @@ where } } -impl<'a, T> Join for &'a mut ChangeSet { +// TODO: lifetime issues +// SAFETY: `open` returns references to a mask and storage which are contained +// together in the `ChangeSet` and correspond. +/*#[nougat::gat] +unsafe impl<'a, T> LendJoin for &'a mut ChangeSet { type Mask = &'a BitSet; - type Type = &'a mut T; + type Type<'next> = &'next mut T; type Value = &'a mut DenseVecStorage; unsafe fn open(self) -> (Self::Mask, Self::Value) { (&self.mask, &mut self.inner) } - // `DistinctStorage` invariants are also met, but no `ParJoin` - // implementation exists yet. - unsafe fn get(v: &mut Self::Value, id: Index) -> Self::Type { - let value: *mut Self::Value = v as *mut Self::Value; - // SAFETY: S-TODO modify Join trait - unsafe { (*value).get_mut(id) } + unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type<'_> { + // SAFETY: Since we require that the mask was checked, an element for + // `id` must have been inserted without being removed. + unsafe { value.get_mut(id) } + } +}*/ + +// SAFETY: `open` returns references to a mask and storage which are contained +// together in the `ChangeSet` and correspond. +unsafe impl<'a, T> Join for &'a mut ChangeSet { + type Mask = &'a BitSet; + type Type = &'a mut T; + type Value = SharedGetMutOnly<'a, T, DenseVecStorage>; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + (&self.mask, SharedGetMutOnly::new(&mut self.inner)) + } + + unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { + // SAFETY: + // * Since we require that the mask was checked, an element for + // `id` must have been inserted without being removed. + // * We also require that the caller drop the value returned before + // subsequent calls with the same `id`, so there are no extant + // references that were obtained with the same `id`. + // * Since we have an exclusive reference to `Self::Value`, we know this + // isn't being called from multiple threads at once. + unsafe { value.get(id) } } } -impl<'a, T> Join for &'a ChangeSet { +// NOTE: could implement ParJoin for `&'a mut ChangeSet`/`&'a ChangeSet` + +// TODO: lifetime issues +// SAFETY: `open` returns references to a mask and storage which are contained +// together in the `ChangeSet` and correspond. +/*#[nougat::gat] +unsafe impl<'a, T> LendJoin for &'a ChangeSet { + type Mask = &'a BitSet; + type Type<'next> = &'next T; + type Value = &'a DenseVecStorage; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + (&self.mask, &self.inner) + } + + unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type<'_> { + // SAFETY: Since we require that the mask was checked, an element for + // `i` must have been inserted without being removed. + unsafe { value.get(id) } + } +}*/ + +// SAFETY: `open` returns references to a mask and storage which are contained +// together in the `ChangeSet` and correspond. +unsafe impl<'a, T> Join for &'a ChangeSet { type Mask = &'a BitSet; type Type = &'a T; type Value = &'a DenseVecStorage; @@ -138,17 +192,20 @@ impl<'a, T> Join for &'a ChangeSet { (&self.mask, &self.inner) } - // `DistinctStorage` invariants are met, but no `ParJoin` implementation - // exists yet. unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { - // SAFETY: S-TODO + // SAFETY: Since we require that the mask was checked, an element for + // `i` must have been inserted without being removed. unsafe { value.get(id) } } } +// S-TODO: implement LendJoin for ChangeSet +/* /// A `Join` implementation for `ChangeSet` that simply removes all the entries /// on a call to `get`. -impl Join for ChangeSet { +// SAFETY: `open` returns references to a mask and storage which are contained +// together in the `ChangeSet` and correspond. +unsafe impl Join for ChangeSet { type Mask = BitSet; type Type = T; type Value = DenseVecStorage; @@ -158,12 +215,41 @@ impl Join for ChangeSet { } unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { + // S-TODO: Following the safety requirements of `Join::get`, users can get + // this to be UB by calling `get` dropping the returned value and + // calling `get` with the same `id`. + // + // Note: the current `JoinIter` implementation will never do + // this. `LendJoinIter` does expose an API to do this, but it is useful + // to implement `LendJoin` so this can be joined with other types that + // only implement `LendJoin`. + // SAFETY: S-TODO + unsafe { value.remove(id) } + } +}*/ + +/// A `Join` implementation for `ChangeSet` that simply removes all the entries +/// on a call to `get`. +// SAFETY: `open` returns references to a mask and storage which are contained +// together in the `ChangeSet` and correspond. +unsafe impl Join for ChangeSet { + type Mask = BitSet; + type Type = T; + type Value = DenseVecStorage; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + (self.mask, self.inner) + } + + unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { + // S-TODO this may not actually be safe, see the documentation on `remove` call // NOTE: Following the safety requirements of `Join::get`, users can get // this to panic by calling `get` dropping the returned value and // calling `get` with the same `id`. However, such a panic isn't // unsound. Also, the current `JoinIter` implementation will never do - // this. - // SAFETY: S-TODO + // this. `LendJoinIter` does expose an API to do this, but it is useful + // to implement `LendJoin` so this can be joined with other types that + // only implement `LendJoin`. unsafe { value.remove(id) } } } diff --git a/src/lib.rs b/src/lib.rs index 4f66cc624..4038ea5ec 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -203,7 +203,7 @@ pub extern crate uuid; pub mod saveload; mod bitset; -// D-TODO pub mod changeset; +pub mod changeset; pub mod error; pub mod join; pub mod prelude; @@ -227,7 +227,7 @@ pub use specs_derive::{Component, ConvertSaveload}; #[cfg(feature = "parallel")] pub use crate::join::ParJoin; pub use crate::{ - // D-TODO changeset::ChangeSet, + changeset::ChangeSet, join::{Join, LendJoin}, storage::{ DefaultVecStorage, DenseVecStorage, FlaggedStorage, HashMapStorage, NullStorage, diff --git a/src/prelude.rs b/src/prelude.rs index 4b0d4fee3..91e815cfe 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -2,9 +2,11 @@ //! //! Contains all of the most common traits, structures, +pub use crate::join::Join; +#[nougat::gat(Type)] +pub use crate::join::LendJoin; #[cfg(feature = "parallel")] pub use crate::join::ParJoin; -pub use crate::join::{Join, LendJoin}; pub use hibitset::BitSet; pub use shred::{ Accessor, Dispatcher, DispatcherBuilder, Read, ReadExpect, Resource, ResourceId, RunNow, diff --git a/src/saveload/marker.rs b/src/saveload/marker.rs index 5aebdd85c..203e7324c 100644 --- a/src/saveload/marker.rs +++ b/src/saveload/marker.rs @@ -11,6 +11,7 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize}; use crate::{ prelude::*, + storage::AccessMut, world::{EntitiesRes, EntityResBuilder, LazyBuilder}, }; @@ -324,7 +325,7 @@ pub trait MarkerAllocator: Resource { ) -> Entity { if let Some(entity) = self.retrieve_entity_internal(marker.id()) { if let Some(mut marker_comp) = storage.get_mut(entity) { - marker_comp.update(marker); + marker_comp.access_mut().update(marker); return entity; } diff --git a/src/storage/deref_flagged.rs b/src/storage/deref_flagged.rs index a46c9fd0e..f2f53046d 100644 --- a/src/storage/deref_flagged.rs +++ b/src/storage/deref_flagged.rs @@ -6,7 +6,9 @@ use std::{ use hibitset::BitSetLike; use crate::{ - storage::{ComponentEvent, DenseVecStorage, Tracked, TryDefault, UnprotectedStorage}, + storage::{ + AccessMut, ComponentEvent, DenseVecStorage, Tracked, TryDefault, UnprotectedStorage, + }, world::{Component, Index}, }; @@ -143,12 +145,12 @@ where impl<'a, A, C> DerefMut for FlaggedAccessMut<'a, A, C> where - A: DerefMut, + A: AccessMut, { fn deref_mut(&mut self) -> &mut Self::Target { if self.emit { self.channel.single_write(ComponentEvent::Modified(self.id)); } - self.access.deref_mut() + self.access.access_mut() } } diff --git a/src/storage/generic.rs b/src/storage/generic.rs index f5e93f2a9..6a4e8d348 100644 --- a/src/storage/generic.rs +++ b/src/storage/generic.rs @@ -1,11 +1,9 @@ #[cfg(feature = "nightly")] -use crate::storage::UnprotectedStorage; +use crate::storage::{AccessMut, UnprotectedStorage}; use crate::{ storage::{AccessMutReturn, InsertResult, ReadStorage, WriteStorage}, world::{Component, Entity}, }; -#[cfg(feature = "nightly")] -use std::ops::DerefMut; pub struct Seal; @@ -88,7 +86,7 @@ pub trait GenericWriteStorage { type Component: Component; /// The wrapper through with mutable access of a component is performed. #[cfg(feature = "nightly")] - type AccessMut<'a> + type AccessMut<'a>: AccessMut where Self: 'a; diff --git a/src/storage/mod.rs b/src/storage/mod.rs index c8da1ec2f..8b3d1fc30 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -383,7 +383,7 @@ where // SAFETY: We have exclusive access (which ensures no aliasing or // concurrent calls from other threads) and we checked the mask, so // all invariants are met. - std::mem::swap(&mut v, unsafe { self.data.inner.get_mut(id) }.deref_mut()); + std::mem::swap(&mut v, unsafe { self.data.inner.get_mut(id) }.access_mut()); Ok(Some(v)) } else { // SAFETY: The mask was previously empty, so it is safe to insert. @@ -540,18 +540,22 @@ where } mod shared_get_mut_only { - use super::{AccessMutReturn, Component, Index, SharedGetMutStorage}; + use super::{AccessMutReturn, Index, SharedGetMutStorage}; + use core::marker::PhantomData; /// This type provides a way to ensure only `shared_get_mut` can be called /// for the lifetime `'a` and that no references previously obtained from /// the storage exist when it is created. While internally this is a shared /// reference, constructing it requires an exclusive borrow for the lifetime /// `'a`. - pub struct SharedGetMutOnly<'a, T: Component>(&'a T::Storage); + /// + /// This is useful for implementations of [`Join`](super::Join) and + /// [`ParJoin`](super::ParJoin). + pub struct SharedGetMutOnly<'a, T, S>(&'a S, PhantomData); - impl<'a, T: Component> SharedGetMutOnly<'a, T> { - pub(super) fn new(storage: &'a mut T::Storage) -> Self { - Self(storage) + impl<'a, T, S> SharedGetMutOnly<'a, T, S> { + pub fn new(storage: &'a mut S) -> Self { + Self(storage, PhantomData) } /// # Safety @@ -567,9 +571,9 @@ mod shared_get_mut_only { /// /// Unless `T::Storage` implements `DistinctStorage`, calling this from /// multiple threads at once is unsound. - pub(super) unsafe fn get(&self, i: Index) -> AccessMutReturn<'a, T> + pub unsafe fn get(&self, i: Index) -> AccessMutReturn<'a, T> where - T::Storage: SharedGetMutStorage, + S: SharedGetMutStorage, { // SAFETY: `Self::new` takes an exclusive reference to this storage, // ensuring there are no extant references to its content at the @@ -586,7 +590,7 @@ mod shared_get_mut_only { } } } -use shared_get_mut_only::SharedGetMutOnly; +pub use shared_get_mut_only::SharedGetMutOnly; // SAFETY: The mask and unprotected storage contained in `MaskedStorage` // correspond and `open` returns references to them from the same @@ -600,7 +604,7 @@ where { type Mask = &'a BitSet; type Type = AccessMutReturn<'a, T>; - type Value = SharedGetMutOnly<'a, T>; + type Value = SharedGetMutOnly<'a, T, T::Storage>; unsafe fn open(self) -> (Self::Mask, Self::Value) { let (mask, value) = self.data.open_mut(); @@ -636,7 +640,7 @@ where { type Mask = &'a BitSet; type Type = AccessMutReturn<'a, T>; - type Value = SharedGetMutOnly<'a, T>; + type Value = SharedGetMutOnly<'a, T, T::Storage>; unsafe fn open(self) -> (Self::Mask, Self::Value) { let (mask, value) = self.data.open_mut(); @@ -705,11 +709,31 @@ macro_rules! get_mut_docs { }; } +/// DerefMut without autoderefing. +/// +/// Allows forcing mutable access to be explicit. Useful to implement a flagged +/// storage where it is easier to discover sites where components are marked as +/// mutated. Of course, individual storages can use an associated `AccessMut` +/// type that also implements `DerefMut`, but this provides the common denominator. +pub trait AccessMut: core::ops::Deref { + /// This may generate a mutation event for certain flagged storages. + fn access_mut(&mut self) -> &mut Self::Target; +} + +impl AccessMut for T +where + T: core::ops::DerefMut, +{ + fn access_mut(&mut self) -> &mut Self::Target { + &mut *self + } +} + /// Used by the framework to quickly join components. pub trait UnprotectedStorage: TryDefault { /// The wrapper through with mutable access of a component is performed. #[cfg(feature = "nightly")] - type AccessMut<'a> + type AccessMut<'a>: AccessMut where Self: 'a; @@ -829,10 +853,10 @@ macro_rules! shared_get_mut_docs { }; } -trait SharedGetMutStorage: UnprotectedStorage { +pub trait SharedGetMutStorage: UnprotectedStorage { #[cfg(feature = "nightly")] shared_get_mut_docs! { - unsafe fn shared_get_ut(&self, id: Index) -> >::AccessMut<'_>; + unsafe fn shared_get_mut(&self, id: Index) -> >::AccessMut<'_>; } #[cfg(not(feature = "nightly"))] From 25b27e581b1d17da64c43f31ae4d7e1446872e3c Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 22 Jan 2023 01:37:56 -0500 Subject: [PATCH 11/47] Modify LendJoin to support ChangeSet impl of LendJoin with non 'static T --- src/bitset.rs | 5 ++++- src/join/lend_join.rs | 8 ++++++-- src/join/maybe.rs | 5 ++++- src/join/mod.rs | 15 ++++++++++++--- src/storage/mod.rs | 16 +++++++++++++--- src/world/entity.rs | 5 ++++- 6 files changed, 43 insertions(+), 11 deletions(-) diff --git a/src/bitset.rs b/src/bitset.rs index b18b045a3..5b935dc10 100644 --- a/src/bitset.rs +++ b/src/bitset.rs @@ -30,7 +30,10 @@ macro_rules! define_bit_join { (self, ()) } - unsafe fn get(_: &mut Self::Value, id: Index) -> Self::Type<'_> { + unsafe fn get<'next>(_: &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next, + { id } } diff --git a/src/join/lend_join.rs b/src/join/lend_join.rs index 0b25600e2..8bff5d2bd 100644 --- a/src/join/lend_join.rs +++ b/src/join/lend_join.rs @@ -20,7 +20,9 @@ pub unsafe trait LendJoin { /// This type is using macro magic to emulate GATs on stable. So to refer to /// it you need to use the [`LendJoinType<'next, J>`](LendJoinType) type /// alias. - type Type<'next>; + type Type<'next> + where + Self: 'next; /// Type of joined storages. type Value; /// Type of joined bit mask. @@ -116,7 +118,9 @@ pub unsafe trait LendJoin { /// /// * A call to `get` must be preceded by a check if `id` is part of /// `Self::Mask` - unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type<'_>; + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next; /// If this `LendJoin` typically returns all indices in the mask, then /// iterating over only it or combined with other joins that are also diff --git a/src/join/maybe.rs b/src/join/maybe.rs index ede41e4f2..d28f39427 100644 --- a/src/join/maybe.rs +++ b/src/join/maybe.rs @@ -36,7 +36,10 @@ where (BitSetAll, (mask, value)) } - unsafe fn get((mask, value): &mut Self::Value, id: Index) -> Self::Type<'_> { + unsafe fn get<'next>((mask, value): &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next, + { if mask.contains(id) { // SAFETY: The mask was just checked for `id`. Some(unsafe { ::get(value, id) }) diff --git a/src/join/mod.rs b/src/join/mod.rs index 218a56917..aaeb11fa9 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -215,7 +215,10 @@ macro_rules! define_open { } #[allow(non_snake_case)] - unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type<'_> { + unsafe fn get<'next>(v: &'next mut Self::Value, i: Index) -> Self::Type<'next> + where + Self: 'next, + { let &mut ($(ref mut $from,)*) = v; // SAFETY: `get` is safe to call as the caller must have checked // the mask, which only has a key that exists in all of the @@ -369,7 +372,10 @@ macro_rules! immutable_resource_join { unsafe { self.deref().open() } } - unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type<'_> { + unsafe fn get<'next>(v: &'next mut Self::Value, i: Index) -> Self::Type<'next> + where + Self: 'next, + { // SAFETY: The mask of `Self` and `T` are identical, thus a // check to `Self`'s mask (which is required) is equal to a // check of `T`'s mask, which makes `get` safe to call. @@ -470,7 +476,10 @@ macro_rules! mutable_resource_join { unsafe { self.deref_mut().open() } } - unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type<'_> { + unsafe fn get<'next>(v: &'next mut Self::Value, i: Index) -> Self::Type<'next> + where + Self: 'next, + { // SAFETY: The mask of `Self` and `T` are identical, thus a check to // `Self`'s mask (which is required) is equal to a check of `T`'s // mask, which makes `get_mut` safe to call. diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 8b3d1fc30..0efbcaa34 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -76,7 +76,11 @@ unsafe impl<'a> LendJoin for AntiStorage<'a> { (BitSetNot(self.0), ()) } - unsafe fn get(_: &mut (), _: Index) {} + unsafe fn get<'next>(_: &'next mut (), _: Index) + where + Self: 'next, + { + } } // SAFETY: Items are just `()` and it is always safe to retrieve them regardless @@ -458,7 +462,10 @@ where (&self.data.mask, &self.data.inner) } - unsafe fn get(v: &mut Self::Value, i: Index) -> &'a T { + unsafe fn get<'next>(v: &'next mut Self::Value, i: Index) -> &'a T + where + Self: 'next, + { // SAFETY: Since we require that the mask was checked, an element for // `i` must have been inserted without being removed. unsafe { v.get(i) } @@ -532,7 +539,10 @@ where self.data.open_mut() } - unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type<'_> { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next, + { // SAFETY: Since we require that the mask was checked, an element for // `id` must have been inserted without being removed. unsafe { value.get_mut(id) } diff --git a/src/world/entity.rs b/src/world/entity.rs index 508c07b62..27daa20d0 100644 --- a/src/world/entity.rs +++ b/src/world/entity.rs @@ -330,7 +330,10 @@ unsafe impl<'a> LendJoin for &'a EntitiesRes { (BitSetOr(&self.alloc.alive, &self.alloc.raised), self) } - unsafe fn get(v: &mut &'a EntitiesRes, id: Index) -> Entity { + unsafe fn get<'next>(v: &'next mut &'a EntitiesRes, id: Index) -> Entity + where + Self: 'next, + { let gen = v .alloc .generation(id) From 448c9dedb1a1e5f096d23027caadc581ddcc5986 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 22 Jan 2023 01:44:24 -0500 Subject: [PATCH 12/47] Implement LendJoin for owned and referenced ChangeSet --- src/changeset.rs | 36 +++++++++++++++++++++--------------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/src/changeset.rs b/src/changeset.rs index b5b147e68..ea656389f 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -115,10 +115,9 @@ where } } -// TODO: lifetime issues // SAFETY: `open` returns references to a mask and storage which are contained // together in the `ChangeSet` and correspond. -/*#[nougat::gat] +#[nougat::gat] unsafe impl<'a, T> LendJoin for &'a mut ChangeSet { type Mask = &'a BitSet; type Type<'next> = &'next mut T; @@ -128,12 +127,15 @@ unsafe impl<'a, T> LendJoin for &'a mut ChangeSet { (&self.mask, &mut self.inner) } - unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type<'_> { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next, + { // SAFETY: Since we require that the mask was checked, an element for // `id` must have been inserted without being removed. unsafe { value.get_mut(id) } } -}*/ +} // SAFETY: `open` returns references to a mask and storage which are contained // together in the `ChangeSet` and correspond. @@ -161,25 +163,27 @@ unsafe impl<'a, T> Join for &'a mut ChangeSet { // NOTE: could implement ParJoin for `&'a mut ChangeSet`/`&'a ChangeSet` -// TODO: lifetime issues // SAFETY: `open` returns references to a mask and storage which are contained // together in the `ChangeSet` and correspond. -/*#[nougat::gat] +#[nougat::gat] unsafe impl<'a, T> LendJoin for &'a ChangeSet { type Mask = &'a BitSet; - type Type<'next> = &'next T; + type Type<'next> = &'a T; type Value = &'a DenseVecStorage; unsafe fn open(self) -> (Self::Mask, Self::Value) { (&self.mask, &self.inner) } - unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type<'_> { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next, + { // SAFETY: Since we require that the mask was checked, an element for // `i` must have been inserted without being removed. unsafe { value.get(id) } } -}*/ +} // SAFETY: `open` returns references to a mask and storage which are contained // together in the `ChangeSet` and correspond. @@ -199,22 +203,24 @@ unsafe impl<'a, T> Join for &'a ChangeSet { } } -// S-TODO: implement LendJoin for ChangeSet -/* /// A `Join` implementation for `ChangeSet` that simply removes all the entries /// on a call to `get`. // SAFETY: `open` returns references to a mask and storage which are contained // together in the `ChangeSet` and correspond. -unsafe impl Join for ChangeSet { +#[nougat::gat] +unsafe impl LendJoin for ChangeSet { type Mask = BitSet; - type Type = T; + type Type<'next> = T; type Value = DenseVecStorage; unsafe fn open(self) -> (Self::Mask, Self::Value) { (self.mask, self.inner) } - unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next, + { // S-TODO: Following the safety requirements of `Join::get`, users can get // this to be UB by calling `get` dropping the returned value and // calling `get` with the same `id`. @@ -226,7 +232,7 @@ unsafe impl Join for ChangeSet { // SAFETY: S-TODO unsafe { value.remove(id) } } -}*/ +} /// A `Join` implementation for `ChangeSet` that simply removes all the entries /// on a call to `get`. From 919434b11e1cfb82f29df8cfb7fea8a0d1cc2b9c Mon Sep 17 00:00:00 2001 From: Imbris Date: Tue, 24 Jan 2023 01:19:22 -0500 Subject: [PATCH 13/47] Changes to allow soundly implementing Join/LendJoin for the owned ChangeSet where iterating it removes items. * Added additional requirement to Join::get/LendJoin::get that it can not be called multiple times with the same ID. * Added unsafe `RepeatableLendGet` trait to allow opt-out of this requirement so that a safe `JoinLendIter::get` method can remain exposed. * Updated relevant safety comments for uses/impls of `LendJoin::get`. * TODO for next commit: update all uses/impls of `Join::get` to ensure they correspond with the requirement changes. --- src/bitset.rs | 8 ++++++- src/changeset.rs | 55 ++++++++++++++++++++++++++----------------- src/join/lend_join.rs | 49 ++++++++++++++++++++++++++++++-------- src/join/maybe.rs | 12 +++++++--- src/join/mod.rs | 49 ++++++++++++++++++++++++++++++++------ src/storage/mod.rs | 27 ++++++++++++++++++--- src/world/entity.rs | 11 ++++++++- 7 files changed, 165 insertions(+), 46 deletions(-) diff --git a/src/bitset.rs b/src/bitset.rs index 5b935dc10..ab06bc0e9 100644 --- a/src/bitset.rs +++ b/src/bitset.rs @@ -7,11 +7,11 @@ use hibitset::{AtomicBitSet, BitSet, BitSetAnd, BitSetLike, BitSetNot, BitSetOr, BitSetXor}; -use crate::join::Join; #[nougat::gat(Type)] use crate::join::LendJoin; #[cfg(feature = "parallel")] use crate::join::ParJoin; +use crate::join::{Join, RepeatableLendGet}; use crate::world::Index; macro_rules! define_bit_join { @@ -37,6 +37,12 @@ macro_rules! define_bit_join { id } } + + // SAFETY: <$biset as LendJoin>::get does not rely on only being called + // once with a particular ID + unsafe impl<$( $lifetime, )* $( $arg ),*> RepeatableLendGet for $bitset + where $( $arg: BitSetLike ),* {} + // SAFETY: `get` just returns the provided `id` (`Self::Value` is `()` // and corresponds with any mask instance). unsafe impl<$( $lifetime, )* $( $arg ),*> Join for $bitset diff --git a/src/changeset.rs b/src/changeset.rs index ea656389f..122baa2b9 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -3,6 +3,7 @@ use std::{iter::FromIterator, ops::AddAssign}; use crate::{ + join::RepeatableLendGet, prelude::*, storage::{SharedGetMutOnly, UnprotectedStorage}, world::Index, @@ -116,7 +117,8 @@ where } // SAFETY: `open` returns references to a mask and storage which are contained -// together in the `ChangeSet` and correspond. +// together in the `ChangeSet` and correspond. Iterating mask does not repeat +// indices. #[nougat::gat] unsafe impl<'a, T> LendJoin for &'a mut ChangeSet { type Mask = &'a BitSet; @@ -137,6 +139,10 @@ unsafe impl<'a, T> LendJoin for &'a mut ChangeSet { } } +// SAFETY: LendJoin::get impl for this type can safely be called multiple times +// with the same ID. +unsafe impl<'a, T> RepeatableLendGet for &'a mut ChangeSet {} + // SAFETY: `open` returns references to a mask and storage which are contained // together in the `ChangeSet` and correspond. unsafe impl<'a, T> Join for &'a mut ChangeSet { @@ -164,7 +170,8 @@ unsafe impl<'a, T> Join for &'a mut ChangeSet { // NOTE: could implement ParJoin for `&'a mut ChangeSet`/`&'a ChangeSet` // SAFETY: `open` returns references to a mask and storage which are contained -// together in the `ChangeSet` and correspond. +// together in the `ChangeSet` and correspond. Iterating mask does not repeat +// indices. #[nougat::gat] unsafe impl<'a, T> LendJoin for &'a ChangeSet { type Mask = &'a BitSet; @@ -180,11 +187,15 @@ unsafe impl<'a, T> LendJoin for &'a ChangeSet { Self: 'next, { // SAFETY: Since we require that the mask was checked, an element for - // `i` must have been inserted without being removed. + // `id` must have been inserted without being removed. unsafe { value.get(id) } } } +// SAFETY: LendJoin::get impl for this type can safely be called multiple times +// with the same ID. +unsafe impl<'a, T> RepeatableLendGet for &'a ChangeSet {} + // SAFETY: `open` returns references to a mask and storage which are contained // together in the `ChangeSet` and correspond. unsafe impl<'a, T> Join for &'a ChangeSet { @@ -198,7 +209,7 @@ unsafe impl<'a, T> Join for &'a ChangeSet { unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { // SAFETY: Since we require that the mask was checked, an element for - // `i` must have been inserted without being removed. + // `id` must have been inserted without being removed. unsafe { value.get(id) } } } @@ -206,7 +217,8 @@ unsafe impl<'a, T> Join for &'a ChangeSet { /// A `Join` implementation for `ChangeSet` that simply removes all the entries /// on a call to `get`. // SAFETY: `open` returns references to a mask and storage which are contained -// together in the `ChangeSet` and correspond. +// together in the `ChangeSet` and correspond. Iterating mask does not repeat +// indices. #[nougat::gat] unsafe impl LendJoin for ChangeSet { type Mask = BitSet; @@ -221,15 +233,17 @@ unsafe impl LendJoin for ChangeSet { where Self: 'next, { - // S-TODO: Following the safety requirements of `Join::get`, users can get - // this to be UB by calling `get` dropping the returned value and - // calling `get` with the same `id`. + // NOTE: This impl is the main reason that `RepeatableLendGet` exists + // since it moves the value out of the backing storage and thus can't + // be called multiple times with the same ID! // - // Note: the current `JoinIter` implementation will never do - // this. `LendJoinIter` does expose an API to do this, but it is useful - // to implement `LendJoin` so this can be joined with other types that - // only implement `LendJoin`. - // SAFETY: S-TODO + // SAFETY: Since we require that the mask was checked, an element for + // `id` must have been inserted without being removed. Note, this + // removes the element without effecting the mask. However, the caller + // is also required to not call this multiple times with the same `id` + // value and mask instance. Because `open` takes ownership we don't have + // to update the mask for futures uses since the `ChangeSet` is + // consumed. unsafe { value.remove(id) } } } @@ -248,14 +262,13 @@ unsafe impl Join for ChangeSet { } unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { - // S-TODO this may not actually be safe, see the documentation on `remove` call - // NOTE: Following the safety requirements of `Join::get`, users can get - // this to panic by calling `get` dropping the returned value and - // calling `get` with the same `id`. However, such a panic isn't - // unsound. Also, the current `JoinIter` implementation will never do - // this. `LendJoinIter` does expose an API to do this, but it is useful - // to implement `LendJoin` so this can be joined with other types that - // only implement `LendJoin`. + // SAFETY: Since we require that the mask was checked, an element for + // `id` must have been inserted without being removed. Note, this + // removes the element without effecting the mask. However, the caller + // is also required to not call this multiple times with the same `id` + // value and mask instance. Because `open` takes ownership we don't have + // to update the mask for futures uses since the `ChangeSet` is + // consumed. unsafe { value.remove(id) } } } diff --git a/src/join/lend_join.rs b/src/join/lend_join.rs index 8bff5d2bd..2d0dcd9f6 100644 --- a/src/join/lend_join.rs +++ b/src/join/lend_join.rs @@ -10,7 +10,9 @@ use crate::world::{Entities, Entity, Index}; /// /// The `Self::Mask` value returned with the `Self::Value` must correspond such /// that it is safe to retrieve items from `Self::Value` whose presence is -/// indicated in the mask. +/// indicated in the mask. As part of this, `BitSetLike::iter` must not produce +/// an iterator that repeats an `Index` value if the `LendJoin::get` impl relies +/// on not being called twice with the same `Index`. #[nougat::gat] pub unsafe trait LendJoin { /// Type of joined components. @@ -118,6 +120,11 @@ pub unsafe trait LendJoin { /// /// * A call to `get` must be preceded by a check if `id` is part of /// `Self::Mask` + /// * Multiple calls with the same `id` are not allowed, for a particular + /// instance of the values from [`open`](Join::open). Unless this type + /// implements the unsafe trait [`RepeatableLendGet`]. + /// (S-TODO update callers to match edit) + /// (S-TODO update immplemetors to match edit) unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> where Self: 'next; @@ -132,6 +139,12 @@ pub unsafe trait LendJoin { } } +/// # Safety +/// +/// Implementing this trait guarantees that `::get` can soundly be called +/// multiple times with the same ID. +pub unsafe trait RepeatableLendGet: LendJoin {} + /// Type alias to refer to the `::Type<'next>` (except this /// doesn't actually exist in this form so the `nougat::Gat!` macro is needed). pub type LendJoinType<'next, J> = nougat::Gat!(::Type<'next>); @@ -172,8 +185,10 @@ impl JoinLendIter { /// /// `while let Some(components) = join_lending_iter.next() {` pub fn next(&mut self) -> Option> { - // SAFETY: since `idx` is yielded from `keys` (the mask), it is necessarily a - // part of it. Thus, requirements are fulfilled for calling `get`. + // SAFETY: Since `idx` is yielded from `keys` (the mask), it is + // necessarily a part of it. `LendJoin` requires that the iterator + // doesn't repeat indices and we advance the iterator for each `get` + // call in all methods that don't require `RepeatableLendGet`. self.keys .next() .map(|idx| unsafe { J::get(&mut self.values, idx) }) @@ -182,9 +197,10 @@ impl JoinLendIter { /// Calls a closure on each entity in the join. pub fn for_each(mut self, mut f: impl FnMut(LendJoinType<'_, J>)) { self.keys.for_each(|idx| { - // SAFETY: since `idx` is yielded from `keys` (the mask), it is - // necessarily a part of it. Thus, requirements are fulfilled for - // calling `get`. + // SAFETY: Since `idx` is yielded from `keys` (the mask), it is + // necessarily a part of it. `LendJoin` requires that the iterator + // doesn't repeat indices and we advance the iterator for each `get` + // call in all methods that don't require `RepeatableLendGet`. let item = unsafe { J::get(&mut self.values, idx) }; f(item); }) @@ -239,9 +255,14 @@ impl JoinLendIter { /// ); /// } /// ``` - pub fn get(&mut self, entity: Entity, entities: &Entities) -> Option> { + pub fn get(&mut self, entity: Entity, entities: &Entities) -> Option> + where + J: RepeatableLendGet, + { if self.keys.contains(entity.id()) && entities.is_alive(entity) { - // SAFETY: the mask (`keys`) is checked as specified in the docs of `get`. + // SAFETY: the mask (`keys`) is checked as specified in the docs of + // `get`. We require `J: RepeatableJoinGet` so this can be safely + // called multiple time with the same ID. Some(unsafe { J::get(&mut self.values, entity.id()) }) } else { None @@ -255,9 +276,17 @@ impl JoinLendIter { /// /// As this method operates on raw indices, there is no check to see if the /// entity is still alive, so the caller should ensure it instead. - pub fn get_unchecked(&mut self, index: Index) -> Option> { + /// + /// Note: Not checking is still sound (thus this method is safe to call), + /// but this can return data from deleted entities! + pub fn get_unchecked(&mut self, index: Index) -> Option> + where + J: RepeatableLendGet, + { if self.keys.contains(index) { - // SAFETY: the mask (`keys`) is checked as specified in the docs of `get`. + // SAFETY: the mask (`keys`) is checked as specified in the docs of + // `get`. We require `J: RepeatableJoinGet` so this can be safely + // called multiple time with the same ID. Some(unsafe { J::get(&mut self.values, index) }) } else { None diff --git a/src/join/maybe.rs b/src/join/maybe.rs index d28f39427..834842447 100644 --- a/src/join/maybe.rs +++ b/src/join/maybe.rs @@ -1,6 +1,6 @@ #[nougat::gat(Type)] use super::LendJoin; -use super::{Join, ParJoin}; +use super::{Join, ParJoin, RepeatableLendGet}; use hibitset::{BitSetAll, BitSetLike}; use crate::world::Index; @@ -18,7 +18,7 @@ use crate::world::Index; pub struct MaybeJoin(pub J); // SAFETY: We return a mask containing all items, but check the original mask in -// the `get` implementation. +// the `get` implementation. Iterating the mask does not repeat indices. #[nougat::gat] unsafe impl LendJoin for MaybeJoin where @@ -41,7 +41,9 @@ where Self: 'next, { if mask.contains(id) { - // SAFETY: The mask was just checked for `id`. + // SAFETY: The mask was just checked for `id`. Requirement to not + // call with the same ID more than once (unless `RepeatableLendGet` + // is implemented) is passed to the caller. Some(unsafe { ::get(value, id) }) } else { None @@ -54,6 +56,10 @@ where } } +// SAFETY: ::get does not rely on only being called once +// with a particular ID. +unsafe impl RepeatableLendGet for MaybeJoin where T: RepeatableLendGet {} + // SAFETY: We return a mask containing all items, but check the original mask in // the `get` implementation. unsafe impl Join for MaybeJoin diff --git a/src/join/mod.rs b/src/join/mod.rs index aaeb11fa9..cff7065fd 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -15,7 +15,7 @@ mod par_join; pub use bit_and::BitAnd; #[nougat::gat(Type)] pub use lend_join::LendJoin; -pub use lend_join::{JoinLendIter, LendJoinType}; +pub use lend_join::{JoinLendIter, LendJoinType, RepeatableLendGet}; pub use maybe::MaybeJoin; #[cfg(feature = "parallel")] pub use par_join::{JoinParIter, ParJoin}; @@ -129,8 +129,10 @@ pub unsafe trait Join { /// /// * A call to `get` must be preceded by a check if `id` is part of /// `Self::Mask`. - /// * The value returned from this method must be dropped before subsequent - /// calls with the same `id`. (S-TODO update callers to match edit) + /// * Multiple calls with the same `id` are not allowed, for a particular + /// instance of the values from [`open`](Join::open). + /// (S-TODO update callers to match edit) + /// (S-TODO update immplemetors to match edit) unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type; /// If this `Join` typically returns all indices in the mask, then iterating @@ -192,6 +194,7 @@ macro_rules! define_open { // SAFETY: The returned mask in `open` is the intersection of the masks // from each type in this tuple. So if an `id` is present in the // combined mask, it will be safe to retrieve the corresponding items. + // Iterating the mask does not repeat indices. #[nougat::gat] unsafe impl<$($from,)*> LendJoin for ($($from),*,) where $($from: LendJoin),*, @@ -222,7 +225,9 @@ macro_rules! define_open { let &mut ($(ref mut $from,)*) = v; // SAFETY: `get` is safe to call as the caller must have checked // the mask, which only has a key that exists in all of the - // storages. + // storages. Requirement to Requirement to not call with the + // same ID more than once (unless `RepeatableLendGet` is + // implemented) is passed to the caller. unsafe { ($($from::get($from, i),)*) } } @@ -234,6 +239,13 @@ macro_rules! define_open { } } + // SAFETY: Tuple impls of `LendJoin` simply defer to the individual + // storages. Thus, if all of them implement this, it is safe to call + // `LendJoin::get` multiple times with the same ID. + unsafe impl<$($from,)*> RepeatableLendGet for ($($from),*,) + where $($from: RepeatableLendGet),*, + ($(<$from as LendJoin>::Mask,)*): BitAnd, {} + // SAFETY: The returned mask in `open` is the intersection of the masks // from each type in this tuple. So if an `id` is present in the // combined mask, it will be safe to retrieve the corresponding items. @@ -379,6 +391,9 @@ macro_rules! immutable_resource_join { // SAFETY: The mask of `Self` and `T` are identical, thus a // check to `Self`'s mask (which is required) is equal to a // check of `T`'s mask, which makes `get` safe to call. + // Requirement to not call with the same ID more than once + // (unless `RepeatableLendGet` is implemented) is passed to the + // caller. unsafe { <&'a T as LendJoin>::get(v, i) } } @@ -388,6 +403,14 @@ macro_rules! immutable_resource_join { } } + // SAFETY: <&'a $ty as LendJoin>::get does not rely on only being called + // once with a particular ID as long as `&'a T` does not rely on this. + unsafe impl<'a, 'b, T> RepeatableLendGet for &'a $ty + where + &'a T: RepeatableLendGet, + T: Resource, + {} + // SAFETY: Since `T` implements `Join` it is safe to deref and defer to // its implementation. unsafe impl<'a, 'b, T> Join for &'a $ty @@ -480,9 +503,12 @@ macro_rules! mutable_resource_join { where Self: 'next, { - // SAFETY: The mask of `Self` and `T` are identical, thus a check to - // `Self`'s mask (which is required) is equal to a check of `T`'s - // mask, which makes `get_mut` safe to call. + // SAFETY: The mask of `Self` and `T` are identical, thus a + // check to `Self`'s mask (which is required) is equal to a + // check of `T`'s mask, which makes `get_mut` safe to call. + // Requirement to not call with the same ID more than once + // (unless `RepeatableLendGet` is implemented) is passed to the + // caller. unsafe { <&'a mut T as LendJoin>::get(v, i) } } @@ -492,6 +518,15 @@ macro_rules! mutable_resource_join { } } + // SAFETY: <&'a mut $ty as LendJoin>::get does not rely on only being + // called once with a particular ID as long as `&'a mut T` does not rely + // on this. + unsafe impl<'a, 'b, T> RepeatableLendGet for &'a mut $ty + where + &'a mut T: RepeatableLendGet, + T: Resource, + {} + // SAFETY: Since `T` implements `Join` it is safe to deref and defer to // its implementation. unsafe impl<'a, 'b, T> Join for &'a mut $ty diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 0efbcaa34..82c34a35c 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -34,7 +34,7 @@ use crate::join::LendJoin; use crate::join::ParJoin; use crate::{ error::{Error, WrongGeneration}, - join::Join, + join::{Join, RepeatableLendGet}, world::{Component, EntitiesRes, Entity, Index}, }; @@ -83,6 +83,9 @@ unsafe impl<'a> LendJoin for AntiStorage<'a> { } } +// SAFETY: ::get does nothing. +unsafe impl RepeatableLendGet for AntiStorage<'_> {} + // SAFETY: Items are just `()` and it is always safe to retrieve them regardless // of the mask and value returned by `open`. unsafe impl<'a> Join for AntiStorage<'a> { @@ -447,7 +450,7 @@ where // SAFETY: The mask and unprotected storage contained in `MaskedStorage` // correspond and `open` returns references to them from the same -// `MaskedStorage` instance. +// `MaskedStorage` instance. Iterating the mask does not repeat indices. #[nougat::gat] unsafe impl<'a, 'e, T, D> LendJoin for &'a Storage<'e, T, D> where @@ -472,6 +475,15 @@ where } } +// SAFETY: LendJoin::get impl for this type is safe to call multiple times with +// the same ID. +unsafe impl<'a, 'e, T, D> RepeatableLendGet for &'a Storage<'e, T, D> +where + T: Component, + D: Deref>, +{ +} + // SAFETY: The mask and unprotected storage contained in `MaskedStorage` // correspond and `open` returns references to them from the same // `MaskedStorage` instance. @@ -524,7 +536,7 @@ where // SAFETY: The mask and unprotected storage contained in `MaskedStorage` // correspond and `open` returns references to them from the same -// `MaskedStorage` instance. +// `MaskedStorage` instance. Iterating the mask does not repeat indices. #[nougat::gat] unsafe impl<'a, 'e, T, D> LendJoin for &'a mut Storage<'e, T, D> where @@ -549,6 +561,15 @@ where } } +// SAFETY: LendJoin::get impl for this type is safe to call multiple times with +// the same ID. +unsafe impl<'a, 'e, T, D> RepeatableLendGet for &'a mut Storage<'e, T, D> +where + T: Component, + D: DerefMut>, +{ +} + mod shared_get_mut_only { use super::{AccessMutReturn, Index, SharedGetMutStorage}; use core::marker::PhantomData; diff --git a/src/world/entity.rs b/src/world/entity.rs index 27daa20d0..b4a132d62 100644 --- a/src/world/entity.rs +++ b/src/world/entity.rs @@ -11,7 +11,12 @@ use shred::Read; use crate::join::LendJoin; #[cfg(feature = "parallel")] use crate::join::ParJoin; -use crate::{error::WrongGeneration, join::Join, storage::WriteStorage, world::Component}; +use crate::{ + error::WrongGeneration, + join::{Join, RepeatableLendGet}, + storage::WriteStorage, + world::Component, +}; /// An index is basically the id of an `Entity`. pub type Index = u32; @@ -343,6 +348,10 @@ unsafe impl<'a> LendJoin for &'a EntitiesRes { } } +// SAFETY: ::get does not rely on only being called once +// with a particular ID. +unsafe impl<'a> RepeatableLendGet for &'a EntitiesRes {} + // SAFETY: It is safe to retrieve elements with any `id` regardless of the mask. unsafe impl<'a> Join for &'a EntitiesRes { type Mask = BitSetOr<&'a BitSet, &'a AtomicBitSet>; From d2d05f24df3a0727f214d2e403157484dd790b26 Mon Sep 17 00:00:00 2001 From: Imbris Date: Tue, 24 Jan 2023 02:01:58 -0500 Subject: [PATCH 14/47] Updated safety comments for impls/uses of Join (see previous commit for addtional details) --- src/changeset.rs | 15 +++++++++------ src/join/lend_join.rs | 2 -- src/join/maybe.rs | 2 +- src/join/mod.rs | 33 ++++++++++++++++++++------------- src/storage/mod.rs | 12 ++++++------ 5 files changed, 36 insertions(+), 28 deletions(-) diff --git a/src/changeset.rs b/src/changeset.rs index 122baa2b9..fec5a77b9 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -144,7 +144,8 @@ unsafe impl<'a, T> LendJoin for &'a mut ChangeSet { unsafe impl<'a, T> RepeatableLendGet for &'a mut ChangeSet {} // SAFETY: `open` returns references to a mask and storage which are contained -// together in the `ChangeSet` and correspond. +// together in the `ChangeSet` and correspond. Iterating mask does not repeat +// indices. unsafe impl<'a, T> Join for &'a mut ChangeSet { type Mask = &'a BitSet; type Type = &'a mut T; @@ -158,9 +159,9 @@ unsafe impl<'a, T> Join for &'a mut ChangeSet { // SAFETY: // * Since we require that the mask was checked, an element for // `id` must have been inserted without being removed. - // * We also require that the caller drop the value returned before - // subsequent calls with the same `id`, so there are no extant - // references that were obtained with the same `id`. + // * We also require that there are no subsequent calls with the same + // `id` for this instance of the values from `open`, so there are no + // extant references for the element corresponding to this `id`. // * Since we have an exclusive reference to `Self::Value`, we know this // isn't being called from multiple threads at once. unsafe { value.get(id) } @@ -197,7 +198,8 @@ unsafe impl<'a, T> LendJoin for &'a ChangeSet { unsafe impl<'a, T> RepeatableLendGet for &'a ChangeSet {} // SAFETY: `open` returns references to a mask and storage which are contained -// together in the `ChangeSet` and correspond. +// together in the `ChangeSet` and correspond. Iterating mask does not repeat +// indices. unsafe impl<'a, T> Join for &'a ChangeSet { type Mask = &'a BitSet; type Type = &'a T; @@ -251,7 +253,8 @@ unsafe impl LendJoin for ChangeSet { /// A `Join` implementation for `ChangeSet` that simply removes all the entries /// on a call to `get`. // SAFETY: `open` returns references to a mask and storage which are contained -// together in the `ChangeSet` and correspond. +// together in the `ChangeSet` and correspond. Iterating mask does not repeat +// indices. unsafe impl Join for ChangeSet { type Mask = BitSet; type Type = T; diff --git a/src/join/lend_join.rs b/src/join/lend_join.rs index 2d0dcd9f6..b25358953 100644 --- a/src/join/lend_join.rs +++ b/src/join/lend_join.rs @@ -123,8 +123,6 @@ pub unsafe trait LendJoin { /// * Multiple calls with the same `id` are not allowed, for a particular /// instance of the values from [`open`](Join::open). Unless this type /// implements the unsafe trait [`RepeatableLendGet`]. - /// (S-TODO update callers to match edit) - /// (S-TODO update immplemetors to match edit) unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> where Self: 'next; diff --git a/src/join/maybe.rs b/src/join/maybe.rs index 834842447..81a1142e8 100644 --- a/src/join/maybe.rs +++ b/src/join/maybe.rs @@ -61,7 +61,7 @@ where unsafe impl RepeatableLendGet for MaybeJoin where T: RepeatableLendGet {} // SAFETY: We return a mask containing all items, but check the original mask in -// the `get` implementation. +// the `get` implementation. Iterating the mask does not repeat indices. unsafe impl Join for MaybeJoin where T: Join, diff --git a/src/join/mod.rs b/src/join/mod.rs index cff7065fd..520251bab 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -94,7 +94,9 @@ pub use par_join::{JoinParIter, ParJoin}; /// /// The `Self::Mask` value returned with the `Self::Value` must correspond such /// that it is safe to retrieve items from `Self::Value` whose presence is -/// indicated in the mask. +/// indicated in the mask. As part of this, `BitSetLike::iter` must not produce +/// an iterator that repeats an `Index` value if the `LendJoin::get` impl relies +/// on not being called twice with the same `Index`. (S-TODO update impls) pub unsafe trait Join { /// Type of joined components. type Type; @@ -123,7 +125,7 @@ pub unsafe trait Join { /// Get a joined component value by a given index. /// - // S-TODO: evaluate all impls + // S-TODO: evaluate all impls (TODO: probably restrict, entry, and drain) /// /// # Safety /// @@ -131,8 +133,6 @@ pub unsafe trait Join { /// `Self::Mask`. /// * Multiple calls with the same `id` are not allowed, for a particular /// instance of the values from [`open`](Join::open). - /// (S-TODO update callers to match edit) - /// (S-TODO update immplemetors to match edit) unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type; /// If this `Join` typically returns all indices in the mask, then iterating @@ -177,8 +177,9 @@ impl std::iter::Iterator for JoinIter { type Item = J::Type; fn next(&mut self) -> Option { - // SAFETY: since `idx` is yielded from `keys` (the mask), it is necessarily a - // part of it. Thus, requirements are fulfilled for calling `get`. + // SAFETY: Since `idx` is yielded from `keys` (the mask), it is + // necessarily a part of it. `Join` requires that the iterator doesn't + // repeat indices and we advance the iterator for each `get` call. self.keys .next() .map(|idx| unsafe { J::get(&mut self.values, idx) }) @@ -225,9 +226,9 @@ macro_rules! define_open { let &mut ($(ref mut $from,)*) = v; // SAFETY: `get` is safe to call as the caller must have checked // the mask, which only has a key that exists in all of the - // storages. Requirement to Requirement to not call with the - // same ID more than once (unless `RepeatableLendGet` is - // implemented) is passed to the caller. + // storages. Requirement to not call with the same ID more than + // once (unless `RepeatableLendGet` is implemented) is passed to + // the caller. unsafe { ($($from::get($from, i),)*) } } @@ -249,6 +250,7 @@ macro_rules! define_open { // SAFETY: The returned mask in `open` is the intersection of the masks // from each type in this tuple. So if an `id` is present in the // combined mask, it will be safe to retrieve the corresponding items. + // Iterating the mask does not repeat indices. unsafe impl<$($from,)*> Join for ($($from),*,) where $($from: Join),*, ($(<$from as Join>::Mask,)*): BitAnd, @@ -275,7 +277,8 @@ macro_rules! define_open { let &mut ($(ref mut $from,)*) = v; // SAFETY: `get` is safe to call as the caller must have checked // the mask, which only has a key that exists in all of the - // storages. + // storages. Requirement to not use the same ID multiple times + // is also passed to the caller. unsafe { ($($from::get($from, i),)*) } } @@ -433,6 +436,8 @@ macro_rules! immutable_resource_join { // SAFETY: The mask of `Self` and `T` are identical, thus a // check to `Self`'s mask (which is required) is equal to a // check of `T`'s mask, which makes `get` safe to call. + // Requirement to not use the same ID multiple times is passed + // to the caller. unsafe { <&'a T as Join>::get(v, i) } } @@ -546,9 +551,11 @@ macro_rules! mutable_resource_join { } unsafe fn get(v: &mut Self::Value, i: Index) -> Self::Type { - // SAFETY: The mask of `Self` and `T` are identical, thus a check to - // `Self`'s mask (which is required) is equal to a check of `T`'s - // mask, which makes `get_mut` safe to call. + // SAFETY: The mask of `Self` and `T` are identical, thus a + // check to `Self`'s mask (which is required) is equal to a + // check of `T`'s mask, which makes `get_mut` safe to call. + // Requirement to not use the same ID multiple times is passed + // to the caller. unsafe { <&'a mut T as Join>::get(v, i) } } diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 82c34a35c..e067ae807 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -486,7 +486,7 @@ where // SAFETY: The mask and unprotected storage contained in `MaskedStorage` // correspond and `open` returns references to them from the same -// `MaskedStorage` instance. +// `MaskedStorage` instance. Iterating the mask does not repeat indices. unsafe impl<'a, 'e, T, D> Join for &'a Storage<'e, T, D> where T: Component, @@ -625,8 +625,8 @@ pub use shared_get_mut_only::SharedGetMutOnly; // SAFETY: The mask and unprotected storage contained in `MaskedStorage` // correspond and `open` returns references to them from the same -// `MaskedStorage` instance (the storage is wrapped in -// `SharedGetMutOnly`). +// `MaskedStorage` instance (the storage is wrapped in `SharedGetMutOnly`). +// Iterating the mask does not repeat indices. unsafe impl<'a, 'e, T, D> Join for &'a mut Storage<'e, T, D> where T: Component, @@ -647,9 +647,9 @@ where // SAFETY: // * Since we require that the mask was checked, an element for // `id` must have been inserted without being removed. - // * We also require that the caller drop the value returned before - // subsequent calls with the same `id`, so there are no extant - // references that were obtained with the same `id`. + // * We also require that there are no subsequent calls with the same + // `id` for this instance of the values from `open`, so there are no + // extant references for the element corresponding to this `id`. // * Since we have an exclusive reference to `Self::Value`, we know this // isn't being called from multiple threads at once. unsafe { value.get(id) } From ce90ce7370a1eb93f7d3e7a250c75c2047dfaa3a Mon Sep 17 00:00:00 2001 From: Imbris Date: Fri, 3 Feb 2023 00:52:21 -0500 Subject: [PATCH 15/47] Address a few compile errors that snuck through when enabling additional features. --- src/saveload/marker.rs | 6 +++-- src/storage/mod.rs | 50 ++++++++++++++++++++++++++++-------------- 2 files changed, 38 insertions(+), 18 deletions(-) diff --git a/src/saveload/marker.rs b/src/saveload/marker.rs index 203e7324c..61b5b7baa 100644 --- a/src/saveload/marker.rs +++ b/src/saveload/marker.rs @@ -348,7 +348,9 @@ pub trait MarkerAllocator: Resource { entity: Entity, storage: &'m mut WriteStorage, ) -> Option<(&'m M, bool)> { - let new = if let Ok(entry) = storage.entry(entity) { + todo!() + // D-TODO (uncomment when entry API is re-enabled!) + /*let new = if let Ok(entry) = storage.entry(entity) { let mut new = false; let _marker = entry.or_insert_with(|| { new = true; @@ -359,7 +361,7 @@ pub trait MarkerAllocator: Resource { } else { return None; }; - Some((storage.get(entity).unwrap(), new)) + Some((storage.get(entity).unwrap(), new))*/ } /// Maintain internal data. Cleanup if necessary. diff --git a/src/storage/mod.rs b/src/storage/mod.rs index e067ae807..fe37a0e5d 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -571,7 +571,7 @@ where } mod shared_get_mut_only { - use super::{AccessMutReturn, Index, SharedGetMutStorage}; + use super::{Index, SharedGetMutStorage, UnprotectedStorage}; use core::marker::PhantomData; /// This type provides a way to ensure only `shared_get_mut` can be called @@ -584,25 +584,33 @@ mod shared_get_mut_only { /// [`ParJoin`](super::ParJoin). pub struct SharedGetMutOnly<'a, T, S>(&'a S, PhantomData); + macro_rules! get_docs { + ($fn_definition:item) => { + /// # Safety + /// + /// May only be called after a call to `insert` with `id` and no following + /// call to `remove` with `id` or to `clean`. + /// + /// A mask should keep track of those states, and an `id` being contained in + /// the tracking mask is sufficient to call this method. + /// + /// There must be no extant aliasing references to this component (i.e. + /// obtained with the same `id`). + /// + /// Unless `T::Storage` implements `DistinctStorage`, calling this from + /// multiple threads at once is unsound. + $fn_definition + }; + } + impl<'a, T, S> SharedGetMutOnly<'a, T, S> { pub fn new(storage: &'a mut S) -> Self { Self(storage, PhantomData) } - /// # Safety - /// - /// May only be called after a call to `insert` with `id` and no following - /// call to `remove` with `id` or to `clean`. - /// - /// A mask should keep track of those states, and an `id` being contained in - /// the tracking mask is sufficient to call this method. - /// - /// There must be no extant aliasing references to this component (i.e. - /// obtained with the same `id`). - /// - /// Unless `T::Storage` implements `DistinctStorage`, calling this from - /// multiple threads at once is unsound. - pub unsafe fn get(&self, i: Index) -> AccessMutReturn<'a, T> + #[cfg(not(feature = "nightly"))] + get_docs! { + pub unsafe fn get(&self, i: Index) -> &'a mut T where S: SharedGetMutStorage, { @@ -618,7 +626,17 @@ mod shared_get_mut_only { // and the remaining safety requirements are passed on to the // caller. unsafe { self.0.shared_get_mut(i) } - } + }} + + #[cfg(feature = "nightly")] + get_docs! { + pub unsafe fn get(&self, i: Index) -> >::AccessMut<'a> + where + S: SharedGetMutStorage, + { + // SAFETY: Same as above. + unsafe { self.0.shared_get_mut(i) } + }} } } pub use shared_get_mut_only::SharedGetMutOnly; From bea1ea914a9060b10b415788a274d7ac0db7693c Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 5 Feb 2023 14:12:41 -0500 Subject: [PATCH 16/47] Add lend_join example and make Miri run it without errors by fixing an issue in `shred` --- Cargo.toml | 8 ++++++-- examples/lend_join.rs | 25 +++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 examples/lend_join.rs diff --git a/Cargo.toml b/Cargo.toml index 28de53213..d58ae80e4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,7 +24,9 @@ ahash = "0.7.6" crossbeam-queue = "0.3" hibitset = { version = "0.6.3", default-features = false } log = "0.4.8" -shred = { version = "0.14.1", default-features = false } +# waiting on https://github.com/amethyst/shred/pull/223 +# shred = { version = "0.14.1", default-features = false } +shred = { path = "../shred", default-features = false } shrev = "1.1.1" tuple_utils = "0.4.0" nougat = "0.2.3" @@ -54,7 +56,9 @@ criterion = "0.3.1" ron = "0.7.1" rand = "0.8" serde_json = "1.0.48" -shred = { version = "0.14.1", default-features = false, features = ["shred-derive"] } +# waiting on https://github.com/amethyst/shred/pull/223 +# shred = { version = "0.14.1", default-features = false, features = ["shred-derive"] } +shred = { path = "../shred", default-features = false, features = ["shred-derive"] } specs-derive = { path = "specs-derive", version = "0.4.1" } [[example]] diff --git a/examples/lend_join.rs b/examples/lend_join.rs new file mode 100644 index 000000000..b9a25610c --- /dev/null +++ b/examples/lend_join.rs @@ -0,0 +1,25 @@ +use specs::prelude::*; +struct Pos(f32); + +impl Component for Pos { + type Storage = VecStorage; +} + +fn main() { + let mut world = World::new(); + + world.register::(); + + world.create_entity().with(Pos(0.0)).build(); + world.create_entity().with(Pos(1.6)).build(); + world.create_entity().with(Pos(5.4)).build(); + + let mut pos = world.write_storage::(); + + let mut lending = (&mut pos).lend_join(); + + let a = lending.next().unwrap().0; + let b = lending.next().unwrap(); + // let d = lending.next().unwrap(); (this rightly fails to compile) + let _c = a + b.0; +} From e22d22a96f0d0eeefad2f6a8e7bd3f11bfa5fa00 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 5 Feb 2023 15:18:51 -0500 Subject: [PATCH 17/47] Remove "nightly" feature now that generic associated types have stabilized and bump the MSRV to 1.65.0 --- CHANGELOG.md | 10 ++- Cargo.toml | 1 - src/lib.rs | 5 -- src/storage/flagged.rs | 27 ------ src/storage/generic.rs | 4 - src/storage/mod.rs | 177 ++++++++++++++-------------------------- src/storage/storages.rs | 6 -- 7 files changed, 70 insertions(+), 160 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e4318517e..762ef3186 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,13 +1,19 @@ +# Unreleased + +* MSRV to 1.65.0 [#7xx]. + +[#7xx]: https://github.com/amethyst/specs/pull/7xx + # 0.19.0 (2023-06-10) * Bump MSRV to 1.65.0 ([#766]) * Added index where entity deletion stopped to the error returned from `WorldExt::delete_entities` ([#766]) * Fix bug where deleting an entity with the wrong generation could clear the components of an existing entity. ([#766]) -* Bump shred to version `0.14.1`, MSRV to 1.60.0 ([shred changelog][shred-changelog], [#756]) +* Bump shred to version `0.14.1`, MSRV to 1.60.0 ([shred changelog][shred_changelog], [#756]) +[shred_changelog]: https://github.com/amethyst/shred/blob/master/CHANGELOG.md#0141-2022-07-14 [#756]: https://github.com/amethyst/specs/pull/756 [#766]: https://github.com/amethyst/specs/pull/766 -[shred-changelog]: https://github.com/amethyst/shred/blob/6b754812e304cf6c63ba0364a82a7e0e5025aaa4/CHANGELOG.md#0140-2022-07-12 # 0.18.0 (2022-07-02) diff --git a/Cargo.toml b/Cargo.toml index d58ae80e4..285f458b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,6 @@ uuid_entity = ["uuid", "serde"] stdweb = ["uuid/js"] storage-event-control = [] derive = ["shred-derive", "specs-derive"] -nightly = [] shred-derive = ["shred/shred-derive"] diff --git a/src/lib.rs b/src/lib.rs index 4038ea5ec..819e3af4a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,10 +1,6 @@ #![warn(missing_docs)] #![deny(unsafe_op_in_unsafe_fn)] #![deny(clippy::disallowed_types)] -#![cfg_attr( - feature = "nightly", - feature(generic_associated_types, associated_type_defaults) -)] //! # SPECS Parallel ECS //! @@ -236,5 +232,4 @@ pub use crate::{ world::{Builder, Component, Entities, Entity, EntityBuilder, LazyUpdate, WorldExt}, }; -#[cfg(feature = "nightly")] pub use crate::storage::DerefFlaggedStorage; diff --git a/src/storage/flagged.rs b/src/storage/flagged.rs index 221caf530..528594cca 100644 --- a/src/storage/flagged.rs +++ b/src/storage/flagged.rs @@ -203,7 +203,6 @@ where } impl> UnprotectedStorage for FlaggedStorage { - #[cfg(feature = "nightly")] type AccessMut<'a> = >::AccessMut<'a> where T: 'a; unsafe fn clean(&mut self, has: B) @@ -219,7 +218,6 @@ impl> UnprotectedStorage for FlaggedSt unsafe { self.storage.get(id) } } - #[cfg(feature = "nightly")] unsafe fn get_mut(&mut self, id: Index) -> >::AccessMut<'_> { if self.emit_event() { self.channel @@ -230,17 +228,6 @@ impl> UnprotectedStorage for FlaggedSt unsafe { self.storage.get_mut(id) } } - #[cfg(not(feature = "nightly"))] - unsafe fn get_mut(&mut self, id: Index) -> &mut C { - if self.emit_event() { - self.channel - .get_mut() - .single_write(ComponentEvent::Modified(id)); - } - // SAFETY: Requirements passed to caller. - unsafe { self.storage.get_mut(id) } - } - unsafe fn insert(&mut self, id: Index, comp: C) { if self.emit_event() { self.channel @@ -263,7 +250,6 @@ impl> UnprotectedStorage for FlaggedSt } impl> SharedGetMutStorage for FlaggedStorage { - #[cfg(feature = "nightly")] unsafe fn shared_get_mut(&self, id: Index) -> >::AccessMut<'_> { if self.emit_event() { let channel_ptr = self.channel.get(); @@ -275,19 +261,6 @@ impl> SharedGetMutStorage for Flagged // SAFETY: Requirements passed to caller. unsafe { self.storage.shared_get_mut(id) } } - - #[cfg(not(feature = "nightly"))] - unsafe fn shared_get_mut(&self, id: Index) -> &mut C { - if self.emit_event() { - let channel_ptr = self.channel.get(); - // SAFETY: Caller required to ensure references returned from other - // safe methods such as Tracked::channel are no longer alive. This - // storage is not marked with a `DistinctStorage` impl. - unsafe { &mut *channel_ptr }.single_write(ComponentEvent::Modified(id)); - } - // SAFETY: Requirements passed to caller. - unsafe { self.storage.shared_get_mut(id) } - } } impl Tracked for FlaggedStorage { diff --git a/src/storage/generic.rs b/src/storage/generic.rs index 6a4e8d348..84f81a196 100644 --- a/src/storage/generic.rs +++ b/src/storage/generic.rs @@ -1,4 +1,3 @@ -#[cfg(feature = "nightly")] use crate::storage::{AccessMut, UnprotectedStorage}; use crate::{ storage::{AccessMutReturn, InsertResult, ReadStorage, WriteStorage}, @@ -85,7 +84,6 @@ pub trait GenericWriteStorage { /// The component type of the storage type Component: Component; /// The wrapper through with mutable access of a component is performed. - #[cfg(feature = "nightly")] type AccessMut<'a>: AccessMut where Self: 'a; @@ -118,7 +116,6 @@ impl<'a, T> GenericWriteStorage for WriteStorage<'a, T> where T: Component, { - #[cfg(feature = "nightly")] type AccessMut<'b> = <::Storage as UnprotectedStorage>::AccessMut<'b> where Self: 'b; type Component = T; @@ -157,7 +154,6 @@ impl<'a: 'b, 'b, T> GenericWriteStorage for &'b mut WriteStorage<'a, T> where T: Component, { - #[cfg(feature = "nightly")] type AccessMut<'c> = <::Storage as UnprotectedStorage>::AccessMut<'c> where Self: 'c; type Component = T; diff --git a/src/storage/mod.rs b/src/storage/mod.rs index fe37a0e5d..1e9f4beb2 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -1,6 +1,5 @@ //! Component storage types, implementations for component joins, etc. -#[cfg(feature = "nightly")] pub use self::deref_flagged::{DerefFlaggedStorage, FlaggedAccessMut}; pub use self::{ data::{ReadStorage, WriteStorage}, @@ -42,7 +41,6 @@ use crate::{ use self::sync_unsafe_cell::SyncUnsafeCell; mod data; -#[cfg(feature = "nightly")] mod deref_flagged; // D-TODO mod drain; // D-TODO mod entry; @@ -55,10 +53,7 @@ mod sync_unsafe_cell; mod tests; mod track; -#[cfg(feature = "nightly")] type AccessMutReturn<'a, T> = <::Storage as UnprotectedStorage>::AccessMut<'a>; -#[cfg(not(feature = "nightly"))] -type AccessMutReturn<'a, T> = &'a mut T; /// An inverted storage type, only useful to iterate entities /// that do not have a particular component type. @@ -584,33 +579,25 @@ mod shared_get_mut_only { /// [`ParJoin`](super::ParJoin). pub struct SharedGetMutOnly<'a, T, S>(&'a S, PhantomData); - macro_rules! get_docs { - ($fn_definition:item) => { - /// # Safety - /// - /// May only be called after a call to `insert` with `id` and no following - /// call to `remove` with `id` or to `clean`. - /// - /// A mask should keep track of those states, and an `id` being contained in - /// the tracking mask is sufficient to call this method. - /// - /// There must be no extant aliasing references to this component (i.e. - /// obtained with the same `id`). - /// - /// Unless `T::Storage` implements `DistinctStorage`, calling this from - /// multiple threads at once is unsound. - $fn_definition - }; - } - impl<'a, T, S> SharedGetMutOnly<'a, T, S> { pub fn new(storage: &'a mut S) -> Self { Self(storage, PhantomData) } - #[cfg(not(feature = "nightly"))] - get_docs! { - pub unsafe fn get(&self, i: Index) -> &'a mut T + /// # Safety + /// + /// May only be called after a call to `insert` with `id` and no following + /// call to `remove` with `id` or to `clean`. + /// + /// A mask should keep track of those states, and an `id` being contained in + /// the tracking mask is sufficient to call this method. + /// + /// There must be no extant aliasing references to this component (i.e. + /// obtained with the same `id`). + /// + /// Unless `T::Storage` implements `DistinctStorage`, calling this from + /// multiple threads at once is unsound. + pub unsafe fn get(&self, i: Index) -> >::AccessMut<'a> where S: SharedGetMutStorage, { @@ -626,17 +613,7 @@ mod shared_get_mut_only { // and the remaining safety requirements are passed on to the // caller. unsafe { self.0.shared_get_mut(i) } - }} - - #[cfg(feature = "nightly")] - get_docs! { - pub unsafe fn get(&self, i: Index) -> >::AccessMut<'a> - where - S: SharedGetMutStorage, - { - // SAFETY: Same as above. - unsafe { self.0.shared_get_mut(i) } - }} + } } } pub use shared_get_mut_only::SharedGetMutOnly; @@ -734,30 +711,6 @@ where } } -macro_rules! get_mut_docs { - ($fn_definition:item) => { - /// Gets mutable access to the the data associated with an `Index`. - /// - /// This doesn't necessarily directly return a `&mut` reference (at - /// least with `nightly` feature). This allows storages more - /// flexibility. For example, some flagged storages utilize this to - /// defer generation of mutation events until the user obtains an `&mut` - /// reference out of the returned wrapper type. - /// - /// This is unsafe because the external set used to protect this storage is - /// absent. - /// - /// # Safety - /// - /// May only be called after a call to `insert` with `id` and no following - /// call to `remove` with `id` or to `clean`. - /// - /// A mask should keep track of those states, and an `id` being contained in - /// the tracking mask is sufficient to call this method. - $fn_definition - }; -} - /// DerefMut without autoderefing. /// /// Allows forcing mutable access to be explicit. Useful to implement a flagged @@ -781,7 +734,6 @@ where /// Used by the framework to quickly join components. pub trait UnprotectedStorage: TryDefault { /// The wrapper through with mutable access of a component is performed. - #[cfg(feature = "nightly")] type AccessMut<'a>: AccessMut where Self: 'a; @@ -816,15 +768,24 @@ pub trait UnprotectedStorage: TryDefault { /// in the tracking mask is sufficient to call this method. unsafe fn get(&self, id: Index) -> &T; - #[cfg(feature = "nightly")] - get_mut_docs! { - unsafe fn get_mut(&mut self, id: Index) -> Self::AccessMut<'_>; - } - - #[cfg(not(feature = "nightly"))] - get_mut_docs! { - unsafe fn get_mut(&mut self, id: Index) -> &mut T; - } + /// Gets mutable access to the the data associated with an `Index`. + /// + /// This doesn't necessarily directly return a `&mut` reference. This + /// allows storages more flexibility. For example, some flagged storages + /// utilize this to defer generation of mutation events until the user + /// obtains an `&mut` reference out of the returned wrapper type. + /// + /// This is unsafe because the external set used to protect this storage is + /// absent. + /// + /// # Safety + /// + /// May only be called after a call to `insert` with `id` and no following + /// call to `remove` with `id` or to `clean`. + /// + /// A mask should keep track of those states, and an `id` being contained in + /// the tracking mask is sufficient to call this method. + unsafe fn get_mut(&mut self, id: Index) -> Self::AccessMut<'_>; /// Inserts new data for a given `Index`. /// @@ -868,50 +829,36 @@ pub trait UnprotectedStorage: TryDefault { } } -macro_rules! shared_get_mut_docs { - ($fn_definition:item) => { - /// Gets mutable access to the the data associated with an `Index`. - /// - /// This is unsafe because the external set used to protect this storage is - /// absent and because it doesn't protect against concurrent calls from - /// multiple threads and aliasing must manually be managed. - /// - /// # Safety - /// - /// May only be called after a call to `insert` with `id` and no following - /// call to `remove` with `id` or to `clean`. - /// - /// A mask should keep track of those states, and an `id` being contained in - /// the tracking mask is sufficient to call this method. - /// - /// There must be no extant aliasing references to this component (i.e. - /// obtained with the same `id`). Additionally, references obtained from - /// methods on this type that take `&self` (e.g. - /// [`UnprotectedStorage::get`], [`SliceAccess::as_slice`], - /// [`Tracked::channel`]) must no longer be alive when - /// `shared_get_mut` is called and these methods must not be - /// called while the references returned here are alive. Essentially, - /// the `unsafe` code calling this must hold exclusive access of the - /// storage at some level to ensure only known code is calling `&self` - /// methods during the usage of this method and the references it - /// produces. - /// - /// Unless this type implements `DistinctStorage`, calling this from - /// multiple threads at once is unsound. - $fn_definition - }; -} - pub trait SharedGetMutStorage: UnprotectedStorage { - #[cfg(feature = "nightly")] - shared_get_mut_docs! { - unsafe fn shared_get_mut(&self, id: Index) -> >::AccessMut<'_>; - } - - #[cfg(not(feature = "nightly"))] - shared_get_mut_docs! { - unsafe fn shared_get_mut(&self, id: Index) -> &mut T; - } + /// Gets mutable access to the the data associated with an `Index`. + /// + /// This is unsafe because the external set used to protect this storage is + /// absent and because it doesn't protect against concurrent calls from + /// multiple threads and aliasing must manually be managed. + /// + /// # Safety + /// + /// May only be called after a call to `insert` with `id` and no following + /// call to `remove` with `id` or to `clean`. + /// + /// A mask should keep track of those states, and an `id` being contained in + /// the tracking mask is sufficient to call this method. + /// + /// There must be no extant aliasing references to this component (i.e. + /// obtained with the same `id`). Additionally, references obtained from + /// methods on this type that take `&self` (e.g. + /// [`UnprotectedStorage::get`], [`SliceAccess::as_slice`], + /// [`Tracked::channel`]) must no longer be alive when + /// `shared_get_mut` is called and these methods must not be + /// called while the references returned here are alive. Essentially, + /// the `unsafe` code calling this must hold exclusive access of the + /// storage at some level to ensure only known code is calling `&self` + /// methods during the usage of this method and the references it + /// produces. + /// + /// Unless this type implements `DistinctStorage`, calling this from + /// multiple threads at once is unsound. + unsafe fn shared_get_mut(&self, id: Index) -> >::AccessMut<'_>; } #[cfg(test)] diff --git a/src/storage/storages.rs b/src/storage/storages.rs index 1423ff622..1b403ec94 100644 --- a/src/storage/storages.rs +++ b/src/storage/storages.rs @@ -33,7 +33,6 @@ impl Default for BTreeStorage { } impl UnprotectedStorage for BTreeStorage { - #[cfg(feature = "nightly")] type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, _has: B) @@ -87,7 +86,6 @@ impl Default for HashMapStorage { } impl UnprotectedStorage for HashMapStorage { - #[cfg(feature = "nightly")] type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, _has: B) @@ -186,7 +184,6 @@ impl SliceAccess for DenseVecStorage { } impl UnprotectedStorage for DenseVecStorage { - #[cfg(feature = "nightly")] type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, _has: B) @@ -317,7 +314,6 @@ impl Default for NullStorage { } impl UnprotectedStorage for NullStorage { - #[cfg(feature = "nightly")] type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, has: B) @@ -422,7 +418,6 @@ impl SliceAccess for VecStorage { } impl UnprotectedStorage for VecStorage { - #[cfg(feature = "nightly")] type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, has: B) @@ -578,7 +573,6 @@ impl UnprotectedStorage for DefaultVecStorage where T: Default, { - #[cfg(feature = "nightly")] type AccessMut<'a> = &'a mut T where T: 'a; unsafe fn clean(&mut self, _has: B) From 738dc16e5bb1383a2b06790e86bd9f906bd3b8ed Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 5 Feb 2023 20:35:26 -0500 Subject: [PATCH 18/47] Uncomment `entry` module and rework implementation: * Replace `Join` impl with `LendJoin` (to avoid creating aliasing mutable references to the storage). * Create new `Storage::not_present_insert` method that requires that the `id` not be present in the mask. This is used by both `Storage::insert` and `VacantEntry::insert` so we can centralize documenting the safety of calling `UnprotectedStorage::insert` and the handling of potential unwinding from `BitSet::add`. --- src/join/mod.rs | 3 +- src/storage/entry.rs | 82 ++++++++++++++++++++------------------------ src/storage/mod.rs | 48 +++++++++++++++++++++----- 3 files changed, 79 insertions(+), 54 deletions(-) diff --git a/src/join/mod.rs b/src/join/mod.rs index 520251bab..25989d12e 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -96,7 +96,8 @@ pub use par_join::{JoinParIter, ParJoin}; /// that it is safe to retrieve items from `Self::Value` whose presence is /// indicated in the mask. As part of this, `BitSetLike::iter` must not produce /// an iterator that repeats an `Index` value if the `LendJoin::get` impl relies -/// on not being called twice with the same `Index`. (S-TODO update impls) +/// on not being called twice with the same `Index`. (S-TODO update impls: +/// probably restrict, entry, and drain) pub unsafe trait Join { /// Type of joined components. type Type; diff --git a/src/storage/entry.rs b/src/storage/entry.rs index 5cde07443..fad2ca174 100644 --- a/src/storage/entry.rs +++ b/src/storage/entry.rs @@ -1,7 +1,8 @@ use hibitset::BitSetAll; use super::*; -use crate::join::Join; +use crate::join::LendJoin; +use crate::world::Generation; impl<'e, T, D> Storage<'e, T, D> where @@ -38,14 +39,7 @@ where 'e: 'a, { if self.entities.is_alive(e) { - let entries = self.entries(); - unsafe { - // SAFETY: This is safe since we're not swapping out the mask or the values. - let (_, mut value): (BitSetAll, _) = entries.open(); - // SAFETY: We did check the mask, because the mask is `BitSetAll` and every - // index is part of it. - Ok(Entries::get(&mut value, e.id())) - } + Ok(self.entry_inner(e.id())) } else { let gen = self .entities @@ -107,7 +101,8 @@ where /// # } /// # /// # world.exec(|(mut counters, marker): (WriteStorage, ReadStorage)| { - /// for (mut counter, _) in (counters.entries(), &marker).join() { + /// let mut join = (counter.entries(), &marker).lend_join(); + /// while let Some((mut counter, _)) = join.next() { /// let counter = counter.or_insert_with(Default::default); /// counter.increase(); /// @@ -121,46 +116,44 @@ where pub fn entries<'a>(&'a mut self) -> Entries<'a, 'e, T, D> { Entries(self) } + + /// Returns an entry to the component associated with the provided index. + /// + /// Does not check whether an entity is alive! + pub fn entry_inner<'a>(&'a mut self, id: Index) -> StorageEntry<'a, 'e, T, D> { + if self.data.mask.contains(id) { + StorageEntry::Occupied(OccupiedEntry { id, storage: self }) + } else { + StorageEntry::Vacant(VacantEntry { id, storage: self }) + } + } } /// `Join`-able structure that yields all indices, returning `Entry` for all -/// elements +/// elements. pub struct Entries<'a, 'b: 'a, T: 'a, D: 'a>(&'a mut Storage<'b, T, D>); -impl<'a, 'b: 'a, T: 'a, D: 'a> Join for Entries<'a, 'b, T, D> +// SAFETY: We return a mask containing all items, but check the original mask in +// the `get` implementation. Iterating the mask does not repeat indices. +#[nougat::gat] +unsafe impl<'a, 'b: 'a, T: 'a, D: 'a> LendJoin for Entries<'a, 'b, T, D> where T: Component, - D: Deref>, + D: DerefMut>, { type Mask = BitSetAll; - type Type = StorageEntry<'a, 'b, T, D>; + type Type<'next> = StorageEntry<'next, 'b, T, D>; type Value = &'a mut Storage<'b, T, D>; - // SAFETY: No invariants to meet and no unsafe code. unsafe fn open(self) -> (Self::Mask, Self::Value) { (BitSetAll, self.0) } - // SAFETY: We are lengthening the lifetime of `value` to `'a`; - // TODO: how to prove this is safe? - unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { - // This is HACK. See implementation of Join for &'a mut Storage<'e, T, D> for - // details why it is necessary. - let storage: *mut Storage<'b, T, D> = *value as *mut Storage<'b, T, D>; - // SAFETY: S-TODO redo when updating join trait - if unsafe { &*storage }.data.mask.contains(id) { - StorageEntry::Occupied(OccupiedEntry { - id, - // SAFETY: S-TODO redo when updating join trait - storage: unsafe { &mut *storage }, - }) - } else { - StorageEntry::Vacant(VacantEntry { - id, - // SAFETY: S-TODO redo when updating join trait - storage: unsafe { &mut *storage }, - }) - } + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next, + { + value.entry_inner(id) } #[inline] @@ -195,7 +188,6 @@ where { /// Get a mutable reference to the component associated with the entity. pub fn get_mut(&mut self) -> AccessMutReturn<'_, T> { - // S-TODO update safety comment after changing Join // SAFETY: This is safe since `OccupiedEntry` is only constructed // after checking the mask. unsafe { self.storage.data.inner.get_mut(self.id) } @@ -204,7 +196,6 @@ where /// Converts the `OccupiedEntry` into a mutable reference bounded by /// the storage's lifetime. pub fn into_mut(self) -> AccessMutReturn<'a, T> { - // S-TODO update safety comment after changing Join // SAFETY: This is safe since `OccupiedEntry` is only constructed // after checking the mask. unsafe { self.storage.data.inner.get_mut(self.id) } @@ -212,7 +203,7 @@ where /// Inserts a value into the storage and returns the old one. pub fn insert(&mut self, mut component: T) -> T { - std::mem::swap(&mut component, self.get_mut().deref_mut()); + core::mem::swap(&mut component, self.get_mut().access_mut()); component } @@ -236,13 +227,14 @@ where { /// Inserts a value into the storage. pub fn insert(self, component: T) -> AccessMutReturn<'a, T> { - // S-TODO safety comment incomplete - // SAFETY: This is safe since we added `self.id` to the mask. - unsafe { - self.storage.data.inner.insert(self.id, component); - self.storage.data.mask.add(self.id); - self.storage.data.inner.get_mut(self.id) - } + // Note, this method adds `id` to the mask. + // SAFETY: `VacantEntry` is only constructed after checking that `id` is + // not present in the mask and we consume `VacantEntry` here. + unsafe { self.storage.not_present_insert(self.id, component) }; + // TODO (perf): We could potentially have an insert method that directly + // produces a reference to the just inserted value. + // SAFETY: We just inserted the component above. + unsafe { self.storage.data.inner.get_mut(self.id) } } } diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 1e9f4beb2..e37ffd2e0 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -3,7 +3,7 @@ pub use self::deref_flagged::{DerefFlaggedStorage, FlaggedAccessMut}; pub use self::{ data::{ReadStorage, WriteStorage}, - // D-TODO entry::{Entries, OccupiedEntry, StorageEntry, VacantEntry}, + entry::{Entries, OccupiedEntry, StorageEntry, VacantEntry}, flagged::FlaggedStorage, generic::{GenericReadStorage, GenericWriteStorage}, // D-TODO restrict::{ @@ -43,7 +43,7 @@ use self::sync_unsafe_cell::SyncUnsafeCell; mod data; mod deref_flagged; // D-TODO mod drain; -// D-TODO mod entry; +mod entry; mod flagged; mod generic; // D-TODO mod restrict; @@ -382,15 +382,13 @@ where if self.entities.is_alive(e) { let id = e.id(); if self.data.mask.contains(id) { - // SAFETY: We have exclusive access (which ensures no aliasing or - // concurrent calls from other threads) and we checked the mask, so - // all invariants are met. + // SAFETY: `id` is in the mask. std::mem::swap(&mut v, unsafe { self.data.inner.get_mut(id) }.access_mut()); Ok(Some(v)) } else { - // SAFETY: The mask was previously empty, so it is safe to insert. - unsafe { self.data.inner.insert(id, v) }; - self.data.mask.add(id); + // SAFETY: The mask was previously empty, so this is safe to + // call. + unsafe { self.not_present_insert(id, v) } Ok(None) } } else { @@ -402,6 +400,40 @@ where } } + /// Insert the provided value at `id` and adds `id` to the mask. + /// + /// # Safety + /// + /// May only be called if `id` is not present in the mask. + #[inline(always)] + unsafe fn not_present_insert(&mut self, id: Index, value: T) { + // SAFETY: The mask was previously empty, so it is safe to + // insert. We immediately add the value to the mask below and + // unwinding from the `insert` call means that we don't need to + // include the value in the mask. `BitSet::add` won't unwind on 32-bit + // and 64-bit platforms since OOM aborts and any overflow in capacity + // calculations (which panics) won't occur for resizing to hold the bit + // at `id = u32::MAX`. We rely on `BitSet::add` not having any other + // cases where it panics. On 16-bit platforms we insert a guard to abort + // if a panic occurs (although I suspect we will run out of memory + // before that). + unsafe { self.data.inner.insert(id, value) }; + const _ASSERT_INDEX_IS_U32: Index = 0u32; + if cfg!(panic = "abort") || usize::BITS >= 32 { + self.data.mask.add(id); + } else { + struct AbortOnDrop; + impl Drop for AbortOnDrop { + fn drop(&mut self) { + std::process::abort() + } + } + let guard = AbortOnDrop; + self.data.mask.add(id); + core::mem::forget(guard); + } + } + /// Removes the data associated with an `Entity`. pub fn remove(&mut self, e: Entity) -> Option { if self.entities.is_alive(e) { From e4c36559430443fc72c1a4536216718bc5ad079b Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 12 Feb 2023 23:14:11 -0500 Subject: [PATCH 19/47] Implement refactored Join traits for `RestrictedStorage` and other related changes: * SharedGetMutOnly::get_mut changed from method to associated function to make its use more apparent (e.g. compared to calling UnprotectedStorage::get_mut). * New requirement added to ParJoin trait implementation to facilitate callers of ParJoin::get that need to ensure they don't repeat indices. * `ShareGetMutStorage::shared_get_mut` requirements tweaked to allow calling this in conjuction with `UnprotectedStorage::get` when the `id`s used don't overlap. This facilitates `Join`/`ParJoin` impls for `RestrictedStorage` which can allow getting a component for one entity mutably while immutably getting the component for another entity at the same time. * Marker types used for restricted storage implementation replaced with producing distinct types for different types of joins: `PairedStorageRead` (for any read only join), `PariedStorageWriteExclusive` (for mutable LendJoin), and `PairedStorageWriteShare` (for mutable Join/ParJoin). * Renamed `PairedStorage` (which was replaced with the 3 types above) methods `get_unchecked`/`get_unchecked_mut` to `get`/`get_mut` since `unchecked` often is used to indicate some safety requirement hasn't been checked which isn't the case here. Renamed existing `get`/`get_mut` methods to `get_other`/`get_mut_other`. * Other misc changes that were missed in previous commits. --- src/changeset.rs | 2 +- src/join/mod.rs | 5 +- src/join/par_join.rs | 8 +- src/prelude.rs | 2 +- src/saveload/marker.rs | 6 +- src/storage/drain.rs | 7 + src/storage/entry.rs | 9 + src/storage/mod.rs | 57 ++-- src/storage/restrict.rs | 567 +++++++++++++++++++++++++++++----------- src/storage/storages.rs | 23 +- 10 files changed, 486 insertions(+), 200 deletions(-) diff --git a/src/changeset.rs b/src/changeset.rs index fec5a77b9..7129862ce 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -164,7 +164,7 @@ unsafe impl<'a, T> Join for &'a mut ChangeSet { // extant references for the element corresponding to this `id`. // * Since we have an exclusive reference to `Self::Value`, we know this // isn't being called from multiple threads at once. - unsafe { value.get(id) } + unsafe { SharedGetMutOnly::get_mut(value, id) } } } diff --git a/src/join/mod.rs b/src/join/mod.rs index 25989d12e..0e35bc9b2 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -95,9 +95,8 @@ pub use par_join::{JoinParIter, ParJoin}; /// The `Self::Mask` value returned with the `Self::Value` must correspond such /// that it is safe to retrieve items from `Self::Value` whose presence is /// indicated in the mask. As part of this, `BitSetLike::iter` must not produce -/// an iterator that repeats an `Index` value if the `LendJoin::get` impl relies -/// on not being called twice with the same `Index`. (S-TODO update impls: -/// probably restrict, entry, and drain) +/// an iterator that repeats an `Index` value. (S-TODO update impls: probably +/// drain) pub unsafe trait Join { /// Type of joined components. type Type; diff --git a/src/join/par_join.rs b/src/join/par_join.rs index f641f3d83..31755293c 100644 --- a/src/join/par_join.rs +++ b/src/join/par_join.rs @@ -14,9 +14,10 @@ use crate::world::Index; /// /// `ParJoin::get` must be callable from multiple threads, simultaneously. /// -/// The Self::Mask` value returned with the `Self::Value` must correspond such +/// The `Self::Mask` value returned with the `Self::Value` must correspond such /// that it is safe to retrieve items from `Self::Value` whose presence is -/// indicated in the mask. +/// indicated in the mask. As part of this, `BitSetLike::iter` must not produce +/// an iterator that repeats an `Index` value. (S-TODO update impls) pub unsafe trait ParJoin { /// Type of joined components. type Type; @@ -146,7 +147,8 @@ where let JoinProducer { values, keys, .. } = self; // SAFETY: `idx` is obtained from the `Mask` returned by // `ParJoin::open`. The indices here are guaranteed to be distinct - // because of the fact that the bit set is split. + // because of the fact that the bit set is split and because `ParJoin` + // requires that the bit set iterator doesn't repeat indices. let iter = keys.0.map(|idx| unsafe { J::get(values, idx) }); folder.consume_iter(iter) diff --git a/src/prelude.rs b/src/prelude.rs index 91e815cfe..d13d6863a 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -20,7 +20,7 @@ pub use rayon::iter::ParallelIterator; pub use shred::AsyncDispatcher; pub use crate::{ - // D-TODO changeset::ChangeSet, + changeset::ChangeSet, storage::{ ComponentEvent, DefaultVecStorage, DenseVecStorage, FlaggedStorage, HashMapStorage, NullStorage, ReadStorage, Storage, Tracked, VecStorage, WriteStorage, diff --git a/src/saveload/marker.rs b/src/saveload/marker.rs index 61b5b7baa..203e7324c 100644 --- a/src/saveload/marker.rs +++ b/src/saveload/marker.rs @@ -348,9 +348,7 @@ pub trait MarkerAllocator: Resource { entity: Entity, storage: &'m mut WriteStorage, ) -> Option<(&'m M, bool)> { - todo!() - // D-TODO (uncomment when entry API is re-enabled!) - /*let new = if let Ok(entry) = storage.entry(entity) { + let new = if let Ok(entry) = storage.entry(entity) { let mut new = false; let _marker = entry.or_insert_with(|| { new = true; @@ -361,7 +359,7 @@ pub trait MarkerAllocator: Resource { } else { return None; }; - Some((storage.get(entity).unwrap(), new))*/ + Some((storage.get(entity).unwrap(), new)) } /// Maintain internal data. Cleanup if necessary. diff --git a/src/storage/drain.rs b/src/storage/drain.rs index e86b52a5d..8946fcff4 100644 --- a/src/storage/drain.rs +++ b/src/storage/drain.rs @@ -13,6 +13,9 @@ pub struct Drain<'a, T: Component> { pub data: &'a mut MaskedStorage, } +// S-TODO implement LendJoin +// S-TODO implement RepeatableLendGet + impl<'a, T> Join for Drain<'a, T> where T: Component, @@ -23,6 +26,10 @@ where // SAFETY: No invariants to meet and no unsafe code. unsafe fn open(self) -> (Self::Mask, Self::Value) { + // TODO: Cloning the whole bitset here seems expensive, and it is + // hidden from the user, but there is no obvious way to restructure + // things to avoid this with the way that bitsets are composed together + // for iteration. let mask = self.data.mask.clone(); (mask, self.data) diff --git a/src/storage/entry.rs b/src/storage/entry.rs index fad2ca174..c51875a8d 100644 --- a/src/storage/entry.rs +++ b/src/storage/entry.rs @@ -162,6 +162,15 @@ where } } +// SAFETY: LendJoin::get impl for this type is safe to call multiple times with +// the same ID. +unsafe impl<'a, 'b: 'a, T: 'a, D: 'a> RepeatableLendGet for Entries<'a, 'b, T, D> +where + T: Component, + D: DerefMut>, +{ +} + /// An entry to a storage which has a component associated to the entity. pub struct OccupiedEntry<'a, 'b: 'a, T: 'a, D: 'a> { id: Index, diff --git a/src/storage/mod.rs b/src/storage/mod.rs index e37ffd2e0..bbecc4f2c 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -6,10 +6,10 @@ pub use self::{ entry::{Entries, OccupiedEntry, StorageEntry, VacantEntry}, flagged::FlaggedStorage, generic::{GenericReadStorage, GenericWriteStorage}, - // D-TODO restrict::{ - // ImmutableParallelRestriction, MutableParallelRestriction, PairedStorage, RestrictedStorage, - // SequentialRestriction, - //}, + restrict::{ + PairedStorageRead, PairedStorageWriteExclusive, PairedStorageWriteShared, + RestrictedStorage, SharedGetOnly, + }, storages::{ BTreeStorage, DefaultVecStorage, DenseVecStorage, HashMapStorage, NullStorage, VecStorage, }, @@ -46,7 +46,7 @@ mod deref_flagged; mod entry; mod flagged; mod generic; -// D-TODO mod restrict; +mod restrict; mod storages; mod sync_unsafe_cell; #[cfg(test)] @@ -612,24 +612,27 @@ mod shared_get_mut_only { pub struct SharedGetMutOnly<'a, T, S>(&'a S, PhantomData); impl<'a, T, S> SharedGetMutOnly<'a, T, S> { - pub fn new(storage: &'a mut S) -> Self { + pub(crate) fn new(storage: &'a mut S) -> Self { Self(storage, PhantomData) } /// # Safety /// - /// May only be called after a call to `insert` with `id` and no following - /// call to `remove` with `id` or to `clean`. + /// May only be called after a call to `insert` with `id` and no + /// following call to `remove` with `id` or to `clean`. /// - /// A mask should keep track of those states, and an `id` being contained in - /// the tracking mask is sufficient to call this method. + /// A mask should keep track of those states, and an `id` being + /// contained in the tracking mask is sufficient to call this method. /// /// There must be no extant aliasing references to this component (i.e. /// obtained with the same `id`). /// - /// Unless `T::Storage` implements `DistinctStorage`, calling this from - /// multiple threads at once is unsound. - pub unsafe fn get(&self, i: Index) -> >::AccessMut<'a> + /// Unless `S: DistinctStorage`, calling this from multiple threads at + /// once is unsound. + pub(crate) unsafe fn get_mut( + this: &Self, + id: Index, + ) -> >::AccessMut<'a> where S: SharedGetMutStorage, { @@ -641,10 +644,10 @@ mod shared_get_mut_only { // exposed outside of this module). // // This means we only have to worry about aliasing references being - // produced by calling `shared_get_mut`. Ensuring these don't alias - // and the remaining safety requirements are passed on to the - // caller. - unsafe { self.0.shared_get_mut(i) } + // produced by calling `SharedGetMutStorage::shared_get_mut`. + // Ensuring these don't alias and the remaining safety requirements + // are passed on to the caller. + unsafe { this.0.shared_get_mut(id) } } } } @@ -679,11 +682,11 @@ where // extant references for the element corresponding to this `id`. // * Since we have an exclusive reference to `Self::Value`, we know this // isn't being called from multiple threads at once. - unsafe { value.get(id) } + unsafe { SharedGetMutOnly::get_mut(value, id) } } } -// SAFETY: It is safe to call `SharedGetMutOnly<'a, T>::get` from multiple +// SAFETY: It is safe to call `SharedGetMutOnly<'a, T>::get_mut` from multiple // threads at once since `T::Storage: DistinctStorage`. // // The mask and unprotected storage contained in `MaskedStorage` correspond and @@ -715,7 +718,7 @@ where // references that were obtained with the same `id`. // * `T::Storage` implements the unsafe trait `DistinctStorage` so it is // safe to call this from multiple threads at once. - unsafe { value.get(id) } + unsafe { SharedGetMutOnly::get_mut(value, id) } } } @@ -878,14 +881,16 @@ pub trait SharedGetMutStorage: UnprotectedStorage { /// /// There must be no extant aliasing references to this component (i.e. /// obtained with the same `id`). Additionally, references obtained from - /// methods on this type that take `&self` (e.g. - /// [`UnprotectedStorage::get`], [`SliceAccess::as_slice`], + /// methods on this type that take `&self` (e.g. [`SliceAccess::as_slice`], /// [`Tracked::channel`]) must no longer be alive when /// `shared_get_mut` is called and these methods must not be - /// called while the references returned here are alive. Essentially, - /// the `unsafe` code calling this must hold exclusive access of the - /// storage at some level to ensure only known code is calling `&self` - /// methods during the usage of this method and the references it + /// called while the references returned here are alive. An exception is + /// made for [`UnprotectedStorage::get`] as long as the live references it + /// has returned do not alias with live references returned here. + /// + /// Essentially, the `unsafe` code calling this must hold exclusive access + /// of the storage at some level to ensure only known code is calling + /// `&self` methods during the usage of this method and the references it /// produces. /// /// Unless this type implements `DistinctStorage`, calling this from diff --git a/src/storage/restrict.rs b/src/storage/restrict.rs index b60091a30..6026e49c5 100644 --- a/src/storage/restrict.rs +++ b/src/storage/restrict.rs @@ -7,36 +7,20 @@ use std::{ use hibitset::BitSet; use shred::Fetch; -use crate::join::Join; +#[nougat::gat(Type)] +use crate::join::LendJoin; +use crate::join::{Join, RepeatableLendGet}; #[cfg(feature = "parallel")] use crate::join::ParJoin; use crate::{ - storage::{AccessMutReturn, MaskedStorage, Storage, UnprotectedStorage}, + storage::{ + AccessMutReturn, DistinctStorage, MaskedStorage, SharedGetMutStorage, Storage, + UnprotectedStorage, + }, world::{Component, EntitiesRes, Entity, Index}, }; -/// Specifies that the `RestrictedStorage` cannot run in parallel. -/// -/// A mutable `RestrictedStorage` can call `get`, `get_mut`, `get_unchecked` and -/// `get_mut_unchecked` for deferred/restricted access while an immutable -/// version can only call the immutable accessors. -pub enum SequentialRestriction {} -/// Specifies that the `RestrictedStorage` can run in parallel mutably. -/// -/// This means the storage can only call `get_mut_unchecked` and -/// `get_unchecked`. -pub enum MutableParallelRestriction {} -/// Specifies that the `RestrictedStorage` can run in parallel immutably. -/// -/// This means that the storage can call `get`, `get_unchecked`. -pub enum ImmutableParallelRestriction {} - -/// Restrictions that are allowed to access `RestrictedStorage::get`. -pub trait ImmutableAliasing: Sized {} -impl ImmutableAliasing for SequentialRestriction {} -impl ImmutableAliasing for ImmutableParallelRestriction {} - /// Similar to a `MaskedStorage` and a `Storage` combined, but restricts usage /// to only getting and modifying the components. That means it's not possible /// to modify the inner bitset so the iteration cannot be invalidated. In other @@ -58,56 +42,73 @@ impl ImmutableAliasing for ImmutableParallelRestriction {} /// fn run(&mut self, (entities, mut some_comps): Self::SystemData) { /// for (entity, mut comps) in (&entities, &mut some_comps.restrict_mut()).join() { /// // Check if the reference is fine to mutate. -/// if comps.get_unchecked().0 < 5 { +/// if comps.get().0 < 5 { /// // Get a mutable reference now. -/// let mut mutable = comps.get_mut_unchecked(); +/// let mut mutable = comps.get_mut(); /// mutable.0 += 1; /// } /// } /// } /// } /// ``` -pub struct RestrictedStorage<'rf, 'st: 'rf, C, S, B, Restrict> -where - C: Component, - S: Borrow + 'rf, - B: Borrow + 'rf, -{ - bitset: B, +pub struct RestrictedStorage<'rf, 'st: 'rf, C, S> { + bitset: &'rf BitSet, data: S, entities: &'rf Fetch<'st, EntitiesRes>, - phantom: PhantomData<(C, Restrict)>, + phantom: PhantomData, } -#[cfg(feature = "parallel")] -unsafe impl<'rf, 'st: 'rf, C, S, B> ParJoin - for &'rf mut RestrictedStorage<'rf, 'st, C, S, B, MutableParallelRestriction> +impl<'st, T, D> Storage<'st, T, D> where - C: Component, - S: BorrowMut + 'rf, - B: Borrow + 'rf, + T: Component, + D: Deref>, { + /// Builds an immutable `RestrictedStorage` out of a `Storage`. Allows + /// deferred unchecked access to the entity's component. + /// + /// This is returned as a `ParallelRestriction` version since you can only + /// get immutable components with this which is safe for parallel by + /// default. + pub fn restrict<'rf>(&'rf self) -> RestrictedStorage<'rf, 'st, T, &T::Storage> { + RestrictedStorage { + bitset: &self.data.mask, + data: &self.data.inner, + entities: &self.entities, + phantom: PhantomData, + } + } } -#[cfg(feature = "parallel")] -unsafe impl<'rf, 'st: 'rf, C, S, B, Restrict> ParJoin - for &'rf RestrictedStorage<'rf, 'st, C, S, B, Restrict> +impl<'st, T, D> Storage<'st, T, D> where - C: Component, - S: Borrow + 'rf, - B: Borrow + 'rf, - Restrict: ImmutableAliasing, + T: Component, + D: DerefMut>, { + /// Builds a mutable `RestrictedStorage` out of a `Storage`. Allows + /// restricted access to the inner components without allowing + /// invalidating the bitset for iteration in `Join`. + pub fn restrict_mut<'rf>(&'rf mut self) -> RestrictedStorage<'rf, 'st, T, &mut T::Storage> { + let (mask, data) = self.data.open_mut(); + RestrictedStorage { + bitset: mask, + data, + entities: &self.entities, + phantom: PhantomData, + } + } } -impl<'rf, 'st: 'rf, C, S, B, Restrict> Join for &'rf RestrictedStorage<'rf, 'st, C, S, B, Restrict> +// SAFETY: `open` returns references to corresponding mask and storage values +// contained in the wrapped `Storage`. Iterating the mask does not repeat +// indices. +#[nougat::gat] +unsafe impl<'rf, 'st: 'rf, C, S> LendJoin for &'rf RestrictedStorage<'rf, 'st, C, S> where C: Component, S: Borrow, - B: Borrow, { type Mask = &'rf BitSet; - type Type = PairedStorage<'rf, 'st, C, &'rf C::Storage, &'rf BitSet, Restrict>; + type Type<'next> = PairedStorageRead<'rf, 'st, C>; type Value = (&'rf C::Storage, &'rf Fetch<'st, EntitiesRes>, &'rf BitSet); unsafe fn open(self) -> (Self::Mask, Self::Value) { @@ -115,26 +116,40 @@ where (bitset, (self.data.borrow(), self.entities, bitset)) } - unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { - PairedStorage { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next, + { + // NOTE: Methods on this type rely on safety requiments of this method. + PairedStorageRead { index: id, storage: value.0, entities: value.1, bitset: value.2, - phantom: PhantomData, } } } -impl<'rf, 'st: 'rf, C, S, B, Restrict> Join - for &'rf mut RestrictedStorage<'rf, 'st, C, S, B, Restrict> +// SAFETY: LendJoin::get impl for this type can safely be called multiple times +// with the same ID. +unsafe impl<'rf, 'st: 'rf, C, S> RepeatableLendGet for &'rf RestrictedStorage<'rf, 'st, C, S> +where + C: Component, + S: Borrow, +{ +} + +// SAFETY: `open` returns references to corresponding mask and storage values +// contained in the wrapped `Storage`. Iterating the mask does not repeat +// indices. +#[nougat::gat] +unsafe impl<'rf, 'st: 'rf, C, S> LendJoin for &'rf mut RestrictedStorage<'rf, 'st, C, S> where C: Component, S: BorrowMut, - B: Borrow, { type Mask = &'rf BitSet; - type Type = PairedStorage<'rf, 'st, C, &'rf mut C::Storage, &'rf BitSet, Restrict>; + type Type<'next> = PairedStorageWriteExclusive<'next, 'st, C>; type Value = ( &'rf mut C::Storage, &'rf Fetch<'st, EntitiesRes>, @@ -146,155 +161,411 @@ where (bitset, (self.data.borrow_mut(), self.entities, bitset)) } - unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { - // SAFETY: S-TODO update when changing Join trait - let value: &'rf mut Self::Value = unsafe { &mut *(value as *mut Self::Value) }; - PairedStorage { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> + where + Self: 'next, + { + // NOTE: Methods on this type rely on safety requiments of this method. + PairedStorageWriteExclusive { index: id, storage: value.0, entities: value.1, bitset: value.2, - phantom: PhantomData, } } } -impl<'st, T, D> Storage<'st, T, D> +// SAFETY: LendJoin::get impl for this type can safely be called multiple times +// with the same ID. +unsafe impl<'rf, 'st: 'rf, C, S> RepeatableLendGet for &'rf mut RestrictedStorage<'rf, 'st, C, S> where - T: Component, - D: Deref>, + C: Component, + S: BorrowMut, { - /// Builds an immutable `RestrictedStorage` out of a `Storage`. Allows - /// deferred unchecked access to the entity's component. - /// - /// This is returned as a `ParallelRestriction` version since you can only - /// get immutable components with this which is safe for parallel by - /// default. - pub fn restrict<'rf>( - &'rf self, - ) -> RestrictedStorage<'rf, 'st, T, &T::Storage, &BitSet, ImmutableParallelRestriction> { - RestrictedStorage { - bitset: &self.data.mask, - data: &self.data.inner, - entities: &self.entities, - phantom: PhantomData, - } - } } -impl<'st, T, D> Storage<'st, T, D> +// SAFETY: `open` returns references to corresponding mask and storage values +// contained in the wrapped `Storage`. Iterating the mask does not repeat +// indices. +unsafe impl<'rf, 'st: 'rf, C, S> Join for &'rf RestrictedStorage<'rf, 'st, C, S> where - T: Component, - D: DerefMut>, + C: Component, + S: Borrow, { - /// Builds a mutable `RestrictedStorage` out of a `Storage`. Allows - /// restricted access to the inner components without allowing - /// invalidating the bitset for iteration in `Join`. - pub fn restrict_mut<'rf>( - &'rf mut self, - ) -> RestrictedStorage<'rf, 'st, T, &mut T::Storage, &BitSet, SequentialRestriction> { - let (mask, data) = self.data.open_mut(); - RestrictedStorage { - bitset: mask, - data, - entities: &self.entities, - phantom: PhantomData, + type Mask = &'rf BitSet; + type Type = PairedStorageRead<'rf, 'st, C>; + type Value = (&'rf C::Storage, &'rf Fetch<'st, EntitiesRes>, &'rf BitSet); + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + let bitset = self.bitset.borrow(); + (bitset, (self.data.borrow(), self.entities, bitset)) + } + + unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { + // NOTE: Methods on this type rely on safety requiments of this method. + PairedStorageRead { + index: id, + storage: value.0, + entities: value.1, + bitset: value.2, } } +} - /// Builds a mutable, parallel `RestrictedStorage`, - /// does not allow mutably getting other components - /// aside from the current iteration. - pub fn par_restrict_mut<'rf>( - &'rf mut self, - ) -> RestrictedStorage<'rf, 'st, T, &mut T::Storage, &BitSet, MutableParallelRestriction> { - let (mask, data) = self.data.open_mut(); - RestrictedStorage { - bitset: mask, - data, - entities: &self.entities, - phantom: PhantomData, +mod shared_get_only { + use super::{DistinctStorage, Index, SharedGetMutStorage, UnprotectedStorage}; + use core::marker::PhantomData; + + /// This type provides a way to ensure only `shared_get_mut` and `get` can + /// be called for the lifetime `'a` and that no references previously + /// obtained from the storage exist when it is created. While internally + /// this is a shared reference, constructing it requires an exclusive borrow + /// for the lifetime `'a`. + /// + /// This is useful for implementation of [`Join`](super::Join) and + /// [`ParJoin`](super::ParJoin) for `&mut RestrictedStorage`. + pub struct SharedGetOnly<'a, T, S>(&'a S, PhantomData); + + // SAFETY: All fields are required to be `Send` in the where clause. This + // also requires `S: DistinctStorage` so that we can freely duplicate + // `ShareGetOnly` while preventing `get_mut` from being called from multiple + // threads at once. + unsafe impl<'a, T, S> Send for SharedGetOnly<'a, T, S> + where + for<'b> &'b S: Send, + PhantomData: Send, + S: DistinctStorage, + { + } + // SAFETY: See above. + // NOTE: A limitation of this is that `PairedStorageWrite` is not `Sync` in + // some cases where it would be fine (we can address this if it is an issue). + unsafe impl<'a, T, S> Sync for SharedGetOnly<'a, T, S> + where + for<'b> &'b S: Sync, + PhantomData: Sync, + S: DistinctStorage, + { + } + + impl<'a, T, S> SharedGetOnly<'a, T, S> { + pub(super) fn new(storage: &'a mut S) -> Self { + Self(storage, PhantomData) + } + + pub(crate) fn duplicate(this: &Self) -> Self { + Self(this.0, this.1) + } + + /// # Safety + /// + /// May only be called after a call to `insert` with `id` and no + /// following call to `remove` with `id` or to `clean`. + /// + /// A mask should keep track of those states, and an `id` being + /// contained in the tracking mask is sufficient to call this method. + /// + /// There must be no extant aliasing references to this component (i.e. + /// obtained with the same `id` via this method or [`Self::get`]). + pub(super) unsafe fn get_mut( + this: &Self, + id: Index, + ) -> >::AccessMut<'a> + where + S: SharedGetMutStorage, + { + // SAFETY: `Self::new` takes an exclusive reference to this storage, + // ensuring there are no extant references to its content at the + // time `self` is created and ensuring that only `self` has access + // to the storage for its lifetime and the lifetime of the produced + // `AccessMutReturn`s (the reference we hold to the storage is not + // exposed outside of this module). + // + // This means we only have to worry about aliasing references being + // produced by calling `SharedGetMutStorage::shared_get_mut` (via + // this method) or `UnprotectedStorage::get` (via `Self::get`). + // Ensuring these don't alias is enforced by the requirements on + // this method and `Self::get`. + // + // `Self` is only `Send`/`Sync` when `S: DistinctStorage`. Note, + // that multiple instances of `Self` can be created via `duplicate` + // but they can't be sent between threads (nor can shared references + // be sent) unless `S: DistinctStorage`. These factors, along with + // `Self::new` taking an exclusive reference to the storage, prevent + // calling `shared_get_mut` from multiple threads at once unless `S: + // DistinctStorage`. + // + // The remaining safety requirements are passed on to the caller. + unsafe { this.0.shared_get_mut(id) } + } + + /// # Safety + /// + /// May only be called after a call to `insert` with `id` and no + /// following call to `remove` with `id` or to `clean`. + /// + /// A mask should keep track of those states, and an `id` being + /// contained in the tracking mask is sufficient to call this method. + /// + /// There must be no extant references obtained from [`Self::get_mut`] + /// using the same `id`. + pub(super) unsafe fn get(this: &Self, id: Index) -> &'a T + where + S: UnprotectedStorage, + { + // SAFETY: Safety requirements passed to the caller. + unsafe { this.0.get(id) } } } } +pub use shared_get_only::SharedGetOnly; -/// Pairs a storage with an index, meaning that the index is guaranteed to exist -/// as long as the `PairedStorage` exists. -pub struct PairedStorage<'rf, 'st: 'rf, C, S, B, Restrict> { - index: Index, - storage: S, - bitset: B, - entities: &'rf Fetch<'st, EntitiesRes>, - phantom: PhantomData<(C, Restrict)>, +// SAFETY: `open` returns references to corresponding mask and storage values +// contained in the wrapped `Storage`. Iterating the mask does not repeat +// indices. +unsafe impl<'rf, 'st: 'rf, C, S> Join for &'rf mut RestrictedStorage<'rf, 'st, C, S> +where + C: Component, + S: BorrowMut, + C::Storage: SharedGetMutStorage, +{ + type Mask = &'rf BitSet; + type Type = PairedStorageWriteShared<'rf, C>; + type Value = SharedGetOnly<'rf, C, C::Storage>; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + let bitset = &self.bitset; + let storage = SharedGetOnly::new(self.data.borrow_mut()); + (bitset, storage) + } + + unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { + // NOTE: Methods on this type rely on safety requiments of this method. + PairedStorageWriteShared { + index: id, + storage: SharedGetOnly::duplicate(value), + } + } } -impl<'rf, 'st, C, S, B, Restrict> PairedStorage<'rf, 'st, C, S, B, Restrict> +// SAFETY: It is safe to call `get` from multiple threads at once since +// `T::Storage: Sync`. We construct a `PairedStorageRead` which can be used to +// call `UnprotectedStorage::get` which is safe to call concurrently. +// +// `open` returns references to corresponding mask and storage values contained +// in the wrapped `Storage`. +unsafe impl<'rf, 'st: 'rf, C, S> ParJoin for &'rf RestrictedStorage<'rf, 'st, C, S> where C: Component, S: Borrow, - B: Borrow, + C::Storage: Sync, { - /// Gets the component related to the current entry without checking whether - /// the storage has it or not. - pub fn get_unchecked(&self) -> &C { - unsafe { self.storage.borrow().get(self.index) } + type Mask = &'rf BitSet; + type Type = PairedStorageRead<'rf, 'st, C>; + type Value = (&'rf C::Storage, &'rf Fetch<'st, EntitiesRes>, &'rf BitSet); + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + let bitset = self.bitset.borrow(); + (bitset, (self.data.borrow(), self.entities, bitset)) + } + + unsafe fn get(value: &Self::Value, id: Index) -> Self::Type { + // NOTE: Methods on this type rely on safety requiments of this method. + PairedStorageRead { + index: id, + storage: value.0, + entities: value.1, + bitset: value.2, + } } } -impl<'rf, 'st, C, S, B, Restrict> PairedStorage<'rf, 'st, C, S, B, Restrict> +// SAFETY: It is safe to call `get` from multiple threads at once since +// `T::Storage: Sync`. We construct a `PairedStorageSharedWrite` which can be +// used to call `UnprotectedStorage::get` which is safe to call concurrently and +// `SharedGetOnly::get_mut` which is safe to call concurrently since we require +// `C::Storage: DistinctStorage` here. +// +// `open` returns references to corresponding mask and storage values contained +// in the wrapped `Storage`. +#[cfg(feature = "parallel")] +unsafe impl<'rf, 'st: 'rf, C, S> ParJoin for &'rf mut RestrictedStorage<'rf, 'st, C, S> where C: Component, S: BorrowMut, - B: Borrow, + C::Storage: Sync + SharedGetMutStorage + DistinctStorage, { - /// Gets the component related to the current entry without checking whether - /// the storage has it or not. - pub fn get_mut_unchecked(&mut self) -> AccessMutReturn<'_, C> { - // SAFETY: S-TODO update comment when Join trait is fixed. - unsafe { self.storage.borrow_mut().get_mut(self.index) } + type Mask = &'rf BitSet; + type Type = PairedStorageWriteShared<'rf, C>; + type Value = SharedGetOnly<'rf, C, C::Storage>; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + let bitset = &self.bitset; + let storage = SharedGetOnly::new(self.data.borrow_mut()); + (bitset, storage) } + + unsafe fn get(value: &Self::Value, id: Index) -> Self::Type { + // NOTE: Methods on this type rely on safety requiments of this method. + PairedStorageWriteShared { + index: id, + storage: SharedGetOnly::duplicate(value), + } + } +} + +/// Pairs a storage with an index, meaning that the index is guaranteed to exist +/// as long as the `PairedStorage` exists. +/// +/// Yielded by `lend_join`/`join`/`par_join` on `&storage.restrict()`. +pub struct PairedStorageRead<'rf, 'st: 'rf, C: Component> { + index: Index, + storage: &'rf C::Storage, + bitset: &'rf BitSet, + entities: &'rf Fetch<'st, EntitiesRes>, } -impl<'rf, 'st, C, S, B, Restrict> PairedStorage<'rf, 'st, C, S, B, Restrict> +/// Pairs a storage with an index, meaning that the index is guaranteed to +/// exist. +/// +/// Yielded by `join`/`par_join` on `&mut storage.restrict_mut()`. +pub struct PairedStorageWriteShared<'rf, C: Component> { + index: Index, + storage: SharedGetOnly<'rf, C, C::Storage>, +} + +// SAFETY: All fields are required to implement `Send` in the where clauses. We +// also require `C::Storage: DistinctStorage` so that this cannot be sent +// between threads and then used to call `get_mut` from multiple threads at +// once. +unsafe impl Send for PairedStorageWriteShared<'_, C> where C: Component, - S: Borrow, - B: Borrow, - // Only non parallel and immutable parallel storages can access this. - Restrict: ImmutableAliasing, + Index: Send, + for<'a> SharedGetOnly<'a, C, C::Storage>: Send, + C::Storage: DistinctStorage, { - /// Attempts to get the component related to the entity. +} + +/// Pairs a storage with an index, meaning that the index is guaranteed to +/// exist. +/// +/// Yielded by `lend_join` on `&mut storage.restrict_mut()`. +pub struct PairedStorageWriteExclusive<'rf, 'st: 'rf, C: Component> { + index: Index, + storage: &'rf mut C::Storage, + bitset: &'rf BitSet, + entities: &'rf Fetch<'st, EntitiesRes>, +} + +impl<'rf, 'st, C> PairedStorageRead<'rf, 'st, C> +where + C: Component, +{ + /// Gets the component related to the current entity. + /// + /// Note, unlike `get_other` this doesn't need to check whether the + /// component is present. + pub fn get(&self) -> &C { + // SAFETY: This is constructed in the `get` methods of + // `LendJoin`/`Join`/`ParJoin` above. These all require that the mask + // has been checked. + unsafe { self.storage.get(self.index) } + } + + /// Attempts to get the component related to an arbitrary entity. /// /// Functions similar to the normal `Storage::get` implementation. /// /// This only works for non-parallel or immutably parallel /// `RestrictedStorage`. - pub fn get(&self, entity: Entity) -> Option<&C> { - if self.bitset.borrow().contains(entity.id()) && self.entities.is_alive(entity) { - Some(unsafe { self.storage.borrow().get(entity.id()) }) + pub fn get_other(&self, entity: Entity) -> Option<&C> { + if self.bitset.contains(entity.id()) && self.entities.is_alive(entity) { + // SAFETY:We just checked the mask. + Some(unsafe { self.storage.get(entity.id()) }) } else { None } } } -impl<'rf, 'st, C, S, B> PairedStorage<'rf, 'st, C, S, B, SequentialRestriction> +impl<'rf, C> PairedStorageWriteShared<'rf, C> where C: Component, - S: BorrowMut, - B: Borrow, + C::Storage: SharedGetMutStorage, { - /// Attempts to get the component related to the entity mutably. + /// Gets the component related to the current entity. + pub fn get(&self) -> &C { + // SAFETY: See note in `Self::get_mut` below. The only difference is + // that here we take a shared reference which prevents `get_mut` from + // being called while the return value is alive, but also allows this + // method to still be called again (which is fine). + unsafe { SharedGetOnly::get(&self.storage, self.index) } + } + + /// Gets the component related to the current entity. + pub fn get_mut(&mut self) -> AccessMutReturn<'_, C> { + // SAFETY: + // * This is constructed in the `get` methods of `Join`/`ParJoin` above. + // These all require that the mask has been checked. + // * We also require that either there are no subsequent calls with the + // same `id` (`Join`) or that there are not extant references from a + // call with the same `id` (`ParJoin`). Thus, `id` is unique among + // the instances of `Self` created by the join `get` methods. We then + // tie the lifetime of the returned value to the exclusive borrow of + // self which prevents this or `Self::get` from being called while the + // returned reference is still alive. + unsafe { SharedGetOnly::get_mut(&self.storage, self.index) } + } +} + +impl<'rf, 'st, C> PairedStorageWriteExclusive<'rf, 'st, C> +where + C: Component, +{ + /// Gets the component related to the current entity. /// - /// Functions similar to the normal `Storage::get_mut` implementation. + /// Note, unlike `get_other` this doesn't need to check whether the + /// component is present. + pub fn get(&self) -> &C { + // SAFETY: This is constructed in `LendJoin::get` which requires that + // the mask has been checked. + unsafe { self.storage.get(self.index) } + } + + /// Gets the component related to the current entity. /// - /// This only works if this is a non-parallel `RestrictedStorage`, - /// otherwise you could access the same component mutably in two different - /// threads. - pub fn get_mut(&mut self, entity: Entity) -> Option> { + /// Note, unlike `get_other_mut` this doesn't need to check whether the + /// component is present. + pub fn get_mut(&mut self) -> AccessMutReturn<'_, C> { + // SAFETY: This is constructed in `LendJoin::get` which requires that + // the mask has been checked. + unsafe { self.storage.get_mut(self.index) } + } + + /// Attempts to get the component related to an arbitrary entity. + /// + /// Functions similar to the normal `Storage::get` implementation. + pub fn get_other(&self, entity: Entity) -> Option<&C> { if self.bitset.borrow().contains(entity.id()) && self.entities.is_alive(entity) { - // SAFETY: S-TODO update comment when Join trait is fixed. - Some(unsafe { self.storage.borrow_mut().get_mut(entity.id()) }) + // SAFETY:We just checked the mask. + Some(unsafe { self.storage.get(entity.id()) }) + } else { + None + } + } + + /// Attempts to mutably get the component related to an arbitrary entity. + /// + /// Functions similar to the normal `Storage::get_mut` implementation. + /// + /// This only works if this is a lending `RestrictedStorage`, otherwise you + /// could access the same component mutably via two different + /// `PairedStorage`s at the same time. + pub fn get_other_mut(&mut self, entity: Entity) -> Option> { + if self.bitset.contains(entity.id()) && self.entities.is_alive(entity) { + // SAFETY:We just checked the mask. + Some(unsafe { self.storage.get_mut(entity.id()) }) } else { None } diff --git a/src/storage/storages.rs b/src/storage/storages.rs index 1b403ec94..8c1eb2a96 100644 --- a/src/storage/storages.rs +++ b/src/storage/storages.rs @@ -165,11 +165,7 @@ impl SliceAccess for DenseVecStorage { #[inline] fn as_slice(&self) -> &[Self::Element] { let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.data.as_slice()).get(); - // SAFETY: The only place that mutably accesses these elements via a - // shared reference is the impl of `SharedGetMut::shared_get_mut` which - // requires callers to avoid calling other methods with `&self` while - // references returned there are still in use (and to ensure references - // from methods like this no longer exist). + // SAFETY: See `VecStorage` impl. unsafe { &*unsafe_cell_slice_ptr } } @@ -445,9 +441,9 @@ impl UnprotectedStorage for VecStorage { // following call to `remove` with that id or to `clean`). let ptr = unsafe { self.0.get_unchecked(id as usize) }.get(); // SAFETY: Only method that obtains exclusive references from this - // unsafe cell is `shared_get_mut` and callers are required to - // managed aliasing there and prevent other methods from being called - // while those exclusive references are alive. + // unsafe cell is `shared_get_mut` and callers of that method are + // required to manually ensure that those references don't alias + // references from this method. let maybe_uninit = unsafe { &*ptr }; // SAFETY: Requirement to have `insert`ed this component ensures that it // will be initialized. @@ -518,7 +514,10 @@ impl SharedGetMutStorage for VecStorage { // SAFETY: Caller required to manage aliasing (ensuring there are no // extant shared references into the storage, this is called with // distinct ids, and that other methods that take `&self` aren't called - // while the exclusive references returned here are alive). + // while the exclusive references returned here are alive (except for + // `UnprotectedStorage::get` which may be used with this provided the + // caller avoids creating aliasing references from both that live at the + // same time)). let maybe_uninit = unsafe { &mut *ptr }; // SAFETY: Requirement to have `insert`ed this component ensures that it // will be initialized. @@ -554,11 +553,7 @@ impl SliceAccess for DefaultVecStorage { #[inline] fn as_slice(&self) -> &[Self::Element] { let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.0.as_slice()).get(); - // SAFETY: The only place that mutably accesses these elements via a - // shared reference is the impl of `SharedGetMut::shared_get_mut` which - // requires callers to avoid calling other methods with `&self` while - // references returned there are still in use (and to ensure references - // from methods like this no longer exist). + // SAFETY: See `VecStorage` impl. unsafe { &*unsafe_cell_slice_ptr } } From 6ca2338b29fbd109028bca84651c9ed0eed70bd0 Mon Sep 17 00:00:00 2001 From: Imbris Date: Mon, 13 Feb 2023 00:06:16 -0500 Subject: [PATCH 20/47] Update implementations of ParJoin and callers of ParJoin::get to reflect changes in safety requirements. --- src/join/maybe.rs | 5 ++--- src/join/mod.rs | 16 ++++++++++++---- src/join/par_join.rs | 6 +++--- src/storage/mod.rs | 9 ++++++--- src/storage/restrict.rs | 4 ++++ 5 files changed, 27 insertions(+), 13 deletions(-) diff --git a/src/join/maybe.rs b/src/join/maybe.rs index 81a1142e8..cce4bb04b 100644 --- a/src/join/maybe.rs +++ b/src/join/maybe.rs @@ -81,8 +81,7 @@ where unsafe fn get((mask, value): &mut Self::Value, id: Index) -> Self::Type { if mask.contains(id) { // SAFETY: The mask was just checked for `id`. This has the same - // requirements on the caller to not call with the same `id` until - // the previous value is no longer in use. + // requirements on the caller to only call with the same `id` once. Some(unsafe { ::get(value, id) }) } else { None @@ -100,7 +99,7 @@ where // thread. // // We return a mask containing all items, but check the original mask in -// the `get` implementation. +// the `get` implementation. Iterating the mask does not repeat indices. #[cfg(feature = "parallel")] unsafe impl ParJoin for MaybeJoin where diff --git a/src/join/mod.rs b/src/join/mod.rs index 0e35bc9b2..40ee5a001 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -297,6 +297,7 @@ macro_rules! define_open { // The returned mask in `open` is the intersection of the masks // from each type in this tuple. So if an `id` is present in the // combined mask, it will be safe to retrieve the corresponding items. + // Iterating the mask does not repeat indices. #[cfg(feature = "parallel")] unsafe impl<$($from,)*> ParJoin for ($($from),*,) where $($from: ParJoin),*, @@ -324,7 +325,9 @@ macro_rules! define_open { let &($(ref $from,)*) = v; // SAFETY: `get` is safe to call as the caller must have checked // the mask, which only has a key that exists in all of the - // storages. + // storages. Requirement for the return value to no longer be + // alive before subsequent calls with the same `id` is passed to + // the caller. unsafe { ($($from::get($from, i),)*) } } @@ -449,6 +452,7 @@ macro_rules! immutable_resource_join { // SAFETY: Since `T` implements `ParJoin` it is safe to deref and defer to // its implementation. S-TODO we can rely on errors if $ty is not sync? + // Iterating the mask does not repeat indices. #[cfg(feature = "parallel")] unsafe impl<'a, 'b, T> ParJoin for &'a $ty where @@ -470,6 +474,8 @@ macro_rules! immutable_resource_join { // SAFETY: The mask of `Self` and `T` are identical, thus a // check to `Self`'s mask (which is required) is equal to a // check of `T`'s mask, which makes `get` safe to call. + // Requirement for the return value to no longer be alive before + // subsequent calls with the same ID is passed to the caller. unsafe { <&'a T as ParJoin>::get(v, i) } } @@ -585,9 +591,11 @@ macro_rules! mutable_resource_join { } unsafe fn get(v: &Self::Value, i: Index) -> Self::Type { - // SAFETY: The mask of `Self` and `T` are identical, thus a check to - // `Self`'s mask (which is required) is equal to a check of `T`'s - // mask, which makes `get_mut` safe to call. + // SAFETY: The mask of `Self` and `T` are identical, thus a + // check to `Self`'s mask (which is required) is equal to a + // check of `T`'s mask, which makes `get_mut` safe to call. + // Requirement for the return value to no longer be alive before + // subsequent calls with the same ID is passed to the caller. unsafe { <&'a mut T as ParJoin>::get(v, i) } } diff --git a/src/join/par_join.rs b/src/join/par_join.rs index 31755293c..9a1a7f9c3 100644 --- a/src/join/par_join.rs +++ b/src/join/par_join.rs @@ -17,7 +17,7 @@ use crate::world::Index; /// The `Self::Mask` value returned with the `Self::Value` must correspond such /// that it is safe to retrieve items from `Self::Value` whose presence is /// indicated in the mask. As part of this, `BitSetLike::iter` must not produce -/// an iterator that repeats an `Index` value. (S-TODO update impls) +/// an iterator that repeats an `Index` value. pub unsafe trait ParJoin { /// Type of joined components. type Type; @@ -58,8 +58,8 @@ pub unsafe trait ParJoin { /// /// * A call to `get` must be preceded by a check if `id` is part of /// `Self::Mask`. - /// * The value returned from this method must be dropped before subsequent - /// calls with the same `id`. (S-TODO update callers to match edit) + /// * The value returned from this method must no longer be alive before + /// subsequent calls with the same `id`. unsafe fn get(value: &Self::Value, id: Index) -> Self::Type; /// If this `LendJoin` typically returns all indices in the mask, then diff --git a/src/storage/mod.rs b/src/storage/mod.rs index bbecc4f2c..9c72f7184 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -96,7 +96,8 @@ unsafe impl<'a> Join for AntiStorage<'a> { } // SAFETY: Since `get` does not do anything it is safe to concurrently call. -// Items are just `()` and it is always safe to retrieve them regardless +// Items are just `()` and it is always safe to retrieve them regardless of the +// mask and value returned by `open`. #[cfg(feature = "parallel")] unsafe impl<'a> ParJoin for AntiStorage<'a> { type Mask = BitSetNot<&'a BitSet>; @@ -539,6 +540,7 @@ where // // The mask and unprotected storage contained in `MaskedStorage` correspond and // `open` returns references to them from the same `MaskedStorage` instance. +// Iterating the mask does not repeat indices. #[cfg(feature = "parallel")] unsafe impl<'a, 'e, T, D> ParJoin for &'a Storage<'e, T, D> where @@ -691,7 +693,8 @@ where // // The mask and unprotected storage contained in `MaskedStorage` correspond and // `open` returns references to them from the same `MaskedStorage` instance (the -// storage is wrapped in `SharedGetMutOnly`). +// storage is wrapped in `SharedGetMutOnly`). Iterating the mask does not repeat +// indices. #[cfg(feature = "parallel")] unsafe impl<'a, 'e, T, D> ParJoin for &'a mut Storage<'e, T, D> where @@ -713,7 +716,7 @@ where // SAFETY: // * Since we require that the mask was checked, an element for // `id` must have been inserted without being removed. - // * We also require that the caller drop the value returned before + // * We also require that the returned value is no longer alive before // subsequent calls with the same `id`, so there are no extant // references that were obtained with the same `id`. // * `T::Storage` implements the unsafe trait `DistinctStorage` so it is diff --git a/src/storage/restrict.rs b/src/storage/restrict.rs index 6026e49c5..76d9f0ca9 100644 --- a/src/storage/restrict.rs +++ b/src/storage/restrict.rs @@ -354,6 +354,8 @@ where // // `open` returns references to corresponding mask and storage values contained // in the wrapped `Storage`. +// +// Iterating the mask does not repeat indices. unsafe impl<'rf, 'st: 'rf, C, S> ParJoin for &'rf RestrictedStorage<'rf, 'st, C, S> where C: Component, @@ -388,6 +390,8 @@ where // // `open` returns references to corresponding mask and storage values contained // in the wrapped `Storage`. +// +// Iterating the mask does not repeat indices. #[cfg(feature = "parallel")] unsafe impl<'rf, 'st: 'rf, C, S> ParJoin for &'rf mut RestrictedStorage<'rf, 'st, C, S> where From 0fad41d1e2ce6aece709559032796432fca48d57 Mon Sep 17 00:00:00 2001 From: Imbris Date: Mon, 13 Feb 2023 00:34:32 -0500 Subject: [PATCH 21/47] Uncomment `drain` module, update safety comments and implement `LendJoin` for `Drain`. --- src/join/lend_join.rs | 4 ++-- src/join/mod.rs | 4 +--- src/storage/drain.rs | 43 +++++++++++++++++++++++++++++++++++++------ src/storage/mod.rs | 18 +++++++++--------- 4 files changed, 49 insertions(+), 20 deletions(-) diff --git a/src/join/lend_join.rs b/src/join/lend_join.rs index b25358953..4d0fac6c3 100644 --- a/src/join/lend_join.rs +++ b/src/join/lend_join.rs @@ -139,8 +139,8 @@ pub unsafe trait LendJoin { /// # Safety /// -/// Implementing this trait guarantees that `::get` can soundly be called -/// multiple times with the same ID. +/// Implementing this trait guarantees that `::get` can +/// soundly be called multiple times with the same ID. pub unsafe trait RepeatableLendGet: LendJoin {} /// Type alias to refer to the `::Type<'next>` (except this diff --git a/src/join/mod.rs b/src/join/mod.rs index 40ee5a001..7b41d6ca2 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -95,8 +95,7 @@ pub use par_join::{JoinParIter, ParJoin}; /// The `Self::Mask` value returned with the `Self::Value` must correspond such /// that it is safe to retrieve items from `Self::Value` whose presence is /// indicated in the mask. As part of this, `BitSetLike::iter` must not produce -/// an iterator that repeats an `Index` value. (S-TODO update impls: probably -/// drain) +/// an iterator that repeats an `Index` value. pub unsafe trait Join { /// Type of joined components. type Type; @@ -125,7 +124,6 @@ pub unsafe trait Join { /// Get a joined component value by a given index. /// - // S-TODO: evaluate all impls (TODO: probably restrict, entry, and drain) /// /// # Safety /// diff --git a/src/storage/drain.rs b/src/storage/drain.rs index 8946fcff4..294b996f5 100644 --- a/src/storage/drain.rs +++ b/src/storage/drain.rs @@ -1,7 +1,9 @@ use hibitset::BitSet; +#[nougat::gat(Type)] +use crate::join::LendJoin; use crate::{ - join::Join, + join::{Join, RepeatableLendGet}, storage::MaskedStorage, world::{Component, Index}, }; @@ -13,10 +15,41 @@ pub struct Drain<'a, T: Component> { pub data: &'a mut MaskedStorage, } -// S-TODO implement LendJoin -// S-TODO implement RepeatableLendGet +// SAFETY: Calling `get` is always safe! Iterating the mask does not repeat +// indices. +#[nougat::gat] +unsafe impl<'a, T> LendJoin for Drain<'a, T> +where + T: Component, +{ + type Mask = BitSet; + type Type<'next> = T; + type Value = &'a mut MaskedStorage; + + unsafe fn open(self) -> (Self::Mask, Self::Value) { + // TODO: Cloning the whole bitset here seems expensive, and it is + // hidden from the user, but there is no obvious way to restructure + // things to avoid this with the way that bitsets are composed together + // for iteration. + let mask = self.data.mask.clone(); + + (mask, self.data) + } + + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> T + where + Self: 'next, + { + value.remove(id).expect("Tried to access same index twice") + } +} + +// SAFETY: Calling `get` is always safe! +unsafe impl<'a, T> RepeatableLendGet for Drain<'a, T> where T: Component {} -impl<'a, T> Join for Drain<'a, T> +// SAFETY: Calling `get` is always safe! Iterating the mask does not repeat +// indices. +unsafe impl<'a, T> Join for Drain<'a, T> where T: Component, { @@ -24,7 +57,6 @@ where type Type = T; type Value = &'a mut MaskedStorage; - // SAFETY: No invariants to meet and no unsafe code. unsafe fn open(self) -> (Self::Mask, Self::Value) { // TODO: Cloning the whole bitset here seems expensive, and it is // hidden from the user, but there is no obvious way to restructure @@ -35,7 +67,6 @@ where (mask, self.data) } - // SAFETY: No invariants to meet and no unsafe code. unsafe fn get(value: &mut Self::Value, id: Index) -> T { value.remove(id).expect("Tried to access same index twice") } diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 9c72f7184..0fccdbedb 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -37,12 +37,12 @@ use crate::{ world::{Component, EntitiesRes, Entity, Index}, }; -// D-TODO use self::drain::Drain; +use self::drain::Drain; use self::sync_unsafe_cell::SyncUnsafeCell; mod data; mod deref_flagged; -// D-TODO mod drain; +mod drain; mod entry; mod flagged; mod generic; @@ -449,13 +449,13 @@ where self.data.clear(); } - // /// Creates a draining storage wrapper which can be `.join`ed - // /// to get a draining iterator. - // D-TODO pub fn drain(&mut self) -> Drain { - // Drain { - // data: &mut self.data, - // } - //} + /// Creates a draining storage wrapper which can be `.join`ed + /// to get a draining iterator. + pub fn drain(&mut self) -> Drain { + Drain { + data: &mut self.data, + } + } } impl<'a, T, D: Clone> Clone for Storage<'a, T, D> { From 2c7d5e635088577691092df8ff98c448d769c5b3 Mon Sep 17 00:00:00 2001 From: Imbris Date: Mon, 13 Feb 2023 01:07:21 -0500 Subject: [PATCH 22/47] Fix errors and warnings in tests and examples --- benches/parallel.rs | 4 ++++ examples/track.rs | 2 +- src/storage/tests.rs | 28 +++++++++++++++------------- tests/saveload.rs | 2 ++ tests/tests.rs | 14 ++++++++++---- 5 files changed, 32 insertions(+), 18 deletions(-) diff --git a/benches/parallel.rs b/benches/parallel.rs index 27a03b98f..588693c84 100644 --- a/benches/parallel.rs +++ b/benches/parallel.rs @@ -55,6 +55,7 @@ impl Component for Lifetime { } #[derive(Clone, Copy, Debug)] +#[allow(dead_code)] struct Ball { radius: f32, } @@ -64,6 +65,7 @@ impl Component for Ball { } #[derive(Clone, Copy, Debug)] +#[allow(dead_code)] struct Rect { a: f32, b: f32, @@ -91,6 +93,7 @@ impl Component for SpawnRequests { } #[derive(Clone, Copy, Debug)] +#[allow(dead_code)] struct Collision { a: Entity, b: Entity, @@ -102,6 +105,7 @@ impl Component for Collision { } #[derive(Clone, Copy, Debug)] +#[allow(dead_code)] struct Room { inner_width: f32, inner_height: f32, diff --git a/examples/track.rs b/examples/track.rs index 1e231f39d..c32b6de83 100644 --- a/examples/track.rs +++ b/examples/track.rs @@ -70,7 +70,7 @@ impl<'a> System<'a> for SysB { fn run(&mut self, (entities, mut tracked): Self::SystemData) { for (entity, mut restricted) in (&entities, &mut tracked.restrict_mut()).join() { if entity.id() % 2 == 0 { - let mut comp = restricted.get_mut_unchecked(); + let mut comp = restricted.get_mut(); comp.0 += 1; } } diff --git a/src/storage/tests.rs b/src/storage/tests.rs index 6c5b76574..c74df94f5 100644 --- a/src/storage/tests.rs +++ b/src/storage/tests.rs @@ -301,6 +301,7 @@ mod test { for i in 0..1_000 { *s.get_mut(Entity::new(i, Generation::new(1))) .unwrap() + .access_mut() .as_mut() -= 718; } @@ -330,6 +331,7 @@ mod test { for i in 0..1_000 { *s.get_mut_or_default(Entity::new(i, Generation::new(1))) .unwrap() + .access_mut() .as_mut() += i; } @@ -698,9 +700,9 @@ mod test { } for mut comps in (&mut s1.restrict_mut()).join() { - let c1 = { comps.get_unchecked().0 }; + let c1 = { comps.get().0 }; - let c2 = { comps.get_mut_unchecked().0 }; + let c2 = { comps.get_mut().0 }; assert_eq!( c1, c2, @@ -740,14 +742,12 @@ mod test { let components2 = Mutex::new(Vec::new()); let components2_mut = Mutex::new(Vec::new()); - (&mut s1.par_restrict_mut()) - .par_join() - .for_each(|mut comps| { - let (mut components2, mut components2_mut) = - (components2.lock().unwrap(), components2_mut.lock().unwrap()); - components2.push(comps.get_unchecked().0); - components2_mut.push(comps.get_mut_unchecked().0); - }); + (&mut s1.restrict_mut()).par_join().for_each(|mut comps| { + let (mut components2, mut components2_mut) = + (components2.lock().unwrap(), components2_mut.lock().unwrap()); + components2.push(comps.get().0); + components2_mut.push(comps.get_mut().0); + }); let components2 = components2.into_inner().unwrap(); assert_eq!( components2, @@ -994,7 +994,7 @@ mod test { #[test] fn entries() { - use crate::{join::Join, storage::WriteStorage, world::Entities}; + use crate::{join::LendJoin, storage::WriteStorage, world::Entities}; let mut w = World::new(); @@ -1018,10 +1018,12 @@ mod test { let mut sum = 0; w.exec(|(e, mut s): (Entities, WriteStorage)| { - sum = (&e, s.entries()).join().fold(0, |acc, (_, value)| { + let mut acc = 0; + (&e, s.entries()).lend_join().for_each(|(_, value)| { let v = value.or_insert(2.into()); - acc + v.0 + acc = acc + v.0; }); + sum = acc; }); assert_eq!(sum, 135); diff --git a/tests/saveload.rs b/tests/saveload.rs index 26e717486..1623f64fc 100644 --- a/tests/saveload.rs +++ b/tests/saveload.rs @@ -56,6 +56,7 @@ mod tests { struct TupleSerdeType(u32); #[derive(Clone)] + #[allow(dead_code)] struct UnserializableType { inner: u32, } @@ -67,6 +68,7 @@ mod tests { } #[derive(Serialize, Deserialize, Clone)] + #[allow(dead_code)] struct ComplexSerdeType { #[serde(skip, default)] opaque: UnserializableType, diff --git a/tests/tests.rs b/tests/tests.rs index 4cbd17ca5..27b791921 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -550,7 +550,7 @@ fn par_join_many_entities_and_systems() { } #[test] -fn getting_specific_entity_with_join() { +fn getting_specific_entity_with_lend_join() { let mut world = create_world(); world .create_entity() @@ -565,12 +565,16 @@ fn getting_specific_entity_with_join() { assert_eq!( Some((&CompInt(1), &mut CompBool(true))), - (&ints, &mut bools).join().get(entity, &world.entities()) + (&ints, &mut bools) + .lend_join() + .get(entity, &world.entities()) ); bools.remove(entity); assert_eq!( None, - (&ints, &mut bools).join().get(entity, &world.entities()) + (&ints, &mut bools) + .lend_join() + .get(entity, &world.entities()) ); entity }; @@ -584,7 +588,9 @@ fn getting_specific_entity_with_join() { let mut bools = world.write_storage::(); assert_eq!( None, - (&ints, &mut bools).join().get(entity, &world.entities()) + (&ints, &mut bools) + .lend_join() + .get(entity, &world.entities()) ); } From c33cd7ced43f1e1a6ccc93b4f0938c71be07f769 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 26 Feb 2023 00:33:13 -0500 Subject: [PATCH 23/47] Add missing documentation and fix some doc tests --- Cargo.toml | 12 +++++++----- src/storage/deref_flagged.rs | 3 ++- src/storage/entry.rs | 2 +- src/storage/flagged.rs | 4 ++-- src/storage/mod.rs | 2 ++ 5 files changed, 14 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 285f458b8..eeb4c4720 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,11 +22,13 @@ autobenches = false [dependencies] ahash = "0.7.6" crossbeam-queue = "0.3" -hibitset = { version = "0.6.3", default-features = false } +# TODO make PR +# hibitset = { version = "0.6.3", default-features = false } +hibitset = { path = "../hibitset", default-features = false } log = "0.4.8" # waiting on https://github.com/amethyst/shred/pull/223 -# shred = { version = "0.14.1", default-features = false } -shred = { path = "../shred", default-features = false } +shred = { version = "0.14.1", default-features = false } +# shred = { path = "../shred", default-features = false } shrev = "1.1.1" tuple_utils = "0.4.0" nougat = "0.2.3" @@ -56,8 +58,8 @@ ron = "0.7.1" rand = "0.8" serde_json = "1.0.48" # waiting on https://github.com/amethyst/shred/pull/223 -# shred = { version = "0.14.1", default-features = false, features = ["shred-derive"] } -shred = { path = "../shred", default-features = false, features = ["shred-derive"] } +shred = { version = "0.14.1", default-features = false, features = ["shred-derive"] } +# shred = { path = "../shred", default-features = false, features = ["shred-derive"] } specs-derive = { path = "specs-derive", version = "0.4.1" } [[example]] diff --git a/src/storage/deref_flagged.rs b/src/storage/deref_flagged.rs index f2f53046d..994c8accb 100644 --- a/src/storage/deref_flagged.rs +++ b/src/storage/deref_flagged.rs @@ -123,7 +123,8 @@ impl Tracked for DerefFlaggedStorage { } } -/// S-TODO document +/// Wrapper type only emits modificaition events when the component is accessed +/// via mutably dereferencing. Also see [`DerefFlaggedStorage`] documentation. pub struct FlaggedAccessMut<'a, A, C> { channel: &'a mut EventChannel, emit: bool, diff --git a/src/storage/entry.rs b/src/storage/entry.rs index c51875a8d..f039263c0 100644 --- a/src/storage/entry.rs +++ b/src/storage/entry.rs @@ -101,7 +101,7 @@ where /// # } /// # /// # world.exec(|(mut counters, marker): (WriteStorage, ReadStorage)| { - /// let mut join = (counter.entries(), &marker).lend_join(); + /// let mut join = (counters.entries(), &marker).lend_join(); /// while let Some((mut counter, _)) = join.next() { /// let counter = counter.or_insert_with(Default::default); /// counter.increase(); diff --git a/src/storage/flagged.rs b/src/storage/flagged.rs index 528594cca..51bcf65c3 100644 --- a/src/storage/flagged.rs +++ b/src/storage/flagged.rs @@ -20,7 +20,7 @@ use shrev::EventChannel; /// /// What you want to instead is to use `restrict_mut()` to first /// get the entities which contain the component and then conditionally -/// modify the component after a call to `get_mut_unchecked()` or `get_mut()`. +/// modify the component after a call to `get_mut()` or `get_other_mut()`. /// /// # Examples /// @@ -102,7 +102,7 @@ use shrev::EventChannel; /// # let condition = true; /// for (entity, mut comps) in (&entities, &mut comps.restrict_mut()).join() { /// if condition { // check whether this component should be modified. -/// let mut comp = comps.get_mut_unchecked(); +/// let mut comp = comps.get_mut(); /// // ... /// } /// } diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 0fccdbedb..f6ccfcc03 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -867,6 +867,8 @@ pub trait UnprotectedStorage: TryDefault { } } +/// Used by the framework to mutably access components in contexts where +/// exclusive access to the storage is not possible. pub trait SharedGetMutStorage: UnprotectedStorage { /// Gets mutable access to the the data associated with an `Index`. /// From 09f1a4cb315d3c9f3b78220959488a1ed93db862 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 26 Feb 2023 00:44:17 -0500 Subject: [PATCH 24/47] Adjust for changes in shred MetaTable --- Cargo.toml | 13 ++++++------- src/storage/data.rs | 4 ++-- src/storage/mod.rs | 8 +++----- src/world/world_ext.rs | 6 +++--- 4 files changed, 14 insertions(+), 17 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index eeb4c4720..38e4c708d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,13 +22,13 @@ autobenches = false [dependencies] ahash = "0.7.6" crossbeam-queue = "0.3" -# TODO make PR +# waiting on PR and new version to be published # hibitset = { version = "0.6.3", default-features = false } hibitset = { path = "../hibitset", default-features = false } log = "0.4.8" -# waiting on https://github.com/amethyst/shred/pull/223 -shred = { version = "0.14.1", default-features = false } -# shred = { path = "../shred", default-features = false } +# waiting on PR and new version to be published +# shred = { version = "0.14.1", default-features = false } +shred = { path = "../shred", default-features = false } shrev = "1.1.1" tuple_utils = "0.4.0" nougat = "0.2.3" @@ -57,9 +57,8 @@ criterion = "0.3.1" ron = "0.7.1" rand = "0.8" serde_json = "1.0.48" -# waiting on https://github.com/amethyst/shred/pull/223 -shred = { version = "0.14.1", default-features = false, features = ["shred-derive"] } -# shred = { path = "../shred", default-features = false, features = ["shred-derive"] } +# shred = { version = "0.14.1", default-features = false, features = ["shred-derive"] } +shred = { path = "../shred", default-features = false, features = ["shred-derive"] } specs-derive = { path = "specs-derive", version = "0.4.1" } [[example]] diff --git a/src/storage/data.rs b/src/storage/data.rs index 08655ca07..3461e0109 100644 --- a/src/storage/data.rs +++ b/src/storage/data.rs @@ -124,7 +124,7 @@ where res.entry::>() .or_insert_with(|| MaskedStorage::new(::unwrap_default())); res.fetch_mut::>() - .register(&*res.fetch::>()); + .register::>(); } fn fetch(res: &'a World) -> Self { @@ -211,7 +211,7 @@ where res.entry::>() .or_insert_with(|| MaskedStorage::new(::unwrap_default())); res.fetch_mut::>() - .register(&*res.fetch::>()); + .register::>(); } fn fetch(res: &'a World) -> Self { diff --git a/src/storage/mod.rs b/src/storage/mod.rs index f6ccfcc03..eba0f8042 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -117,15 +117,13 @@ pub trait AnyStorage { fn drop(&mut self, entities: &[Entity]); } +// SAFETY: Returned pointer has a vtable valid for `T` and retains the same +// address/provenance. unsafe impl CastFrom for dyn AnyStorage where T: AnyStorage + 'static, { - fn cast(t: &T) -> &Self { - t - } - - fn cast_mut(t: &mut T) -> &mut Self { + fn cast(t: *mut T) -> *mut Self { t } } diff --git a/src/world/world_ext.rs b/src/world/world_ext.rs index 2d11ac73e..80cec67f8 100644 --- a/src/world/world_ext.rs +++ b/src/world/world_ext.rs @@ -320,7 +320,7 @@ impl WorldExt for World { self.entry() .or_insert_with(move || MaskedStorage::::new(storage())); self.fetch_mut::>() - .register(&*self.fetch::>()); + .register::>(); } fn add_resource(&mut self, res: T) { @@ -414,8 +414,8 @@ impl WorldExt for World { } fn delete_components(&mut self, delete: &[Entity]) { - for storage in self.fetch_mut::>().iter_mut(self) { - storage.drop(delete); + for mut storage in self.fetch_mut::>().iter_mut(self) { + (&mut *storage).drop(delete); } } } From c3a301f1e636e7cf94f0683730e9387865a9ba90 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 26 Feb 2023 11:47:50 -0500 Subject: [PATCH 25/47] Reduce loop iterations on some tests when running with Miri so that they finish in a reasonable time --- src/storage/tests.rs | 59 +++++++++++++++++++++++--------------------- tests/no_parallel.rs | 1 + tests/tests.rs | 7 ++++-- 3 files changed, 37 insertions(+), 30 deletions(-) diff --git a/src/storage/tests.rs b/src/storage/tests.rs index c74df94f5..b25a053e6 100644 --- a/src/storage/tests.rs +++ b/src/storage/tests.rs @@ -5,6 +5,9 @@ use crate::world::{Component, Entity, Generation, Index, WorldExt}; use shred::World; use std::mem::MaybeUninit; +// Make tests finish in reasonable time with miri +const ITERATIONS: u32 = if cfg!(miri) { 100 } else { 1000 }; + fn create(world: &mut World) -> WriteStorage where T::Storage: Default, @@ -32,13 +35,13 @@ mod map_test { let mut w = World::new(); let mut c = create(&mut w); - for i in 0..1_000 { + for i in 0..ITERATIONS { if let Err(err) = c.insert(ent(i), Comp(i)) { panic!("Failed to insert component into entity! {:?}", err); } } - for i in 0..1_000 { + for i in 0..ITERATIONS { assert_eq!(c.get(ent(i)).unwrap().0, i); } } @@ -64,21 +67,21 @@ mod map_test { let mut w = World::new(); let mut c = create(&mut w); - for i in 0..1_000 { + for i in 0..ITERATIONS { if let Err(err) = c.insert(ent(i), Comp(i)) { panic!("Failed to insert component into entity! {:?}", err); } } - for i in 0..1_000 { + for i in 0..ITERATIONS { assert_eq!(c.get(ent(i)).unwrap().0, i); } - for i in 0..1_000 { + for i in 0..ITERATIONS { c.remove(ent(i)); } - for i in 0..1_000 { + for i in 0..ITERATIONS { assert!(c.get(ent(i)).is_none()); } } @@ -88,7 +91,7 @@ mod map_test { let mut w = World::new(); let mut c = create(&mut w); - for i in 0..1_000i32 { + for i in 0..ITERATIONS as i32 { if let Err(err) = c.insert(ent(i as u32), Comp(i)) { panic!("Failed to insert component into entity! {:?}", err); } @@ -97,7 +100,7 @@ mod map_test { } } - for i in 0..1_000i32 { + for i in 0..ITERATIONS as i32 { assert_eq!(c.get(ent(i as u32)).unwrap().0, -i); } } @@ -249,13 +252,13 @@ mod test { let mut w = World::new(); let mut s: Storage = create(&mut w); - for i in 0..1_000 { + for i in 0..ITERATIONS { if let Err(err) = s.insert(Entity::new(i, Generation::new(1)), (i + 2718).into()) { panic!("Failed to insert component into entity! {:?}", err); } } - for i in 0..1_000 { + for i in 0..ITERATIONS { assert_eq!( s.get(Entity::new(i, Generation::new(1))).unwrap(), &(i + 2718).into() @@ -270,13 +273,13 @@ mod test { let mut w = World::new(); let mut s: Storage = create(&mut w); - for i in 0..1_000 { + for i in 0..ITERATIONS { if let Err(err) = s.insert(Entity::new(i, Generation::new(1)), (i + 2718).into()) { panic!("Failed to insert component into entity! {:?}", err); } } - for i in 0..1_000 { + for i in 0..ITERATIONS { assert_eq!( s.remove(Entity::new(i, Generation::new(1))).unwrap(), (i + 2718).into() @@ -292,20 +295,20 @@ mod test { let mut w = World::new(); let mut s: Storage = create(&mut w); - for i in 0..1_000 { + for i in 0..ITERATIONS { if let Err(err) = s.insert(Entity::new(i, Generation::new(1)), (i + 2718).into()) { panic!("Failed to insert component into entity! {:?}", err); } } - for i in 0..1_000 { + for i in 0..ITERATIONS { *s.get_mut(Entity::new(i, Generation::new(1))) .unwrap() .access_mut() .as_mut() -= 718; } - for i in 0..1_000 { + for i in 0..ITERATIONS { assert_eq!( s.get(Entity::new(i, Generation::new(1))).unwrap(), &(i + 2000).into() @@ -322,21 +325,21 @@ mod test { // Insert the first 500 components manually, leaving indices 500..1000 // unoccupied. - for i in 0..500 { + for i in 0..ITERATIONS / 2 { if let Err(err) = s.insert(Entity::new(i, Generation::new(1)), (i).into()) { panic!("Failed to insert component into entity! {:?}", err); } } - for i in 0..1_000 { + for i in 0..ITERATIONS { *s.get_mut_or_default(Entity::new(i, Generation::new(1))) .unwrap() .access_mut() .as_mut() += i; } - // The first 500 were initialized, and should be i*2. - for i in 0..500 { + // The first ITERATIONS / 2 were initialized, and should be i*2. + for i in 0..ITERATIONS / 2 { assert_eq!( s.get(Entity::new(i, Generation::new(1))).unwrap(), &(i + i).into() @@ -344,7 +347,7 @@ mod test { } // The rest were Default-initialized, and should equal i. - for i in 500..1_000 { + for i in ITERATIONS / 2..ITERATIONS { assert_eq!( s.get(Entity::new(i, Generation::new(1))).unwrap(), &(i).into() @@ -359,7 +362,7 @@ mod test { let mut w = World::new(); let mut s: Storage = create(&mut w); - for i in 0..1_000 { + for i in 0..ITERATIONS { if let Err(err) = s.insert(Entity::new(i, Generation::new(1)), (i + 2718).into()) { panic!("Failed to insert component into entity! {:?}", err); } @@ -370,7 +373,7 @@ mod test { } } - for i in 0..1_000 { + for i in 0..ITERATIONS { assert!(s.get(Entity::new(i, Generation::new(2))).is_none()); assert_eq!( s.get(Entity::new(i, Generation::new(1))).unwrap(), @@ -386,7 +389,7 @@ mod test { let mut w = World::new(); let mut s: Storage = create(&mut w); - for i in 0..1_000 { + for i in 0..ITERATIONS { if s.insert(Entity::new(i, Generation::new(2)), (i + 2718).into()) .is_ok() { @@ -394,7 +397,7 @@ mod test { } } - for i in 0..1_000 { + for i in 0..ITERATIONS { assert!(s.remove(Entity::new(i, Generation::new(1))).is_none()); } } @@ -444,14 +447,14 @@ mod test { let mut w = World::new(); let mut s: Storage = create(&mut w); - for i in 0..1_000 { + for i in 0..ITERATIONS { if let Err(err) = s.insert(Entity::new(i, Generation::new(1)), (i + 2718).into()) { panic!("Failed to insert component into entity! {:?}", err); } } let slice = s.as_slice(); - assert_eq!(slice.len(), 1_000); + assert_eq!(slice.len(), ITERATIONS as usize); for (i, v) in slice.iter().enumerate() { assert_eq!(v, &(i as u32 + 2718).into()); } @@ -464,14 +467,14 @@ mod test { let mut w = World::new(); let mut s: Storage = create(&mut w); - for i in 0..1_000 { + for i in 0..ITERATIONS { if let Err(err) = s.insert(Entity::new(i, Generation::new(1)), (i + 2718).into()) { panic!("Failed to insert component into entity! {:?}", err); } } let slice = s.as_slice(); - assert_eq!(slice.len(), 1_000); + assert_eq!(slice.len(), ITERATIONS as usize); for (i, v) in slice.iter().enumerate() { let v = unsafe { &*v.as_ptr() }; assert_eq!(v, &(i as u32 + 2718).into()); diff --git a/tests/no_parallel.rs b/tests/no_parallel.rs index 0504af5b6..ee8941b6e 100644 --- a/tests/no_parallel.rs +++ b/tests/no_parallel.rs @@ -1,4 +1,5 @@ #![cfg(not(feature = "parallel"))] +// TODO: ensure we run these with miri too use std::rc::Rc; diff --git a/tests/tests.rs b/tests/tests.rs index 27b791921..2c08f12dd 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -4,6 +4,9 @@ use specs::{ world::{Builder, WorldExt}, }; +// Make tests finish in reasonable time with miri +const ITERATIONS: u32 = if cfg!(miri) { 20 } else { 1000 }; + #[derive(Clone, Debug, PartialEq)] struct CompInt(i8); @@ -97,7 +100,7 @@ fn dynamic_create() { let mut world = create_world(); let mut dispatcher = DispatcherBuilder::new().with(Sys, "s", &[]).build(); - for _ in 0..1_000 { + for _ in 0..ITERATIONS { dispatcher.dispatch(&mut world); } } @@ -118,7 +121,7 @@ fn dynamic_deletion() { let mut world = create_world(); let mut dispatcher = DispatcherBuilder::new().with(Sys, "s", &[]).build(); - for _ in 0..1_000 { + for _ in 0..ITERATIONS { dispatcher.dispatch(&mut world); } } From e8f80e78e7360fb0dbff0f390430e9d60644fe3f Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 26 Feb 2023 17:13:38 -0500 Subject: [PATCH 26/47] Appease clippy --- src/changeset.rs | 4 ++++ src/join/lend_join.rs | 1 + src/storage/mod.rs | 2 ++ src/storage/storages.rs | 2 ++ src/world/entity.rs | 5 ++--- src/world/world_ext.rs | 2 +- 6 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/changeset.rs b/src/changeset.rs index 7129862ce..9babb8c0f 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -222,6 +222,10 @@ unsafe impl<'a, T> Join for &'a ChangeSet { // together in the `ChangeSet` and correspond. Iterating mask does not repeat // indices. #[nougat::gat] +// This is a trait impl so method safety documentation is on the trait +// definition, maybe nougat confuses clippy? But this is the only spot that has +// this issue. +#[allow(clippy::missing_safety_doc)] unsafe impl LendJoin for ChangeSet { type Mask = BitSet; type Type<'next> = T; diff --git a/src/join/lend_join.rs b/src/join/lend_join.rs index 4d0fac6c3..b4d482b50 100644 --- a/src/join/lend_join.rs +++ b/src/join/lend_join.rs @@ -182,6 +182,7 @@ impl JoinLendIter { /// Can be used to iterate with this pattern: /// /// `while let Some(components) = join_lending_iter.next() {` + #[allow(clippy::should_implement_trait)] // we want this to look like iterator pub fn next(&mut self) -> Option> { // SAFETY: Since `idx` is yielded from `keys` (the mask), it is // necessarily a part of it. `LendJoin` requires that the iterator diff --git a/src/storage/mod.rs b/src/storage/mod.rs index eba0f8042..d9ef2d97a 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -141,6 +141,8 @@ where /// This is a marker trait which requires you to uphold the following guarantee: /// +/// # Safety +/// /// > Multiple threads may call `SharedGetMutStorage::shared_get_mut()` /// with distinct indices without causing > undefined behavior. /// diff --git a/src/storage/storages.rs b/src/storage/storages.rs index 8c1eb2a96..f36a01d2a 100644 --- a/src/storage/storages.rs +++ b/src/storage/storages.rs @@ -461,6 +461,8 @@ impl UnprotectedStorage for VecStorage { unsafe { maybe_uninit.assume_init_mut() } } + // false positive https://github.com/rust-lang/rust-clippy/issues/10407 + #[allow(clippy::uninit_vec)] unsafe fn insert(&mut self, id: Index, v: T) { let id = if Index::BITS > usize::BITS { // Saturate the cast to usize::MAX so if this overflows usize the diff --git a/src/world/entity.rs b/src/world/entity.rs index b4a132d62..071f6203e 100644 --- a/src/world/entity.rs +++ b/src/world/entity.rs @@ -207,9 +207,8 @@ impl Allocator { } fn update_generation_length(&mut self, i: usize) { - if self.generations.len() <= i as usize { - self.generations - .resize(i as usize + 1, ZeroableGeneration(None)); + if self.generations.len() <= i { + self.generations.resize(i + 1, ZeroableGeneration(None)); } } } diff --git a/src/world/world_ext.rs b/src/world/world_ext.rs index 80cec67f8..df44fa8be 100644 --- a/src/world/world_ext.rs +++ b/src/world/world_ext.rs @@ -415,7 +415,7 @@ impl WorldExt for World { fn delete_components(&mut self, delete: &[Entity]) { for mut storage in self.fetch_mut::>().iter_mut(self) { - (&mut *storage).drop(delete); + (*storage).drop(delete); } } } From 391b310eca060f1d776a50e11a0c0678d0786cda Mon Sep 17 00:00:00 2001 From: Imbris Date: Mon, 27 Feb 2023 22:20:25 -0500 Subject: [PATCH 27/47] Small update to tutorial to reflect method name change --- docs/tutorials/src/12_tracked.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/tutorials/src/12_tracked.md b/docs/tutorials/src/12_tracked.md index a9d0a2df9..5e88b0405 100644 --- a/docs/tutorials/src/12_tracked.md +++ b/docs/tutorials/src/12_tracked.md @@ -117,8 +117,8 @@ fetch the component as mutable if/when needed. for (entity, mut comp) in (&entities, &mut comps.restrict_mut()).join() { // Check whether this component should be modified, without fetching it as // mutable. - if comp.get_unchecked().condition < 5 { - let mut comp = comp.get_mut_unchecked(); + if comp.get().condition < 5 { + let mut comp = comp.get_mut(); // ... } } @@ -137,4 +137,4 @@ simply call `storage.set_event_emission(true)`. _See [FlaggedStorage Doc](https://docs.rs/specs/latest/specs/struct.FlaggedStorage.html) -for more into._ \ No newline at end of file +for more into._ From 6b74393d86f66e5580d5ce8d6333e6fbdf1b8bd7 Mon Sep 17 00:00:00 2001 From: Imbris Date: Mon, 27 Feb 2023 23:39:05 -0500 Subject: [PATCH 28/47] Enhance LendJoin docs to hopefully explain its purpose and usage Also: * Make `lend_join` example more comprehensive to showcase the options for iterating without the Iterator trait, as well as the JoinLendIter::get method. Include comments in the example to explain different aspects. * Fix/add various links in code docs. * Publically export `SliceAccess` trait since it appears in bounds on the public `Storage::as_slice`/`Storage::as_mut_slice`. --- examples/lend_join.rs | 33 ++++++++++++++++++++++++++++++--- src/join/lend_join.rs | 34 +++++++++++++++++++++++++++++++--- src/join/maybe.rs | 2 +- src/storage/entry.rs | 10 ++++++---- src/storage/mod.rs | 5 ++--- src/storage/storages.rs | 3 +++ 6 files changed, 73 insertions(+), 14 deletions(-) diff --git a/examples/lend_join.rs b/examples/lend_join.rs index b9a25610c..603e31009 100644 --- a/examples/lend_join.rs +++ b/examples/lend_join.rs @@ -10,16 +10,43 @@ fn main() { world.register::(); - world.create_entity().with(Pos(0.0)).build(); + let entity0 = world.create_entity().with(Pos(0.0)).build(); world.create_entity().with(Pos(1.6)).build(); world.create_entity().with(Pos(5.4)).build(); let mut pos = world.write_storage::(); + let entities = world.entities(); + // Unlike `join` the type return from `lend_join` does not implement + // `Iterator`. Instead, a `next` method is provided that only allows one + // element to be accessed at once. let mut lending = (&mut pos).lend_join(); + // We copy the value out here so the borrow of `lending` is released. let a = lending.next().unwrap().0; + // Here we keep the reference from `lending.next()` alive, so `lending` + // remains exclusively borrowed for the lifetime of `b`. let b = lending.next().unwrap(); - // let d = lending.next().unwrap(); (this rightly fails to compile) - let _c = a + b.0; + // This right fails to compile since `b` is used below: + // let d = lending.next().unwrap(); + b.0 = a; + + // Items can be iterated with `while let` loop: + let mut lending = (&mut pos).lend_join(); + while let Some(pos) = lending.next() { + pos.0 *= 1.5; + } + + // A `for_each` method is also available: + (&mut pos).lend_join().for_each(|pos| { + pos.0 += 1.0; + }); + + // Finally, there is one bonus feature which `.join()` can't soundly provide. + let mut lending = (&mut pos).lend_join(); + // That is, there is a method to get the joined result for a particular + // entity: + if let Some(pos) = lending.get(entity0, &entities) { + pos.0 += 5.0; + } } diff --git a/src/join/lend_join.rs b/src/join/lend_join.rs index b4d482b50..e4b30aa8e 100644 --- a/src/join/lend_join.rs +++ b/src/join/lend_join.rs @@ -3,8 +3,36 @@ use hibitset::{BitIter, BitSetLike}; use crate::world::{Entities, Entity, Index}; -/// Like the `Join` trait except this is similar to a lending iterator in that -/// only one item can be accessed at once. +/// Like the [`Join`](super::Join) trait except this is similar to a [lending +/// iterator](https://blog.rust-lang.org/2021/08/03/GATs-stabilization-push.html#so-what-are-gats) +/// in that only one item can be accessed at once. +/// +/// The type returned from [`.lend_join()`](LendJoin::lend_join), +/// [`JoinLendIter`] does not implement `Iterator` like +/// [`JoinIter`](super::JoinIter) does. Instead, it provides a +/// [`next`](JoinLendIter::next) method that exclusively borrows the +/// `JoinLendIter` for the lifetime of the returned value. +/// +/// This limitation allows freedom for more patterns to be soundly implemented. +/// Thus, `LendJoin` acts as the "lowest common denominator" of the +/// `Join`-like traits (i.e. if something can implement `Join` it can also +/// implement `LendJoin`). +/// +/// In particular, [`Entries`](crate::storage::Entries) only implements +/// `LendJoin`. As another example, +/// [`RestrictedStorage`](crate::storage::RestrictedStorage) implements both +/// `Join` and `LendJoin`. However, for joining mutably, lend join variant +/// produces +/// [`PairedStorageWriteExclusive`](crate::storage::PairedStorageWriteExclusive) +/// values which have `get_other`/`get_other_mut` methods that aren't provided +/// by [`PairedStorageWriteShared`](crate::storage::PairedStorageWriteShared). +/// +/// Finally, these limitations allow providing the [`JoinLendIter::get`] method +/// which can be useful to get a set of components from an entity without +/// calling `get` individually on each storage (see the example in that method's +/// docs). +/// +/// Also see the `lend_join` example. /// /// # Safety /// @@ -121,7 +149,7 @@ pub unsafe trait LendJoin { /// * A call to `get` must be preceded by a check if `id` is part of /// `Self::Mask` /// * Multiple calls with the same `id` are not allowed, for a particular - /// instance of the values from [`open`](Join::open). Unless this type + /// instance of the values from [`open`](LendJoin::open). Unless this type /// implements the unsafe trait [`RepeatableLendGet`]. unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> where diff --git a/src/join/maybe.rs b/src/join/maybe.rs index cce4bb04b..819e3fd81 100644 --- a/src/join/maybe.rs +++ b/src/join/maybe.rs @@ -9,7 +9,7 @@ use crate::world::Index; /// contained `T` does and that yields all indices, returning `None` for all /// missing elements and `Some(T)` for found elements. /// -/// For usage see [`LendJoin::maybe()`](LendJoin::Maybe). +/// For usage see [`LendJoin::maybe()`](LendJoin::maybe). /// /// WARNING: Do not have a join of only `MaybeJoin`s. Otherwise the join will /// iterate over every single index of the bitset. If you want a join with diff --git a/src/storage/entry.rs b/src/storage/entry.rs index f039263c0..5b3bf0a92 100644 --- a/src/storage/entry.rs +++ b/src/storage/entry.rs @@ -54,8 +54,8 @@ where } } - /// Returns a `Join`-able structure that yields all indices, returning - /// `Entry` for all elements + /// Returns a [`LendJoin`]-able structure that yields all indices, returning + /// [`StorageEntry`] for all elements /// /// WARNING: Do not have a join of only `Entries`s. Otherwise the join will /// iterate over every single index of the bitset. If you want a join with @@ -129,8 +129,10 @@ where } } -/// `Join`-able structure that yields all indices, returning `Entry` for all -/// elements. +/// [`LendJoin`]-able structure that yields all indices, +/// returning [`StorageEntry`] for all elements. +/// +/// This can be constructed via [`Storage::entries`]. pub struct Entries<'a, 'b: 'a, T: 'a, D: 'a>(&'a mut Storage<'b, T, D>); // SAFETY: We return a mask containing all items, but check the original mask in diff --git a/src/storage/mod.rs b/src/storage/mod.rs index d9ef2d97a..03f70b730 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -11,13 +11,12 @@ pub use self::{ RestrictedStorage, SharedGetOnly, }, storages::{ - BTreeStorage, DefaultVecStorage, DenseVecStorage, HashMapStorage, NullStorage, VecStorage, + BTreeStorage, DefaultVecStorage, DenseVecStorage, HashMapStorage, NullStorage, SliceAccess, + VecStorage, }, track::{ComponentEvent, Tracked}, }; -use self::storages::SliceAccess; - use std::{ self, marker::PhantomData, diff --git a/src/storage/storages.rs b/src/storage/storages.rs index f36a01d2a..d293d6f8a 100644 --- a/src/storage/storages.rs +++ b/src/storage/storages.rs @@ -17,9 +17,12 @@ use crate::{ /// which wraps `T`. The associated type `Element` identifies what /// the slices will contain. pub trait SliceAccess { + /// The type of the underlying data elements. type Element; + /// Returns a slice of the underlying storage. fn as_slice(&self) -> &[Self::Element]; + /// Returns a mutable slice of the underlying storage. fn as_mut_slice(&mut self) -> &mut [Self::Element]; } From 5aa5abf7e13708ace84df5bfb5a3a9111a5af9b4 Mon Sep 17 00:00:00 2001 From: Imbris Date: Tue, 28 Feb 2023 00:07:50 -0500 Subject: [PATCH 29/47] Add test for when Send is implemented for PairedStorageWriteShared --- src/storage/restrict.rs | 62 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/src/storage/restrict.rs b/src/storage/restrict.rs index 76d9f0ca9..10917e0a2 100644 --- a/src/storage/restrict.rs +++ b/src/storage/restrict.rs @@ -451,6 +451,68 @@ where { } +/// Compile test for when `Send` is implemented. +/// ```rust,compile_fail +/// use specs::prelude::*; +/// +/// struct Pos(f32); +/// impl Component for Pos { +/// type Storage = FlaggedStorage; +/// } +/// +/// fn main() { +/// let mut world = World::new(); +/// world.register::(); +/// world.create_entity().with(Pos(0.0)).build(); +/// world.create_entity().with(Pos(1.6)).build(); +/// world.create_entity().with(Pos(5.4)).build(); +/// let mut pos = world.write_storage::(); +/// +/// let mut restricted_pos = pos.restrict_mut(); +/// let mut joined = (&mut restricted_pos).join(); +/// let mut a = joined.next().unwrap(); +/// let mut b = joined.next().unwrap(); +/// // unsound since Pos::Storage isn't a DistinctStorage +/// std::thread::scope(|s| { +/// s.spawn(move || { +/// a.get_mut(); +/// }); +/// }); +/// b.get_mut(); +/// } +/// ``` +/// Should compile since `VecStorage` is a `DistinctStorage`. +/// ```rust +/// use specs::prelude::*; +/// +/// struct Pos(f32); +/// impl Component for Pos { +/// type Storage = VecStorage; +/// } +/// +/// fn main() { +/// let mut world = World::new(); +/// world.register::(); +/// world.create_entity().with(Pos(0.0)).build(); +/// world.create_entity().with(Pos(1.6)).build(); +/// world.create_entity().with(Pos(5.4)).build(); +/// let mut pos = world.write_storage::(); +/// +/// let mut restricted_pos = pos.restrict_mut(); +/// let mut joined = (&mut restricted_pos).join(); +/// let mut a = joined.next().unwrap(); +/// let mut b = joined.next().unwrap(); +/// // sound since Pos::Storage is a DistinctStorage +/// std::thread::scope(|s| { +/// s.spawn(move || { +/// a.get_mut(); +/// }); +/// }); +/// b.get_mut(); +/// } +/// ``` +fn _dummy() {} + /// Pairs a storage with an index, meaning that the index is guaranteed to /// exist. /// From 2beb089b142b2926a745ce6e0c36795303aa8917 Mon Sep 17 00:00:00 2001 From: Imbris Date: Tue, 28 Feb 2023 00:12:23 -0500 Subject: [PATCH 30/47] Switch to git deps from local path deps for shred and hibitset --- .config/nextest.toml | 3 +++ .github/workflows/ci.yml | 17 ++++++++++++++++- Cargo.toml | 6 +++--- miri.sh | 21 +++++++++++++++++++++ 4 files changed, 43 insertions(+), 4 deletions(-) create mode 100644 .config/nextest.toml create mode 100755 miri.sh diff --git a/.config/nextest.toml b/.config/nextest.toml new file mode 100644 index 000000000..59508d7e9 --- /dev/null +++ b/.config/nextest.toml @@ -0,0 +1,3 @@ +[profile.default-miri] +slow-timeout = { period = "30s", terminate-after = 1 } +fail-fast = false diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 115296d26..57e036861 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,7 +27,7 @@ jobs: os: [macos-latest, windows-latest, ubuntu-latest] toolchain: [stable, beta, nightly, 1.65.0] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # install the toolchain we are going to compile and test with - name: install ${{ matrix.toolchain }} toolchain @@ -77,3 +77,18 @@ jobs: # - run: mdbook test -L ./target/debug/deps docs/book # if: matrix.toolchain == 'stable' && matrix.os == 'ubuntu-latest' + + miri: + name: "Miri" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Install Miri + run: | + rustup toolchain install nightly --component miri + rustup override set nightly + cargo miri setup + - name: Install latest nextest release + uses: taiki-e/install-action@nextest + - name: Test with Miri + run: ./miri.sh diff --git a/Cargo.toml b/Cargo.toml index 38e4c708d..8afd7831e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,11 +24,11 @@ ahash = "0.7.6" crossbeam-queue = "0.3" # waiting on PR and new version to be published # hibitset = { version = "0.6.3", default-features = false } -hibitset = { path = "../hibitset", default-features = false } +hibitset = { git = "https://github.com/amethyst/hibitset", default-features = false } log = "0.4.8" # waiting on PR and new version to be published # shred = { version = "0.14.1", default-features = false } -shred = { path = "../shred", default-features = false } +shred = { git = "https://github.com/Imberflur/shred", branch = "metatable-fix", default-features = false } shrev = "1.1.1" tuple_utils = "0.4.0" nougat = "0.2.3" @@ -58,7 +58,7 @@ ron = "0.7.1" rand = "0.8" serde_json = "1.0.48" # shred = { version = "0.14.1", default-features = false, features = ["shred-derive"] } -shred = { path = "../shred", default-features = false, features = ["shred-derive"] } +shred = { git = "https://github.com/Imberflur/shred", branch = "metatable-fix", default-features = false, features = ["shred-derive"]} specs-derive = { path = "specs-derive", version = "0.4.1" } [[example]] diff --git a/miri.sh b/miri.sh new file mode 100755 index 000000000..e36b58311 --- /dev/null +++ b/miri.sh @@ -0,0 +1,21 @@ +#!/bin/bash +# +# Convenience script for running Miri, also the same one that the CI runs! + +# use half the available threads since miri can be a bit memory hungry +test_threads=$((($(nproc) - 1) / 2 + 1)) +echo using $test_threads threads + +# filters out long running tests +filter='not (test(100k) | test(map_test::wrap) | test(map_test::insert_same_key) | test(=mixed_create_merge)| test(=par_join_many_entities_and_systems) | test(=stillborn_entities))' +echo "using filter: \"$filter\"" + +# Miri currently reports leaks in some tests so we disable that check +# here (might be due to ptr-int-ptr in crossbeam-epoch so might be +# resolved in future versions of that crate). +MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-ignore-leaks" \ + cargo +nightly miri nextest run \ + -E "$filter" \ + --test-threads="$test_threads" \ + # use nocapture or run miri directly to see warnings from miri + #--nocapture From cbfa283aa80670a9dc6fef52f7c9754ddf7a4a5b Mon Sep 17 00:00:00 2001 From: Imbris Date: Tue, 28 Feb 2023 21:23:13 -0500 Subject: [PATCH 31/47] Order example entries by name --- Cargo.toml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8afd7831e..ce349663d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -61,27 +61,28 @@ serde_json = "1.0.48" shred = { git = "https://github.com/Imberflur/shred", branch = "metatable-fix", default-features = false, features = ["shred-derive"]} specs-derive = { path = "specs-derive", version = "0.4.1" } +[[example]] +name = "async" [[example]] name = "basic" - [[example]] -name = "full" - +name = "bitset" [[example]] name = "cluster_bomb" - [[example]] -name = "bitset" - +name = "full" [[example]] -name = "track" - +name = "lend_join" +test = true [[example]] name = "ordered_track" - [[example]] name = "saveload" required-features = ["serde"] +[[example]] +name = "slices" +[[example]] +name = "track" [[bench]] name = "benches_main" From 819d6ca0e3c81ead427493805a00e9faee00ff1d Mon Sep 17 00:00:00 2001 From: Imbris Date: Tue, 28 Feb 2023 21:38:21 -0500 Subject: [PATCH 32/47] Tweak rustfmt config --- .rustfmt.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.rustfmt.toml b/.rustfmt.toml index b299429d8..77fdc7039 100644 --- a/.rustfmt.toml +++ b/.rustfmt.toml @@ -1,9 +1,8 @@ hard_tabs = false -imports_granularity = "Crate" reorder_impl_items = true use_field_init_shorthand = true use_try_shorthand = true format_code_in_doc_comments = true wrap_comments = true -edition = "2018" +edition = "2021" version = "Two" From 2400172ffa3d32bc4b090e8325eab8581e350520 Mon Sep 17 00:00:00 2001 From: Imbris Date: Tue, 28 Feb 2023 21:38:28 -0500 Subject: [PATCH 33/47] fmt --- src/changeset.rs | 14 +++++++------- src/storage/mod.rs | 29 +++++++++++++++-------------- src/storage/restrict.rs | 19 +++++++++---------- src/world/entity.rs | 4 ++-- 4 files changed, 33 insertions(+), 33 deletions(-) diff --git a/src/changeset.rs b/src/changeset.rs index 9babb8c0f..52ce9b878 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -157,13 +157,13 @@ unsafe impl<'a, T> Join for &'a mut ChangeSet { unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { // SAFETY: - // * Since we require that the mask was checked, an element for - // `id` must have been inserted without being removed. - // * We also require that there are no subsequent calls with the same - // `id` for this instance of the values from `open`, so there are no - // extant references for the element corresponding to this `id`. - // * Since we have an exclusive reference to `Self::Value`, we know this - // isn't being called from multiple threads at once. + // * Since we require that the mask was checked, an element for `id` must have + // been inserted without being removed. + // * We also require that there are no subsequent calls with the same `id` for + // this instance of the values from `open`, so there are no extant references + // for the element corresponding to this `id`. + // * Since we have an exclusive reference to `Self::Value`, we know this isn't + // being called from multiple threads at once. unsafe { SharedGetMutOnly::get_mut(value, id) } } } diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 03f70b730..86d7b4b49 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -676,13 +676,13 @@ where unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { // SAFETY: - // * Since we require that the mask was checked, an element for - // `id` must have been inserted without being removed. - // * We also require that there are no subsequent calls with the same - // `id` for this instance of the values from `open`, so there are no - // extant references for the element corresponding to this `id`. - // * Since we have an exclusive reference to `Self::Value`, we know this - // isn't being called from multiple threads at once. + // * Since we require that the mask was checked, an element for `id` must have + // been inserted without being removed. + // * We also require that there are no subsequent calls with the same `id` for + // this instance of the values from `open`, so there are no extant references + // for the element corresponding to this `id`. + // * Since we have an exclusive reference to `Self::Value`, we know this isn't + // being called from multiple threads at once. unsafe { SharedGetMutOnly::get_mut(value, id) } } } @@ -713,13 +713,13 @@ where unsafe fn get(value: &Self::Value, id: Index) -> Self::Type { // SAFETY: - // * Since we require that the mask was checked, an element for - // `id` must have been inserted without being removed. + // * Since we require that the mask was checked, an element for `id` must have + // been inserted without being removed. // * We also require that the returned value is no longer alive before - // subsequent calls with the same `id`, so there are no extant - // references that were obtained with the same `id`. - // * `T::Storage` implements the unsafe trait `DistinctStorage` so it is - // safe to call this from multiple threads at once. + // subsequent calls with the same `id`, so there are no extant references that + // were obtained with the same `id`. + // * `T::Storage` implements the unsafe trait `DistinctStorage` so it is safe to + // call this from multiple threads at once. unsafe { SharedGetMutOnly::get_mut(value, id) } } } @@ -753,7 +753,8 @@ where /// Allows forcing mutable access to be explicit. Useful to implement a flagged /// storage where it is easier to discover sites where components are marked as /// mutated. Of course, individual storages can use an associated `AccessMut` -/// type that also implements `DerefMut`, but this provides the common denominator. +/// type that also implements `DerefMut`, but this provides the common +/// denominator. pub trait AccessMut: core::ops::Deref { /// This may generate a mutation event for certain flagged storages. fn access_mut(&mut self) -> &mut Self::Target; diff --git a/src/storage/restrict.rs b/src/storage/restrict.rs index 10917e0a2..4bd9c3136 100644 --- a/src/storage/restrict.rs +++ b/src/storage/restrict.rs @@ -505,7 +505,7 @@ where /// // sound since Pos::Storage is a DistinctStorage /// std::thread::scope(|s| { /// s.spawn(move || { -/// a.get_mut(); +/// a.get_mut(); /// }); /// }); /// b.get_mut(); @@ -572,15 +572,14 @@ where /// Gets the component related to the current entity. pub fn get_mut(&mut self) -> AccessMutReturn<'_, C> { // SAFETY: - // * This is constructed in the `get` methods of `Join`/`ParJoin` above. - // These all require that the mask has been checked. - // * We also require that either there are no subsequent calls with the - // same `id` (`Join`) or that there are not extant references from a - // call with the same `id` (`ParJoin`). Thus, `id` is unique among - // the instances of `Self` created by the join `get` methods. We then - // tie the lifetime of the returned value to the exclusive borrow of - // self which prevents this or `Self::get` from being called while the - // returned reference is still alive. + // * This is constructed in the `get` methods of `Join`/`ParJoin` above. These + // all require that the mask has been checked. + // * We also require that either there are no subsequent calls with the same + // `id` (`Join`) or that there are not extant references from a call with the + // same `id` (`ParJoin`). Thus, `id` is unique among the instances of `Self` + // created by the join `get` methods. We then tie the lifetime of the returned + // value to the exclusive borrow of self which prevents this or `Self::get` + // from being called while the returned reference is still alive. unsafe { SharedGetOnly::get_mut(&self.storage, self.index) } } } diff --git a/src/world/entity.rs b/src/world/entity.rs index 071f6203e..daa88afaf 100644 --- a/src/world/entity.rs +++ b/src/world/entity.rs @@ -347,8 +347,8 @@ unsafe impl<'a> LendJoin for &'a EntitiesRes { } } -// SAFETY: ::get does not rely on only being called once -// with a particular ID. +// SAFETY: ::get does not rely on only being called +// once with a particular ID. unsafe impl<'a> RepeatableLendGet for &'a EntitiesRes {} // SAFETY: It is safe to retrieve elements with any `id` regardless of the mask. From 6736f1510fcd8d5a39cbb3237f245938895e957f Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 22 Jul 2023 21:58:31 -0400 Subject: [PATCH 34/47] Fix and simply insert code unwinding handling since allocation is not actually guaranteed to abort on failure --- src/storage/mod.rs | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 86d7b4b49..d981b78b8 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -410,16 +410,10 @@ where // SAFETY: The mask was previously empty, so it is safe to // insert. We immediately add the value to the mask below and // unwinding from the `insert` call means that we don't need to - // include the value in the mask. `BitSet::add` won't unwind on 32-bit - // and 64-bit platforms since OOM aborts and any overflow in capacity - // calculations (which panics) won't occur for resizing to hold the bit - // at `id = u32::MAX`. We rely on `BitSet::add` not having any other - // cases where it panics. On 16-bit platforms we insert a guard to abort - // if a panic occurs (although I suspect we will run out of memory - // before that). + // include the value in the mask. If adding to the mask unwinds we + // abort. unsafe { self.data.inner.insert(id, value) }; - const _ASSERT_INDEX_IS_U32: Index = 0u32; - if cfg!(panic = "abort") || usize::BITS >= 32 { + if cfg!(panic = "abort") { self.data.mask.add(id); } else { struct AbortOnDrop; From a31e7818c13fdb22f9914fb603aa00b96e48f14f Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 23 Jul 2023 17:00:19 -0400 Subject: [PATCH 35/47] Add nightly feature to enable shred/nightly for more efficient MetaTable implmenetation. --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index ce349663d..cc673e912 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,6 +45,7 @@ uuid_entity = ["uuid", "serde"] stdweb = ["uuid/js"] storage-event-control = [] derive = ["shred-derive", "specs-derive"] +nightly = ["shred/nightly"] shred-derive = ["shred/shred-derive"] From 1fc3d1f33b22894338852d3e51d95f64aa088be2 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sun, 23 Jul 2023 22:56:10 -0400 Subject: [PATCH 36/47] Fix typo in docs --- src/world/entity.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/world/entity.rs b/src/world/entity.rs index daa88afaf..8151c345d 100644 --- a/src/world/entity.rs +++ b/src/world/entity.rs @@ -96,7 +96,7 @@ impl Allocator { Ok(()) } - /// Kills and entity atomically (will be updated when the allocator is + /// Kills an entity atomically (will be updated when the allocator is /// maintained). pub fn kill_atomic(&self, e: Entity) -> Result<(), WrongGeneration> { if !self.is_alive(e) { From 5abd46a149dabaf332fecf770a456f2764ed9c48 Mon Sep 17 00:00:00 2001 From: Imbris Date: Mon, 24 Jul 2023 22:08:47 -0400 Subject: [PATCH 37/47] Fix various compilation warnings, change abort on unwinding during insertion into removing the inserted component (mainly to make should_panic test work) --- .github/workflows/deny.yml | 5 ++- examples/slices.rs | 2 +- examples/track.rs | 2 +- src/join/maybe.rs | 4 ++- src/storage/mod.rs | 18 ++++++---- src/storage/restrict.rs | 69 ++++++++++++++++++-------------------- src/storage/tests.rs | 2 +- 7 files changed, 55 insertions(+), 47 deletions(-) diff --git a/.github/workflows/deny.yml b/.github/workflows/deny.yml index 30b84e58b..09c29af00 100644 --- a/.github/workflows/deny.yml +++ b/.github/workflows/deny.yml @@ -15,7 +15,10 @@ jobs: - bans licenses sources # Prevent sudden announcement of a new advisory from failing ci: - continue-on-error: ${{ matrix.checks == 'advisories' }} + # continue-on-error: ${{ matrix.checks == 'advisories' }} + # TODO: temp to allow git deps + continue-on-error: true + steps: - uses: actions/checkout@v2 diff --git a/examples/slices.rs b/examples/slices.rs index 6f81557f3..99a023731 100644 --- a/examples/slices.rs +++ b/examples/slices.rs @@ -42,7 +42,7 @@ impl<'a> System<'a> for SysA { // Both the `Pos` and `Vel` components use `DefaultVecStorage`, which supports // `as_slice()` and `as_mut_slice()`. This lets us access components without // indirection. - let mut pos_slice = pos.as_mut_slice(); + let pos_slice = pos.as_mut_slice(); let vel_slice = vel.as_slice(); // Note that an entity which has position but not velocity will still have diff --git a/examples/track.rs b/examples/track.rs index c32b6de83..ef9f08d35 100644 --- a/examples/track.rs +++ b/examples/track.rs @@ -70,7 +70,7 @@ impl<'a> System<'a> for SysB { fn run(&mut self, (entities, mut tracked): Self::SystemData) { for (entity, mut restricted) in (&entities, &mut tracked.restrict_mut()).join() { if entity.id() % 2 == 0 { - let mut comp = restricted.get_mut(); + let comp = restricted.get_mut(); comp.0 += 1; } } diff --git a/src/join/maybe.rs b/src/join/maybe.rs index 819e3fd81..e7730bc5f 100644 --- a/src/join/maybe.rs +++ b/src/join/maybe.rs @@ -1,6 +1,8 @@ #[nougat::gat(Type)] use super::LendJoin; -use super::{Join, ParJoin, RepeatableLendGet}; +#[cfg(feature = "parallel")] +use super::ParJoin; +use super::{Join, RepeatableLendGet}; use hibitset::{BitSetAll, BitSetLike}; use crate::world::Index; diff --git a/src/storage/mod.rs b/src/storage/mod.rs index d981b78b8..a3f293c46 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -411,19 +411,25 @@ where // insert. We immediately add the value to the mask below and // unwinding from the `insert` call means that we don't need to // include the value in the mask. If adding to the mask unwinds we - // abort. + // remove the value via a drop guard. + // NOTE: We rely on any panics in `Bitset::add` leaving the bitset in + // the same state as before `add` was called! unsafe { self.data.inner.insert(id, value) }; if cfg!(panic = "abort") { self.data.mask.add(id); } else { - struct AbortOnDrop; - impl Drop for AbortOnDrop { + struct RemoveOnDrop<'a, T: Component>(&'a mut MaskedStorage, Index); + impl<'a, T: Component> Drop for RemoveOnDrop<'a, T> { fn drop(&mut self) { - std::process::abort() + // SAFETY: We just inserted a value here above and failed to + // add it to the bitset. + unsafe { + self.0.inner.remove(self.1); + } } } - let guard = AbortOnDrop; - self.data.mask.add(id); + let guard = RemoveOnDrop(&mut self.data, id); + guard.0.mask.add(id); core::mem::forget(guard); } } diff --git a/src/storage/restrict.rs b/src/storage/restrict.rs index 4bd9c3136..910343193 100644 --- a/src/storage/restrict.rs +++ b/src/storage/restrict.rs @@ -356,6 +356,7 @@ where // in the wrapped `Storage`. // // Iterating the mask does not repeat indices. +#[cfg(feature = "parallel")] unsafe impl<'rf, 'st: 'rf, C, S> ParJoin for &'rf RestrictedStorage<'rf, 'st, C, S> where C: Component, @@ -460,26 +461,24 @@ where /// type Storage = FlaggedStorage; /// } /// -/// fn main() { -/// let mut world = World::new(); -/// world.register::(); -/// world.create_entity().with(Pos(0.0)).build(); -/// world.create_entity().with(Pos(1.6)).build(); -/// world.create_entity().with(Pos(5.4)).build(); -/// let mut pos = world.write_storage::(); +/// let mut world = World::new(); +/// world.register::(); +/// world.create_entity().with(Pos(0.0)).build(); +/// world.create_entity().with(Pos(1.6)).build(); +/// world.create_entity().with(Pos(5.4)).build(); +/// let mut pos = world.write_storage::(); /// -/// let mut restricted_pos = pos.restrict_mut(); -/// let mut joined = (&mut restricted_pos).join(); -/// let mut a = joined.next().unwrap(); -/// let mut b = joined.next().unwrap(); -/// // unsound since Pos::Storage isn't a DistinctStorage -/// std::thread::scope(|s| { -/// s.spawn(move || { -/// a.get_mut(); -/// }); +/// let mut restricted_pos = pos.restrict_mut(); +/// let mut joined = (&mut restricted_pos).join(); +/// let mut a = joined.next().unwrap(); +/// let mut b = joined.next().unwrap(); +/// // unsound since Pos::Storage isn't a DistinctStorage +/// std::thread::scope(|s| { +/// s.spawn(move || { +/// a.get_mut(); /// }); -/// b.get_mut(); -/// } +/// }); +/// b.get_mut(); /// ``` /// Should compile since `VecStorage` is a `DistinctStorage`. /// ```rust @@ -490,26 +489,24 @@ where /// type Storage = VecStorage; /// } /// -/// fn main() { -/// let mut world = World::new(); -/// world.register::(); -/// world.create_entity().with(Pos(0.0)).build(); -/// world.create_entity().with(Pos(1.6)).build(); -/// world.create_entity().with(Pos(5.4)).build(); -/// let mut pos = world.write_storage::(); +/// let mut world = World::new(); +/// world.register::(); +/// world.create_entity().with(Pos(0.0)).build(); +/// world.create_entity().with(Pos(1.6)).build(); +/// world.create_entity().with(Pos(5.4)).build(); +/// let mut pos = world.write_storage::(); /// -/// let mut restricted_pos = pos.restrict_mut(); -/// let mut joined = (&mut restricted_pos).join(); -/// let mut a = joined.next().unwrap(); -/// let mut b = joined.next().unwrap(); -/// // sound since Pos::Storage is a DistinctStorage -/// std::thread::scope(|s| { -/// s.spawn(move || { -/// a.get_mut(); -/// }); +/// let mut restricted_pos = pos.restrict_mut(); +/// let mut joined = (&mut restricted_pos).join(); +/// let mut a = joined.next().unwrap(); +/// let mut b = joined.next().unwrap(); +/// // sound since Pos::Storage is a DistinctStorage +/// std::thread::scope(|s| { +/// s.spawn(move || { +/// a.get_mut(); /// }); -/// b.get_mut(); -/// } +/// }); +/// b.get_mut(); /// ``` fn _dummy() {} diff --git a/src/storage/tests.rs b/src/storage/tests.rs index b25a053e6..44bc43097 100644 --- a/src/storage/tests.rs +++ b/src/storage/tests.rs @@ -944,7 +944,7 @@ mod test { assert!(!removed.contains(entity.id())); } - for (_, mut comp) in (&w.entities(), &mut s1).join() { + for (_, comp) in (&w.entities(), &mut s1).join() { comp.0 += 1; } From de30c85b3a46680f925581bed66898f6d3c48958 Mon Sep 17 00:00:00 2001 From: Imbris Date: Wed, 26 Jul 2023 00:50:12 -0400 Subject: [PATCH 38/47] Remove apparently unnecessary "where Self: 'next" on LendJoin::get --- src/bitset.rs | 3 +-- src/changeset.rs | 15 +++------------ src/join/lend_join.rs | 4 +--- src/join/maybe.rs | 5 +---- src/join/mod.rs | 9 +++------ src/storage/drain.rs | 5 +---- src/storage/entry.rs | 5 +---- src/storage/mod.rs | 16 +++------------- src/storage/restrict.rs | 10 ++-------- src/world/entity.rs | 5 +---- 10 files changed, 17 insertions(+), 60 deletions(-) diff --git a/src/bitset.rs b/src/bitset.rs index ab06bc0e9..e2c899942 100644 --- a/src/bitset.rs +++ b/src/bitset.rs @@ -31,8 +31,7 @@ macro_rules! define_bit_join { } unsafe fn get<'next>(_: &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next, + { id } diff --git a/src/changeset.rs b/src/changeset.rs index 52ce9b878..d4e33505e 100644 --- a/src/changeset.rs +++ b/src/changeset.rs @@ -129,10 +129,7 @@ unsafe impl<'a, T> LendJoin for &'a mut ChangeSet { (&self.mask, &mut self.inner) } - unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next, - { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> { // SAFETY: Since we require that the mask was checked, an element for // `id` must have been inserted without being removed. unsafe { value.get_mut(id) } @@ -183,10 +180,7 @@ unsafe impl<'a, T> LendJoin for &'a ChangeSet { (&self.mask, &self.inner) } - unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next, - { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> { // SAFETY: Since we require that the mask was checked, an element for // `id` must have been inserted without being removed. unsafe { value.get(id) } @@ -235,10 +229,7 @@ unsafe impl LendJoin for ChangeSet { (self.mask, self.inner) } - unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next, - { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> { // NOTE: This impl is the main reason that `RepeatableLendGet` exists // since it moves the value out of the backing storage and thus can't // be called multiple times with the same ID! diff --git a/src/join/lend_join.rs b/src/join/lend_join.rs index e4b30aa8e..a909eb7ba 100644 --- a/src/join/lend_join.rs +++ b/src/join/lend_join.rs @@ -151,9 +151,7 @@ pub unsafe trait LendJoin { /// * Multiple calls with the same `id` are not allowed, for a particular /// instance of the values from [`open`](LendJoin::open). Unless this type /// implements the unsafe trait [`RepeatableLendGet`]. - unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next; + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next>; /// If this `LendJoin` typically returns all indices in the mask, then /// iterating over only it or combined with other joins that are also diff --git a/src/join/maybe.rs b/src/join/maybe.rs index e7730bc5f..7ca6c5480 100644 --- a/src/join/maybe.rs +++ b/src/join/maybe.rs @@ -38,10 +38,7 @@ where (BitSetAll, (mask, value)) } - unsafe fn get<'next>((mask, value): &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next, - { + unsafe fn get<'next>((mask, value): &'next mut Self::Value, id: Index) -> Self::Type<'next> { if mask.contains(id) { // SAFETY: The mask was just checked for `id`. Requirement to not // call with the same ID more than once (unless `RepeatableLendGet` diff --git a/src/join/mod.rs b/src/join/mod.rs index 7b41d6ca2..2a3ed6f75 100644 --- a/src/join/mod.rs +++ b/src/join/mod.rs @@ -218,8 +218,7 @@ macro_rules! define_open { #[allow(non_snake_case)] unsafe fn get<'next>(v: &'next mut Self::Value, i: Index) -> Self::Type<'next> - where - Self: 'next, + { let &mut ($(ref mut $from,)*) = v; // SAFETY: `get` is safe to call as the caller must have checked @@ -389,8 +388,7 @@ macro_rules! immutable_resource_join { } unsafe fn get<'next>(v: &'next mut Self::Value, i: Index) -> Self::Type<'next> - where - Self: 'next, + { // SAFETY: The mask of `Self` and `T` are identical, thus a // check to `Self`'s mask (which is required) is equal to a @@ -509,8 +507,7 @@ macro_rules! mutable_resource_join { } unsafe fn get<'next>(v: &'next mut Self::Value, i: Index) -> Self::Type<'next> - where - Self: 'next, + { // SAFETY: The mask of `Self` and `T` are identical, thus a // check to `Self`'s mask (which is required) is equal to a diff --git a/src/storage/drain.rs b/src/storage/drain.rs index 294b996f5..9cb1dbaf4 100644 --- a/src/storage/drain.rs +++ b/src/storage/drain.rs @@ -36,10 +36,7 @@ where (mask, self.data) } - unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> T - where - Self: 'next, - { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> T { value.remove(id).expect("Tried to access same index twice") } } diff --git a/src/storage/entry.rs b/src/storage/entry.rs index 5b3bf0a92..063619546 100644 --- a/src/storage/entry.rs +++ b/src/storage/entry.rs @@ -151,10 +151,7 @@ where (BitSetAll, self.0) } - unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next, - { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> { value.entry_inner(id) } diff --git a/src/storage/mod.rs b/src/storage/mod.rs index a3f293c46..0316cf76f 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -70,11 +70,7 @@ unsafe impl<'a> LendJoin for AntiStorage<'a> { (BitSetNot(self.0), ()) } - unsafe fn get<'next>(_: &'next mut (), _: Index) - where - Self: 'next, - { - } + unsafe fn get<'next>(_: &'next mut (), _: Index) {} } // SAFETY: ::get does nothing. @@ -492,10 +488,7 @@ where (&self.data.mask, &self.data.inner) } - unsafe fn get<'next>(v: &'next mut Self::Value, i: Index) -> &'a T - where - Self: 'next, - { + unsafe fn get<'next>(v: &'next mut Self::Value, i: Index) -> &'a T { // SAFETY: Since we require that the mask was checked, an element for // `i` must have been inserted without being removed. unsafe { v.get(i) } @@ -579,10 +572,7 @@ where self.data.open_mut() } - unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next, - { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> { // SAFETY: Since we require that the mask was checked, an element for // `id` must have been inserted without being removed. unsafe { value.get_mut(id) } diff --git a/src/storage/restrict.rs b/src/storage/restrict.rs index 910343193..f820540f4 100644 --- a/src/storage/restrict.rs +++ b/src/storage/restrict.rs @@ -116,10 +116,7 @@ where (bitset, (self.data.borrow(), self.entities, bitset)) } - unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next, - { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> { // NOTE: Methods on this type rely on safety requiments of this method. PairedStorageRead { index: id, @@ -161,10 +158,7 @@ where (bitset, (self.data.borrow_mut(), self.entities, bitset)) } - unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> - where - Self: 'next, - { + unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> { // NOTE: Methods on this type rely on safety requiments of this method. PairedStorageWriteExclusive { index: id, diff --git a/src/world/entity.rs b/src/world/entity.rs index 8151c345d..3d1064b6f 100644 --- a/src/world/entity.rs +++ b/src/world/entity.rs @@ -334,10 +334,7 @@ unsafe impl<'a> LendJoin for &'a EntitiesRes { (BitSetOr(&self.alloc.alive, &self.alloc.raised), self) } - unsafe fn get<'next>(v: &'next mut &'a EntitiesRes, id: Index) -> Entity - where - Self: 'next, - { + unsafe fn get<'next>(v: &'next mut &'a EntitiesRes, id: Index) -> Entity { let gen = v .alloc .generation(id) From 35da87929b9147ed9ee8e8284f2799e48ea7587b Mon Sep 17 00:00:00 2001 From: Imbris Date: Wed, 26 Jul 2023 01:15:03 -0400 Subject: [PATCH 39/47] Update MSRV to 1.70 since that is apparently necessary to remove "where Self: 'next" from LendJoin::get --- .github/workflows/ci.yml | 2 +- CHANGELOG.md | 2 +- Cargo.toml | 2 +- README.md | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 57e036861..232e3fd40 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,7 +25,7 @@ jobs: fail-fast: true matrix: os: [macos-latest, windows-latest, ubuntu-latest] - toolchain: [stable, beta, nightly, 1.65.0] + toolchain: [stable, beta, nightly, 1.70.0] steps: - uses: actions/checkout@v3 diff --git a/CHANGELOG.md b/CHANGELOG.md index 762ef3186..58ab591a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Unreleased -* MSRV to 1.65.0 [#7xx]. +* MSRV to 1.70.0 [#7xx]. [#7xx]: https://github.com/amethyst/specs/pull/7xx diff --git a/Cargo.toml b/Cargo.toml index cc673e912..dd62b3cdf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,7 @@ license = "MIT OR Apache-2.0" authors = ["slide-rs hackers"] include = ["/src", "/examples", "/benches", "/README.md", "/LICENSE-MIT", "/LICENSE-APACHE"] edition = "2021" -rust-version = "1.65.0" +rust-version = "1.70.0" # the `storage_cmp` and `storage_sparse` benches are called from `benches_main` autobenches = false diff --git a/README.md b/README.md index a61bf18b9..a5caf0a4b 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ Unlike most other ECS libraries out there, it provides other and you can use barriers to force several stages in system execution * high performance for real-world applications -Minimum Rust version: 1.65 +Minimum Rust version: 1.70 ## [Link to the book][book] From a8c96f8db2573f188330e4babf8f76b13bff0d59 Mon Sep 17 00:00:00 2001 From: Imbris Date: Wed, 26 Jul 2023 01:19:45 -0400 Subject: [PATCH 40/47] Collapse unnecessary lifetimes in restrict storages --- src/storage/restrict.rs | 56 ++++++++++++++++++++--------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/src/storage/restrict.rs b/src/storage/restrict.rs index f820540f4..8c6b51342 100644 --- a/src/storage/restrict.rs +++ b/src/storage/restrict.rs @@ -51,14 +51,14 @@ use crate::{ /// } /// } /// ``` -pub struct RestrictedStorage<'rf, 'st: 'rf, C, S> { +pub struct RestrictedStorage<'rf, C, S> { bitset: &'rf BitSet, data: S, - entities: &'rf Fetch<'st, EntitiesRes>, + entities: &'rf Fetch<'rf, EntitiesRes>, phantom: PhantomData, } -impl<'st, T, D> Storage<'st, T, D> +impl Storage<'_, T, D> where T: Component, D: Deref>, @@ -69,7 +69,7 @@ where /// This is returned as a `ParallelRestriction` version since you can only /// get immutable components with this which is safe for parallel by /// default. - pub fn restrict<'rf>(&'rf self) -> RestrictedStorage<'rf, 'st, T, &T::Storage> { + pub fn restrict<'rf>(&'rf self) -> RestrictedStorage<'rf, T, &T::Storage> { RestrictedStorage { bitset: &self.data.mask, data: &self.data.inner, @@ -79,7 +79,7 @@ where } } -impl<'st, T, D> Storage<'st, T, D> +impl Storage<'_, T, D> where T: Component, D: DerefMut>, @@ -87,7 +87,7 @@ where /// Builds a mutable `RestrictedStorage` out of a `Storage`. Allows /// restricted access to the inner components without allowing /// invalidating the bitset for iteration in `Join`. - pub fn restrict_mut<'rf>(&'rf mut self) -> RestrictedStorage<'rf, 'st, T, &mut T::Storage> { + pub fn restrict_mut<'rf>(&'rf mut self) -> RestrictedStorage<'rf, T, &mut T::Storage> { let (mask, data) = self.data.open_mut(); RestrictedStorage { bitset: mask, @@ -102,14 +102,14 @@ where // contained in the wrapped `Storage`. Iterating the mask does not repeat // indices. #[nougat::gat] -unsafe impl<'rf, 'st: 'rf, C, S> LendJoin for &'rf RestrictedStorage<'rf, 'st, C, S> +unsafe impl<'rf, C, S> LendJoin for &'rf RestrictedStorage<'rf, C, S> where C: Component, S: Borrow, { type Mask = &'rf BitSet; - type Type<'next> = PairedStorageRead<'rf, 'st, C>; - type Value = (&'rf C::Storage, &'rf Fetch<'st, EntitiesRes>, &'rf BitSet); + type Type<'next> = PairedStorageRead<'rf, C>; + type Value = (&'rf C::Storage, &'rf Fetch<'rf, EntitiesRes>, &'rf BitSet); unsafe fn open(self) -> (Self::Mask, Self::Value) { let bitset = self.bitset.borrow(); @@ -129,7 +129,7 @@ where // SAFETY: LendJoin::get impl for this type can safely be called multiple times // with the same ID. -unsafe impl<'rf, 'st: 'rf, C, S> RepeatableLendGet for &'rf RestrictedStorage<'rf, 'st, C, S> +unsafe impl<'rf, C, S> RepeatableLendGet for &'rf RestrictedStorage<'rf, C, S> where C: Component, S: Borrow, @@ -140,16 +140,16 @@ where // contained in the wrapped `Storage`. Iterating the mask does not repeat // indices. #[nougat::gat] -unsafe impl<'rf, 'st: 'rf, C, S> LendJoin for &'rf mut RestrictedStorage<'rf, 'st, C, S> +unsafe impl<'rf, C, S> LendJoin for &'rf mut RestrictedStorage<'rf, C, S> where C: Component, S: BorrowMut, { type Mask = &'rf BitSet; - type Type<'next> = PairedStorageWriteExclusive<'next, 'st, C>; + type Type<'next> = PairedStorageWriteExclusive<'next, C>; type Value = ( &'rf mut C::Storage, - &'rf Fetch<'st, EntitiesRes>, + &'rf Fetch<'rf, EntitiesRes>, &'rf BitSet, ); @@ -171,7 +171,7 @@ where // SAFETY: LendJoin::get impl for this type can safely be called multiple times // with the same ID. -unsafe impl<'rf, 'st: 'rf, C, S> RepeatableLendGet for &'rf mut RestrictedStorage<'rf, 'st, C, S> +unsafe impl<'rf, C, S> RepeatableLendGet for &'rf mut RestrictedStorage<'rf, C, S> where C: Component, S: BorrowMut, @@ -181,14 +181,14 @@ where // SAFETY: `open` returns references to corresponding mask and storage values // contained in the wrapped `Storage`. Iterating the mask does not repeat // indices. -unsafe impl<'rf, 'st: 'rf, C, S> Join for &'rf RestrictedStorage<'rf, 'st, C, S> +unsafe impl<'rf, C, S> Join for &'rf RestrictedStorage<'rf, C, S> where C: Component, S: Borrow, { type Mask = &'rf BitSet; - type Type = PairedStorageRead<'rf, 'st, C>; - type Value = (&'rf C::Storage, &'rf Fetch<'st, EntitiesRes>, &'rf BitSet); + type Type = PairedStorageRead<'rf, C>; + type Value = (&'rf C::Storage, &'rf Fetch<'rf, EntitiesRes>, &'rf BitSet); unsafe fn open(self) -> (Self::Mask, Self::Value) { let bitset = self.bitset.borrow(); @@ -317,7 +317,7 @@ pub use shared_get_only::SharedGetOnly; // SAFETY: `open` returns references to corresponding mask and storage values // contained in the wrapped `Storage`. Iterating the mask does not repeat // indices. -unsafe impl<'rf, 'st: 'rf, C, S> Join for &'rf mut RestrictedStorage<'rf, 'st, C, S> +unsafe impl<'rf, C, S> Join for &'rf mut RestrictedStorage<'rf, C, S> where C: Component, S: BorrowMut, @@ -351,15 +351,15 @@ where // // Iterating the mask does not repeat indices. #[cfg(feature = "parallel")] -unsafe impl<'rf, 'st: 'rf, C, S> ParJoin for &'rf RestrictedStorage<'rf, 'st, C, S> +unsafe impl<'rf, C, S> ParJoin for &'rf RestrictedStorage<'rf, C, S> where C: Component, S: Borrow, C::Storage: Sync, { type Mask = &'rf BitSet; - type Type = PairedStorageRead<'rf, 'st, C>; - type Value = (&'rf C::Storage, &'rf Fetch<'st, EntitiesRes>, &'rf BitSet); + type Type = PairedStorageRead<'rf, C>; + type Value = (&'rf C::Storage, &'rf Fetch<'rf, EntitiesRes>, &'rf BitSet); unsafe fn open(self) -> (Self::Mask, Self::Value) { let bitset = self.bitset.borrow(); @@ -388,7 +388,7 @@ where // // Iterating the mask does not repeat indices. #[cfg(feature = "parallel")] -unsafe impl<'rf, 'st: 'rf, C, S> ParJoin for &'rf mut RestrictedStorage<'rf, 'st, C, S> +unsafe impl<'rf, C, S> ParJoin for &'rf mut RestrictedStorage<'rf, C, S> where C: Component, S: BorrowMut, @@ -417,11 +417,11 @@ where /// as long as the `PairedStorage` exists. /// /// Yielded by `lend_join`/`join`/`par_join` on `&storage.restrict()`. -pub struct PairedStorageRead<'rf, 'st: 'rf, C: Component> { +pub struct PairedStorageRead<'rf, C: Component> { index: Index, storage: &'rf C::Storage, bitset: &'rf BitSet, - entities: &'rf Fetch<'st, EntitiesRes>, + entities: &'rf Fetch<'rf, EntitiesRes>, } /// Pairs a storage with an index, meaning that the index is guaranteed to @@ -508,14 +508,14 @@ fn _dummy() {} /// exist. /// /// Yielded by `lend_join` on `&mut storage.restrict_mut()`. -pub struct PairedStorageWriteExclusive<'rf, 'st: 'rf, C: Component> { +pub struct PairedStorageWriteExclusive<'rf, C: Component> { index: Index, storage: &'rf mut C::Storage, bitset: &'rf BitSet, - entities: &'rf Fetch<'st, EntitiesRes>, + entities: &'rf Fetch<'rf, EntitiesRes>, } -impl<'rf, 'st, C> PairedStorageRead<'rf, 'st, C> +impl<'rf, C> PairedStorageRead<'rf, C> where C: Component, { @@ -575,7 +575,7 @@ where } } -impl<'rf, 'st, C> PairedStorageWriteExclusive<'rf, 'st, C> +impl<'rf, C> PairedStorageWriteExclusive<'rf, C> where C: Component, { From 784c7b078e7ea86c9ed617c7394968a4dcc11056 Mon Sep 17 00:00:00 2001 From: Imbris Date: Fri, 15 Sep 2023 01:42:45 -0400 Subject: [PATCH 41/47] Address todo in deref_flagged impl --- src/storage/deref_flagged.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/storage/deref_flagged.rs b/src/storage/deref_flagged.rs index 994c8accb..f88bcac62 100644 --- a/src/storage/deref_flagged.rs +++ b/src/storage/deref_flagged.rs @@ -74,16 +74,16 @@ impl> UnprotectedStorage for DerefFlag unsafe { self.storage.get(id) } } - unsafe fn get_mut(&mut self, _id: Index) -> Self::AccessMut<'_> { - /*let emit = self.emit_event(); + unsafe fn get_mut(&mut self, id: Index) -> Self::AccessMut<'_> { + let emit = self.emit_event(); FlaggedAccessMut { channel: &mut self.channel, emit, id, - access: self.storage.get_mut(id), + // SAFETY: Requirements passed to caller. + access: unsafe { self.storage.get_mut(id) }, phantom: PhantomData, - }*/ - todo!("adapt to streaming only") + } } unsafe fn insert(&mut self, id: Index, comp: C) { From 5d3bfe798ec945fadcd98ce1ffa7314cb8c10e5e Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 16 Sep 2023 02:53:11 -0400 Subject: [PATCH 42/47] Switch to published versions of hibitset and shred --- Cargo.toml | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index dd62b3cdf..e08b14922 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,13 +22,9 @@ autobenches = false [dependencies] ahash = "0.7.6" crossbeam-queue = "0.3" -# waiting on PR and new version to be published -# hibitset = { version = "0.6.3", default-features = false } -hibitset = { git = "https://github.com/amethyst/hibitset", default-features = false } +hibitset = { version = "0.6.4", default-features = false } log = "0.4.8" -# waiting on PR and new version to be published -# shred = { version = "0.14.1", default-features = false } -shred = { git = "https://github.com/Imberflur/shred", branch = "metatable-fix", default-features = false } +shred = { version = "0.15.0", default-features = false } shrev = "1.1.1" tuple_utils = "0.4.0" nougat = "0.2.3" @@ -58,8 +54,7 @@ criterion = "0.3.1" ron = "0.7.1" rand = "0.8" serde_json = "1.0.48" -# shred = { version = "0.14.1", default-features = false, features = ["shred-derive"] } -shred = { git = "https://github.com/Imberflur/shred", branch = "metatable-fix", default-features = false, features = ["shred-derive"]} +shred = { version = "0.15.0", default-features = false, features = ["shred-derive"] } specs-derive = { path = "specs-derive", version = "0.4.1" } [[example]] From a53d28ee4d0f80e0e6cf1f44e283fe90d16c50e9 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 16 Sep 2023 02:56:24 -0400 Subject: [PATCH 43/47] Remove rustfmt bug todo since things seem to be working now --- src/bitset.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/bitset.rs b/src/bitset.rs index e2c899942..f0ac9f33f 100644 --- a/src/bitset.rs +++ b/src/bitset.rs @@ -2,8 +2,7 @@ //! //! Normally used for `Join`s and filtering entities. -// TODO: rustfmt bug (probably fixed in next rust release) -// #![cfg_attr(rustfmt, rustfmt::skip)] +#![cfg_attr(rustfmt, rustfmt::skip)] use hibitset::{AtomicBitSet, BitSet, BitSetAnd, BitSetLike, BitSetNot, BitSetOr, BitSetXor}; From 0a4b99ad17c29df641aa3091fb882c0e0517d013 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 16 Sep 2023 16:54:19 -0400 Subject: [PATCH 44/47] Remove temporary continue-on-error setting --- .github/workflows/deny.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/deny.yml b/.github/workflows/deny.yml index 09c29af00..270d4ed88 100644 --- a/.github/workflows/deny.yml +++ b/.github/workflows/deny.yml @@ -15,13 +15,11 @@ jobs: - bans licenses sources # Prevent sudden announcement of a new advisory from failing ci: - # continue-on-error: ${{ matrix.checks == 'advisories' }} - # TODO: temp to allow git deps - continue-on-error: true + continue-on-error: ${{ matrix.checks == 'advisories' }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: EmbarkStudios/cargo-deny-action@v1 with: command: check ${{ matrix.checks }} From 6d4724b9d616df6bf9868ca4b0154eb0871e0542 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 16 Sep 2023 17:19:20 -0400 Subject: [PATCH 45/47] Run no_parallel tests under miri --- miri.sh | 10 ++++++++++ tests/no_parallel.rs | 1 - 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/miri.sh b/miri.sh index e36b58311..a6bb69cca 100755 --- a/miri.sh +++ b/miri.sh @@ -2,6 +2,8 @@ # # Convenience script for running Miri, also the same one that the CI runs! +set -e + # use half the available threads since miri can be a bit memory hungry test_threads=$((($(nproc) - 1) / 2 + 1)) echo using $test_threads threads @@ -19,3 +21,11 @@ MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-ignore-leaks" \ --test-threads="$test_threads" \ # use nocapture or run miri directly to see warnings from miri #--nocapture + +# Run tests only available when parallel feature is disabled. +MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-ignore-leaks" \ + cargo +nightly miri nextest run \ + --no-default-features \ + -E "binary(no_parallel)" \ + --test-threads="$test_threads" + diff --git a/tests/no_parallel.rs b/tests/no_parallel.rs index ee8941b6e..0504af5b6 100644 --- a/tests/no_parallel.rs +++ b/tests/no_parallel.rs @@ -1,5 +1,4 @@ #![cfg(not(feature = "parallel"))] -// TODO: ensure we run these with miri too use std::rc::Rc; From eecf83a4e4c3c9b605a97bd49f5c77047647fb25 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 16 Sep 2023 17:34:33 -0400 Subject: [PATCH 46/47] Update changelog --- CHANGELOG.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 58ab591a1..19149b84e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,12 @@ # Unreleased -* MSRV to 1.70.0 [#7xx]. +* MSRV to 1.70.0 ([#765]) +* Significant refactor of `Join` and related traits to alleviate soundness + issues. Includes introduction of a lending/streaming join via the `LendJoin` + trait which is the new common denominator implemented by joinable types. + ([#765]) -[#7xx]: https://github.com/amethyst/specs/pull/7xx +[#765]: https://github.com/amethyst/specs/pull/765 # 0.19.0 (2023-06-10) From 276450e5515e77ba9acd2d59618b61987ce19dd0 Mon Sep 17 00:00:00 2001 From: Imbris Date: Sat, 16 Sep 2023 17:55:48 -0400 Subject: [PATCH 47/47] Remove unnecessary borrow calls on bitset reference in restricted storage --- src/storage/restrict.rs | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/src/storage/restrict.rs b/src/storage/restrict.rs index 8c6b51342..a15cc8f96 100644 --- a/src/storage/restrict.rs +++ b/src/storage/restrict.rs @@ -112,8 +112,10 @@ where type Value = (&'rf C::Storage, &'rf Fetch<'rf, EntitiesRes>, &'rf BitSet); unsafe fn open(self) -> (Self::Mask, Self::Value) { - let bitset = self.bitset.borrow(); - (bitset, (self.data.borrow(), self.entities, bitset)) + ( + self.bitset, + (self.data.borrow(), self.entities, self.bitset), + ) } unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> { @@ -154,8 +156,10 @@ where ); unsafe fn open(self) -> (Self::Mask, Self::Value) { - let bitset = self.bitset.borrow(); - (bitset, (self.data.borrow_mut(), self.entities, bitset)) + ( + self.bitset, + (self.data.borrow_mut(), self.entities, self.bitset), + ) } unsafe fn get<'next>(value: &'next mut Self::Value, id: Index) -> Self::Type<'next> { @@ -191,8 +195,10 @@ where type Value = (&'rf C::Storage, &'rf Fetch<'rf, EntitiesRes>, &'rf BitSet); unsafe fn open(self) -> (Self::Mask, Self::Value) { - let bitset = self.bitset.borrow(); - (bitset, (self.data.borrow(), self.entities, bitset)) + ( + self.bitset, + (self.data.borrow(), self.entities, self.bitset), + ) } unsafe fn get(value: &mut Self::Value, id: Index) -> Self::Type { @@ -362,8 +368,10 @@ where type Value = (&'rf C::Storage, &'rf Fetch<'rf, EntitiesRes>, &'rf BitSet); unsafe fn open(self) -> (Self::Mask, Self::Value) { - let bitset = self.bitset.borrow(); - (bitset, (self.data.borrow(), self.entities, bitset)) + ( + self.bitset, + (self.data.borrow(), self.entities, self.bitset), + ) } unsafe fn get(value: &Self::Value, id: Index) -> Self::Type { @@ -603,7 +611,7 @@ where /// /// Functions similar to the normal `Storage::get` implementation. pub fn get_other(&self, entity: Entity) -> Option<&C> { - if self.bitset.borrow().contains(entity.id()) && self.entities.is_alive(entity) { + if self.bitset.contains(entity.id()) && self.entities.is_alive(entity) { // SAFETY:We just checked the mask. Some(unsafe { self.storage.get(entity.id()) }) } else {