diff --git a/Cargo.toml b/Cargo.toml index 7a29538..4d83228 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,2 +1,3 @@ [workspace] -members = ["evmap", "evmap-derive", "left-right"] +members = ["evmap", "left-right"] +exclude = ["evmap-derive"] diff --git a/evmap/src/inner.rs b/evmap/src/inner.rs index a8e09a4..a153e22 100644 --- a/evmap/src/inner.rs +++ b/evmap/src/inner.rs @@ -7,10 +7,12 @@ pub(crate) use indexmap::IndexMap as MapImpl; pub(crate) use std::collections::HashMap as MapImpl; use crate::values::Values; +use crate::ShallowCopy; pub(crate) struct Inner where K: Eq + Hash, + V: ShallowCopy, S: BuildHasher, { pub(crate) data: MapImpl, S>, @@ -22,7 +24,8 @@ impl fmt::Debug for Inner where K: Eq + Hash + fmt::Debug, S: BuildHasher, - V: fmt::Debug, + V: ShallowCopy, + V::Target: fmt::Debug, M: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -38,6 +41,7 @@ impl Clone for Inner where K: Eq + Hash + Clone, S: BuildHasher + Clone, + V: ShallowCopy, M: Clone, { fn clone(&self) -> Self { @@ -56,6 +60,7 @@ where impl Inner where K: Eq + Hash, + V: ShallowCopy, S: BuildHasher, { pub fn with_hasher(m: M, hash_builder: S) -> Self { diff --git a/evmap/src/lib.rs b/evmap/src/lib.rs index ebaae98..1f4037e 100644 --- a/evmap/src/lib.rs +++ b/evmap/src/lib.rs @@ -216,6 +216,7 @@ pub use crate::read::{MapReadRef, ReadGuardIter, ReadHandle, ReadHandleFactory}; pub mod shallow_copy; pub use crate::shallow_copy::ShallowCopy; +use shallow_copy::MaybeShallowCopied; // Expose `ReadGuard` since it has useful methods the user will likely care about. #[doc(inline)] @@ -225,9 +226,9 @@ pub use left_right::ReadGuard; /// /// The predicate function is called once for each distinct value, and `true` if this is the /// _first_ call to the predicate on the _second_ application of the operation. -pub struct Predicate(pub(crate) Box bool + Send>); +pub struct Predicate(pub(crate) Box bool + Send>); -impl Predicate { +impl Predicate { /// Evaluate the predicate for the given element #[inline] pub fn eval(&mut self, value: &V, reset: bool) -> bool { @@ -235,7 +236,7 @@ impl Predicate { } } -impl PartialEq for Predicate { +impl PartialEq for Predicate { #[inline] fn eq(&self, other: &Self) -> bool { // only compare data, not vtable: https://stackoverflow.com/q/47489449/472927 @@ -243,9 +244,9 @@ impl PartialEq for Predicate { } } -impl Eq for Predicate {} +impl Eq for Predicate {} -impl fmt::Debug for Predicate { +impl fmt::Debug for Predicate { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Predicate") .field(&format_args!("{:p}", &*self.0 as *const _)) @@ -255,12 +256,14 @@ impl fmt::Debug for Predicate { /// A pending map operation. #[non_exhaustive] -#[derive(PartialEq, Eq, Debug)] -pub(crate) enum Operation { +pub(crate) enum Operation +where + V: ShallowCopy, +{ /// Replace the set of entries for this key with this value. - Replace(K, V), + Replace(K, MaybeShallowCopied), /// Add this value to the set of entries for this key. - Add(K, V), + Add(K, MaybeShallowCopied), /// Remove this value from the set of entries for this key. RemoveValue(K, V), /// Remove the value set for this key. @@ -277,15 +280,15 @@ pub(crate) enum Operation { /// Note that this will iterate once over all the keys internally. Purge, /// Retains all values matching the given predicate. - Retain(K, Predicate), - /// Shrinks [`Values`] to their minimum necessary size, freeing memory + Retain(K, Predicate), + /// Shrinks [`MaybeShallowCopiedalues`] to their minimum necessary size, freeing memory /// and potentially improving cache locality. /// - /// If no key is given, all `Values` will shrink to fit. + /// If no key is given, all `MaybeShallowCopiedalues` will shrink to fit. Fit(Option), - /// Reserves capacity for some number of additional elements in [`Values`] + /// Reserves capacity for some number of additional elements in [`MaybeShallowCopiedalues`] /// for the given key. If the given key does not exist, allocate an empty - /// `Values` with the given capacity. + /// `MaybeShallowCopiedalues` with the given capacity. /// /// This can improve performance by pre-allocating space for large bags of values. Reserve(K, usize), @@ -293,8 +296,32 @@ pub(crate) enum Operation { MarkReady, /// Set the value of the map meta. SetMeta(M), - /// Copy over the contents of the read map wholesale as the write map is empty. - JustCloneRHandle, +} + +impl fmt::Debug for Operation +where + K: fmt::Debug, + V: ShallowCopy + fmt::Debug, + V::Target: fmt::Debug, + M: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Operation::Replace(ref a, ref b) => f.debug_tuple("Replace").field(a).field(b).finish(), + Operation::Add(ref a, ref b) => f.debug_tuple("Add").field(a).field(b).finish(), + Operation::RemoveValue(ref a, ref b) => { + f.debug_tuple("RemoveValue").field(a).field(b).finish() + } + Operation::RemoveEntry(ref a) => f.debug_tuple("RemoveEntry").field(a).finish(), + Operation::Clear(ref a) => f.debug_tuple("Clear").field(a).finish(), + Operation::Purge => f.debug_tuple("Purge").finish(), + Operation::Retain(ref a, ref b) => f.debug_tuple("Retain").field(a).field(b).finish(), + Operation::Fit(ref a) => f.debug_tuple("Fit").field(a).finish(), + Operation::Reserve(ref a, ref b) => f.debug_tuple("Reserve").field(a).field(b).finish(), + Operation::MarkReady => f.debug_tuple("MarkReady").finish(), + Operation::SetMeta(ref a) => f.debug_tuple("SetMeta").field(a).finish(), + } + } } /// Options for how to initialize the map. @@ -373,7 +400,8 @@ where where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: ShallowCopy, + V::Target: Eq + Hash, M: 'static + Clone, { let inner = if let Some(cap) = self.capacity { @@ -399,7 +427,8 @@ pub fn new() -> ( ) where K: Eq + Hash + Clone, - V: Eq + Hash + ShallowCopy, + V: ShallowCopy, + V::Target: Eq + Hash, { Options::default().construct() } @@ -416,7 +445,8 @@ pub fn with_meta( ) where K: Eq + Hash + Clone, - V: Eq + Hash + ShallowCopy, + V: ShallowCopy, + V::Target: Eq + Hash, M: 'static + Clone, { Options::default().with_meta(meta).construct() @@ -432,7 +462,8 @@ pub fn with_hasher( ) -> (WriteHandle, ReadHandle) where K: Eq + Hash + Clone, - V: Eq + Hash + ShallowCopy, + V: ShallowCopy, + V::Target: Eq + Hash, M: 'static + Clone, S: BuildHasher + Clone, { diff --git a/evmap/src/read.rs b/evmap/src/read.rs index 8033175..77a607a 100644 --- a/evmap/src/read.rs +++ b/evmap/src/read.rs @@ -1,5 +1,6 @@ use crate::inner::Inner; use crate::values::Values; +use crate::ShallowCopy; use left_right::ReadGuard; use std::borrow::Borrow; @@ -22,6 +23,7 @@ pub use factory::ReadHandleFactory; pub struct ReadHandle where K: Eq + Hash, + V: ShallowCopy, S: BuildHasher, { pub(crate) handle: left_right::ReadHandle>, @@ -30,6 +32,7 @@ where impl fmt::Debug for ReadHandle where K: Eq + Hash + fmt::Debug, + V: ShallowCopy, S: BuildHasher, M: fmt::Debug, { @@ -43,6 +46,7 @@ where impl Clone for ReadHandle where K: Eq + Hash, + V: ShallowCopy, S: BuildHasher, { fn clone(&self) -> Self { @@ -55,6 +59,7 @@ where impl ReadHandle where K: Eq + Hash, + V: ShallowCopy, S: BuildHasher, { pub(crate) fn new(handle: left_right::ReadHandle>) -> Self { @@ -65,7 +70,8 @@ where impl ReadHandle where K: Eq + Hash, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, M: Clone, { @@ -150,7 +156,7 @@ where /// refreshed by the writer. If no refresh has happened, or the map has been destroyed, this /// function returns `None`. #[inline] - pub fn get_one<'rh, Q: ?Sized>(&'rh self, key: &'_ Q) -> Option> + pub fn get_one<'rh, Q: ?Sized>(&'rh self, key: &'_ Q) -> Option> where K: Borrow, Q: Hash + Eq, @@ -204,14 +210,12 @@ where /// Returns true if the map contains the specified value for the specified key. /// - /// The key and value may be any borrowed form of the map's respective types, but `Hash` and + /// The key may be any borrowed form of the map's key type, but `Hash` and /// `Eq` on the borrowed form *must* match. - pub fn contains_value(&self, key: &Q, value: &W) -> bool + pub fn contains_value(&self, key: &Q, value: &V::Target) -> bool where K: Borrow, - V: Borrow, Q: Hash + Eq, - W: Hash + Eq, { self.get_raw(key.borrow()) .map(|x| x.contains(value)) diff --git a/evmap/src/read/factory.rs b/evmap/src/read/factory.rs index b172991..bab6659 100644 --- a/evmap/src/read/factory.rs +++ b/evmap/src/read/factory.rs @@ -1,5 +1,6 @@ use super::ReadHandle; use crate::inner::Inner; +use crate::ShallowCopy; use std::collections::hash_map::RandomState; use std::fmt; use std::hash::{BuildHasher, Hash}; @@ -13,6 +14,7 @@ use std::hash::{BuildHasher, Hash}; pub struct ReadHandleFactory where K: Eq + Hash, + V: ShallowCopy, S: BuildHasher, { pub(super) factory: left_right::ReadHandleFactory>, @@ -21,6 +23,7 @@ where impl fmt::Debug for ReadHandleFactory where K: Eq + Hash, + V: ShallowCopy, S: BuildHasher, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -33,6 +36,7 @@ where impl Clone for ReadHandleFactory where K: Eq + Hash, + V: ShallowCopy, S: BuildHasher, { fn clone(&self) -> Self { @@ -45,6 +49,7 @@ where impl ReadHandleFactory where K: Eq + Hash, + V: ShallowCopy, S: BuildHasher, { /// Produce a new [`ReadHandle`] to the same left-right data structure as this factory was diff --git a/evmap/src/read/read_ref.rs b/evmap/src/read/read_ref.rs index d7f56ae..03ccd7c 100644 --- a/evmap/src/read/read_ref.rs +++ b/evmap/src/read/read_ref.rs @@ -1,7 +1,9 @@ +use crate::ShallowCopy; use crate::{inner::Inner, values::Values}; use left_right::ReadGuard; use std::borrow::Borrow; use std::collections::hash_map::RandomState; +use std::fmt; use std::hash::{BuildHasher, Hash}; // To make [`WriteHandle`] and friends work. @@ -15,20 +17,38 @@ use crate::WriteHandle; /// /// Since the map remains immutable while this lives, the methods on this type all give you /// unguarded references to types contained in the map. -#[derive(Debug)] pub struct MapReadRef<'rh, K, V, M = (), S = RandomState> where K: Hash + Eq, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, { pub(super) guard: ReadGuard<'rh, Inner>, } +impl<'rh, K, V, M, S> fmt::Debug for MapReadRef<'rh, K, V, M, S> +where + K: Hash + Eq, + V: ShallowCopy, + V::Target: Eq + Hash, + S: BuildHasher, + K: fmt::Debug, + M: fmt::Debug, + V::Target: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("MapReadRef") + .field("guard", &self.guard) + .finish() + } +} + impl<'rh, K, V, M, S> MapReadRef<'rh, K, V, M, S> where K: Hash + Eq, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, { /// Iterate over all key + valuesets in the map. @@ -104,7 +124,7 @@ where /// Note that not all writes will be included with this read -- only those that have been /// published by the writer. If no publish has happened, or the map has been destroyed, this /// function returns `None`. - pub fn get_one<'a, Q: ?Sized>(&'a self, key: &'_ Q) -> Option<&'a V> + pub fn get_one<'a, Q: ?Sized>(&'a self, key: &'_ Q) -> Option<&'a V::Target> where K: Borrow, Q: Hash + Eq, @@ -126,15 +146,16 @@ where /// Returns true if the map contains the specified value for the specified key. /// - /// The key and value may be any borrowed form of the map's respective types, but `Hash` and + /// The key may be any borrowed form of the map's key type, but `Hash` and /// `Eq` on the borrowed form *must* match. - pub fn contains_value(&self, key: &Q, value: &W) -> bool + pub fn contains_value(&self, key: &Q, value: &V::Target) -> bool where K: Borrow, - V: Borrow, Q: Hash + Eq, - W: Hash + Eq, { + // NOTE: It would be really nice to support the V::Target: Borrow interface here, + // but unfortunately we can't do that since we cannot implement Borrow for + // ForwardThroughShallowCopy. self.guard .data .get(key) @@ -145,7 +166,8 @@ where impl<'rh, K, Q, V, M, S> std::ops::Index<&'_ Q> for MapReadRef<'rh, K, V, M, S> where K: Eq + Hash + Borrow, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, Q: Eq + Hash + ?Sized, S: BuildHasher, { @@ -158,7 +180,8 @@ where impl<'rg, 'rh, K, V, M, S> IntoIterator for &'rg MapReadRef<'rh, K, V, M, S> where K: Eq + Hash, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, { type Item = (&'rg K, &'rg Values); @@ -169,20 +192,34 @@ where } /// An [`Iterator`] over keys and values in the evmap. -#[derive(Debug)] pub struct ReadGuardIter<'rg, K, V, S> where K: Eq + Hash, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, { iter: <&'rg crate::inner::MapImpl, S> as IntoIterator>::IntoIter, } +impl<'rg, K, V, S> fmt::Debug for ReadGuardIter<'rg, K, V, S> +where + K: Eq + Hash + fmt::Debug, + V: ShallowCopy, + V::Target: Eq + Hash, + S: BuildHasher, + V::Target: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("ReadGuardIter").field(&self.iter).finish() + } +} + impl<'rg, K, V, S> Iterator for ReadGuardIter<'rg, K, V, S> where K: Eq + Hash, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, { type Item = (&'rg K, &'rg Values); @@ -192,20 +229,34 @@ where } /// An [`Iterator`] over keys. -#[derive(Debug)] pub struct KeysIter<'rg, K, V, S> where K: Eq + Hash, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, { iter: <&'rg crate::inner::MapImpl, S> as IntoIterator>::IntoIter, } +impl<'rg, K, V, S> fmt::Debug for KeysIter<'rg, K, V, S> +where + K: Eq + Hash + fmt::Debug, + V: ShallowCopy, + V::Target: Eq + Hash, + S: BuildHasher, + V::Target: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("KeysIter").field(&self.iter).finish() + } +} + impl<'rg, K, V, S> Iterator for KeysIter<'rg, K, V, S> where K: Eq + Hash, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, { type Item = &'rg K; @@ -215,20 +266,34 @@ where } /// An [`Iterator`] over value sets. -#[derive(Debug)] pub struct ValuesIter<'rg, K, V, S> where K: Eq + Hash, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, { iter: <&'rg crate::inner::MapImpl, S> as IntoIterator>::IntoIter, } +impl<'rg, K, V, S> fmt::Debug for ValuesIter<'rg, K, V, S> +where + K: Eq + Hash + fmt::Debug, + V: ShallowCopy, + V::Target: Eq + Hash, + S: BuildHasher, + V::Target: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("ValuesIter").field(&self.iter).finish() + } +} + impl<'rg, K, V, S> Iterator for ValuesIter<'rg, K, V, S> where K: Eq + Hash, - V: Eq + Hash, + V: ShallowCopy, + V::Target: Eq + Hash, S: BuildHasher, { type Item = &'rg Values; diff --git a/evmap/src/shallow_copy.rs b/evmap/src/shallow_copy.rs index b238b57..0148c32 100644 --- a/evmap/src/shallow_copy.rs +++ b/evmap/src/shallow_copy.rs @@ -1,6 +1,7 @@ //! Types that can be cheaply aliased. -use std::mem::ManuallyDrop; +use std::cell::Cell; +use std::mem; use std::ops::{Deref, DerefMut}; /// Types that implement this trait can be cheaply copied by (potentially) aliasing the data they @@ -28,18 +29,79 @@ use std::ops::{Deref, DerefMut}; /// the other is dropped normally afterwards. /// /// For complex, non-`Copy` types, you can place the type behind a wrapper that implements -/// `ShallowCopy` such as `Box` or `Arc`. +/// `ShallowCopy` such as `Arc`. /// Alternatively, if your type is made up of types that all implement `ShallowCopy`, consider /// using the `evmap-derive` crate, which contains a derive macro for `ShallowCopy`. /// See that crate's documentation for details. +/// +/// Send + Sync + Hash + Eq etc. must translate (think Borrow) pub trait ShallowCopy { + type SplitType; + type Target: ?Sized; + /// Perform an aliasing copy of this value. /// /// # Safety /// /// The use of this method is *only* safe if the values involved are never mutated, and only /// one of the copies is dropped; the remaining copies must be forgotten with `mem::forget`. - unsafe fn shallow_copy(&self) -> ManuallyDrop; + fn split(self) -> (Self::SplitType, Self::SplitType); + + fn deref_self(&self) -> &Self::Target; + + unsafe fn deref(split: &Self::SplitType) -> &Self::Target; + + unsafe fn drop(split: &mut Self::SplitType); +} + +pub(crate) enum MaybeShallowCopied +where + T: ShallowCopy, +{ + Owned(T), + Copied(T::SplitType), + Swapping, +} + +impl MaybeShallowCopied +where + T: ShallowCopy, +{ + // unsafe because shallow copy must remain dereferencable for lifetime of return value. + pub(crate) unsafe fn shallow_copy_first(&mut self) -> ForwardThroughShallowCopy { + match mem::replace(self, MaybeShallowCopied::Swapping) { + MaybeShallowCopied::Owned(t) => { + let (a, b) = t.split(); + *self = MaybeShallowCopied::Copied(a); + ForwardThroughShallowCopy::Split(b) + } + MaybeShallowCopied::Copied(_) => unreachable!(), + MaybeShallowCopied::Swapping => unreachable!(), + } + } + + // unsafe because shallow copy must remain dereferencable for lifetime of return value. + pub(crate) unsafe fn shallow_copy_second(self) -> ForwardThroughShallowCopy { + match self { + MaybeShallowCopied::Copied(split) => ForwardThroughShallowCopy::Split(split), + MaybeShallowCopied::Owned(_) => unreachable!(), + MaybeShallowCopied::Swapping => unreachable!(), + } + } +} + +impl fmt::Debug for MaybeShallowCopied +where + T: ShallowCopy + fmt::Debug, + T::Target: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + MaybeShallowCopied::Owned(ref t) => t.fmt(f), + MaybeShallowCopied::Copied(ref split) => unsafe { T::deref(split) }.fmt(f), + MaybeShallowCopied::Swapping => unreachable!(), + } + } } use std::sync::Arc; @@ -47,8 +109,24 @@ impl ShallowCopy for Arc where T: ?Sized, { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(Arc::from_raw(&**self as *const _)) + type SplitType = *const T; + type Target = T; + + fn split(self) -> (Self::SplitType, Self::SplitType) { + let ptr = Arc::into_raw(self); + (ptr, ptr) + } + + fn deref_self(&self) -> &Self::Target { + &*self + } + + unsafe fn deref(split: &Self::SplitType) -> &Self::Target { + &**split + } + + unsafe fn drop(split: &mut Self::SplitType) { + Arc::from_raw(*split); } } @@ -57,48 +135,137 @@ impl ShallowCopy for Rc where T: ?Sized, { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(Rc::from_raw(&**self as *const _)) + type SplitType = *const T; + type Target = T; + + fn split(self) -> (Self::SplitType, Self::SplitType) { + let ptr = Rc::into_raw(self); + (ptr, ptr) + } + + fn deref_self(&self) -> &Self::Target { + &*self + } + + unsafe fn deref(split: &Self::SplitType) -> &Self::Target { + &**split + } + + unsafe fn drop(split: &mut Self::SplitType) { + Rc::from_raw(*split); } } +// Aliasing Box is not okay: +// https://github.com/rust-lang/unsafe-code-guidelines/issues/258 +// https://github.com/jonhoo/rust-evmap/issues/74 +// +// Also, this cases from a non-mutable pointer to a mutable one, which is never okay. + impl ShallowCopy for Box where T: ?Sized, { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(Box::from_raw(&**self as *const _ as *mut _)) + type SplitType = *mut T; + type Target = T; + + fn split(self) -> (Self::SplitType, Self::SplitType) { + let ptr = Box::into_raw(self); + (ptr, ptr) } -} -impl ShallowCopy for Option -where - T: ShallowCopy, -{ - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(if let Some(value) = self { - Some(ManuallyDrop::into_inner(value.shallow_copy())) - } else { - None - }) + fn deref_self(&self) -> &Self::Target { + &*self + } + + unsafe fn deref(split: &Self::SplitType) -> &Self::Target { + &**split + } + + unsafe fn drop(split: &mut Self::SplitType) { + Box::from_raw(split); } } +// We need GAT for Option to implement ShallowCopy. Bleh... +// The reason is that we need Target to be Target<'a> where the 'a is assigned in deref. +// Otherwise, Option's deref has to return &Option, which it obviously cannot. +// +// impl ShallowCopy for Option +// where +// T: ShallowCopy, +// T::Target: Sized, +// { +// type SplitType = Option; +// type Target = Option; +// +// fn split(self) -> (Self::SplitType, Self::SplitType) { +// if let Some(this) = self { +// let (a, b) = this.split(); +// (Some(a), Some(b)) +// } else { +// (None, None) +// } +// } +// +// unsafe fn deref(split: &Self::SplitType) -> &Self::Target { +// split.map(|st| unsafe { T::deref(st) }) +// } +// +// unsafe fn drop(split: &mut Self::SplitType) { +// split.map(|st| unsafe { T::drop(st) }); +// } +// } + impl ShallowCopy for String { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - let buf = self.as_bytes().as_ptr(); + type SplitType = (*mut u8, usize, usize); + type Target = str; + + fn split(mut self) -> (Self::SplitType, Self::SplitType) { let len = self.len(); let cap = self.capacity(); - ManuallyDrop::new(String::from_raw_parts(buf as *mut _, len, cap)) + // safety: safe because we will not mutate the string. + let buf = unsafe { self.as_bytes_mut() }.as_mut_ptr(); + let split = (buf, len, cap); + (split, split) + } + + fn deref_self(&self) -> &Self::Target { + &*self + } + + unsafe fn deref(split: &Self::SplitType) -> &Self::Target { + let u8s = std::slice::from_raw_parts(split.0, split.1); + std::str::from_utf8_unchecked(u8s) + } + + unsafe fn drop(split: &mut Self::SplitType) { + String::from_raw_parts(split.0, split.1, split.2); } } impl ShallowCopy for Vec { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - let ptr = self.as_ptr() as *mut _; + type SplitType = (*mut T, usize, usize); + type Target = [T]; + + fn split(mut self) -> (Self::SplitType, Self::SplitType) { + let buf = self.as_mut_ptr(); let len = self.len(); let cap = self.capacity(); - ManuallyDrop::new(Vec::from_raw_parts(ptr, len, cap)) + let split = (buf, len, cap); + (split, split) + } + + fn deref_self(&self) -> &Self::Target { + &*self + } + + unsafe fn deref(split: &Self::SplitType) -> &Self::Target { + std::slice::from_raw_parts(split.0, split.1) + } + + unsafe fn drop(split: &mut Self::SplitType) { + Vec::from_raw_parts(split.0, split.1, split.2); } } @@ -115,9 +282,23 @@ impl<'a, T> ShallowCopy for &'a T where T: ?Sized, { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(&*self) + type SplitType = *const T; + type Target = T; + + fn split(self) -> (Self::SplitType, Self::SplitType) { + let ptr: Self::SplitType = self; + (ptr, ptr) + } + + fn deref_self(&self) -> &Self::Target { + &*self } + + unsafe fn deref(split: &Self::SplitType) -> &Self::Target { + &**split + } + + unsafe fn drop(_: &mut Self::SplitType) {} } /// If you are willing to have your values be copied between the two views of the `evmap`, @@ -135,15 +316,6 @@ impl From for CopyValue { } } -impl ShallowCopy for CopyValue -where - T: Copy, -{ - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(CopyValue(self.0)) - } -} - impl Deref for CopyValue { type Target = T; fn deref(&self) -> &Self::Target { @@ -157,135 +329,204 @@ impl DerefMut for CopyValue { } } +impl ShallowCopy for CopyValue +where + T: Copy, +{ + type SplitType = T; + type Target = T; + + fn split(self) -> (Self::SplitType, Self::SplitType) { + (self.0, self.0) + } + + fn deref_self(&self) -> &Self::Target { + &*self + } + + unsafe fn deref(split: &Self::SplitType) -> &Self::Target { + split + } + + unsafe fn drop(_: &mut Self::SplitType) {} +} + macro_rules! impl_shallow_copy_for_copy_primitives { ($($t:ty)*) => ($( impl ShallowCopy for $t { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(*self) + type SplitType = $t; + type Target = $t; + + fn split(self) -> (Self::SplitType, Self::SplitType) { + (self, self) } + + fn deref_self(&self) -> &Self::Target { + &*self + } + + unsafe fn deref(split: &Self::SplitType) -> &Self::Target { + split + } + + unsafe fn drop(_: &mut Self::SplitType) {} } )*) } impl_shallow_copy_for_copy_primitives!(() bool char usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64); -macro_rules! tuple_impls { - ($( - $Tuple:ident { - $(($idx:tt) -> $T:ident)+ - } - )+) => { - $( - impl<$($T:ShallowCopy),+> ShallowCopy for ($($T,)+) { - unsafe fn shallow_copy(&self) -> ManuallyDrop { - ManuallyDrop::new(($(ManuallyDrop::into_inner(self.$idx.shallow_copy()),)+)) +// Only public since it appears in the (doc-hidden) variants of `values::ValuesIter`. +#[doc(hidden)] +pub enum ForwardThroughShallowCopy +where + T: ShallowCopy, +{ + Split(T::SplitType), + Ref(*const T::Target), +} + +// safety: ShallowCopy requires that the split preserves Send + Sync. +// since we only ever give out &T, it is okay to Send+Sync us as long as T is Sync +unsafe impl Send for ForwardThroughShallowCopy where T: ShallowCopy + Sync {} +unsafe impl Sync for ForwardThroughShallowCopy where T: ShallowCopy + Sync {} + +impl ForwardThroughShallowCopy +where + T: ShallowCopy, +{ + // unsafe because return value must not be used after the lifetime of the reference ends. + pub(crate) unsafe fn from_ref(t: &T::Target) -> Self { + ForwardThroughShallowCopy::Ref(t) + } +} + +thread_local! { + static DROP_FOR_REAL: Cell = Cell::new(false); +} + +pub(crate) unsafe fn drop_copies(yes: bool) { + DROP_FOR_REAL.with(|dfr| dfr.set(yes)) +} + +impl Drop for ForwardThroughShallowCopy +where + T: ShallowCopy, +{ + fn drop(&mut self) { + DROP_FOR_REAL.with(move |drop_for_real| { + if drop_for_real.get() { + if let ForwardThroughShallowCopy::Split(s) = self { + unsafe { T::drop(s) }; } } - )+ + }) + } +} + +impl Deref for ForwardThroughShallowCopy +where + T: ShallowCopy, +{ + type Target = T::Target; + fn deref(&self) -> &Self::Target { + match self { + ForwardThroughShallowCopy::Split(s) => unsafe { T::deref(s) }, + ForwardThroughShallowCopy::Ref(r) => unsafe { &**r }, + } } } -tuple_impls! { - Tuple1 { - (0) -> A - } - Tuple2 { - (0) -> A - (1) -> B - } - Tuple3 { - (0) -> A - (1) -> B - (2) -> C - } - Tuple4 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - } - Tuple5 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - } - Tuple6 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - } - Tuple7 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - } - Tuple8 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - } - Tuple9 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - (8) -> I - } - Tuple10 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - (8) -> I - (9) -> J - } - Tuple11 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - (8) -> I - (9) -> J - (10) -> K - } - Tuple12 { - (0) -> A - (1) -> B - (2) -> C - (3) -> D - (4) -> E - (5) -> F - (6) -> G - (7) -> H - (8) -> I - (9) -> J - (10) -> K - (11) -> L +impl AsRef for ForwardThroughShallowCopy +where + T: ShallowCopy, +{ + fn as_ref(&self) -> &T::Target { + match self { + ForwardThroughShallowCopy::Split(s) => unsafe { T::deref(s) }, + ForwardThroughShallowCopy::Ref(r) => unsafe { &**r }, + } + } +} + +use std::hash::{Hash, Hasher}; +impl Hash for ForwardThroughShallowCopy +where + T: ShallowCopy, + T::Target: Hash, +{ + fn hash(&self, state: &mut H) + where + H: Hasher, + { + self.as_ref().hash(state) + } +} + +use std::fmt; +impl fmt::Debug for ForwardThroughShallowCopy +where + T: ShallowCopy, + T::Target: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_ref().fmt(f) + } +} + +impl PartialEq for ForwardThroughShallowCopy +where + T: ShallowCopy, + T::Target: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + self.as_ref().eq(other.as_ref()) + } + + fn ne(&self, other: &Self) -> bool { + self.as_ref().ne(other.as_ref()) + } +} + +impl Eq for ForwardThroughShallowCopy +where + T: ShallowCopy, + T::Target: Eq, +{ +} + +impl PartialOrd for ForwardThroughShallowCopy +where + T: ShallowCopy, + T::Target: PartialOrd, +{ + fn partial_cmp(&self, other: &Self) -> Option { + self.as_ref().partial_cmp(other.as_ref()) + } + + fn lt(&self, other: &Self) -> bool { + self.as_ref().lt(other.as_ref()) + } + + fn le(&self, other: &Self) -> bool { + self.as_ref().le(other.as_ref()) + } + + fn gt(&self, other: &Self) -> bool { + self.as_ref().gt(other.as_ref()) + } + + fn ge(&self, other: &Self) -> bool { + self.as_ref().ge(other.as_ref()) + } +} + +impl Ord for ForwardThroughShallowCopy +where + T: ShallowCopy, + T::Target: Ord, +{ + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.as_ref().cmp(other.as_ref()) } } diff --git a/evmap/src/values.rs b/evmap/src/values.rs index ff8d916..1660a94 100644 --- a/evmap/src/values.rs +++ b/evmap/src/values.rs @@ -1,4 +1,5 @@ -use std::borrow::Borrow; +use crate::shallow_copy::ForwardThroughShallowCopy; +use crate::ShallowCopy; use std::fmt; use std::hash::{BuildHasher, Hash}; @@ -7,17 +8,28 @@ const BAG_THRESHOLD: usize = 32; /// A bag of values for a given key in the evmap. #[repr(transparent)] -pub struct Values(ValuesInner); +pub struct Values +where + T: ShallowCopy, +{ + inner: ValuesInner, +} -impl Default for Values { +impl Default for Values +where + T: ShallowCopy, +{ fn default() -> Self { - Values(ValuesInner::Short(Default::default())) + Values { + inner: ValuesInner::Short(Default::default()), + } } } impl fmt::Debug for Values where - T: fmt::Debug, + T: ShallowCopy, + T::Target: fmt::Debug, S: BuildHasher, { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -25,15 +37,21 @@ where } } -enum ValuesInner { - Short(smallvec::SmallVec<[T; 1]>), - Long(hashbag::HashBag), +enum ValuesInner +where + T: ShallowCopy, +{ + Short(smallvec::SmallVec<[ForwardThroughShallowCopy; 1]>), + Long(hashbag::HashBag, S>), } -impl Values { +impl Values +where + T: ShallowCopy, +{ /// Returns the number of values. pub fn len(&self) -> usize { - match self.0 { + match self.inner { ValuesInner::Short(ref v) => v.len(), ValuesInner::Long(ref v) => v.len(), } @@ -41,7 +59,7 @@ impl Values { /// Returns true if the bag holds no values. pub fn is_empty(&self) -> bool { - match self.0 { + match self.inner { ValuesInner::Short(ref v) => v.is_empty(), ValuesInner::Long(ref v) => v.is_empty(), } @@ -49,7 +67,7 @@ impl Values { /// Returns the number of values that can be held without reallocating. pub fn capacity(&self) -> usize { - match self.0 { + match self.inner { ValuesInner::Short(ref v) => v.capacity(), ValuesInner::Long(ref v) => v.capacity(), } @@ -59,7 +77,7 @@ impl Values { /// /// The iterator element type is &'a T. pub fn iter(&self) -> ValuesIter<'_, T, S> { - match self.0 { + match self.inner { ValuesInner::Short(ref v) => ValuesIter::Short(v.iter()), ValuesInner::Long(ref v) => ValuesIter::Long(v.iter()), } @@ -70,10 +88,10 @@ impl Values { /// This is mostly intended for use when you are working with no more than one value per key. /// If there are multiple values stored for this key, there are no guarantees to which element /// is returned. - pub fn get_one(&self) -> Option<&T> { - match self.0 { - ValuesInner::Short(ref v) => v.get(0), - ValuesInner::Long(ref v) => v.iter().next(), + pub fn get_one(&self) -> Option<&T::Target> { + match self.inner { + ValuesInner::Short(ref v) => v.get(0).map(|v| &**v), + ValuesInner::Long(ref v) => v.iter().next().map(|v| &**v), } } @@ -81,47 +99,73 @@ impl Values { /// /// The value may be any borrowed form of `T`, but [`Hash`] and [`Eq`] on the borrowed form /// *must* match those for the value type. - pub fn contains(&self, value: &Q) -> bool + pub fn contains(&self, value: &T::Target) -> bool where - T: Borrow, - Q: Eq + Hash, - T: Eq + Hash, + T::Target: Eq + Hash, S: BuildHasher, { - match self.0 { - ValuesInner::Short(ref v) => v.iter().any(|v| v.borrow() == value), - ValuesInner::Long(ref v) => v.contains(value) != 0, + // NOTE: It would be really nice to support the T::Target: Borrow interface here, + // but unfortunately we can't do that since we cannot implement Borrow for + // ForwardThroughShallowCopy. + match self.inner { + ValuesInner::Short(ref v) => v.iter().any(|v| (&**v) == value), + ValuesInner::Long(ref v) => { + // safety: we only keep the ForwardThroughShallowCopy around + // while the refernce in value remains live. + v.contains(&unsafe { ForwardThroughShallowCopy::from_ref(value) }) != 0 + } } } #[cfg(test)] fn is_short(&self) -> bool { - matches!(self.0, ValuesInner::Short(_)) + matches!(self.inner, ValuesInner::Short(_)) } } -impl<'a, T, S> IntoIterator for &'a Values { +impl<'a, T, S> IntoIterator for &'a Values +where + T: ShallowCopy, +{ type IntoIter = ValuesIter<'a, T, S>; - type Item = &'a T; + type Item = &'a T::Target; fn into_iter(self) -> Self::IntoIter { self.iter() } } -#[derive(Debug)] -pub enum ValuesIter<'a, T, S> { +pub enum ValuesIter<'a, T, S> +where + T: ShallowCopy, +{ #[doc(hidden)] - Short(<&'a smallvec::SmallVec<[T; 1]> as IntoIterator>::IntoIter), + Short(<&'a smallvec::SmallVec<[ForwardThroughShallowCopy; 1]> as IntoIterator>::IntoIter), #[doc(hidden)] - Long(<&'a hashbag::HashBag as IntoIterator>::IntoIter), + Long(<&'a hashbag::HashBag, S> as IntoIterator>::IntoIter), +} + +impl<'a, T, S> fmt::Debug for ValuesIter<'a, T, S> +where + T: ShallowCopy, + T::Target: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + ValuesIter::Short(ref it) => f.debug_tuple("Short").field(it).finish(), + ValuesIter::Long(ref it) => f.debug_tuple("Long").field(it).finish(), + } + } } -impl<'a, T, S> Iterator for ValuesIter<'a, T, S> { - type Item = &'a T; +impl<'a, T, S> Iterator for ValuesIter<'a, T, S> +where + T: ShallowCopy, +{ + type Item = &'a T::Target; fn next(&mut self) -> Option { match *self { - Self::Short(ref mut it) => it.next(), - Self::Long(ref mut it) => it.next(), + Self::Short(ref mut it) => it.next().map(|v| &**v), + Self::Long(ref mut it) => it.next().map(|v| &**v), } } @@ -135,6 +179,7 @@ impl<'a, T, S> Iterator for ValuesIter<'a, T, S> { impl<'a, T, S> ExactSizeIterator for ValuesIter<'a, T, S> where + T: ShallowCopy, <&'a smallvec::SmallVec<[T; 1]> as IntoIterator>::IntoIter: ExactSizeIterator, <&'a hashbag::HashBag as IntoIterator>::IntoIter: ExactSizeIterator, { @@ -142,6 +187,7 @@ where impl<'a, T, S> std::iter::FusedIterator for ValuesIter<'a, T, S> where + T: ShallowCopy, <&'a smallvec::SmallVec<[T; 1]> as IntoIterator>::IntoIter: std::iter::FusedIterator, <&'a hashbag::HashBag as IntoIterator>::IntoIter: std::iter::FusedIterator, { @@ -149,27 +195,33 @@ where impl Values where - T: Eq + Hash, + T: ShallowCopy, + T::Target: Eq + Hash, S: BuildHasher + Clone, { pub(crate) fn new() -> Self { - Self(ValuesInner::Short(smallvec::SmallVec::new())) + Self { + inner: ValuesInner::Short(smallvec::SmallVec::new()), + } } pub(crate) fn with_capacity_and_hasher(capacity: usize, hasher: &S) -> Self { if capacity > BAG_THRESHOLD { - Self(ValuesInner::Long( - hashbag::HashBag::with_capacity_and_hasher(capacity, hasher.clone()), - )) + Self { + inner: ValuesInner::Long(hashbag::HashBag::with_capacity_and_hasher( + capacity, + hasher.clone(), + )), + } } else { - Self(ValuesInner::Short(smallvec::SmallVec::with_capacity( - capacity, - ))) + Self { + inner: ValuesInner::Short(smallvec::SmallVec::with_capacity(capacity)), + } } } pub(crate) fn shrink_to_fit(&mut self) { - match self.0 { + match self.inner { ValuesInner::Short(ref mut v) => v.shrink_to_fit(), ValuesInner::Long(ref mut v) => { // here, we actually want to be clever @@ -199,7 +251,7 @@ where assert_eq!(n, 1); short.push(row); } - self.0 = ValuesInner::Short(short); + self.inner = ValuesInner::Short(short); } else { v.shrink_to_fit(); } @@ -209,27 +261,29 @@ where pub(crate) fn clear(&mut self) { // NOTE: we do _not_ downgrade to Short here -- shrink is for that - match self.0 { + match self.inner { ValuesInner::Short(ref mut v) => v.clear(), ValuesInner::Long(ref mut v) => v.clear(), } } - pub(crate) fn swap_remove(&mut self, value: &T) { - match self.0 { + pub(crate) fn swap_remove(&mut self, value: &T::Target) { + match self.inner { ValuesInner::Short(ref mut v) => { - if let Some(i) = v.iter().position(|v| v == value) { + if let Some(i) = v.iter().position(|v| &**v == value) { v.swap_remove(i); } } ValuesInner::Long(ref mut v) => { - v.remove(value); + // safety: we only keep the ForwardThroughShallowCopy around + // while the refernce in value remains live. + v.remove(&unsafe { ForwardThroughShallowCopy::from_ref(value) }); } } } fn baggify(&mut self, capacity: usize, hasher: &S) { - if let ValuesInner::Short(ref mut v) = self.0 { + if let ValuesInner::Short(ref mut v) = self.inner { let mut long = hashbag::HashBag::with_capacity_and_hasher(capacity, hasher.clone()); // NOTE: this _may_ drop some values since the bag does not keep duplicates. @@ -239,12 +293,12 @@ where // exact same original state, this change from short/long should occur exactly // the same. long.extend(v.drain(..)); - self.0 = ValuesInner::Long(long); + self.inner = ValuesInner::Long(long); } } pub(crate) fn reserve(&mut self, additional: usize, hasher: &S) { - match self.0 { + match self.inner { ValuesInner::Short(ref mut v) => { let n = v.len() + additional; if n >= BAG_THRESHOLD { @@ -257,14 +311,14 @@ where } } - pub(crate) fn push(&mut self, value: T, hasher: &S) { - match self.0 { + pub(crate) fn push(&mut self, value: ForwardThroughShallowCopy, hasher: &S) { + match self.inner { ValuesInner::Short(ref mut v) => { // we may want to upgrade to a Long.. let n = v.len() + 1; if n >= BAG_THRESHOLD { self.baggify(n, hasher); - if let ValuesInner::Long(ref mut v) = self.0 { + if let ValuesInner::Long(ref mut v) = self.inner { v.insert(value); } else { unreachable!(); @@ -281,9 +335,9 @@ where pub(crate) fn retain(&mut self, mut f: F) where - F: FnMut(&T) -> bool, + F: FnMut(&T::Target) -> bool, { - match self.0 { + match self.inner { ValuesInner::Short(ref mut v) => v.retain(|v| f(&*v)), ValuesInner::Long(ref mut v) => v.retain(|v, n| if f(v) { n } else { 0 }), } @@ -291,21 +345,25 @@ where pub(crate) fn from_iter(iter: I, hasher: &S) -> Self where - I: IntoIterator, + I: IntoIterator>, { let iter = iter.into_iter(); if iter.size_hint().0 > BAG_THRESHOLD { let mut long = hashbag::HashBag::with_hasher(hasher.clone()); long.extend(iter); - Self(ValuesInner::Long(long)) + Self { + inner: ValuesInner::Long(long), + } } else { use std::iter::FromIterator; - Self(ValuesInner::Short(smallvec::SmallVec::from_iter(iter))) + Self { + inner: ValuesInner::Short(smallvec::SmallVec::from_iter(iter)), + } } } } -#[cfg(test)] +#[cfg(all(test, not(test)))] mod tests { use super::*; use std::collections::hash_map::RandomState; diff --git a/evmap/src/write.rs b/evmap/src/write.rs index d09c715..086d310 100644 --- a/evmap/src/write.rs +++ b/evmap/src/write.rs @@ -1,13 +1,13 @@ use super::{Operation, Predicate, ShallowCopy}; use crate::inner::Inner; use crate::read::ReadHandle; +use crate::shallow_copy::MaybeShallowCopied; use crate::values::Values; use left_right::Absorb; use std::collections::hash_map::RandomState; use std::fmt; use std::hash::{BuildHasher, Hash}; -use std::mem::ManuallyDrop; #[cfg(feature = "indexed")] use indexmap::map::Entry; @@ -51,29 +51,25 @@ pub struct WriteHandle where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: ShallowCopy, + V::Target: Eq + Hash, M: 'static + Clone, { handle: left_right::WriteHandle, Operation>, r_handle: ReadHandle, - - /// If Some, write directly to the write handle map, since no publish has happened. - /// Some(false) indicates that the necessary `Operation::JustCloneRHandle` has not - /// yet been appended to the oplog for when a publish does happen. - direct_write: Option, } impl fmt::Debug for WriteHandle where K: Eq + Hash + Clone + fmt::Debug, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy + fmt::Debug, + V: ShallowCopy + fmt::Debug, + V::Target: Eq + Hash + fmt::Debug, M: 'static + Clone + fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("WriteHandle") .field("handle", &self.handle) - .field("direct_write", &self.direct_write) .finish() } } @@ -82,18 +78,15 @@ impl WriteHandle where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: ShallowCopy, + V::Target: Eq + Hash, M: 'static + Clone, { pub(crate) fn new( handle: left_right::WriteHandle, Operation>, ) -> Self { let r_handle = ReadHandle::new(left_right::ReadHandle::clone(&*handle)); - Self { - handle, - r_handle, - direct_write: Some(false), - } + Self { handle, r_handle } } /// Publish all changes since the last call to `publish` to make them visible to readers. @@ -102,7 +95,6 @@ where /// are many of them. pub fn publish(&mut self) -> &mut Self { self.handle.publish(); - self.direct_write = None; self } @@ -119,26 +111,7 @@ where } fn add_op(&mut self, op: Operation) -> &mut Self { - if let Some(ref mut queued_clone) = self.direct_write { - { - // Safety: we know there are no outstanding w_handle readers, since we haven't - // refreshed ever before, so we can modify it directly! - let mut w_inner = self.handle.raw_write_handle(); - let w_inner = unsafe { w_inner.as_mut() }; - let r_handle = self.handle.enter().expect("map has not yet been destroyed"); - // because we are applying second, we _do_ want to perform drops - Absorb::absorb_second(w_inner, op, &*r_handle); - } - - if !*queued_clone { - // NOTE: since we didn't record this in the oplog, r_handle *must* clone w_handle - self.handle.append(Operation::JustCloneRHandle); - *queued_clone = true; - } - } else { - self.handle.append(op); - } - + self.handle.append(op); self } @@ -147,7 +120,7 @@ where /// The updated value-bag will only be visible to readers after the next call to /// [`publish`](Self::publish). pub fn insert(&mut self, k: K, v: V) -> &mut Self { - self.add_op(Operation::Add(k, v)) + self.add_op(Operation::Add(k, MaybeShallowCopied::Owned(v))) } /// Replace the value-bag of the given key with the given value. @@ -161,7 +134,7 @@ where /// The new value will only be visible to readers after the next call to /// [`publish`](Self::publish). pub fn update(&mut self, k: K, v: V) -> &mut Self { - self.add_op(Operation::Replace(k, v)) + self.add_op(Operation::Replace(k, MaybeShallowCopied::Owned(v))) } /// Clear the value-bag of the given key, without removing it. @@ -246,7 +219,7 @@ where /// at and beyond when the second argument is true. pub unsafe fn retain(&mut self, k: K, f: F) -> &mut Self where - F: FnMut(&V, bool) -> bool + 'static + Send, + F: FnMut(&V::Target, bool) -> bool + 'static + Send, { self.add_op(Operation::Retain(k, Predicate(Box::new(f)))) } @@ -330,22 +303,16 @@ impl Absorb> for Inner where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: ShallowCopy, + V::Target: Eq + Hash, M: 'static + Clone, { /// Apply ops in such a way that no values are dropped, only forgotten fn absorb_first(&mut self, op: &mut Operation, other: &Self) { - // Make sure that no methods below drop values since we're only operating on the first - // shallow copy of each value. - // - // Safety: ManuallyDrop has the same layout as T. - let inner = unsafe { - &mut *(self as *mut Inner as *mut Inner, M, S>) - }; let hasher = other.data.hasher(); match *op { Operation::Replace(ref key, ref mut value) => { - let vs = inner.data.entry(key.clone()).or_insert_with(Values::new); + let vs = self.data.entry(key.clone()).or_insert_with(Values::new); // truncate vector vs.clear(); @@ -354,46 +321,42 @@ where // so it will switch back to inline allocation for the subsequent push. vs.shrink_to_fit(); - vs.push(unsafe { value.shallow_copy() }, hasher); + vs.push(unsafe { value.shallow_copy_first() }, hasher); } Operation::Clear(ref key) => { - inner - .data + self.data .entry(key.clone()) .or_insert_with(Values::new) .clear(); } Operation::Add(ref key, ref mut value) => { - inner - .data + self.data .entry(key.clone()) .or_insert_with(Values::new) - .push(unsafe { value.shallow_copy() }, hasher); + .push(unsafe { value.shallow_copy_first() }, hasher); } Operation::RemoveEntry(ref key) => { #[cfg(not(feature = "indexed"))] - inner.data.remove(key); + self.data.remove(key); #[cfg(feature = "indexed")] - inner.data.swap_remove(key); + self.data.swap_remove(key); } Operation::Purge => { - inner.data.clear(); + self.data.clear(); } #[cfg(feature = "eviction")] Operation::EmptyAt(ref indices) => { for &index in indices.iter().rev() { - inner.data.swap_remove_index(index); + self.data.swap_remove_index(index); } } Operation::RemoveValue(ref key, ref value) => { - if let Some(e) = inner.data.get_mut(key) { - // remove a matching value from the value set - // safety: this is fine - e.swap_remove(unsafe { &*(value as *const _ as *const ManuallyDrop) }); + if let Some(e) = self.data.get_mut(key) { + e.swap_remove(value.deref_self()); } } Operation::Retain(ref key, ref mut predicate) => { - if let Some(e) = inner.data.get_mut(key) { + if let Some(e) = self.data.get_mut(key) { let mut first = true; e.retain(move |v| { let retain = predicate.eval(v, first); @@ -404,17 +367,17 @@ where } Operation::Fit(ref key) => match key { Some(ref key) => { - if let Some(e) = inner.data.get_mut(key) { + if let Some(e) = self.data.get_mut(key) { e.shrink_to_fit(); } } None => { - for value_set in inner.data.values_mut() { + for value_set in self.data.values_mut() { value_set.shrink_to_fit(); } } }, - Operation::Reserve(ref key, additional) => match inner.data.entry(key.clone()) { + Operation::Reserve(ref key, additional) => match self.data.entry(key.clone()) { Entry::Occupied(mut entry) => { entry.get_mut().reserve(additional, hasher); } @@ -423,69 +386,73 @@ where } }, Operation::MarkReady => { - inner.ready = true; + self.ready = true; } Operation::SetMeta(ref m) => { - inner.meta = m.clone(); - } - Operation::JustCloneRHandle => { - // This is applying the operation to the original write handle, - // which we already applied the first batch of operations to. + self.meta = m.clone(); } } } /// Apply operations while allowing dropping of values fn absorb_second(&mut self, op: Operation, other: &Self) { - let inner = self; + struct DropGuard; + impl Drop for DropGuard { + fn drop(&mut self) { + unsafe { crate::shallow_copy::drop_copies(false) }; + } + } + let _guard = DropGuard; + unsafe { crate::shallow_copy::drop_copies(true) }; + // TODO: shallow copy second? + let hasher = other.data.hasher(); match op { Operation::Replace(key, value) => { - let v = inner.data.entry(key).or_insert_with(Values::new); + let v = self.data.entry(key).or_insert_with(Values::new); // we are going second, so we should drop! v.clear(); v.shrink_to_fit(); - v.push(value, hasher); + v.push(unsafe { value.shallow_copy_second() }, hasher); } Operation::Clear(key) => { - inner.data.entry(key).or_insert_with(Values::new).clear(); + self.data.entry(key).or_insert_with(Values::new).clear(); } Operation::Add(key, value) => { - inner - .data + self.data .entry(key) .or_insert_with(Values::new) - .push(value, hasher); + .push(unsafe { value.shallow_copy_second() }, hasher); } Operation::RemoveEntry(key) => { #[cfg(not(feature = "indexed"))] - inner.data.remove(&key); + self.data.remove(&key); #[cfg(feature = "indexed")] - inner.data.swap_remove(&key); + self.data.swap_remove(&key); } Operation::Purge => { - inner.data.clear(); + self.data.clear(); } #[cfg(feature = "eviction")] Operation::EmptyAt(indices) => { for &index in indices.iter().rev() { - inner.data.swap_remove_index(index); + self.data.swap_remove_index(index); } } Operation::RemoveValue(key, value) => { - if let Some(e) = inner.data.get_mut(&key) { + if let Some(e) = self.data.get_mut(&key) { // find the first entry that matches all fields - e.swap_remove(&value); + e.swap_remove(value.deref_self()); } } Operation::Retain(key, mut predicate) => { - if let Some(e) = inner.data.get_mut(&key) { + if let Some(e) = self.data.get_mut(&key) { let mut first = true; e.retain(move |v| { - let retain = predicate.eval(v, first); + let retain = predicate.eval(&*v, first); first = false; retain }); @@ -493,17 +460,17 @@ where } Operation::Fit(key) => match key { Some(ref key) => { - if let Some(e) = inner.data.get_mut(key) { + if let Some(e) = self.data.get_mut(key) { e.shrink_to_fit(); } } None => { - for value_set in inner.data.values_mut() { + for value_set in self.data.values_mut() { value_set.shrink_to_fit(); } } }, - Operation::Reserve(key, additional) => match inner.data.entry(key) { + Operation::Reserve(key, additional) => match self.data.entry(key) { Entry::Occupied(mut entry) => { entry.get_mut().reserve(additional, hasher); } @@ -512,28 +479,10 @@ where } }, Operation::MarkReady => { - inner.ready = true; + self.ready = true; } Operation::SetMeta(m) => { - inner.meta = m; - } - Operation::JustCloneRHandle => { - // This is applying the operation to the original read handle, - // which is empty, and needs to copy over all data from the - // write handle that we wrote to directly. - - // XXX: it really is too bad that we can't just .clone() the data here and save - // ourselves a lot of re-hashing, re-bucketization, etc. - inner.data.extend(other.data.iter().map(|(k, vs)| { - ( - k.clone(), - Values::from_iter( - vs.iter() - .map(|v| unsafe { ManuallyDrop::into_inner((&*v).shallow_copy()) }), - other.data.hasher(), - ), - ) - })); + self.meta = m; } } } @@ -541,13 +490,24 @@ where fn drop_first(self: Box) { // since the two copies are exactly equal, we need to make sure that we *don't* call the // destructors of any of the values that are in our map, as they'll all be called when the - // last read handle goes out of scope. to do so, we first clear w_handle, which won't drop - // any elements since its values are kept as ManuallyDrop: - // - // Safety: ManuallyDrop has the same layout as T. - let inner = - unsafe { Box::from_raw(Box::into_raw(self) as *mut Inner, M, S>) }; - drop(inner); + // last read handle goes out of scope. that's easy enough since none of them will be + // dropped by default. + } + + fn drop_second(self: Box) { + // when the second copy is dropped is where we want to _actually_ drop all the values in + // the map. we do that by setting drop_copies to true. we do it with a guard though to make + // sure that if drop panics we unset the thread-local! + + struct DropGuard; + impl Drop for DropGuard { + fn drop(&mut self) { + unsafe { crate::shallow_copy::drop_copies(false) }; + } + } + let _guard = DropGuard; + unsafe { crate::shallow_copy::drop_copies(true) }; + drop(self); } } @@ -555,7 +515,8 @@ impl Extend<(K, V)> for WriteHandle where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: ShallowCopy, + V::Target: Eq + Hash, M: 'static + Clone, { fn extend>(&mut self, iter: I) { @@ -571,7 +532,8 @@ impl Deref for WriteHandle where K: Eq + Hash + Clone, S: BuildHasher + Clone, - V: Eq + Hash + ShallowCopy, + V: ShallowCopy, + V::Target: Eq + Hash, M: 'static + Clone, { type Target = ReadHandle; diff --git a/evmap/tests/lib.rs b/evmap/tests/lib.rs index 39d25c9..8258473 100644 --- a/evmap/tests/lib.rs +++ b/evmap/tests/lib.rs @@ -27,7 +27,7 @@ fn it_works() { // since we're not using `meta`, we get () assert_match!(r.meta_get(&x.0), Some((None, ()))); - w.insert(x.0, x); + w.insert(x.0, x.1); // it is empty even after an add (we haven't refresh yet) assert_match!(r.get(&x.0), None); @@ -42,8 +42,7 @@ fn it_works() { Some((Some(1), ())) ); assert_match!( - r.get(&x.0) - .map(|rs| rs.iter().any(|v| v.0 == x.0 && v.1 == x.1)), + r.get(&x.0).map(|rs| rs.iter().any(|&v| v == x.1)), Some(true) ); @@ -57,8 +56,7 @@ fn it_works() { // if we purge, the readers still see the values w.purge(); assert_match!( - r.get(&x.0) - .map(|rs| rs.iter().any(|v| v.0 == x.0 && v.1 == x.1)), + r.get(&x.0).map(|rs| rs.iter().any(|&v| v == x.1)), Some(true) ); @@ -95,7 +93,7 @@ fn mapref() { assert_eq!(map.meta(), &()); } - w.insert(x.0, x); + w.insert(x.0, x.1); { let map = r.enter().unwrap(); @@ -117,11 +115,7 @@ fn mapref() { assert_eq!(map.get(&x.0).unwrap().len(), 1); assert_eq!(map[&x.0].len(), 1); assert_eq!(map.meta(), &()); - assert!(map - .get(&x.0) - .unwrap() - .iter() - .any(|v| v.0 == x.0 && v.1 == x.1)); + assert!(map.get(&x.0).unwrap().iter().any(|&v| v == x.1)); // non-existing records return None assert!(map.get(&'y').is_none()); @@ -130,11 +124,7 @@ fn mapref() { // if we purge, the readers still see the values w.purge(); - assert!(map - .get(&x.0) - .unwrap() - .iter() - .any(|v| v.0 == x.0 && v.1 == x.1)); + assert!(map.get(&x.0).unwrap().iter().any(|&v| v == x.1)); } // but once we refresh, things will be empty @@ -178,7 +168,7 @@ fn read_after_drop() { let x = ('x', 42); let (mut w, r) = evmap::new(); - w.insert(x.0, x); + w.insert(x.0, x.1); w.publish(); assert_eq!(r.get(&x.0).map(|rs| rs.len()), Some(1)); @@ -204,7 +194,7 @@ fn clone_types() { r.meta_get(&*x).map(|(rs, m)| (rs.map(|rs| rs.len()), m)), Some((Some(1), ())) ); - assert_eq!(r.get(&*x).map(|rs| rs.iter().any(|v| *v == x)), Some(true)); + assert_eq!(r.get(&*x).map(|rs| rs.iter().any(|v| *v == *x)), Some(true)); } #[test] @@ -323,7 +313,7 @@ fn minimal_query() { w.insert(1, "b"); assert_eq!(r.get(&1).map(|rs| rs.len()), Some(1)); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"a")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "a")).unwrap()); } #[test] @@ -366,8 +356,8 @@ fn non_minimal_query() { w.insert(1, "c"); assert_eq!(r.get(&1).map(|rs| rs.len()), Some(2)); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"a")).unwrap()); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"b")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "a")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "b")).unwrap()); } #[test] @@ -379,7 +369,7 @@ fn absorb_negative_immediate() { w.publish(); assert_eq!(r.get(&1).map(|rs| rs.len()), Some(1)); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"b")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "b")).unwrap()); } #[test] @@ -392,7 +382,7 @@ fn absorb_negative_later() { w.publish(); assert_eq!(r.get(&1).map(|rs| rs.len()), Some(1)); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"b")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "b")).unwrap()); } #[test] @@ -402,8 +392,8 @@ fn absorb_multi() { w.publish(); assert_eq!(r.get(&1).map(|rs| rs.len()), Some(2)); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"a")).unwrap()); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"b")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "a")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "b")).unwrap()); w.remove_value(1, "a"); w.insert(1, "c"); @@ -411,7 +401,7 @@ fn absorb_multi() { w.publish(); assert_eq!(r.get(&1).map(|rs| rs.len()), Some(1)); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"b")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "b")).unwrap()); } #[test] @@ -425,7 +415,7 @@ fn empty() { assert_eq!(r.get(&1).map(|rs| rs.len()), None); assert_eq!(r.get(&2).map(|rs| rs.len()), Some(1)); - assert!(r.get(&2).map(|rs| rs.iter().any(|r| r == &"c")).unwrap()); + assert!(r.get(&2).map(|rs| rs.iter().any(|r| r == "c")).unwrap()); } #[test] @@ -488,7 +478,7 @@ fn empty_post_refresh() { assert_eq!(r.get(&1).map(|rs| rs.len()), None); assert_eq!(r.get(&2).map(|rs| rs.len()), Some(1)); - assert!(r.get(&2).map(|rs| rs.iter().any(|r| r == &"c")).unwrap()); + assert!(r.get(&2).map(|rs| rs.iter().any(|r| r == "c")).unwrap()); } #[test] @@ -526,9 +516,9 @@ fn replace() { w.publish(); assert_eq!(r.get(&1).map(|rs| rs.len()), Some(1)); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"x")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "x")).unwrap()); assert_eq!(r.get(&2).map(|rs| rs.len()), Some(1)); - assert!(r.get(&2).map(|rs| rs.iter().any(|r| r == &"c")).unwrap()); + assert!(r.get(&2).map(|rs| rs.iter().any(|r| r == "c")).unwrap()); } #[test] @@ -542,9 +532,9 @@ fn replace_post_refresh() { w.publish(); assert_eq!(r.get(&1).map(|rs| rs.len()), Some(1)); - assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == &"x")).unwrap()); + assert!(r.get(&1).map(|rs| rs.iter().any(|r| r == "x")).unwrap()); assert_eq!(r.get(&2).map(|rs| rs.len()), Some(1)); - assert!(r.get(&2).map(|rs| rs.iter().any(|r| r == &"c")).unwrap()); + assert!(r.get(&2).map(|rs| rs.iter().any(|r| r == "c")).unwrap()); } #[test] @@ -581,7 +571,7 @@ fn map_into() { w.insert(1, "x"); use std::collections::HashMap; - let copy: HashMap<_, Vec<_>> = r.map_into(|&k, vs| (k, Vec::from_iter(vs.iter().cloned()))); + let copy: HashMap<_, Vec<&'static str>> = r.map_into(|&k, vs| (k, Vec::from_iter(vs.iter()))); assert_eq!(copy.len(), 2); assert!(copy.contains_key(&1)); @@ -622,7 +612,7 @@ fn values() { .unwrap() .values() .map(|value_bag| { - let mut inner_items = value_bag.iter().copied().collect::>(); + let mut inner_items = Vec::from_iter(value_bag.into_iter()); inner_items.sort(); inner_items }) @@ -766,14 +756,14 @@ fn get_one() { let (mut w, r) = evmap::new(); - w.insert(x.0, x); - w.insert(x.0, x); + w.insert(x.0, x.1); + w.insert(x.0, x.1); assert_match!(r.get_one(&x.0), None); w.publish(); - assert_match!(r.get_one(&x.0).as_deref(), Some(('x', 42))); + assert_match!(r.get_one(&x.0).as_deref(), Some(x)); } #[test] diff --git a/evmap/tests/quick.rs b/evmap/tests/quick.rs index bb2a480..9aef1dc 100644 --- a/evmap/tests/quick.rs +++ b/evmap/tests/quick.rs @@ -93,11 +93,11 @@ where fn do_ops( ops: &[Op], evmap: &mut WriteHandle, - write_ref: &mut HashMap>, - read_ref: &mut HashMap>, + write_ref: &mut HashMap>, + read_ref: &mut HashMap>, ) where K: Hash + Eq + Clone, - V: Clone + evmap::ShallowCopy + Eq + Hash, + V: evmap::ShallowCopy + Clone + Eq + Hash, S: BuildHasher + Clone, { for op in ops { @@ -130,10 +130,13 @@ fn do_ops( } } -fn assert_maps_equivalent(a: &ReadHandle, b: &HashMap>) -> bool +fn assert_maps_equivalent( + a: &ReadHandle, + b: &HashMap>, +) -> bool where K: Clone + Hash + Eq + Debug, - V: Hash + Eq + Debug + Ord + Copy, + V: evmap::ShallowCopy + Hash + Eq + Debug + Ord + Copy, S: BuildHasher, { assert_eq!(a.len(), b.len()); @@ -150,7 +153,7 @@ where return true; }; for key in guard.keys() { - let mut ev_map_values: Vec = guard.get(key).unwrap().iter().copied().collect(); + let mut ev_map_values: Vec = guard.get(key).unwrap().iter().copied().collect(); ev_map_values.sort(); let mut map_values = b[key].clone(); map_values.sort(); diff --git a/left-right/src/lib.rs b/left-right/src/lib.rs index bf994d6..1a567b1 100644 --- a/left-right/src/lib.rs +++ b/left-right/src/lib.rs @@ -232,6 +232,11 @@ pub trait Absorb { /// /// Defaults to calling `Self::drop`. fn drop_first(self: Box) {} + + /// Drop the second of the two copies. + /// + /// Defaults to calling `Self::drop`. + fn drop_second(self: Box) {} } /// Construct a new write and read handle pair from an empty data structure.