Skip to content

Commit

Permalink
Auto merge of #42565 - murarth:rc-from-slice, r=aturon
Browse files Browse the repository at this point in the history
Implement From<&[T]> and others for Arc/Rc (RFC 1845)

* Implements `From<`{`&[T]`, `&str`, `String`, `Box<T> where T: ?Sized`, `Vec<T>`}`>` for `Arc`/`Rc`
* Removes `rustc_private`-marked methods `Rc::__from_array` and `Rc::__from_str`, replacing their use with `Rc::from`

Tracking issue: #40475
  • Loading branch information
bors committed Aug 23, 2017
2 parents 2bb8fca + 8e0d01b commit 560b6ca
Show file tree
Hide file tree
Showing 3 changed files with 587 additions and 66 deletions.
300 changes: 295 additions & 5 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,13 @@
//!
//! [arc]: struct.Arc.html
use boxed::Box;

use core::sync::atomic;
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use core::borrow;
use core::fmt;
use core::cmp::Ordering;
use core::intrinsics::abort;
use core::mem;
use core::mem::uninitialized;
use core::mem::{self, size_of_val, uninitialized};
use core::ops::Deref;
use core::ops::CoerceUnsized;
use core::ptr::{self, Shared};
Expand All @@ -34,7 +31,10 @@ use core::hash::{Hash, Hasher};
use core::{isize, usize};
use core::convert::From;

use heap::{Heap, Alloc, Layout};
use heap::{Heap, Alloc, Layout, box_free};
use boxed::Box;
use string::String;
use vec::Vec;

/// A soft limit on the amount of references that may be made to an `Arc`.
///
Expand Down Expand Up @@ -532,6 +532,141 @@ impl<T: ?Sized> Arc<T> {
}
}

impl<T: ?Sized> Arc<T> {
// Allocates an `ArcInner<T>` with sufficient space for an unsized value
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
// Create a fake ArcInner to find allocation size and alignment
let fake_ptr = ptr as *mut ArcInner<T>;

let layout = Layout::for_value(&*fake_ptr);

let mem = Heap.alloc(layout)
.unwrap_or_else(|e| Heap.oom(e));

// Initialize the real ArcInner
let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;

ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));

inner
}

fn from_box(v: Box<T>) -> Arc<T> {
unsafe {
let bptr = Box::into_raw(v);

let value_size = size_of_val(&*bptr);
let ptr = Self::allocate_for_ptr(bptr);

// Copy value as bytes
ptr::copy_nonoverlapping(
bptr as *const T as *const u8,
&mut (*ptr).data as *mut _ as *mut u8,
value_size);

// Free the allocation without dropping its contents
box_free(bptr);

Arc { ptr: Shared::new_unchecked(ptr) }
}
}
}

// Sets the data pointer of a `?Sized` raw pointer.
//
// For a slice/trait object, this sets the `data` field and leaves the rest
// unchanged. For a sized raw pointer, this simply sets the pointer.
unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
ptr
}

impl<T> Arc<[T]> {
// Copy elements from slice into newly allocated Arc<[T]>
//
// Unsafe because the caller must either take ownership or bind `T: Copy`
unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
let v_ptr = v as *const [T];
let ptr = Self::allocate_for_ptr(v_ptr);

ptr::copy_nonoverlapping(
v.as_ptr(),
&mut (*ptr).data as *mut [T] as *mut T,
v.len());

Arc { ptr: Shared::new_unchecked(ptr) }
}
}

// Specialization trait used for From<&[T]>
trait ArcFromSlice<T> {
fn from_slice(slice: &[T]) -> Self;
}

impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
#[inline]
default fn from_slice(v: &[T]) -> Self {
// Panic guard while cloning T elements.
// In the event of a panic, elements that have been written
// into the new ArcInner will be dropped, then the memory freed.
struct Guard<T> {
mem: *mut u8,
elems: *mut T,
layout: Layout,
n_elems: usize,
}

impl<T> Drop for Guard<T> {
fn drop(&mut self) {
use core::slice::from_raw_parts_mut;

unsafe {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);

Heap.dealloc(self.mem, self.layout.clone());
}
}
}

unsafe {
let v_ptr = v as *const [T];
let ptr = Self::allocate_for_ptr(v_ptr);

let mem = ptr as *mut _ as *mut u8;
let layout = Layout::for_value(&*ptr);

// Pointer to first element
let elems = &mut (*ptr).data as *mut [T] as *mut T;

let mut guard = Guard{
mem: mem,
elems: elems,
layout: layout,
n_elems: 0,
};

for (i, item) in v.iter().enumerate() {
ptr::write(elems.offset(i as isize), item.clone());
guard.n_elems += 1;
}

// All clear. Forget the guard so it doesn't free the new ArcInner.
mem::forget(guard);

Arc { ptr: Shared::new_unchecked(ptr) }
}
}
}

impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
#[inline]
fn from_slice(v: &[T]) -> Self {
unsafe { Arc::copy_from_slice(v) }
}
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Clone for Arc<T> {
/// Makes a clone of the `Arc` pointer.
Expand Down Expand Up @@ -1216,8 +1351,56 @@ impl<T> From<T> for Arc<T> {
}
}

#[stable(feature = "shared_from_slice", since = "1.21.0")]
impl<'a, T: Clone> From<&'a [T]> for Arc<[T]> {
#[inline]
fn from(v: &[T]) -> Arc<[T]> {
<Self as ArcFromSlice<T>>::from_slice(v)
}
}

#[stable(feature = "shared_from_slice", since = "1.21.0")]
impl<'a> From<&'a str> for Arc<str> {
#[inline]
fn from(v: &str) -> Arc<str> {
unsafe { mem::transmute(<Arc<[u8]>>::from(v.as_bytes())) }
}
}

#[stable(feature = "shared_from_slice", since = "1.21.0")]
impl From<String> for Arc<str> {
#[inline]
fn from(v: String) -> Arc<str> {
Arc::from(&v[..])
}
}

#[stable(feature = "shared_from_slice", since = "1.21.0")]
impl<T: ?Sized> From<Box<T>> for Arc<T> {
#[inline]
fn from(v: Box<T>) -> Arc<T> {
Arc::from_box(v)
}
}

#[stable(feature = "shared_from_slice", since = "1.21.0")]
impl<T> From<Vec<T>> for Arc<[T]> {
#[inline]
fn from(mut v: Vec<T>) -> Arc<[T]> {
unsafe {
let arc = Arc::copy_from_slice(&v);

// Allow the Vec to free its memory, but not destroy its contents
v.set_len(0);

arc
}
}
}

#[cfg(test)]
mod tests {
use std::boxed::Box;
use std::clone::Clone;
use std::sync::mpsc::channel;
use std::mem::drop;
Expand Down Expand Up @@ -1520,6 +1703,113 @@ mod tests {
}
t.join().unwrap();
}

#[test]
fn test_from_str() {
let r: Arc<str> = Arc::from("foo");

assert_eq!(&r[..], "foo");
}

#[test]
fn test_copy_from_slice() {
let s: &[u32] = &[1, 2, 3];
let r: Arc<[u32]> = Arc::from(s);

assert_eq!(&r[..], [1, 2, 3]);
}

#[test]
fn test_clone_from_slice() {
#[derive(Clone, Debug, Eq, PartialEq)]
struct X(u32);

let s: &[X] = &[X(1), X(2), X(3)];
let r: Arc<[X]> = Arc::from(s);

assert_eq!(&r[..], s);
}

#[test]
#[should_panic]
fn test_clone_from_slice_panic() {
use std::string::{String, ToString};

struct Fail(u32, String);

impl Clone for Fail {
fn clone(&self) -> Fail {
if self.0 == 2 {
panic!();
}
Fail(self.0, self.1.clone())
}
}

let s: &[Fail] = &[
Fail(0, "foo".to_string()),
Fail(1, "bar".to_string()),
Fail(2, "baz".to_string()),
];

// Should panic, but not cause memory corruption
let _r: Arc<[Fail]> = Arc::from(s);
}

#[test]
fn test_from_box() {
let b: Box<u32> = box 123;
let r: Arc<u32> = Arc::from(b);

assert_eq!(*r, 123);
}

#[test]
fn test_from_box_str() {
use std::string::String;

let s = String::from("foo").into_boxed_str();
let r: Arc<str> = Arc::from(s);

assert_eq!(&r[..], "foo");
}

#[test]
fn test_from_box_slice() {
let s = vec![1, 2, 3].into_boxed_slice();
let r: Arc<[u32]> = Arc::from(s);

assert_eq!(&r[..], [1, 2, 3]);
}

#[test]
fn test_from_box_trait() {
use std::fmt::Display;
use std::string::ToString;

let b: Box<Display> = box 123;
let r: Arc<Display> = Arc::from(b);

assert_eq!(r.to_string(), "123");
}

#[test]
fn test_from_box_trait_zero_sized() {
use std::fmt::Debug;

let b: Box<Debug> = box ();
let r: Arc<Debug> = Arc::from(b);

assert_eq!(format!("{:?}", r), "()");
}

#[test]
fn test_from_vec() {
let v = vec![1, 2, 3];
let r: Arc<[u32]> = Arc::from(v);

assert_eq!(&r[..], [1, 2, 3]);
}
}

#[stable(feature = "rust1", since = "1.0.0")]
Expand Down
Loading

0 comments on commit 560b6ca

Please sign in to comment.