Skip to content

Commit

Permalink
Implement the array.copy Wasm GC instruction (#9389)
Browse files Browse the repository at this point in the history
* Implement the `array.copy` Wasm GC instruction

This also involved fixing some `VMGcRef` initialization and GC barrier code for
globals. That in turn involved making global initialization fallible, which
meant that it needed to be pulled out of `vmctx` initialization and put into
instance initialization.

Co-Authored-By: Alex Crichton <[email protected]>

* fix compile error due to code moving where a trait method was no longer in scope

* use the result of `MaybeUninit::write`

---------

Co-authored-by: Alex Crichton <[email protected]>
  • Loading branch information
fitzgen and alexcrichton authored Oct 7, 2024
1 parent 7ee9198 commit 818966f
Show file tree
Hide file tree
Showing 15 changed files with 326 additions and 131 deletions.
23 changes: 3 additions & 20 deletions crates/cranelift/src/func_environ.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2020,36 +2020,19 @@ impl<'module_environment> crate::translate::FuncEnvironment
fn translate_array_copy(
&mut self,
builder: &mut FunctionBuilder,
dst_array_type_index: TypeIndex,
_dst_array_type_index: TypeIndex,
dst_array: ir::Value,
dst_index: ir::Value,
src_array_type_index: TypeIndex,
_src_array_type_index: TypeIndex,
src_array: ir::Value,
src_index: ir::Value,
len: ir::Value,
) -> WasmResult<()> {
let libcall = gc::builtins::array_copy(self, builder.func)?;
let vmctx = self.vmctx_val(&mut builder.cursor());
let dst_array_type_index = self.module.types[dst_array_type_index];
let dst_array_type_index = builder
.ins()
.iconst(I32, i64::from(dst_array_type_index.as_u32()));
let src_array_type_index = self.module.types[src_array_type_index];
let src_array_type_index = builder
.ins()
.iconst(I32, i64::from(src_array_type_index.as_u32()));
builder.ins().call(
libcall,
&[
vmctx,
dst_array_type_index,
dst_array,
dst_index,
src_array_type_index,
src_array,
src_index,
len,
],
&[vmctx, dst_array, dst_index, src_array, src_index, len],
);
Ok(())
}
Expand Down
2 changes: 0 additions & 2 deletions crates/environ/src/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,8 @@ macro_rules! foreach_builtin_function {
#[cfg(feature = "gc")]
array_copy(
vmctx: vmctx,
dst_array_interned_type_index: i32,
dst_array: reference,
dst_index: i32,
src_array_interned_type_index: i32,
src_array: reference,
src_index: i32,
len: i32
Expand Down
2 changes: 1 addition & 1 deletion crates/wasmtime/src/runtime/gc/enabled/arrayref.rs
Original file line number Diff line number Diff line change
Expand Up @@ -568,7 +568,7 @@ impl ArrayRef {
Ok(gc_ref.as_arrayref_unchecked())
}

fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcArrayLayout> {
pub(crate) fn layout(&self, store: &AutoAssertNoGc<'_>) -> Result<GcArrayLayout> {
assert!(self.comes_from_same_store(&store));
let type_index = self.type_index(store)?;
let layout = store
Expand Down
8 changes: 8 additions & 0 deletions crates/wasmtime/src/runtime/gc/enabled/rooting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -905,6 +905,14 @@ impl<T: GcRef> Rooted<T> {
b: &impl RootedGcRef<T>,
) -> Result<bool> {
let store = store.as_context().0;
Self::_ref_eq(store, a, b)
}

pub(crate) fn _ref_eq(
store: &StoreOpaque,
a: &impl RootedGcRef<T>,
b: &impl RootedGcRef<T>,
) -> Result<bool> {
let a = a.try_gc_ref(store)?;
let b = b.try_gc_ref(store)?;
Ok(a == b)
Expand Down
8 changes: 8 additions & 0 deletions crates/wasmtime/src/runtime/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1614,6 +1614,8 @@ impl StoreOpaque {
return;
}

log::trace!("============ Begin GC ===========");

// Take the GC roots out of `self` so we can borrow it mutably but still
// call mutable methods on `self`.
let mut roots = core::mem::take(&mut self.gc_roots_list);
Expand All @@ -1624,6 +1626,8 @@ impl StoreOpaque {
// Restore the GC roots for the next GC.
roots.clear();
self.gc_roots_list = roots;

log::trace!("============ End GC ===========");
}

#[inline]
Expand Down Expand Up @@ -1662,6 +1666,8 @@ impl StoreOpaque {
return;
}

log::trace!("============ Begin Async GC ===========");

// Take the GC roots out of `self` so we can borrow it mutably but still
// call mutable methods on `self`.
let mut roots = std::mem::take(&mut self.gc_roots_list);
Expand All @@ -1674,6 +1680,8 @@ impl StoreOpaque {
// Restore the GC roots for the next GC.
roots.clear();
self.gc_roots_list = roots;

log::trace!("============ End Async GC ===========");
}

#[inline]
Expand Down
9 changes: 4 additions & 5 deletions crates/wasmtime/src/runtime/vm/const_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,7 @@ impl<'a> ConstEvalContext<'a> {
.defined_or_imported_global_ptr(index)
.as_ref()
.unwrap();
let mut gc_store = store.unwrap_gc_store_mut();
Ok(global.to_val_raw(
&mut gc_store,
self.instance.env_module().globals[index].wasm_ty,
))
global.to_val_raw(store, self.instance.env_module().globals[index].wasm_ty)
}
}

Expand Down Expand Up @@ -169,6 +165,8 @@ impl ConstExprEvaluator {
context: &mut ConstEvalContext<'_>,
expr: &ConstExpr,
) -> Result<ValRaw> {
log::trace!("evaluating const expr: {:?}", expr);

self.stack.clear();

let mut store = (*context.instance.store()).store_opaque_mut();
Expand Down Expand Up @@ -372,6 +370,7 @@ impl ConstExprEvaluator {
}

if self.stack.len() == 1 {
log::trace!("const expr evaluated to {:?}", self.stack[0]);
Ok(self.stack[0])
} else {
bail!(
Expand Down
33 changes: 33 additions & 0 deletions crates/wasmtime/src/runtime/vm/gc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ pub use i31::*;
use crate::prelude::*;
use crate::runtime::vm::GcHeapAllocationIndex;
use core::alloc::Layout;
use core::mem::MaybeUninit;
use core::ptr;
use core::{any::Any, num::NonZeroUsize};
use wasmtime_environ::{GcArrayLayout, GcStructLayout, VMGcKind, VMSharedTypeIndex};
Expand Down Expand Up @@ -96,6 +97,19 @@ impl GcStore {
}
}

/// Write the `source` GC reference into the uninitialized `destination`
/// slot, performing write barriers as necessary.
pub fn init_gc_ref(
&mut self,
destination: &mut MaybeUninit<Option<VMGcRef>>,
source: Option<&VMGcRef>,
) {
// Initialize the destination to `None`, at which point the regular GC
// write barrier is safe to reuse.
let destination = destination.write(None);
self.write_gc_ref(destination, source);
}

/// Write the `source` GC reference into the `destination` slot, performing
/// write barriers as necessary.
pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
Expand Down Expand Up @@ -205,6 +219,18 @@ impl GcStore {
self.gc_heap.gc_object_data(gc_ref)
}

/// Get the object datas for the given pair of object references.
///
/// Panics if `a` and `b` are the same reference or either is out of bounds.
pub fn gc_object_data_pair(
&mut self,
a: &VMGcRef,
b: &VMGcRef,
) -> (VMGcObjectDataMut<'_>, VMGcObjectDataMut<'_>) {
assert_ne!(a, b);
self.gc_heap.gc_object_data_pair(a, b)
}

/// Allocate an uninitialized array with the given type index.
///
/// This does NOT check that the index is currently allocated in the types
Expand Down Expand Up @@ -299,6 +325,13 @@ unsafe impl GcHeap for DisabledGcHeap {
fn gc_object_data(&mut self, _gc_ref: &VMGcRef) -> VMGcObjectDataMut<'_> {
unreachable!()
}
fn gc_object_data_pair(
&mut self,
_a: &VMGcRef,
_b: &VMGcRef,
) -> (VMGcObjectDataMut<'_>, VMGcObjectDataMut<'_>) {
unreachable!()
}
fn alloc_uninit_array(
&mut self,
_ty: VMSharedTypeIndex,
Expand Down
22 changes: 22 additions & 0 deletions crates/wasmtime/src/runtime/vm/gc/enabled/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,28 @@ impl<'a> VMGcObjectDataMut<'a> {
val.write_le(into.try_into().unwrap());
}

/// Get a slice of this object's data.
///
/// Panics on out-of-bounds accesses.
#[inline]
pub fn slice(&self, offset: u32, len: u32) -> &[u8] {
let start = usize::try_from(offset).unwrap();
let len = usize::try_from(len).unwrap();
let end = start.checked_add(len).unwrap();
self.data.get(start..end).expect("out of bounds slice")
}

/// Get a mutable slice of this object's data.
///
/// Panics on out-of-bounds accesses.
#[inline]
pub fn slice_mut(&mut self, offset: u32, len: u32) -> &mut [u8] {
let start = usize::try_from(offset).unwrap();
let len = usize::try_from(len).unwrap();
let end = start.checked_add(len).unwrap();
self.data.get_mut(start..end).expect("out of bounds slice")
}

/// Copy the given slice into this object's data at the given offset.
///
/// Panics on out-of-bounds accesses.
Expand Down
52 changes: 44 additions & 8 deletions crates/wasmtime/src/runtime/vm/gc/enabled/drc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ use crate::runtime::vm::{
ExternRefHostDataId, ExternRefHostDataTable, GarbageCollection, GcHeap, GcHeapObject,
GcProgress, GcRootsIter, GcRuntime, Mmap, TypedGcRef, VMExternRef, VMGcHeader, VMGcRef,
};
use core::ops::{Deref, DerefMut};
use core::ops::{Deref, DerefMut, Range};
use core::{
alloc::Layout,
any::Any,
Expand Down Expand Up @@ -132,6 +132,16 @@ impl DrcHeap {
.dealloc(gc_ref.as_heap_index().unwrap(), layout);
}

fn object_range(&self, gc_ref: &VMGcRef) -> Range<usize> {
let start = gc_ref.as_heap_index().unwrap().get();
let start = usize::try_from(start).unwrap();
let size = self
.index::<VMDrcHeader>(gc_ref.as_typed_unchecked())
.object_size();
let end = start.checked_add(size).unwrap();
start..end
}

/// Index into this heap and get a shared reference to the `T` that `gc_ref`
/// points to.
///
Expand Down Expand Up @@ -617,16 +627,42 @@ unsafe impl GcHeap for DrcHeap {
}

fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> VMGcObjectDataMut<'_> {
let start = gc_ref.as_heap_index().unwrap().get();
let start = usize::try_from(start).unwrap();
let size = self
.index::<VMDrcHeader>(gc_ref.as_typed_unchecked())
.object_size();
let end = start + size;
let data = &mut self.heap_slice_mut()[start..end];
let range = self.object_range(gc_ref);
let data = &mut self.heap_slice_mut()[range];
VMGcObjectDataMut::new(data)
}

fn gc_object_data_pair(
&mut self,
a: &VMGcRef,
b: &VMGcRef,
) -> (VMGcObjectDataMut<'_>, VMGcObjectDataMut<'_>) {
assert_ne!(a, b);

let a_range = self.object_range(a);
let b_range = self.object_range(b);

// Assert that the two objects do not overlap.
assert!(a_range.start <= a_range.end);
assert!(b_range.start <= b_range.end);
assert!(a_range.end <= b_range.start || b_range.end <= a_range.start);

let (a_data, b_data) = if a_range.start < b_range.start {
let (a_half, b_half) = self.heap_slice_mut().split_at_mut(b_range.start);
let b_len = b_range.end - b_range.start;
(&mut a_half[a_range], &mut b_half[..b_len])
} else {
let (b_half, a_half) = self.heap_slice_mut().split_at_mut(a_range.start);
let a_len = a_range.end - a_range.start;
(&mut a_half[..a_len], &mut b_half[b_range])
};

(
VMGcObjectDataMut::new(a_data),
VMGcObjectDataMut::new(b_data),
)
}

fn alloc_uninit_array(
&mut self,
ty: VMSharedTypeIndex,
Expand Down
16 changes: 8 additions & 8 deletions crates/wasmtime/src/runtime/vm/gc/gc_runtime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -285,15 +285,15 @@ pub unsafe trait GcHeap: 'static + Send + Sync {
/// Panics on out-of-bounds accesses.
fn gc_object_data(&mut self, gc_ref: &VMGcRef) -> VMGcObjectDataMut<'_>;

/// Allocate a GC-managed array of the given type and length.
///
/// The array's elements are left uninitialized. It is the caller's
/// responsibility to initialize them before exposing the array to Wasm or
/// triggering a GC. Failure to do this is memory safe, but may result in
/// general failures such as panics or incorrect results.
///
/// Return values:
/// Get a pair of mutable borrows of the given objects' data.
///
/// Panics if `a == b` or on out-of-bounds accesses.
fn gc_object_data_pair(
&mut self,
a: &VMGcRef,
b: &VMGcRef,
) -> (VMGcObjectDataMut<'_>, VMGcObjectDataMut<'_>);

/// * `Ok(Some(_))`: The allocation was successful.
///
/// * `Ok(None)`: There is currently no available space for this
Expand Down
34 changes: 6 additions & 28 deletions crates/wasmtime/src/runtime/vm/instance.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1318,34 +1318,12 @@ impl Instance {
ptr = ptr.add(1);
}

// Initialize the defined globals
let mut const_evaluator = ConstExprEvaluator::default();
self.initialize_vmctx_globals(&mut const_evaluator, module);
}

unsafe fn initialize_vmctx_globals(
&mut self,
const_evaluator: &mut ConstExprEvaluator,
module: &Module,
) {
for (index, init) in module.global_initializers.iter() {
let mut context = ConstEvalContext::new(self);
let raw = const_evaluator
.eval(&mut context, init)
.expect("should be a valid const expr");

let to = self.global_ptr(index);
let wasm_ty = module.globals[module.global_index(index)].wasm_ty;

#[cfg(feature = "wmemcheck")]
if index.index() == 0 && wasm_ty == wasmtime_environ::WasmValType::I32 {
if let Some(wmemcheck) = &mut self.wmemcheck_state {
let size = usize::try_from(raw.get_i32()).unwrap();
wmemcheck.set_stack_size(size);
}
}

ptr::write(to, VMGlobalDefinition::from_val_raw(wasm_ty, raw));
// Zero-initialize the globals so that nothing is uninitialized memory
// after this function returns. The globals are actually initialized
// with their const expression initializers after the instance is fully
// allocated.
for (index, _init) in module.global_initializers.iter() {
ptr::write(self.global_ptr(index), VMGlobalDefinition::new());
}
}

Expand Down
Loading

0 comments on commit 818966f

Please sign in to comment.