Skip to content

Commit

Permalink
Do not deallocate disabled GC heaps via the instance allocator (#9180)
Browse files Browse the repository at this point in the history
* Do not deallocate disabled GC heaps in the instance allocator

When the reference types proposal is disabled, we don't allocate a GC heap from
the instance allocator, we instead create a `DisabledGcHeap`. Rather than
returning this to the instance allocator when we're done with it, which won't
even know what to do with it since the instance allocator didn't create the GC
heap, we simply rely on the `DisabledGcHeap`'s `Drop` implementation.

* check gc types instead of reference types
  • Loading branch information
fitzgen authored Aug 28, 2024
1 parent eb896ad commit ccc21b2
Show file tree
Hide file tree
Showing 3 changed files with 104 additions and 101 deletions.
14 changes: 9 additions & 5 deletions crates/wasmtime/src/runtime/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1519,10 +1519,7 @@ impl StoreOpaque {

#[cfg(feature = "gc")]
fn allocate_gc_store(engine: &Engine) -> Result<GcStore> {
let (index, heap) = if engine
.features()
.contains(wasmparser::WasmFeatures::REFERENCE_TYPES)
{
let (index, heap) = if engine.features().gc_types() {
engine
.allocator()
.allocate_gc_heap(&**engine.gc_runtime())?
Expand Down Expand Up @@ -2716,7 +2713,14 @@ impl Drop for StoreOpaque {

#[cfg(feature = "gc")]
if let Some(gc_store) = self.gc_store.take() {
allocator.deallocate_gc_heap(gc_store.allocation_index, gc_store.gc_heap);
if self.engine.features().gc_types() {
allocator.deallocate_gc_heap(gc_store.allocation_index, gc_store.gc_heap);
} else {
// If GC types are not enabled, we are just dealing with a
// dummy GC heap.
debug_assert_eq!(gc_store.allocation_index, GcHeapAllocationIndex::default());
debug_assert!(gc_store.gc_heap.as_any().is::<crate::vm::DisabledGcHeap>());
}
}

#[cfg(feature = "component-model")]
Expand Down
189 changes: 93 additions & 96 deletions crates/wasmtime/src/runtime/vm/gc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,108 +235,105 @@ impl GcStore {
/// time or dynamically due to it being turned off in the `wasmtime::Config`).
pub fn disabled_gc_heap() -> Box<dyn GcHeap> {
return Box::new(DisabledGcHeap);
}

struct DisabledGcHeap;
pub(crate) struct DisabledGcHeap;

unsafe impl GcHeap for DisabledGcHeap {
fn as_any(&self) -> &dyn Any {
self
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
fn enter_no_gc_scope(&mut self) {}
fn exit_no_gc_scope(&mut self) {}
fn header(&self, _gc_ref: &VMGcRef) -> &VMGcHeader {
unreachable!()
}
fn clone_gc_ref(&mut self, _gc_ref: &VMGcRef) -> VMGcRef {
unreachable!()
}
fn write_gc_ref(
&mut self,
_host_data_table: &mut ExternRefHostDataTable,
_destination: &mut Option<VMGcRef>,
_source: Option<&VMGcRef>,
) {
unreachable!()
}
fn expose_gc_ref_to_wasm(&mut self, _gc_ref: VMGcRef) {
unreachable!()
}
fn need_gc_before_entering_wasm(&self, _num_gc_refs: NonZeroUsize) -> bool {
unreachable!()
}
fn alloc_externref(
&mut self,
_host_data: ExternRefHostDataId,
) -> Result<Option<VMExternRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
unsafe impl GcHeap for DisabledGcHeap {
fn as_any(&self) -> &dyn Any {
self
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
fn enter_no_gc_scope(&mut self) {}
fn exit_no_gc_scope(&mut self) {}
fn header(&self, _gc_ref: &VMGcRef) -> &VMGcHeader {
unreachable!()
}
fn clone_gc_ref(&mut self, _gc_ref: &VMGcRef) -> VMGcRef {
unreachable!()
}
fn write_gc_ref(
&mut self,
_host_data_table: &mut ExternRefHostDataTable,
_destination: &mut Option<VMGcRef>,
_source: Option<&VMGcRef>,
) {
unreachable!()
}
fn expose_gc_ref_to_wasm(&mut self, _gc_ref: VMGcRef) {
unreachable!()
}
fn need_gc_before_entering_wasm(&self, _num_gc_refs: NonZeroUsize) -> bool {
unreachable!()
}
fn alloc_externref(&mut self, _host_data: ExternRefHostDataId) -> Result<Option<VMExternRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
because the `gc` cargo feature was not enabled"
)
}
fn externref_host_data(&self, _externref: &VMExternRef) -> ExternRefHostDataId {
unreachable!()
}
fn alloc_uninit_struct(
&mut self,
_ty: wasmtime_environ::VMSharedTypeIndex,
_layout: &GcStructLayout,
) -> Result<Option<VMStructRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
)
}
fn externref_host_data(&self, _externref: &VMExternRef) -> ExternRefHostDataId {
unreachable!()
}
fn alloc_uninit_struct(
&mut self,
_ty: wasmtime_environ::VMSharedTypeIndex,
_layout: &GcStructLayout,
) -> Result<Option<VMStructRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
because the `gc` cargo feature was not enabled"
)
}
fn dealloc_uninit_struct(&mut self, _structref: VMStructRef) {
unreachable!()
}
fn gc_object_data(&mut self, _gc_ref: &VMGcRef) -> VMGcObjectDataMut<'_> {
unreachable!()
}
fn alloc_uninit_array(
&mut self,
_ty: VMSharedTypeIndex,
_len: u32,
_layout: &GcArrayLayout,
) -> Result<Option<VMArrayRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
)
}
fn dealloc_uninit_struct(&mut self, _structref: VMStructRef) {
unreachable!()
}
fn gc_object_data(&mut self, _gc_ref: &VMGcRef) -> VMGcObjectDataMut<'_> {
unreachable!()
}
fn alloc_uninit_array(
&mut self,
_ty: VMSharedTypeIndex,
_len: u32,
_layout: &GcArrayLayout,
) -> Result<Option<VMArrayRef>> {
bail!(
"GC support disabled either in the `Config` or at compile time \
because the `gc` cargo feature was not enabled"
)
}
fn dealloc_uninit_array(&mut self, _structref: VMArrayRef) {
unreachable!()
}
fn array_len(&self, _arrayref: &VMArrayRef) -> u32 {
unreachable!()
}
fn gc<'a>(
&'a mut self,
_roots: GcRootsIter<'a>,
_host_data_table: &'a mut ExternRefHostDataTable,
) -> Box<dyn GarbageCollection<'a> + 'a> {
return Box::new(NoGc);

struct NoGc;

impl<'a> GarbageCollection<'a> for NoGc {
fn collect_increment(&mut self) -> GcProgress {
GcProgress::Complete
}
)
}
fn dealloc_uninit_array(&mut self, _structref: VMArrayRef) {
unreachable!()
}
fn array_len(&self, _arrayref: &VMArrayRef) -> u32 {
unreachable!()
}
fn gc<'a>(
&'a mut self,
_roots: GcRootsIter<'a>,
_host_data_table: &'a mut ExternRefHostDataTable,
) -> Box<dyn GarbageCollection<'a> + 'a> {
return Box::new(NoGc);

struct NoGc;

impl<'a> GarbageCollection<'a> for NoGc {
fn collect_increment(&mut self) -> GcProgress {
GcProgress::Complete
}
}
unsafe fn vmctx_gc_heap_base(&self) -> *mut u8 {
ptr::null_mut()
}
unsafe fn vmctx_gc_heap_bound(&self) -> usize {
0
}
unsafe fn vmctx_gc_heap_data(&self) -> *mut u8 {
ptr::null_mut()
}
#[cfg(feature = "pooling-allocator")]
fn reset(&mut self) {}
}
unsafe fn vmctx_gc_heap_base(&self) -> *mut u8 {
ptr::null_mut()
}
unsafe fn vmctx_gc_heap_bound(&self) -> usize {
0
}
unsafe fn vmctx_gc_heap_data(&self) -> *mut u8 {
ptr::null_mut()
}
#[cfg(feature = "pooling-allocator")]
fn reset(&mut self) {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ impl GcHeapPool {
self.max_gc_heaps
)
})?;
debug_assert_ne!(allocation_index, GcHeapAllocationIndex::default());

let heap = match {
let mut heaps = self.heaps.lock().unwrap();
Expand All @@ -76,6 +77,7 @@ impl GcHeapPool {

/// Deallocate a previously-allocated GC heap.
pub fn deallocate(&self, allocation_index: GcHeapAllocationIndex, mut heap: Box<dyn GcHeap>) {
debug_assert_ne!(allocation_index, GcHeapAllocationIndex::default());
heap.reset();

// NB: Replace the heap before freeing the index. If we did it in the
Expand Down

0 comments on commit ccc21b2

Please sign in to comment.