From da6b8c13ff800edfce406efb768d4d7eaa4b2660 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 28 Apr 2020 17:39:38 +0200 Subject: [PATCH 1/2] refactor(memory): reduce MemoryMapper API The MemoryMapper trait now receives a pre-calculated mapping, reducing the implementation size for anyone implementing the trait. --- crates/mun_memory/src/gc/mark_sweep.rs | 123 +++++++------------- crates/mun_memory/src/lib.rs | 2 +- crates/mun_memory/src/mapping.rs | 149 +++++++++++++++++++++---- crates/mun_runtime/src/assembly.rs | 7 +- 4 files changed, 167 insertions(+), 114 deletions(-) diff --git a/crates/mun_memory/src/gc/mark_sweep.rs b/crates/mun_memory/src/gc/mark_sweep.rs index 9df89a567..bda6fefb8 100644 --- a/crates/mun_memory/src/gc/mark_sweep.rs +++ b/crates/mun_memory/src/gc/mark_sweep.rs @@ -1,14 +1,13 @@ use crate::{ cast, - diff::{Diff, FieldDiff}, gc::{Event, GcPtr, GcRuntime, Observer, RawGcPtr, Stats, TypeTrace}, - mapping::{self, field_mapping, FieldMappingDesc, MemoryMapper}, - TypeDesc, TypeFields, TypeLayout, + mapping::{self, FieldMapping, MemoryMapper}, + TypeLayout, }; -use once_cell::unsync::OnceCell; +use mapping::{Conversion, Mapping}; use parking_lot::RwLock; use std::{ - collections::{HashMap, HashSet, VecDeque}, + collections::{HashMap, VecDeque}, hash::Hash, ops::Deref, pin::Pin, @@ -200,29 +199,17 @@ where impl MemoryMapper for MarkSweep where - T: TypeDesc + TypeLayout + TypeFields + TypeTrace + Clone + Eq + Hash, + T: TypeLayout + TypeTrace + Clone + Eq + Hash, O: Observer, { - fn map_memory(&self, old: &[T], new: &[T], diff: &[Diff]) -> Vec { - // Collect all deleted types. - let deleted: HashSet = diff - .iter() - .filter_map(|diff| { - if let Diff::Delete { index } = diff { - Some(unsafe { old.get_unchecked(*index) }.clone()) - } else { - None - } - }) - .collect(); - + fn map_memory(&self, mapping: Mapping) -> Vec { let mut objects = self.objects.write(); // Determine which types are still allocated with deleted types let deleted = objects .iter() .filter_map(|(ptr, object_info)| { - if deleted.contains(&object_info.ty) { + if mapping.deletions.contains(&object_info.ty) { Some(*ptr) } else { None @@ -230,87 +217,53 @@ where }) .collect(); - for diff in diff.iter() { - match diff { - Diff::Delete { .. } => (), // Already handled - Diff::Edit { - diff, - old_index, - new_index, - } => { - let old_ty = unsafe { old.get_unchecked(*old_index) }; - let new_ty = unsafe { new.get_unchecked(*new_index) }; - - // Use `OnceCell` to lazily construct `TypeData` for `old_ty` and `new_ty`. - let mut type_data = OnceCell::new(); - - for object_info in objects.values_mut() { - if object_info.ty == *old_ty { - map_fields(object_info, old_ty, new_ty, diff, &mut type_data); - } - } + for (old_ty, conversion) in mapping.conversions { + for object_info in objects.values_mut() { + if object_info.ty == old_ty { + map_fields(object_info, &conversion); } - Diff::Insert { .. } | Diff::Move { .. } => (), } } return deleted; - struct TypeData<'a, T> { - old_fields: Vec<(&'a str, T)>, - new_fields: Vec<(&'a str, T)>, - old_offsets: &'a [u16], - new_offsets: &'a [u16], - } - - fn map_fields<'a, T: TypeDesc + TypeLayout + TypeFields + TypeTrace + Clone + Eq>( + fn map_fields( object_info: &mut Pin>>, - old_ty: &'a T, - new_ty: &'a T, - diff: &[FieldDiff], - type_data: &mut OnceCell>, + conversion: &Conversion, ) { - let TypeData { - old_fields, - new_fields, - old_offsets, - new_offsets, - } = type_data.get_or_init(|| TypeData { - old_fields: old_ty.fields(), - new_fields: new_ty.fields(), - old_offsets: old_ty.offsets(), - new_offsets: new_ty.offsets(), - }); - let ptr = unsafe { std::alloc::alloc_zeroed(new_ty.layout()) }; - - let mapping = field_mapping(&old_fields, &diff); - for (new_index, map) in mapping.into_iter().enumerate() { - if let Some(FieldMappingDesc { old_index, action }) = map { + let ptr = unsafe { std::alloc::alloc_zeroed(conversion.new_ty.layout()) }; + + for map in conversion.field_mapping.iter() { + if let Some(FieldMapping { + old_offset, + new_offset, + action, + }) = map + { let src = { let mut src = object_info.ptr as usize; - src += usize::from(unsafe { *old_offsets.get_unchecked(old_index) }); + src += old_offset; src as *mut u8 }; let dest = { let mut dest = ptr as usize; - dest += usize::from(unsafe { *new_offsets.get_unchecked(new_index) }); + dest += new_offset; dest as *mut u8 }; - let old_field = unsafe { old_fields.get_unchecked(old_index) }; - if action == mapping::Action::Cast { - let new_field = unsafe { new_fields.get_unchecked(new_index) }; - if !cast::try_cast_from_to( - *old_field.1.guid(), - *new_field.1.guid(), - unsafe { NonNull::new_unchecked(src) }, - unsafe { NonNull::new_unchecked(dest) }, - ) { - // Failed to cast. Use the previously zero-initialized value instead - } - } else { - unsafe { - std::ptr::copy_nonoverlapping(src, dest, old_field.1.layout().size()) + match action { + mapping::Action::Cast { old, new } => { + if !cast::try_cast_from_to( + *old, + *new, + unsafe { NonNull::new_unchecked(src) }, + unsafe { NonNull::new_unchecked(dest) }, + ) { + // Failed to cast. Use the previously zero-initialized value instead + } } + mapping::Action::Copy { size } => unsafe { + std::ptr::copy_nonoverlapping(src, dest, *size) + }, } } } @@ -318,7 +271,7 @@ where ptr, roots: object_info.roots, color: object_info.color, - ty: new_ty.clone(), + ty: conversion.new_ty.clone(), }); } } diff --git a/crates/mun_memory/src/lib.rs b/crates/mun_memory/src/lib.rs index b076a6121..1d99d82c5 100644 --- a/crates/mun_memory/src/lib.rs +++ b/crates/mun_memory/src/lib.rs @@ -7,7 +7,7 @@ pub mod mapping; pub mod prelude { pub use crate::diff::{diff, Diff, FieldDiff, FieldEditKind}; - pub use crate::mapping::{Action, FieldMappingDesc}; + pub use crate::mapping::{Action, FieldMapping}; } /// A trait used to obtain a type's description. diff --git a/crates/mun_memory/src/mapping.rs b/crates/mun_memory/src/mapping.rs index bab73eb9f..f4573f76e 100644 --- a/crates/mun_memory/src/mapping.rs +++ b/crates/mun_memory/src/mapping.rs @@ -1,37 +1,89 @@ use crate::{ - diff::{Diff, FieldDiff, FieldEditKind}, + diff::{diff, Diff, FieldDiff, FieldEditKind}, gc::GcPtr, + TypeDesc, TypeFields, TypeLayout, +}; +use std::{ + collections::{HashMap, HashSet}, + hash::Hash, }; -use std::collections::HashSet; -/// A trait used to map allocated memory using type differences. -pub trait MemoryMapper { - /// Maps the values memory from `old` to `new` using `diff`. - /// - /// A `Vec` is returned containing all objects of types that were deleted. The - /// corresponding types have to remain in-memory until the objects have been deallocated. - fn map_memory(&self, old: &[T], new: &[T], diff: &[Diff]) -> Vec; +pub struct Mapping { + pub deletions: HashSet, + pub conversions: HashMap>, } -/// The `Action` to take when mapping memory from A to B. -#[derive(Eq, PartialEq)] -pub enum Action { - Cast, - Copy, +pub struct Conversion { + pub field_mapping: Vec>, + pub new_ty: T, } /// Description of the mapping of a single field. When stored together with the new index, this /// provides all information necessary for a mapping function. -pub struct FieldMappingDesc { - pub old_index: usize, +pub struct FieldMapping { + pub old_offset: usize, + pub new_offset: usize, pub action: Action, } +/// The `Action` to take when mapping memory from A to B. +#[derive(Eq, PartialEq)] +pub enum Action { + Cast { old: abi::Guid, new: abi::Guid }, + Copy { size: usize }, +} + +impl Mapping +where + T: TypeDesc + TypeFields + TypeLayout + Copy + Eq + Hash, +{ + /// + pub fn new(old: &[T], new: &[T]) -> Self { + let diff = diff(old, new); + + let mut deletions = HashSet::new(); + let mut conversions = HashMap::new(); + + for diff in diff.iter() { + match diff { + Diff::Delete { index } => { + deletions.insert(unsafe { *old.get_unchecked(*index) }); + } + Diff::Edit { + diff, + old_index, + new_index, + } => { + let old_ty = unsafe { *old.get_unchecked(*old_index) }; + let new_ty = unsafe { *new.get_unchecked(*new_index) }; + conversions.insert(old_ty, unsafe { field_mapping(old_ty, new_ty, diff) }); + } + Diff::Insert { .. } | Diff::Move { .. } => (), + } + } + + Self { + deletions, + conversions, + } + } +} + /// Given a set of `old_fields` of type `T` and their corresponding `diff`, calculates the mapping /// `new_index -> Option` for each new field. /// /// The indices of the returned `Vec`'s elements should be used as indices for the new fields. -pub fn field_mapping(old_fields: &[T], diff: &[FieldDiff]) -> Vec> { +/// +/// # Safety +/// +/// Expects the `diff` to be based on `old_ty` and `new_ty`. If not, it causes undefined behavior. +pub unsafe fn field_mapping + TypeLayout>( + old_ty: T, + new_ty: T, + diff: &[FieldDiff], +) -> Conversion { + let old_fields = old_ty.fields(); + let deletions: HashSet = diff .iter() .filter_map(|diff| match diff { @@ -41,6 +93,17 @@ pub fn field_mapping(old_fields: &[T], diff: &[FieldDiff]) -> Vec> = (0..old_fields.len()) .filter_map(|idx| { @@ -49,7 +112,7 @@ pub fn field_mapping(old_fields: &[T], diff: &[FieldDiff]) -> Vec(old_fields: &[T], diff: &[FieldDiff]) -> Vec(old_fields: &[T], diff: &[FieldDiff]) -> Vec { + /// Maps its allocated memory using the provided `mapping`. + /// + /// A `Vec` is returned containing all objects of types that were deleted. The + /// corresponding types have to remain in-memory until the objects have been deallocated. + fn map_memory(&self, mapping: Mapping) -> Vec; } diff --git a/crates/mun_runtime/src/assembly.rs b/crates/mun_runtime/src/assembly.rs index 41b0c1bad..f478c92fa 100644 --- a/crates/mun_runtime/src/assembly.rs +++ b/crates/mun_runtime/src/assembly.rs @@ -9,7 +9,7 @@ mod temp_library; use self::temp_library::TempLibrary; use crate::garbage_collector::{GarbageCollector, RawTypeInfo}; -use memory::{diff::diff, mapping::MemoryMapper}; +use memory::mapping::{Mapping, MemoryMapper}; use std::{collections::HashSet, sync::Arc}; /// An assembly is a hot reloadable compilation unit, consisting of one or more Mun modules. @@ -161,9 +161,8 @@ impl Assembly { .map(|ty| (*ty as *const abi::TypeInfo).into()) .collect(); - let deleted_objects = - self.allocator - .map_memory(&old_types, &new_types, &diff(&old_types, &new_types)); + let mapping = Mapping::new(&old_types, &new_types); + let deleted_objects = self.allocator.map_memory(mapping); // Remove the old assembly's functions for function in self.info.symbols.functions() { From 5be9d5c708c771967d5d5d746dd2cc817337d770 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 28 Apr 2020 17:42:23 +0200 Subject: [PATCH 2/2] improvement(runtime): clarify usage of unsafe TypeInfo Garbage collection uses TypeInfo pointers that cannot directly be linked to the lifetime of their assemblies. As a result any usage of these pointers is unsafe. To clarify this, the RawTypeInfo type has been renamed to UnsafeTypeInfo and everywhere it is used, unsafe must be added, similar to Rust's UnsafeCell type. --- crates/mun_runtime/src/assembly.rs | 22 +++-- crates/mun_runtime/src/garbage_collector.rs | 99 ++++++++++----------- crates/mun_runtime/src/lib.rs | 10 ++- crates/mun_runtime/src/reflection.rs | 6 +- crates/mun_runtime/src/struct.rs | 63 +++++++++---- 5 files changed, 124 insertions(+), 76 deletions(-) diff --git a/crates/mun_runtime/src/assembly.rs b/crates/mun_runtime/src/assembly.rs index f478c92fa..c507a6eeb 100644 --- a/crates/mun_runtime/src/assembly.rs +++ b/crates/mun_runtime/src/assembly.rs @@ -8,9 +8,9 @@ use libloading::Symbol; mod temp_library; use self::temp_library::TempLibrary; -use crate::garbage_collector::{GarbageCollector, RawTypeInfo}; +use crate::garbage_collector::{GarbageCollector, UnsafeTypeInfo}; use memory::mapping::{Mapping, MemoryMapper}; -use std::{collections::HashSet, sync::Arc}; +use std::{collections::HashSet, ptr::NonNull, sync::Arc}; /// An assembly is a hot reloadable compilation unit, consisting of one or more Mun modules. pub struct Assembly { @@ -145,20 +145,30 @@ impl Assembly { let mut new_assembly = Assembly::load(library_path, self.allocator.clone(), runtime_dispatch_table)?; - let old_types: Vec = self + let old_types: Vec = self .info .symbols .types() .iter() - .map(|ty| (*ty as *const abi::TypeInfo).into()) + .map(|ty| { + // Safety: `ty` is a shared reference, so is guaranteed to not be `ptr::null()`. + UnsafeTypeInfo::new(unsafe { + NonNull::new_unchecked(*ty as *const abi::TypeInfo as *mut _) + }) + }) .collect(); - let new_types: Vec = new_assembly + let new_types: Vec = new_assembly .info .symbols .types() .iter() - .map(|ty| (*ty as *const abi::TypeInfo).into()) + .map(|ty| { + // Safety: `ty` is a shared reference, so is guaranteed to not be `ptr::null()`. + UnsafeTypeInfo::new(unsafe { + NonNull::new_unchecked(*ty as *const abi::TypeInfo as *mut _) + }) + }) .collect(); let mapping = Mapping::new(&old_types, &new_types); diff --git a/crates/mun_runtime/src/garbage_collector.rs b/crates/mun_runtime/src/garbage_collector.rs index 0ceda1709..d439c96b3 100644 --- a/crates/mun_runtime/src/garbage_collector.rs +++ b/crates/mun_runtime/src/garbage_collector.rs @@ -1,64 +1,70 @@ use memory::gc::{self, HasIndirectionPtr}; -use std::{alloc::Layout, hash::Hash}; - +use std::{alloc::Layout, hash::Hash, ptr::NonNull}; + +/// `UnsafeTypeInfo` is a type that wraps a `NonNull` and indicates unsafe interior +/// operations on the wrapped `TypeInfo`. The unsafety originates from uncertainty about the +/// lifetime of the wrapped `TypeInfo`. +/// +/// Rust lifetime rules do not allow separate lifetimes for struct fields, but we can make `unsafe` +/// guarantees about their lifetimes. Thus the `UnsafeTypeInfo` type is the only legal way to obtain +/// shared references to the wrapped `TypeInfo`. #[derive(Clone, Copy, Debug)] #[repr(transparent)] -pub struct RawTypeInfo(*const abi::TypeInfo); +pub struct UnsafeTypeInfo(NonNull); -impl RawTypeInfo { - /// Returns the inner `TypeInfo` pointer. - /// - /// # Safety - /// - /// This method is unsafe because there are no guarantees about the lifetime of the inner +impl UnsafeTypeInfo { + /// Constructs a new instance of `UnsafeTypeInfo`, which will wrap the specified `type_info` /// pointer. - pub unsafe fn inner(self) -> *const abi::TypeInfo { + /// + /// All access to the inner value through methods is `unsafe`. + pub fn new(type_info: NonNull) -> Self { + Self(type_info) + } + + /// Unwraps the value. + pub fn into_inner(self) -> NonNull { self.0 } } -impl PartialEq for RawTypeInfo { +impl PartialEq for UnsafeTypeInfo { fn eq(&self, other: &Self) -> bool { - let this = unsafe { &*self.0 }; - let other = unsafe { &*other.0 }; - *this == *other + unsafe { *self.0.as_ref() == *other.0.as_ref() } } } -impl Eq for RawTypeInfo {} +impl Eq for UnsafeTypeInfo {} -impl Hash for RawTypeInfo { +impl Hash for UnsafeTypeInfo { fn hash(&self, state: &mut H) { - let this = unsafe { &*self.0 }; - this.hash(state); + unsafe { self.0.as_ref().hash(state) }; } } -impl memory::TypeDesc for RawTypeInfo { +impl memory::TypeDesc for UnsafeTypeInfo { fn name(&self) -> &str { - let this = unsafe { &*self.0 }; - this.name() + unsafe { self.0.as_ref().name() } } + fn guid(&self) -> &abi::Guid { - let this = unsafe { &*self.0 }; - &this.guid + unsafe { &self.0.as_ref().guid } } + fn group(&self) -> abi::TypeGroup { - let this = unsafe { &*self.0 }; - this.group + unsafe { self.0.as_ref().group } } } -impl memory::TypeFields for RawTypeInfo { +impl memory::TypeFields for UnsafeTypeInfo { fn fields(&self) -> Vec<(&str, Self)> { - let this = unsafe { &*self.0 }; - if let Some(s) = this.as_struct() { + if let Some(s) = unsafe { self.0.as_ref().as_struct() } { s.field_names() - .zip( - s.field_types() - .iter() - .map(|ty| (*ty as *const abi::TypeInfo).into()), - ) + .zip(s.field_types().iter().map(|ty| { + // Safety: `ty` is a shared reference, so is guaranteed to not be `ptr::null()`. + UnsafeTypeInfo::new(unsafe { + NonNull::new_unchecked(*ty as *const abi::TypeInfo as *mut _) + }) + })) .collect() } else { Vec::new() @@ -66,8 +72,7 @@ impl memory::TypeFields for RawTypeInfo { } fn offsets(&self) -> &[u16] { - let this = unsafe { &*self.0 }; - if let Some(s) = this.as_struct() { + if let Some(s) = unsafe { self.0.as_ref().as_struct() } { s.field_offsets() } else { &[] @@ -75,18 +80,12 @@ impl memory::TypeFields for RawTypeInfo { } } -impl Into for *const abi::TypeInfo { - fn into(self) -> RawTypeInfo { - RawTypeInfo(self) - } -} - -unsafe impl Send for RawTypeInfo {} -unsafe impl Sync for RawTypeInfo {} +unsafe impl Send for UnsafeTypeInfo {} +unsafe impl Sync for UnsafeTypeInfo {} pub struct Trace { obj: GcPtr, - ty: RawTypeInfo, + ty: UnsafeTypeInfo, index: usize, } @@ -94,7 +93,7 @@ impl Iterator for Trace { type Item = GcPtr; fn next(&mut self) -> Option { - let struct_ty = unsafe { self.ty.0.as_ref() }.unwrap().as_struct()?; + let struct_ty = unsafe { self.ty.0.as_ref() }.as_struct()?; let field_count = struct_ty.field_types().len(); while self.index < field_count { let index = self.index; @@ -114,15 +113,15 @@ impl Iterator for Trace { } } -impl memory::TypeLayout for RawTypeInfo { +impl memory::TypeLayout for UnsafeTypeInfo { fn layout(&self) -> Layout { - let ty = unsafe { &*self.0 }; + let ty = unsafe { self.0.as_ref() }; Layout::from_size_align(ty.size_in_bytes(), ty.alignment()) .unwrap_or_else(|_| panic!("invalid layout from Mun Type: {:?}", ty)) } } -impl gc::TypeTrace for RawTypeInfo { +impl gc::TypeTrace for UnsafeTypeInfo { type Trace = Trace; fn trace(&self, obj: GcPtr) -> Self::Trace { @@ -135,7 +134,7 @@ impl gc::TypeTrace for RawTypeInfo { } /// Defines the garbage collector used by the `Runtime`. -pub type GarbageCollector = gc::MarkSweep>; +pub type GarbageCollector = gc::MarkSweep>; pub use gc::GcPtr; -pub type GcRootPtr = gc::GcRootPtr; +pub type GcRootPtr = gc::GcRootPtr; diff --git a/crates/mun_runtime/src/lib.rs b/crates/mun_runtime/src/lib.rs index f2e063616..68522aef1 100644 --- a/crates/mun_runtime/src/lib.rs +++ b/crates/mun_runtime/src/lib.rs @@ -14,7 +14,7 @@ mod reflection; mod r#struct; use failure::Error; -use garbage_collector::GarbageCollector; +use garbage_collector::{GarbageCollector, UnsafeTypeInfo}; use memory::gc::{self, GcRuntime}; use notify::{DebouncedEvent, RecommendedWatcher, RecursiveMode, Watcher}; use rustc_hash::FxHashMap; @@ -22,6 +22,7 @@ use std::{ collections::HashMap, ffi, io, mem, path::{Path, PathBuf}, + ptr::NonNull, string::ToString, sync::{ mpsc::{channel, Receiver}, @@ -182,8 +183,13 @@ extern "C" fn new( type_info: *const abi::TypeInfo, alloc_handle: *mut ffi::c_void, ) -> *const *mut ffi::c_void { + // Safety: `new` is only called from within Mun assemblies' core logic, so we are guaranteed + // that the `Runtime` and its `GarbageCollector` still exist if this function is called, and + // will continue to do so for the duration of this function. let allocator = unsafe { get_allocator(alloc_handle) }; - let handle = allocator.alloc(type_info.into()); + // Safety: the Mun Compiler guarantees that `new` is never called with `ptr::null()`. + let type_info = UnsafeTypeInfo::new(unsafe { NonNull::new_unchecked(type_info as *mut _) }); + let handle = allocator.alloc(type_info); // Prevent destruction of the allocator mem::forget(allocator); diff --git a/crates/mun_runtime/src/reflection.rs b/crates/mun_runtime/src/reflection.rs index 6f6db7212..b64eaa018 100644 --- a/crates/mun_runtime/src/reflection.rs +++ b/crates/mun_runtime/src/reflection.rs @@ -2,8 +2,8 @@ use crate::{marshal::Marshal, Runtime, StructRef}; use abi::HasStaticTypeInfo; /// Returns whether the specified argument type matches the `type_info`. -pub fn equals_argument_type<'r, 'e, 'f, T: ArgumentReflection>( - runtime: &'r Runtime, +pub fn equals_argument_type<'e, 'f, T: ArgumentReflection>( + runtime: &'f Runtime, type_info: &'e abi::TypeInfo, arg: &'f T, ) -> Result<(), (&'e str, &'f str)> { @@ -58,7 +58,7 @@ pub trait ArgumentReflection: Sized { fn type_guid(&self, runtime: &Runtime) -> abi::Guid; /// Retrieves the name of the value's type. - fn type_name(&self, runtime: &Runtime) -> &str; + fn type_name<'r>(&'r self, runtime: &'r Runtime) -> &'r str; /// Marshals the value. fn marshal(self) -> Self::Marshalled; diff --git a/crates/mun_runtime/src/struct.rs b/crates/mun_runtime/src/struct.rs index 7549dbe25..d140746b4 100644 --- a/crates/mun_runtime/src/struct.rs +++ b/crates/mun_runtime/src/struct.rs @@ -1,4 +1,4 @@ -use crate::garbage_collector::{GcPtr, GcRootPtr}; +use crate::garbage_collector::{GcPtr, GcRootPtr, UnsafeTypeInfo}; use crate::{ marshal::Marshal, reflection::{ @@ -37,9 +37,17 @@ impl StructRef { fn new(runtime: Rc>, raw: RawStruct) -> Self { let handle = { let runtime_ref = runtime.borrow(); - assert!(unsafe { &*runtime_ref.gc().ptr_type(raw.0).inner() } - .group - .is_struct()); + // Safety: The type returned from `ptr_type` is guaranteed to live at least as long as + // `Runtime` does not change. As we hold a shared reference to `Runtime`, this is safe. + assert!(unsafe { + runtime_ref + .gc() + .ptr_type(raw.0) + .into_inner() + .as_ref() + .group + .is_struct() + }); GcRootPtr::new(&runtime_ref.gc, raw.0) }; @@ -54,9 +62,16 @@ impl StructRef { /// Returns the type information of the struct. pub fn type_info<'r>(struct_ref: &Self, runtime_ref: &'r Runtime) -> &'r abi::TypeInfo { - // Safety: The lifetime of `TypeInfo` is tied to the lifetime of `Runtime` and thus the - // underlying pointer cannot change during the lifetime of the shared reference. - unsafe { &*runtime_ref.gc.ptr_type(struct_ref.handle.handle()).inner() } + // Safety: The type returned from `ptr_type` is guaranteed to live at least as long as + // `Runtime` does not change. As the lifetime of `TypeInfo` is tied to the lifetime of + // `Runtime`, this is safe. + unsafe { + &*runtime_ref + .gc + .ptr_type(struct_ref.handle.handle()) + .into_inner() + .as_ptr() + } } /// @@ -176,13 +191,29 @@ impl ArgumentReflection for StructRef { type Marshalled = RawStruct; fn type_guid(&self, runtime: &Runtime) -> abi::Guid { - let type_info = unsafe { &*runtime.gc().ptr_type(self.handle.handle()).inner() }; - type_info.guid + // Safety: The type returned from `ptr_type` is guaranteed to live at least as long as + // `Runtime` does not change. As we hold a shared reference to `Runtime`, this is safe. + unsafe { + runtime + .gc() + .ptr_type(self.handle.handle()) + .into_inner() + .as_ref() + .guid + } } fn type_name(&self, runtime: &Runtime) -> &str { - let type_info = unsafe { &*runtime.gc().ptr_type(self.handle.handle()).inner() }; - type_info.name() + // Safety: The type returned from `ptr_type` is guaranteed to live at least as long as + // `Runtime` does not change. As we hold a shared reference to `Runtime`, this is safe. + unsafe { + (&*runtime + .gc() + .ptr_type(self.handle.handle()) + .into_inner() + .as_ptr()) + .name() + } } fn marshal(self) -> Self::Marshalled { @@ -212,9 +243,6 @@ impl Marshal for RawStruct { let type_info = type_info.unwrap(); let struct_info = type_info.as_struct().unwrap(); - // HACK: This is very hacky since we know nothing about the lifetime of abi::TypeInfo. - let type_info_ptr = (type_info as *const abi::TypeInfo).into(); - // Copy the contents of the struct based on what kind of pointer we are dealing with let gc_handle = if struct_info.memory_kind == abi::StructMemoryKind::Value { // For a value struct, `ptr` points to a struct value. @@ -222,7 +250,12 @@ impl Marshal for RawStruct { // Create a new object using the runtime's intrinsic let mut gc_handle = { let runtime_ref = runtime.borrow(); - runtime_ref.gc().alloc(type_info_ptr) + runtime_ref.gc().alloc( + // Safety: `ty` is a shared reference, so is guaranteed to not be `ptr::null()`. + UnsafeTypeInfo::new(unsafe { + NonNull::new_unchecked(type_info as *const abi::TypeInfo as *mut _) + }), + ) }; // Construct