pub use crate::backing::{ImportBacking, LocalBacking, INTERNALS_SIZE};
use crate::{
memory::{Memory, MemoryType},
module::{ModuleInfo, ModuleInner},
structures::TypedIndex,
types::{LocalOrImport, MemoryIndex},
vmcalls,
};
use std::{
cell::UnsafeCell,
ffi::c_void,
mem, ptr,
sync::atomic::{AtomicUsize, Ordering},
sync::Once,
};
use std::collections::HashMap;
#[derive(Debug)]
#[repr(C)]
pub struct Ctx {
pub internal: InternalCtx,
pub(crate) local_functions: *const *const Func,
pub local_backing: *mut LocalBacking,
pub import_backing: *mut ImportBacking,
pub module: *const ModuleInner,
pub data: *mut c_void,
pub data_finalizer: Option<fn(data: *mut c_void)>,
}
impl Drop for Ctx {
fn drop(&mut self) {
if let Some(ref finalizer) = self.data_finalizer {
finalizer(self.data);
}
}
}
#[doc(hidden)]
#[derive(Debug)]
#[repr(C)]
pub struct InternalCtx {
pub memories: *mut *mut LocalMemory,
pub tables: *mut *mut LocalTable,
pub globals: *mut *mut LocalGlobal,
pub imported_memories: *mut *mut LocalMemory,
pub imported_tables: *mut *mut LocalTable,
pub imported_globals: *mut *mut LocalGlobal,
pub imported_funcs: *mut ImportedFunc,
pub dynamic_sigindices: *const SigId,
pub intrinsics: *const Intrinsics,
pub stack_lower_bound: *mut u8,
pub memory_base: *mut u8,
pub memory_bound: usize,
pub internals: *mut [u64; INTERNALS_SIZE],
pub interrupt_signal_mem: *mut u8,
}
static INTERNAL_FIELDS: AtomicUsize = AtomicUsize::new(0);
pub struct InternalField {
init: Once,
inner: UnsafeCell<usize>,
}
unsafe impl Send for InternalField {}
unsafe impl Sync for InternalField {}
impl InternalField {
pub const fn allocate() -> InternalField {
InternalField {
init: Once::new(),
inner: UnsafeCell::new(::std::usize::MAX),
}
}
pub fn index(&self) -> usize {
let inner: *mut usize = self.inner.get();
self.init.call_once(|| {
let idx = INTERNAL_FIELDS.fetch_add(1, Ordering::SeqCst);
if idx >= INTERNALS_SIZE {
INTERNAL_FIELDS.fetch_sub(1, Ordering::SeqCst);
panic!("at most {} internal fields are supported", INTERNALS_SIZE);
} else {
unsafe {
*inner = idx;
}
}
});
unsafe { *inner }
}
}
#[repr(C)]
pub struct Intrinsics {
pub memory_grow: *const Func,
pub memory_size: *const Func,
}
unsafe impl Send for Intrinsics {}
unsafe impl Sync for Intrinsics {}
impl Intrinsics {
#[allow(clippy::erasing_op)]
pub fn offset_memory_grow() -> u8 {
(0 * ::std::mem::size_of::<usize>()) as u8
}
pub fn offset_memory_size() -> u8 {
(1 * ::std::mem::size_of::<usize>()) as u8
}
}
pub static INTRINSICS_LOCAL_STATIC_MEMORY: Intrinsics = Intrinsics {
memory_grow: vmcalls::local_static_memory_grow as _,
memory_size: vmcalls::local_static_memory_size as _,
};
pub static INTRINSICS_LOCAL_DYNAMIC_MEMORY: Intrinsics = Intrinsics {
memory_grow: vmcalls::local_dynamic_memory_grow as _,
memory_size: vmcalls::local_dynamic_memory_size as _,
};
pub static INTRINSICS_IMPORTED_STATIC_MEMORY: Intrinsics = Intrinsics {
memory_grow: vmcalls::imported_static_memory_grow as _,
memory_size: vmcalls::imported_static_memory_size as _,
};
pub static INTRINSICS_IMPORTED_DYNAMIC_MEMORY: Intrinsics = Intrinsics {
memory_grow: vmcalls::imported_dynamic_memory_grow as _,
memory_size: vmcalls::imported_dynamic_memory_size as _,
};
fn get_intrinsics_for_module(m: &ModuleInfo) -> *const Intrinsics {
if m.memories.len() == 0 && m.imported_memories.len() == 0 {
::std::ptr::null()
} else {
match MemoryIndex::new(0).local_or_import(m) {
LocalOrImport::Local(local_mem_index) => {
let mem_desc = &m.memories[local_mem_index];
match mem_desc.memory_type() {
MemoryType::Dynamic => &INTRINSICS_LOCAL_DYNAMIC_MEMORY,
MemoryType::Static => &INTRINSICS_LOCAL_STATIC_MEMORY,
MemoryType::SharedStatic => &INTRINSICS_LOCAL_STATIC_MEMORY,
}
}
LocalOrImport::Import(import_mem_index) => {
let mem_desc = &m.imported_memories[import_mem_index].1;
match mem_desc.memory_type() {
MemoryType::Dynamic => &INTRINSICS_IMPORTED_DYNAMIC_MEMORY,
MemoryType::Static => &INTRINSICS_IMPORTED_STATIC_MEMORY,
MemoryType::SharedStatic => &INTRINSICS_IMPORTED_STATIC_MEMORY,
}
}
}
}
}
#[cfg(all(unix, target_arch = "x86_64"))]
fn get_interrupt_signal_mem() -> *mut u8 {
unsafe { crate::fault::get_wasm_interrupt_signal_mem() }
}
#[cfg(not(all(unix, target_arch = "x86_64")))]
fn get_interrupt_signal_mem() -> *mut u8 {
static mut REGION: u64 = 0;
unsafe { &mut REGION as *mut u64 as *mut u8 }
}
impl Ctx {
#[doc(hidden)]
pub unsafe fn new(
local_backing: &mut LocalBacking,
import_backing: &mut ImportBacking,
module: &ModuleInner,
) -> Self {
let (mem_base, mem_bound): (*mut u8, usize) =
if module.info.memories.len() == 0 && module.info.imported_memories.len() == 0 {
(::std::ptr::null_mut(), 0)
} else {
let mem = match MemoryIndex::new(0).local_or_import(&module.info) {
LocalOrImport::Local(index) => local_backing.vm_memories[index],
LocalOrImport::Import(index) => import_backing.vm_memories[index],
};
((*mem).base, (*mem).bound)
};
Self {
internal: InternalCtx {
memories: local_backing.vm_memories.as_mut_ptr(),
tables: local_backing.vm_tables.as_mut_ptr(),
globals: local_backing.vm_globals.as_mut_ptr(),
imported_memories: import_backing.vm_memories.as_mut_ptr(),
imported_tables: import_backing.vm_tables.as_mut_ptr(),
imported_globals: import_backing.vm_globals.as_mut_ptr(),
imported_funcs: import_backing.vm_functions.as_mut_ptr(),
dynamic_sigindices: local_backing.dynamic_sigindices.as_ptr(),
intrinsics: get_intrinsics_for_module(&module.info),
stack_lower_bound: ::std::ptr::null_mut(),
memory_base: mem_base,
memory_bound: mem_bound,
internals: &mut local_backing.internals.0,
interrupt_signal_mem: get_interrupt_signal_mem(),
},
local_functions: local_backing.local_functions.as_ptr(),
local_backing,
import_backing,
module,
data: ptr::null_mut(),
data_finalizer: None,
}
}
#[doc(hidden)]
pub unsafe fn new_with_data(
local_backing: &mut LocalBacking,
import_backing: &mut ImportBacking,
module: &ModuleInner,
data: *mut c_void,
data_finalizer: fn(*mut c_void),
) -> Self {
let (mem_base, mem_bound): (*mut u8, usize) =
if module.info.memories.len() == 0 && module.info.imported_memories.len() == 0 {
(::std::ptr::null_mut(), 0)
} else {
let mem = match MemoryIndex::new(0).local_or_import(&module.info) {
LocalOrImport::Local(index) => local_backing.vm_memories[index],
LocalOrImport::Import(index) => import_backing.vm_memories[index],
};
((*mem).base, (*mem).bound)
};
Self {
internal: InternalCtx {
memories: local_backing.vm_memories.as_mut_ptr(),
tables: local_backing.vm_tables.as_mut_ptr(),
globals: local_backing.vm_globals.as_mut_ptr(),
imported_memories: import_backing.vm_memories.as_mut_ptr(),
imported_tables: import_backing.vm_tables.as_mut_ptr(),
imported_globals: import_backing.vm_globals.as_mut_ptr(),
imported_funcs: import_backing.vm_functions.as_mut_ptr(),
dynamic_sigindices: local_backing.dynamic_sigindices.as_ptr(),
intrinsics: get_intrinsics_for_module(&module.info),
stack_lower_bound: ::std::ptr::null_mut(),
memory_base: mem_base,
memory_bound: mem_bound,
internals: &mut local_backing.internals.0,
interrupt_signal_mem: get_interrupt_signal_mem(),
},
local_functions: local_backing.local_functions.as_ptr(),
local_backing,
import_backing,
module,
data,
data_finalizer: Some(data_finalizer),
}
}
pub fn memory(&self, mem_index: u32) -> &Memory {
let module = unsafe { &*self.module };
let mem_index = MemoryIndex::new(mem_index as usize);
match mem_index.local_or_import(&module.info) {
LocalOrImport::Local(local_mem_index) => unsafe {
let local_backing = &*self.local_backing;
&local_backing.memories[local_mem_index]
},
LocalOrImport::Import(import_mem_index) => unsafe {
let import_backing = &*self.import_backing;
&import_backing.memories[import_mem_index]
},
}
}
pub unsafe fn borrow_symbol_map(&self) -> &Option<HashMap<u32, String>> {
&(*self.module).info.em_symbol_map
}
pub fn dynamic_sigindice_count(&self) -> usize {
unsafe { (*self.local_backing).dynamic_sigindices.len() }
}
pub fn get_internal(&self, field: &InternalField) -> u64 {
unsafe { (*self.internal.internals)[field.index()] }
}
pub fn set_internal(&mut self, field: &InternalField, value: u64) {
unsafe {
(*self.internal.internals)[field.index()] = value;
}
}
}
#[doc(hidden)]
impl Ctx {
#[allow(clippy::erasing_op)]
pub fn offset_memories() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_tables() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn offset_globals() -> u8 {
2 * (mem::size_of::<usize>() as u8)
}
pub fn offset_imported_memories() -> u8 {
3 * (mem::size_of::<usize>() as u8)
}
pub fn offset_imported_tables() -> u8 {
4 * (mem::size_of::<usize>() as u8)
}
pub fn offset_imported_globals() -> u8 {
5 * (mem::size_of::<usize>() as u8)
}
pub fn offset_imported_funcs() -> u8 {
6 * (mem::size_of::<usize>() as u8)
}
pub fn offset_signatures() -> u8 {
7 * (mem::size_of::<usize>() as u8)
}
pub fn offset_intrinsics() -> u8 {
8 * (mem::size_of::<usize>() as u8)
}
pub fn offset_stack_lower_bound() -> u8 {
9 * (mem::size_of::<usize>() as u8)
}
pub fn offset_memory_base() -> u8 {
10 * (mem::size_of::<usize>() as u8)
}
pub fn offset_memory_bound() -> u8 {
11 * (mem::size_of::<usize>() as u8)
}
pub fn offset_internals() -> u8 {
12 * (mem::size_of::<usize>() as u8)
}
pub fn offset_interrupt_signal_mem() -> u8 {
13 * (mem::size_of::<usize>() as u8)
}
pub fn offset_local_functions() -> u8 {
14 * (mem::size_of::<usize>() as u8)
}
}
enum InnerFunc {}
#[repr(C)]
pub struct Func(InnerFunc);
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedFunc {
pub func: *const Func,
pub vmctx: *mut Ctx,
}
impl ImportedFunc {
#[allow(clippy::erasing_op)]
pub fn offset_func() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_vmctx() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct LocalTable {
pub base: *mut u8,
pub count: usize,
pub table: *mut (),
}
impl LocalTable {
#[allow(clippy::erasing_op)]
pub fn offset_base() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_count() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct LocalMemory {
pub base: *mut u8,
pub bound: usize,
pub memory: *mut (),
}
impl LocalMemory {
#[allow(clippy::erasing_op)]
pub fn offset_base() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_bound() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct LocalGlobal {
pub data: u128,
}
impl LocalGlobal {
#[allow(clippy::erasing_op)]
pub fn offset_data() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn null() -> Self {
Self { data: 0 }
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone, Copy)]
#[repr(transparent)]
pub struct SigId(pub u32);
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct Anyfunc {
pub func: *const Func,
pub ctx: *mut Ctx,
pub sig_id: SigId,
}
impl Anyfunc {
pub fn null() -> Self {
Self {
func: ptr::null(),
ctx: ptr::null_mut(),
sig_id: SigId(u32::max_value()),
}
}
#[allow(clippy::erasing_op)]
pub fn offset_func() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_vmctx() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn offset_sig_id() -> u8 {
2 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[cfg(test)]
mod vm_offset_tests {
use super::{Anyfunc, Ctx, ImportedFunc, InternalCtx, LocalGlobal, LocalMemory, LocalTable};
#[test]
fn vmctx() {
assert_eq!(0usize, offset_of!(Ctx => internal).get_byte_offset(),);
assert_eq!(
Ctx::offset_memories() as usize,
offset_of!(InternalCtx => memories).get_byte_offset(),
);
assert_eq!(
Ctx::offset_tables() as usize,
offset_of!(InternalCtx => tables).get_byte_offset(),
);
assert_eq!(
Ctx::offset_globals() as usize,
offset_of!(InternalCtx => globals).get_byte_offset(),
);
assert_eq!(
Ctx::offset_imported_memories() as usize,
offset_of!(InternalCtx => imported_memories).get_byte_offset(),
);
assert_eq!(
Ctx::offset_imported_tables() as usize,
offset_of!(InternalCtx => imported_tables).get_byte_offset(),
);
assert_eq!(
Ctx::offset_imported_globals() as usize,
offset_of!(InternalCtx => imported_globals).get_byte_offset(),
);
assert_eq!(
Ctx::offset_imported_funcs() as usize,
offset_of!(InternalCtx => imported_funcs).get_byte_offset(),
);
assert_eq!(
Ctx::offset_intrinsics() as usize,
offset_of!(InternalCtx => intrinsics).get_byte_offset(),
);
assert_eq!(
Ctx::offset_stack_lower_bound() as usize,
offset_of!(InternalCtx => stack_lower_bound).get_byte_offset(),
);
assert_eq!(
Ctx::offset_memory_base() as usize,
offset_of!(InternalCtx => memory_base).get_byte_offset(),
);
assert_eq!(
Ctx::offset_memory_bound() as usize,
offset_of!(InternalCtx => memory_bound).get_byte_offset(),
);
assert_eq!(
Ctx::offset_internals() as usize,
offset_of!(InternalCtx => internals).get_byte_offset(),
);
assert_eq!(
Ctx::offset_interrupt_signal_mem() as usize,
offset_of!(InternalCtx => interrupt_signal_mem).get_byte_offset(),
);
assert_eq!(
Ctx::offset_local_functions() as usize,
offset_of!(Ctx => local_functions).get_byte_offset(),
);
}
#[test]
fn imported_func() {
assert_eq!(
ImportedFunc::offset_func() as usize,
offset_of!(ImportedFunc => func).get_byte_offset(),
);
assert_eq!(
ImportedFunc::offset_vmctx() as usize,
offset_of!(ImportedFunc => vmctx).get_byte_offset(),
);
}
#[test]
fn local_table() {
assert_eq!(
LocalTable::offset_base() as usize,
offset_of!(LocalTable => base).get_byte_offset(),
);
assert_eq!(
LocalTable::offset_count() as usize,
offset_of!(LocalTable => count).get_byte_offset(),
);
}
#[test]
fn local_memory() {
assert_eq!(
LocalMemory::offset_base() as usize,
offset_of!(LocalMemory => base).get_byte_offset(),
);
assert_eq!(
LocalMemory::offset_bound() as usize,
offset_of!(LocalMemory => bound).get_byte_offset(),
);
}
#[test]
fn local_global() {
assert_eq!(
LocalGlobal::offset_data() as usize,
offset_of!(LocalGlobal => data).get_byte_offset(),
);
}
#[test]
fn cc_anyfunc() {
assert_eq!(
Anyfunc::offset_func() as usize,
offset_of!(Anyfunc => func).get_byte_offset(),
);
assert_eq!(
Anyfunc::offset_vmctx() as usize,
offset_of!(Anyfunc => ctx).get_byte_offset(),
);
assert_eq!(
Anyfunc::offset_sig_id() as usize,
offset_of!(Anyfunc => sig_id).get_byte_offset(),
);
}
}
#[cfg(test)]
mod vm_ctx_tests {
use super::{Ctx, ImportBacking, LocalBacking};
use crate::module::{ModuleInfo, ModuleInner, StringTable};
use crate::structures::Map;
use std::ffi::c_void;
struct TestData {
x: u32,
y: bool,
str: String,
finalizer: Box<dyn FnMut()>,
}
impl Drop for TestData {
fn drop(&mut self) {
(*self.finalizer)();
}
}
fn test_data_finalizer(data: *mut c_void) {
let test_data: &mut TestData = unsafe { &mut *(data as *mut TestData) };
assert_eq!(10, test_data.x);
assert_eq!(true, test_data.y);
assert_eq!("Test".to_string(), test_data.str,);
println!("hello from finalizer");
drop(test_data);
}
#[test]
fn test_callback_on_drop() {
let mut data = TestData {
x: 10,
y: true,
str: "Test".to_string(),
finalizer: Box::new(move || {}),
};
let mut local_backing = LocalBacking {
memories: Map::new().into_boxed_map(),
tables: Map::new().into_boxed_map(),
globals: Map::new().into_boxed_map(),
vm_memories: Map::new().into_boxed_map(),
vm_tables: Map::new().into_boxed_map(),
vm_globals: Map::new().into_boxed_map(),
dynamic_sigindices: Map::new().into_boxed_map(),
local_functions: Map::new().into_boxed_map(),
internals: crate::backing::Internals([0; crate::backing::INTERNALS_SIZE]),
};
let mut import_backing = ImportBacking {
memories: Map::new().into_boxed_map(),
tables: Map::new().into_boxed_map(),
globals: Map::new().into_boxed_map(),
vm_functions: Map::new().into_boxed_map(),
vm_memories: Map::new().into_boxed_map(),
vm_tables: Map::new().into_boxed_map(),
vm_globals: Map::new().into_boxed_map(),
};
let module = generate_module();
let data_ptr = &mut data as *mut _ as *mut c_void;
let ctx = unsafe {
Ctx::new_with_data(
&mut local_backing,
&mut import_backing,
&module,
data_ptr,
test_data_finalizer,
)
};
let ctx_test_data = cast_test_data(ctx.data);
assert_eq!(10, ctx_test_data.x);
assert_eq!(true, ctx_test_data.y);
assert_eq!("Test".to_string(), ctx_test_data.str);
drop(ctx);
}
fn cast_test_data(data: *mut c_void) -> &'static mut TestData {
let test_data: &mut TestData = unsafe { &mut *(data as *mut TestData) };
test_data
}
fn generate_module() -> ModuleInner {
use super::Func;
use crate::backend::{sys::Memory, Backend, CacheGen, RunnableModule};
use crate::cache::Error as CacheError;
use crate::typed_func::Wasm;
use crate::types::{LocalFuncIndex, SigIndex};
use indexmap::IndexMap;
use std::any::Any;
use std::collections::HashMap;
use std::ptr::NonNull;
struct Placeholder;
impl RunnableModule for Placeholder {
fn get_func(
&self,
_module: &ModuleInfo,
_local_func_index: LocalFuncIndex,
) -> Option<NonNull<Func>> {
None
}
fn get_trampoline(&self, _module: &ModuleInfo, _sig_index: SigIndex) -> Option<Wasm> {
unimplemented!()
}
unsafe fn do_early_trap(&self, _: Box<dyn Any>) -> ! {
unimplemented!()
}
}
impl CacheGen for Placeholder {
fn generate_cache(&self) -> Result<(Box<[u8]>, Memory), CacheError> {
unimplemented!()
}
}
ModuleInner {
runnable_module: Box::new(Placeholder),
cache_gen: Box::new(Placeholder),
info: ModuleInfo {
memories: Map::new(),
globals: Map::new(),
tables: Map::new(),
imported_functions: Map::new(),
imported_memories: Map::new(),
imported_tables: Map::new(),
imported_globals: Map::new(),
exports: IndexMap::new(),
data_initializers: Vec::new(),
elem_initializers: Vec::new(),
start_func: None,
func_assoc: Map::new(),
signatures: Map::new(),
backend: Backend::Cranelift,
namespace_table: StringTable::new(),
name_table: StringTable::new(),
em_symbol_map: None,
custom_sections: HashMap::new(),
},
}
}
}