Library restructure (#104)

* Move wasmer-runtime to wasmer-runtime-core

* Add the runtime library

* Fix issue with macros using wasmer_runtime, fmt

* Make default compiler dependency optional

* Add instantiate and validate functions
This commit is contained in:
Brandon Fish
2019-01-22 13:02:06 -06:00
committed by Lachlan Sneff
parent 62b8e7cc2d
commit 74875ed554
129 changed files with 218 additions and 130 deletions

View File

@ -1,74 +0,0 @@
use crate::{
backing::ImportBacking,
error::CompileResult,
error::RuntimeResult,
module::ModuleInner,
types::{FuncIndex, LocalFuncIndex, Value},
vm,
};
use std::ptr::NonNull;
pub mod sys {
pub use crate::sys::*;
}
pub use crate::sig_registry::SigRegistry;
/// This type cannot be constructed from
/// outside the runtime crate.
pub struct Token {
_private: (),
}
impl Token {
pub(crate) fn generate() -> Self {
Self { _private: () }
}
}
pub trait Compiler {
/// Compiles a `Module` from WebAssembly binary format.
/// The `CompileToken` parameter ensures that this can only
/// be called from inside the runtime.
fn compile(&self, wasm: &[u8], _: Token) -> CompileResult<ModuleInner>;
}
/// The functionality exposed by this trait is expected to be used
/// for calling functions exported by a webassembly module from
/// host code only.
pub trait ProtectedCaller {
/// This calls the exported function designated by `local_func_index`.
/// Important to note, this supports calling imported functions that are
/// then exported.
///
/// It's invalid to attempt to call a local function that isn't exported and
/// the implementation is expected to check for that. The implementation
/// is also expected to check for correct parameter types and correct
/// parameter number.
///
/// The `returns` parameter is filled with dummy values when passed in and upon function
/// return, will be filled with the return values of the wasm function, as long as the
/// call completed successfully.
///
/// The existance of the Token parameter ensures that this can only be called from
/// within the runtime crate.
fn call(
&self,
module: &ModuleInner,
func_index: FuncIndex,
params: &[Value],
returns: &mut [Value],
import_backing: &ImportBacking,
vmctx: *mut vm::Ctx,
_: Token,
) -> RuntimeResult<()>;
}
pub trait FuncResolver {
/// This returns a pointer to the function designated by the `local_func_index`
/// parameter.
fn get(
&self,
module: &ModuleInner,
local_func_index: LocalFuncIndex,
) -> Option<NonNull<vm::Func>>;
}

View File

@ -1,611 +0,0 @@
use crate::{
error::{LinkError, LinkResult},
export::{Context, Export},
import::ImportObject,
memory::LinearMemory,
module::{ImportName, ModuleInner},
structures::{BoxedMap, Map, SliceMap, TypedIndex},
table::{TableBacking, TableElements},
types::{
ElementType, ImportedFuncIndex, ImportedGlobalIndex, ImportedMemoryIndex,
ImportedTableIndex, Initializer, LocalGlobalIndex, LocalMemoryIndex, LocalOrImport,
LocalTableIndex, Type, Value,
},
vm,
};
use std::{mem, slice};
#[derive(Debug)]
pub struct LocalBacking {
pub(crate) memories: BoxedMap<LocalMemoryIndex, LinearMemory>,
pub(crate) tables: BoxedMap<LocalTableIndex, TableBacking>,
pub(crate) vm_memories: BoxedMap<LocalMemoryIndex, vm::LocalMemory>,
pub(crate) vm_tables: BoxedMap<LocalTableIndex, vm::LocalTable>,
pub(crate) vm_globals: BoxedMap<LocalGlobalIndex, vm::LocalGlobal>,
}
impl LocalBacking {
pub fn memory(&mut self, local_memory_index: LocalMemoryIndex) -> &mut LinearMemory {
&mut self.memories[local_memory_index]
}
pub fn table(&mut self, local_table_index: LocalTableIndex) -> &mut TableBacking {
&mut self.tables[local_table_index]
}
}
impl LocalBacking {
pub(crate) fn new(module: &ModuleInner, imports: &ImportBacking, vmctx: *mut vm::Ctx) -> Self {
let mut memories = Self::generate_memories(module);
let mut tables = Self::generate_tables(module);
let globals = Self::generate_globals(module);
let vm_memories = Self::finalize_memories(module, imports, &mut memories);
let vm_tables = Self::finalize_tables(module, imports, &mut tables, vmctx);
let vm_globals = Self::finalize_globals(module, imports, globals);
Self {
memories,
tables,
vm_memories,
vm_tables,
vm_globals,
}
}
fn generate_memories(module: &ModuleInner) -> BoxedMap<LocalMemoryIndex, LinearMemory> {
let mut memories = Map::with_capacity(module.memories.len());
for (_, mem) in &module.memories {
// If we use emscripten, we set a fixed initial and maximum
debug!("Instance - init memory ({}, {:?})", mem.min, mem.max);
// let memory = if options.abi == InstanceABI::Emscripten {
// // We use MAX_PAGES, so at the end the result is:
// // (initial * LinearMemory::PAGE_SIZE) == LinearMemory::DEFAULT_HEAP_SIZE
// // However, it should be: (initial * LinearMemory::PAGE_SIZE) == 16777216
// LinearMemory::new(LinearMemory::MAX_PAGES, None)
// } else {
// LinearMemory::new(memory.minimum, memory.maximum.map(|m| m as u32))
// };
let memory = LinearMemory::new(mem);
memories.push(memory);
}
memories.into_boxed_map()
}
fn finalize_memories(
module: &ModuleInner,
imports: &ImportBacking,
memories: &mut SliceMap<LocalMemoryIndex, LinearMemory>,
) -> BoxedMap<LocalMemoryIndex, vm::LocalMemory> {
// For each init that has some data...
for init in module
.data_initializers
.iter()
.filter(|init| init.data.len() > 0)
{
let init_base = match init.base {
Initializer::Const(Value::I32(offset)) => offset as u32,
Initializer::Const(_) => panic!("a const initializer must be the i32 type"),
Initializer::GetGlobal(imported_global_index) => {
if module.imported_globals[imported_global_index].1.ty == Type::I32 {
unsafe { (*imports.globals[imported_global_index].global).data as u32 }
} else {
panic!("unsupported global type for initialzer")
}
}
} as usize;
match init.memory_index.local_or_import(module) {
LocalOrImport::Local(local_memory_index) => {
let memory_desc = &module.memories[local_memory_index];
let data_top = init_base + init.data.len();
assert!((memory_desc.min * LinearMemory::PAGE_SIZE) as usize >= data_top);
let mem: &mut LinearMemory = &mut memories[local_memory_index];
let mem_init_view = &mut mem[init_base..init_base + init.data.len()];
mem_init_view.copy_from_slice(&init.data);
}
LocalOrImport::Import(imported_memory_index) => {
let vm_imported_memory = imports.imported_memory(imported_memory_index);
unsafe {
let local_memory = &(*vm_imported_memory.memory);
let memory_slice =
slice::from_raw_parts_mut(local_memory.base, local_memory.size);
let mem_init_view =
&mut memory_slice[init_base..init_base + init.data.len()];
mem_init_view.copy_from_slice(&init.data);
}
}
}
}
memories
.iter_mut()
.map(|(index, mem)| mem.into_vm_memory(index))
.collect::<Map<_, _>>()
.into_boxed_map()
}
fn generate_tables(module: &ModuleInner) -> BoxedMap<LocalTableIndex, TableBacking> {
let mut tables = Map::with_capacity(module.tables.len());
for (_, table) in &module.tables {
let table_backing = TableBacking::new(table);
tables.push(table_backing);
}
tables.into_boxed_map()
}
#[allow(clippy::cast_ptr_alignment)]
fn finalize_tables(
module: &ModuleInner,
imports: &ImportBacking,
tables: &mut SliceMap<LocalTableIndex, TableBacking>,
vmctx: *mut vm::Ctx,
) -> BoxedMap<LocalTableIndex, vm::LocalTable> {
for init in &module.elem_initializers {
let init_base = match init.base {
Initializer::Const(Value::I32(offset)) => offset as u32,
Initializer::Const(_) => panic!("a const initializer must be the i32 type"),
Initializer::GetGlobal(imported_global_index) => {
if module.imported_globals[imported_global_index].1.ty == Type::I32 {
unsafe { (*imports.globals[imported_global_index].global).data as u32 }
} else {
panic!("unsupported global type for initialzer")
}
}
} as usize;
match init.table_index.local_or_import(module) {
LocalOrImport::Local(local_table_index) => {
let table = &mut tables[local_table_index];
match table.elements {
TableElements::Anyfunc(ref mut elements) => {
if elements.len() < init_base + init.elements.len() {
// Grow the table if it's too small.
elements
.resize(init_base + init.elements.len(), vm::Anyfunc::null());
}
for (i, &func_index) in init.elements.iter().enumerate() {
let sig_index = module.func_assoc[func_index];
let sig_id = vm::SigId(sig_index.index() as u32);
let func_data = match func_index.local_or_import(module) {
LocalOrImport::Local(local_func_index) => vm::ImportedFunc {
func: module
.func_resolver
.get(module, local_func_index)
.unwrap()
.as_ptr(),
vmctx,
},
LocalOrImport::Import(imported_func_index) => {
imports.functions[imported_func_index].clone()
}
};
elements[init_base + i] = vm::Anyfunc { func_data, sig_id };
}
}
}
}
LocalOrImport::Import(imported_table_index) => {
let (_, table_description) = module.imported_tables[imported_table_index];
match table_description.ty {
ElementType::Anyfunc => {
let imported_table = &imports.tables[imported_table_index];
let imported_local_table = (*imported_table).table;
let mut elements = unsafe {
Vec::from_raw_parts(
(*imported_local_table).base as *mut vm::Anyfunc,
(*imported_local_table).current_elements,
(*imported_local_table).capacity,
)
};
if elements.len() < init_base + init.elements.len() {
// Grow the table if it's too small.
elements
.resize(init_base + init.elements.len(), vm::Anyfunc::null());
// Since the vector may have changed location after reallocating,
// we must fix the base, current_elements, and capacity fields.
unsafe {
(*imported_local_table).base = elements.as_mut_ptr() as *mut u8;
(*imported_local_table).current_elements = elements.len();
(*imported_local_table).capacity = elements.capacity();
}
}
for (i, &func_index) in init.elements.iter().enumerate() {
let sig_index = module.func_assoc[func_index];
let sig_id = vm::SigId(sig_index.index() as u32);
let func_data = match func_index.local_or_import(module) {
LocalOrImport::Local(local_func_index) => vm::ImportedFunc {
func: module
.func_resolver
.get(module, local_func_index)
.unwrap()
.as_ptr(),
vmctx,
},
LocalOrImport::Import(imported_func_index) => {
imports.functions[imported_func_index].clone()
}
};
elements[init_base + i] = vm::Anyfunc { func_data, sig_id };
}
// println!("imported elements: {:#?}", elements);
// THIS IS EXTREMELY IMPORTANT.
mem::forget(elements);
}
}
}
}
}
tables
.iter_mut()
.map(|(_, table)| table.into_vm_table())
.collect::<Map<_, _>>()
.into_boxed_map()
}
fn generate_globals(module: &ModuleInner) -> BoxedMap<LocalGlobalIndex, vm::LocalGlobal> {
let mut globals = Map::with_capacity(module.globals.len());
globals.resize(module.globals.len(), vm::LocalGlobal::null());
globals.into_boxed_map()
}
fn finalize_globals(
module: &ModuleInner,
imports: &ImportBacking,
mut globals: BoxedMap<LocalGlobalIndex, vm::LocalGlobal>,
) -> BoxedMap<LocalGlobalIndex, vm::LocalGlobal> {
for ((_, to), (_, from)) in globals.iter_mut().zip(module.globals.iter()) {
to.data = match from.init {
Initializer::Const(ref value) => match value {
Value::I32(x) => *x as u64,
Value::I64(x) => *x as u64,
Value::F32(x) => x.to_bits() as u64,
Value::F64(x) => x.to_bits(),
},
Initializer::GetGlobal(imported_global_index) => unsafe {
(*imports.globals[imported_global_index].global).data
},
};
}
globals
}
}
#[derive(Debug)]
pub struct ImportBacking {
pub(crate) functions: BoxedMap<ImportedFuncIndex, vm::ImportedFunc>,
pub(crate) memories: BoxedMap<ImportedMemoryIndex, vm::ImportedMemory>,
pub(crate) tables: BoxedMap<ImportedTableIndex, vm::ImportedTable>,
pub(crate) globals: BoxedMap<ImportedGlobalIndex, vm::ImportedGlobal>,
}
impl ImportBacking {
pub fn new(
module: &ModuleInner,
imports: &mut ImportObject,
vmctx: *mut vm::Ctx,
) -> LinkResult<Self> {
let mut failed = false;
let mut link_errors = vec![];
let functions = import_functions(module, imports, vmctx).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
});
let memories = import_memories(module, imports, vmctx).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
});
let tables = import_tables(module, imports, vmctx).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
});
let globals = import_globals(module, imports).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
});
if failed {
Err(link_errors)
} else {
Ok(ImportBacking {
functions,
memories,
tables,
globals,
})
}
}
pub fn imported_func(&self, func_index: ImportedFuncIndex) -> vm::ImportedFunc {
self.functions[func_index].clone()
}
pub fn imported_memory(&self, memory_index: ImportedMemoryIndex) -> vm::ImportedMemory {
self.memories[memory_index].clone()
}
}
fn import_functions(
module: &ModuleInner,
imports: &mut ImportObject,
vmctx: *mut vm::Ctx,
) -> LinkResult<BoxedMap<ImportedFuncIndex, vm::ImportedFunc>> {
let mut link_errors = vec![];
let mut functions = Map::with_capacity(module.imported_functions.len());
for (index, ImportName { namespace, name }) in &module.imported_functions {
let sig_index = module.func_assoc[index.convert_up(module)];
let expected_sig = module.sig_registry.lookup_func_sig(sig_index);
let import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
match import {
Some(Export::Function {
func,
ctx,
signature,
}) => {
if expected_sig == &signature {
functions.push(vm::ImportedFunc {
func: func.inner(),
vmctx: match ctx {
Context::External(ctx) => ctx,
Context::Internal => vmctx,
},
});
} else {
link_errors.push(LinkError::IncorrectImportSignature {
namespace: namespace.clone(),
name: name.clone(),
expected: expected_sig.clone(),
found: signature.clone(),
});
}
}
Some(export_type) => {
let export_type_name = match export_type {
Export::Function { .. } => "function",
Export::Memory { .. } => "memory",
Export::Table { .. } => "table",
Export::Global { .. } => "global",
}
.to_string();
link_errors.push(LinkError::IncorrectImportType {
namespace: namespace.clone(),
name: name.clone(),
expected: "function".to_string(),
found: export_type_name,
});
}
None => {
link_errors.push(LinkError::ImportNotFound {
namespace: namespace.clone(),
name: name.clone(),
});
}
}
}
if link_errors.len() > 0 {
Err(link_errors)
} else {
Ok(functions.into_boxed_map())
}
}
fn import_memories(
module: &ModuleInner,
imports: &mut ImportObject,
vmctx: *mut vm::Ctx,
) -> LinkResult<BoxedMap<ImportedMemoryIndex, vm::ImportedMemory>> {
let mut link_errors = vec![];
let mut memories = Map::with_capacity(module.imported_memories.len());
for (_index, (ImportName { namespace, name }, expected_memory_desc)) in
&module.imported_memories
{
let memory_import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
match memory_import {
Some(Export::Memory {
local,
ctx,
memory: memory_desc,
}) => {
if expected_memory_desc.fits_in_imported(&memory_desc) {
memories.push(vm::ImportedMemory {
memory: local.inner(),
vmctx: match ctx {
Context::External(ctx) => ctx,
Context::Internal => vmctx,
},
});
} else {
link_errors.push(LinkError::IncorrectMemoryDescription {
namespace: namespace.clone(),
name: name.clone(),
expected: expected_memory_desc.clone(),
found: memory_desc.clone(),
});
}
}
Some(export_type) => {
let export_type_name = match export_type {
Export::Function { .. } => "function",
Export::Memory { .. } => "memory",
Export::Table { .. } => "table",
Export::Global { .. } => "global",
}
.to_string();
link_errors.push(LinkError::IncorrectImportType {
namespace: namespace.clone(),
name: name.clone(),
expected: "memory".to_string(),
found: export_type_name,
});
}
None => {
link_errors.push(LinkError::ImportNotFound {
namespace: namespace.clone(),
name: name.clone(),
});
}
}
}
if link_errors.len() > 0 {
Err(link_errors)
} else {
Ok(memories.into_boxed_map())
}
}
fn import_tables(
module: &ModuleInner,
imports: &mut ImportObject,
vmctx: *mut vm::Ctx,
) -> LinkResult<BoxedMap<ImportedTableIndex, vm::ImportedTable>> {
let mut link_errors = vec![];
let mut tables = Map::with_capacity(module.imported_tables.len());
for (_index, (ImportName { namespace, name }, expected_table_desc)) in &module.imported_tables {
let table_import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
match table_import {
Some(Export::Table {
local,
ctx,
table: table_desc,
}) => {
if expected_table_desc.fits_in_imported(&table_desc) {
tables.push(vm::ImportedTable {
table: local.inner(),
vmctx: match ctx {
Context::External(ctx) => ctx,
Context::Internal => vmctx,
},
});
} else {
link_errors.push(LinkError::IncorrectTableDescription {
namespace: namespace.clone(),
name: name.clone(),
expected: expected_table_desc.clone(),
found: table_desc.clone(),
});
}
}
Some(export_type) => {
let export_type_name = match export_type {
Export::Function { .. } => "function",
Export::Memory { .. } => "memory",
Export::Table { .. } => "table",
Export::Global { .. } => "global",
}
.to_string();
link_errors.push(LinkError::IncorrectImportType {
namespace: namespace.clone(),
name: name.clone(),
expected: "table".to_string(),
found: export_type_name,
});
}
None => {
link_errors.push(LinkError::ImportNotFound {
namespace: namespace.clone(),
name: name.clone(),
});
}
}
}
if link_errors.len() > 0 {
Err(link_errors)
} else {
Ok(tables.into_boxed_map())
}
}
fn import_globals(
module: &ModuleInner,
imports: &mut ImportObject,
) -> LinkResult<BoxedMap<ImportedGlobalIndex, vm::ImportedGlobal>> {
let mut link_errors = vec![];
let mut globals = Map::with_capacity(module.imported_globals.len());
for (_, (ImportName { namespace, name }, imported_global_desc)) in &module.imported_globals {
let import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
match import {
Some(Export::Global { local, global }) => {
if global == *imported_global_desc {
globals.push(vm::ImportedGlobal {
global: local.inner(),
});
} else {
link_errors.push(LinkError::IncorrectGlobalDescription {
namespace: namespace.clone(),
name: name.clone(),
expected: imported_global_desc.clone(),
found: global.clone(),
});
}
}
Some(export_type) => {
let export_type_name = match export_type {
Export::Function { .. } => "function",
Export::Memory { .. } => "memory",
Export::Table { .. } => "table",
Export::Global { .. } => "global",
}
.to_string();
link_errors.push(LinkError::IncorrectImportType {
namespace: namespace.clone(),
name: name.clone(),
expected: "global".to_string(),
found: export_type_name,
});
}
None => {
link_errors.push(LinkError::ImportNotFound {
namespace: namespace.clone(),
name: name.clone(),
});
}
}
}
if link_errors.len() > 0 {
Err(link_errors)
} else {
Ok(globals.into_boxed_map())
}
}

View File

@ -1,164 +0,0 @@
use crate::types::{FuncSig, GlobalDesc, Memory, MemoryIndex, Table, TableIndex, Type};
pub type Result<T> = std::result::Result<T, Box<Error>>;
pub type CompileResult<T> = std::result::Result<T, Box<CompileError>>;
pub type LinkResult<T> = std::result::Result<T, Vec<LinkError>>;
pub type RuntimeResult<T> = std::result::Result<T, Box<RuntimeError>>;
pub type CallResult<T> = std::result::Result<T, Box<CallError>>;
/// This is returned when the chosen compiler is unable to
/// successfully compile the provided webassembly module into
/// a `Module`.
///
/// Comparing two `CompileError`s always evaluates to false.
#[derive(Debug, Clone)]
pub enum CompileError {
ValidationError { msg: String },
InternalError { msg: String },
}
impl PartialEq for CompileError {
fn eq(&self, _other: &CompileError) -> bool {
false
}
}
/// This is returned when the runtime is unable to
/// correctly link the module with the provided imports.
///
/// Comparing two `LinkError`s always evaluates to false.
#[derive(Debug, Clone)]
pub enum LinkError {
IncorrectImportType {
namespace: String,
name: String,
expected: String,
found: String,
},
IncorrectImportSignature {
namespace: String,
name: String,
expected: FuncSig,
found: FuncSig,
},
ImportNotFound {
namespace: String,
name: String,
},
IncorrectMemoryDescription {
namespace: String,
name: String,
expected: Memory,
found: Memory,
},
IncorrectTableDescription {
namespace: String,
name: String,
expected: Table,
found: Table,
},
IncorrectGlobalDescription {
namespace: String,
name: String,
expected: GlobalDesc,
found: GlobalDesc,
},
}
impl PartialEq for LinkError {
fn eq(&self, _other: &LinkError) -> bool {
false
}
}
/// This is the error type returned when calling
/// a webassembly function.
///
/// The main way to do this is `Instance.call`.
///
/// Comparing two `RuntimeError`s always evaluates to false.
#[derive(Debug, Clone)]
pub enum RuntimeError {
OutOfBoundsAccess { memory: MemoryIndex, addr: u32 },
TableOutOfBounds { table: TableIndex },
IndirectCallSignature { table: TableIndex },
IndirectCallToNull { table: TableIndex },
IllegalArithmeticOperation,
Unknown { msg: String },
}
impl PartialEq for RuntimeError {
fn eq(&self, _other: &RuntimeError) -> bool {
false
}
}
/// This error type is produced by calling a wasm function
/// exported from a module.
///
/// If the module traps in some way while running, this will
/// be the `CallError::Runtime(RuntimeError)` variant.
///
/// Comparing two `CallError`s always evaluates to false.
#[derive(Debug, Clone)]
pub enum CallError {
Signature { expected: FuncSig, found: Vec<Type> },
NoSuchExport { name: String },
ExportNotFunc { name: String },
Runtime(RuntimeError),
}
impl PartialEq for CallError {
fn eq(&self, _other: &CallError) -> bool {
false
}
}
/// The amalgamation of all errors that can occur
/// during the compilation, instantiation, or execution
/// of a webassembly module.
///
/// Comparing two `Error`s always evaluates to false.
#[derive(Debug, Clone)]
pub enum Error {
CompileError(CompileError),
LinkError(Vec<LinkError>),
RuntimeError(RuntimeError),
CallError(CallError),
}
impl PartialEq for Error {
fn eq(&self, _other: &Error) -> bool {
false
}
}
impl From<Box<CompileError>> for Box<Error> {
fn from(compile_err: Box<CompileError>) -> Self {
Box::new(Error::CompileError(*compile_err))
}
}
impl From<Vec<LinkError>> for Box<Error> {
fn from(link_err: Vec<LinkError>) -> Self {
Box::new(Error::LinkError(link_err))
}
}
impl From<Box<RuntimeError>> for Box<Error> {
fn from(runtime_err: Box<RuntimeError>) -> Self {
Box::new(Error::RuntimeError(*runtime_err))
}
}
impl From<Box<CallError>> for Box<Error> {
fn from(call_err: Box<CallError>) -> Self {
Box::new(Error::CallError(*call_err))
}
}
impl From<Box<RuntimeError>> for Box<CallError> {
fn from(runtime_err: Box<RuntimeError>) -> Self {
Box::new(CallError::Runtime(*runtime_err))
}
}

View File

@ -1,128 +0,0 @@
use crate::{
instance::InstanceInner,
module::ExportIndex,
module::ModuleInner,
types::{FuncSig, GlobalDesc, Memory, Table},
vm,
};
use hashbrown::hash_map;
#[derive(Debug, Copy, Clone)]
pub enum Context {
External(*mut vm::Ctx),
Internal,
}
#[derive(Debug, Clone)]
pub enum Export {
Function {
func: FuncPointer,
ctx: Context,
signature: FuncSig,
},
Memory {
local: MemoryPointer,
ctx: Context,
memory: Memory,
},
Table {
local: TablePointer,
ctx: Context,
table: Table,
},
Global {
local: GlobalPointer,
global: GlobalDesc,
},
}
#[derive(Debug, Clone)]
pub struct FuncPointer(*const vm::Func);
impl FuncPointer {
/// This needs to be unsafe because there is
/// no way to check whether the passed function
/// is valid and has the right signature.
pub unsafe fn new(f: *const vm::Func) -> Self {
FuncPointer(f)
}
pub(crate) fn inner(&self) -> *const vm::Func {
self.0
}
}
#[derive(Debug, Clone)]
pub struct MemoryPointer(*mut vm::LocalMemory);
impl MemoryPointer {
/// This needs to be unsafe because there is
/// no way to check whether the passed function
/// is valid and has the right signature.
pub unsafe fn new(f: *mut vm::LocalMemory) -> Self {
MemoryPointer(f)
}
pub(crate) fn inner(&self) -> *mut vm::LocalMemory {
self.0
}
}
#[derive(Debug, Clone)]
pub struct TablePointer(*mut vm::LocalTable);
impl TablePointer {
/// This needs to be unsafe because there is
/// no way to check whether the passed function
/// is valid and has the right signature.
pub unsafe fn new(f: *mut vm::LocalTable) -> Self {
TablePointer(f)
}
pub(crate) fn inner(&self) -> *mut vm::LocalTable {
self.0
}
}
#[derive(Debug, Clone)]
pub struct GlobalPointer(*mut vm::LocalGlobal);
impl GlobalPointer {
/// This needs to be unsafe because there is
/// no way to check whether the passed function
/// is valid and has the right signature.
pub unsafe fn new(f: *mut vm::LocalGlobal) -> Self {
GlobalPointer(f)
}
pub(crate) fn inner(&self) -> *mut vm::LocalGlobal {
self.0
}
}
pub struct ExportIter<'a> {
inner: &'a mut InstanceInner,
iter: hash_map::Iter<'a, String, ExportIndex>,
module: &'a ModuleInner,
}
impl<'a> ExportIter<'a> {
pub(crate) fn new(module: &'a ModuleInner, inner: &'a mut InstanceInner) -> Self {
Self {
inner,
iter: module.exports.iter(),
module,
}
}
}
impl<'a> Iterator for ExportIter<'a> {
type Item = (String, Export);
fn next(&mut self) -> Option<(String, Export)> {
let (name, export_index) = self.iter.next()?;
Some((
name.clone(),
self.inner.get_export_from_index(&self.module, export_index),
))
}
}

View File

@ -1,60 +0,0 @@
use crate::export::Export;
use hashbrown::{hash_map::Entry, HashMap};
pub trait LikeNamespace {
fn get_export(&mut self, name: &str) -> Option<Export>;
}
pub struct ImportObject {
map: HashMap<String, Box<dyn LikeNamespace>>,
}
impl ImportObject {
pub fn new() -> Self {
Self {
map: HashMap::new(),
}
}
pub fn register<S, N>(&mut self, name: S, namespace: N) -> Option<Box<dyn LikeNamespace>>
where
S: Into<String>,
N: LikeNamespace + 'static,
{
match self.map.entry(name.into()) {
Entry::Vacant(empty) => {
empty.insert(Box::new(namespace));
None
}
Entry::Occupied(mut occupied) => Some(occupied.insert(Box::new(namespace))),
}
}
pub fn get_namespace(&mut self, namespace: &str) -> Option<&mut (dyn LikeNamespace + 'static)> {
self.map
.get_mut(namespace)
.map(|namespace| &mut **namespace)
}
}
pub struct Namespace {
map: HashMap<String, Export>,
}
impl Namespace {
pub fn new() -> Self {
Self {
map: HashMap::new(),
}
}
pub fn insert(&mut self, name: impl Into<String>, export: Export) -> Option<Export> {
self.map.insert(name.into(), export)
}
}
impl LikeNamespace for Namespace {
fn get_export(&mut self, name: &str) -> Option<Export> {
self.map.get(name).cloned()
}
}

View File

@ -1,341 +0,0 @@
use crate::{
backend::Token,
backing::{ImportBacking, LocalBacking},
error::{CallError, CallResult, Result},
export::{
Context, Export, ExportIter, FuncPointer, GlobalPointer, MemoryPointer, TablePointer,
},
import::{ImportObject, LikeNamespace},
module::{ExportIndex, Module, ModuleInner},
types::{
FuncIndex, FuncSig, GlobalDesc, GlobalIndex, LocalOrImport, Memory, MemoryIndex, Table,
TableIndex, Value,
},
vm,
};
use std::mem;
use std::rc::Rc;
pub(crate) struct InstanceInner {
#[allow(dead_code)]
pub(crate) backing: LocalBacking,
import_backing: ImportBacking,
vmctx: Box<vm::Ctx>,
}
/// A WebAssembly instance
pub struct Instance {
pub module: Rc<ModuleInner>,
inner: Box<InstanceInner>,
#[allow(dead_code)]
imports: Box<ImportObject>,
}
impl Instance {
pub(crate) fn new(module: Rc<ModuleInner>, mut imports: Box<ImportObject>) -> Result<Instance> {
// We need the backing and import_backing to create a vm::Ctx, but we need
// a vm::Ctx to create a backing and an import_backing. The solution is to create an
// uninitialized vm::Ctx and then initialize it in-place.
let mut vmctx = unsafe { Box::new(mem::uninitialized()) };
let import_backing = ImportBacking::new(&module, &mut imports, &mut *vmctx)?;
let backing = LocalBacking::new(&module, &import_backing, &mut *vmctx);
// When Pin is stablized, this will use `Box::pinned` instead of `Box::new`.
let mut inner = Box::new(InstanceInner {
backing,
import_backing,
vmctx,
});
// Initialize the vm::Ctx in-place after the backing
// has been boxed.
*inner.vmctx =
unsafe { vm::Ctx::new(&mut inner.backing, &mut inner.import_backing, &module) };
let mut instance = Instance {
module,
inner,
imports,
};
if let Some(start_index) = instance.module.start_func {
instance.call_with_index(start_index, &[])?;
}
Ok(instance)
}
/// Call an exported webassembly function given the export name.
/// Pass arguments by wrapping each one in the `Value` enum.
/// The returned values are also each wrapped in a `Value`.
///
/// This returns `CallResult<Vec<Value>>` in order to support
/// the future multi-value returns webassembly feature.
pub fn call(&mut self, name: &str, args: &[Value]) -> CallResult<Vec<Value>> {
let export_index =
self.module
.exports
.get(name)
.ok_or_else(|| CallError::NoSuchExport {
name: name.to_string(),
})?;
let func_index = if let ExportIndex::Func(func_index) = export_index {
*func_index
} else {
return Err(CallError::ExportNotFunc {
name: name.to_string(),
}
.into());
};
self.call_with_index(func_index, args)
}
pub fn exports(&mut self) -> ExportIter {
ExportIter::new(&self.module, &mut self.inner)
}
pub fn module(&self) -> Module {
Module::new(Rc::clone(&self.module))
}
}
impl Instance {
fn call_with_index(&mut self, func_index: FuncIndex, args: &[Value]) -> CallResult<Vec<Value>> {
let sig_index = *self
.module
.func_assoc
.get(func_index)
.expect("broken invariant, incorrect func index");
let signature = self.module.sig_registry.lookup_func_sig(sig_index);
if !signature.check_sig(args) {
Err(CallError::Signature {
expected: signature.clone(),
found: args.iter().map(|val| val.ty()).collect(),
})?
}
// Create an output vector that's full of dummy values.
let mut returns = vec![Value::I32(0); signature.returns.len()];
let vmctx = match func_index.local_or_import(&self.module) {
LocalOrImport::Local(_) => &mut *self.inner.vmctx,
LocalOrImport::Import(imported_func_index) => {
self.inner.import_backing.functions[imported_func_index].vmctx
}
};
let token = Token::generate();
self.module.protected_caller.call(
&self.module,
func_index,
args,
&mut returns,
&self.inner.import_backing,
vmctx,
token,
)?;
Ok(returns)
}
}
impl InstanceInner {
pub(crate) fn get_export_from_index(
&mut self,
module: &ModuleInner,
export_index: &ExportIndex,
) -> Export {
match export_index {
ExportIndex::Func(func_index) => {
let (func, ctx, signature) = self.get_func_from_index(module, *func_index);
Export::Function {
func,
ctx: match ctx {
Context::Internal => Context::External(&mut *self.vmctx),
ctx @ Context::External(_) => ctx,
},
signature,
}
}
ExportIndex::Memory(memory_index) => {
let (local, ctx, memory) = self.get_memory_from_index(module, *memory_index);
Export::Memory {
local,
ctx: match ctx {
Context::Internal => Context::External(&mut *self.vmctx),
ctx @ Context::External(_) => ctx,
},
memory,
}
}
ExportIndex::Global(global_index) => {
let (local, global) = self.get_global_from_index(module, *global_index);
Export::Global { local, global }
}
ExportIndex::Table(table_index) => {
let (local, ctx, table) = self.get_table_from_index(module, *table_index);
Export::Table {
local,
ctx: match ctx {
Context::Internal => Context::External(&mut *self.vmctx),
ctx @ Context::External(_) => ctx,
},
table,
}
}
}
}
fn get_func_from_index(
&mut self,
module: &ModuleInner,
func_index: FuncIndex,
) -> (FuncPointer, Context, FuncSig) {
let sig_index = *module
.func_assoc
.get(func_index)
.expect("broken invariant, incorrect func index");
let (func_ptr, ctx) = match func_index.local_or_import(module) {
LocalOrImport::Local(local_func_index) => (
module
.func_resolver
.get(&module, local_func_index)
.expect("broken invariant, func resolver not synced with module.exports")
.cast()
.as_ptr() as *const _,
Context::Internal,
),
LocalOrImport::Import(imported_func_index) => {
let imported_func = &self.import_backing.functions[imported_func_index];
(
imported_func.func as *const _,
Context::External(imported_func.vmctx),
)
}
};
let signature = module.sig_registry.lookup_func_sig(sig_index).clone();
(unsafe { FuncPointer::new(func_ptr) }, ctx, signature)
}
fn get_memory_from_index(
&mut self,
module: &ModuleInner,
mem_index: MemoryIndex,
) -> (MemoryPointer, Context, Memory) {
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => {
let vm_mem = &mut self.backing.vm_memories[local_mem_index];
(
unsafe { MemoryPointer::new(vm_mem) },
Context::Internal,
*module
.memories
.get(local_mem_index)
.expect("broken invariant, memories"),
)
}
LocalOrImport::Import(imported_mem_index) => {
let &(_, mem) = &module
.imported_memories
.get(imported_mem_index)
.expect("missing imported memory index");
let vm::ImportedMemory { memory, vmctx } =
&self.import_backing.memories[imported_mem_index];
(
unsafe { MemoryPointer::new(*memory) },
Context::External(*vmctx),
*mem,
)
}
}
}
fn get_global_from_index(
&mut self,
module: &ModuleInner,
global_index: GlobalIndex,
) -> (GlobalPointer, GlobalDesc) {
match global_index.local_or_import(module) {
LocalOrImport::Local(local_global_index) => {
let vm_global = &mut self.backing.vm_globals[local_global_index];
(
unsafe { GlobalPointer::new(vm_global) },
module
.globals
.get(local_global_index)
.expect("broken invariant, globals")
.desc,
)
}
LocalOrImport::Import(imported_global_index) => {
let &(_, imported_global_desc) = &module
.imported_globals
.get(imported_global_index)
.expect("missing imported global index");
let vm::ImportedGlobal { global } =
&self.import_backing.globals[imported_global_index];
(
unsafe { GlobalPointer::new(*global) },
*imported_global_desc,
)
}
}
}
fn get_table_from_index(
&mut self,
module: &ModuleInner,
table_index: TableIndex,
) -> (TablePointer, Context, Table) {
match table_index.local_or_import(module) {
LocalOrImport::Local(local_table_index) => {
let vm_table = &mut self.backing.vm_tables[local_table_index];
(
unsafe { TablePointer::new(vm_table) },
Context::Internal,
*module
.tables
.get(local_table_index)
.expect("broken invariant, tables"),
)
}
LocalOrImport::Import(imported_table_index) => {
let &(_, tab) = &module
.imported_tables
.get(imported_table_index)
.expect("missing imported table index");
let vm::ImportedTable { table, vmctx } =
&self.import_backing.tables[imported_table_index];
(
unsafe { TablePointer::new(*table) },
Context::External(*vmctx),
*tab,
)
}
}
}
}
impl LikeNamespace for Instance {
fn get_export(&mut self, name: &str) -> Option<Export> {
let export_index = self.module.exports.get(name)?;
Some(self.inner.get_export_from_index(&self.module, export_index))
}
}
// TODO Remove this later, only needed for compilation till emscripten is updated
impl Instance {
pub fn memory_offset_addr(&self, _index: usize, _offset: usize) -> *const u8 {
unimplemented!()
}
}

View File

@ -1,51 +1,47 @@
#[cfg(test)]
#[macro_use]
extern crate field_offset;
#[macro_use]
mod macros;
#[doc(hidden)]
pub mod backend;
mod backing;
pub mod error;
pub mod export;
pub mod import;
pub mod instance;
pub mod memory;
pub mod module;
mod sig_registry;
pub mod structures;
mod sys;
pub mod table;
pub mod types;
pub mod vm;
#[doc(hidden)]
pub mod vmcalls;
use self::error::CompileResult;
#[doc(inline)]
pub use self::error::Result;
#[doc(inline)]
pub use self::instance::Instance;
#[doc(inline)]
pub use self::module::Module;
use std::rc::Rc;
pub use wasmer_runtime_core::*;
pub mod prelude {
pub use crate::import::{ImportObject, Namespace};
pub use crate::types::{
FuncIndex, GlobalIndex, ImportedFuncIndex, ImportedGlobalIndex, ImportedMemoryIndex,
ImportedTableIndex, LocalFuncIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex,
MemoryIndex, TableIndex, Type, Value,
};
pub use crate::vm;
pub use crate::{export_func, imports};
pub use wasmer_runtime_core::instance::Instance;
pub use wasmer_runtime_core::module::Module;
pub use wasmer_runtime_core::validate;
/// The `compile(...)` function compiles a `Module`
/// from WebAssembly binary code. This function is useful if it
/// is necessary to a compile a module before it can be instantiated
/// (otherwise, the webassembly::instantiate() function should be used).
///
/// Params:
/// * `wasm`: A `&[u8]` containing the
/// binary code of the wasm module you want to compile.
/// Errors:
/// If the operation fails, the function returns `Err(error::CompileError::...).`
#[cfg(feature = "wasmer-clif-backend")]
pub fn compile(wasm: &[u8]) -> error::CompileResult<module::Module> {
use wasmer_clif_backend::CraneliftCompiler;
wasmer_runtime_core::compile_with(&wasm[..], &CraneliftCompiler::new())
}
/// Compile a webassembly module using the provided compiler.
pub fn compile(wasm: &[u8], compiler: &dyn backend::Compiler) -> CompileResult<module::Module> {
let token = backend::Token::generate();
compiler
.compile(wasm, token)
.map(|inner| module::Module::new(Rc::new(inner)))
/// The `instantiate(...)` function allows you to compile and
/// instantiate WebAssembly code in one go.
///
/// Params:
/// * `wasm`: A `&[u8]` containing the
/// binary code of the wasm module you want to compile.
/// * `import_object`: An object containing the values to be imported
/// into the newly-created Instance, such as functions or
/// webassembly::Memory objects. There must be one matching property
/// for each declared import of the compiled module or else a
/// webassembly::LinkError is thrown.
/// Errors:
/// If the operation fails, the function returns a
/// `error::CompileError`, `error::LinkError`, or
/// `error::RuntimeError` (all combined into an `error::Error`),
/// depending on the cause of the failure.
#[cfg(feature = "wasmer-clif-backend")]
pub fn instantiate(
wasm: &[u8],
import_object: import::ImportObject,
) -> error::Result<instance::Instance> {
let module = compile(wasm)?;
module.instantiate(import_object)
}

View File

@ -1,89 +0,0 @@
macro_rules! debug {
($fmt:expr) => (if cfg!(any(debug_assertions, feature="debug")) { println!(concat!("wasmer-runtime(:{})::", $fmt), line!()) });
($fmt:expr, $($arg:tt)*) => (if cfg!(any(debug_assertions, feature="debug")) { println!(concat!("wasmer-runtime(:{})::", $fmt, "\n"), line!(), $($arg)*) });
}
#[macro_export]
macro_rules! export_func {
($func:ident, [ $( $params:ident ),* ] -> [ $( $returns:ident ),* ]) => {{
use wasmer_runtime::{
export::{Context, Export, FuncPointer},
types::{FuncSig, Type},
vm,
};
let func: extern fn( $( $params, )* &mut vm::Ctx) -> ($( $returns )*) = $func;
Export::Function {
func: unsafe { FuncPointer::new(func as _) },
ctx: Context::Internal,
signature: FuncSig {
params: vec![$($crate::__export_func_convert_type!($params),)*],
returns: vec![$($crate::__export_func_convert_type!($returns),)*],
},
}
}};
}
#[macro_export]
#[doc(hidden)]
macro_rules! __export_func_convert_type {
(i32) => {
Type::I32
};
(u32) => {
Type::I32
};
(i64) => {
Type::I64
};
(u64) => {
Type::I32
};
(f32) => {
Type::F32
};
(f64) => {
Type::F64
};
($x:ty) => {
compile_error!("Only `i32`, `u32`, `i64`, `u64`, `f32`, and `f64` are supported for argument and return types")
};
}
#[macro_export]
macro_rules! imports {
( $( $ns_name:expr => $ns:tt, )* ) => {{
use wasmer_runtime::{
import::{ImportObject, Namespace},
};
let mut import_object = ImportObject::new();
$({
let ns = $crate::__imports_internal!($ns);
import_object.register($ns_name, ns);
})*
import_object
}};
}
#[macro_export]
#[doc(hidden)]
macro_rules! __imports_internal {
( { $( $imp_name:expr => $func:ident < [ $( $params:ident ),* ] -> [ $( $returns:ident ),* ] >, )* } ) => {{
let mut ns = Namespace::new();
$(
ns.insert($imp_name, $crate::export_func!(
$func,
[ $( $params ),* ] -> [ $( $returns )* ]
));
)*
ns
}};
($ns:ident) => {
$ns
};
}

View File

@ -1,225 +0,0 @@
use std::ops::{Deref, DerefMut};
use crate::{
sys,
types::{LocalMemoryIndex, Memory},
vm,
};
/// A linear memory instance.
#[derive(Debug)]
pub struct LinearMemory {
/// The actual memory allocation.
memory: sys::Memory,
/// The current number of wasm pages.
current: u32,
// The maximum size the WebAssembly Memory is allowed to grow
// to, in units of WebAssembly pages. When present, the maximum
// parameter acts as a hint to the engine to reserve memory up
// front. However, the engine may ignore or clamp this reservation
// request. In general, most WebAssembly modules shouldn't need
// to set a maximum.
max: Option<u32>,
// The size of the extra guard pages after the end.
// Is used to optimize loads and stores with constant offsets.
offset_guard_size: usize,
/// Requires exception catching to handle out-of-bounds accesses.
requires_signal_catch: bool,
}
/// It holds the raw bytes of memory accessed by a WebAssembly Instance
impl LinearMemory {
pub(crate) const PAGE_SIZE: u32 = 65_536;
pub(crate) const MAX_PAGES: u32 = 65_536;
#[doc(hidden)]
pub const DEFAULT_HEAP_SIZE: usize = 1 << 32; // 4 GiB
#[doc(hidden)]
pub const DEFAULT_GUARD_SIZE: usize = 1 << 31; // 2 GiB
pub(crate) const DEFAULT_SIZE: usize = Self::DEFAULT_HEAP_SIZE + Self::DEFAULT_GUARD_SIZE; // 6 GiB
/// Create a new linear memory instance with specified initial and maximum number of pages.
///
/// `maximum` cannot be set to more than `65536` pages.
pub(crate) fn new(mem: &Memory) -> Self {
assert!(mem.min <= Self::MAX_PAGES);
assert!(mem.max.is_none() || mem.max.unwrap() <= Self::MAX_PAGES);
debug!("Instantiate LinearMemory(mem: {:?})", mem);
let (mmap_size, initial_pages, offset_guard_size, requires_signal_catch) = if
/*mem.is_static_heap()*/
true {
(Self::DEFAULT_SIZE, mem.min, Self::DEFAULT_GUARD_SIZE, true)
// This is a static heap
} else {
// this is a dynamic heap
assert!(!mem.shared, "shared memories must have a maximum size.");
(
mem.min as usize * Self::PAGE_SIZE as usize,
mem.min,
0,
false,
)
};
let mut memory = sys::Memory::with_size(mmap_size).unwrap();
// map initial pages as readwrite since the inital mmap is mapped as not accessible.
if initial_pages != 0 {
unsafe {
memory
.protect(
0..(initial_pages as usize * Self::PAGE_SIZE as usize),
sys::Protect::ReadWrite,
)
.expect("unable to make memory accessible");
}
}
Self {
memory,
current: initial_pages,
max: mem.max,
offset_guard_size,
requires_signal_catch,
}
}
/// Returns an base address of this linear memory.
fn base(&mut self) -> *mut u8 {
self.memory.as_ptr()
}
/// Returns the size in bytes
pub(crate) fn size(&self) -> usize {
self.current as usize * Self::PAGE_SIZE as usize
}
pub fn pages(&self) -> u32 {
self.current
}
/// Returns the maximum number of wasm pages allowed.
pub fn max(&self) -> u32 {
self.max.unwrap_or(Self::MAX_PAGES)
}
pub(crate) fn into_vm_memory(&mut self, index: LocalMemoryIndex) -> vm::LocalMemory {
vm::LocalMemory {
base: self.base(),
size: self.size(),
index,
}
}
/// Grow memory by the specified amount of pages.
///
/// Returns `None` if memory can't be grown by the specified amount
/// of pages.
pub(crate) fn grow_dynamic(&mut self, add_pages: u32) -> Option<i32> {
debug!("grow_memory_dynamic called!");
assert!(self.max.is_none());
if add_pages == 0 {
return Some(self.current as _);
}
let prev_pages = self.current;
let new_pages = match self.current.checked_add(add_pages) {
Some(new_pages) => new_pages,
None => return None,
};
if let Some(val) = self.max {
if new_pages > val {
return None;
}
// Wasm linear memories are never allowed to grow beyond what is
// indexable. If the memory has no maximum, enforce the greatest
// limit here.
} else if new_pages >= Self::MAX_PAGES {
return None;
}
let new_bytes = (new_pages * Self::PAGE_SIZE) as usize;
if new_bytes > self.memory.size() - self.offset_guard_size {
let memory_size = new_bytes.checked_add(self.offset_guard_size)?;
let mut new_memory = sys::Memory::with_size(memory_size).ok()?;
unsafe {
new_memory
.protect(0..new_bytes, sys::Protect::ReadWrite)
.ok()?;
}
let copy_size = self.memory.size() - self.offset_guard_size;
unsafe {
new_memory.as_slice_mut()[..copy_size]
.copy_from_slice(&self.memory.as_slice()[..copy_size]);
}
self.memory = new_memory;
}
self.current = new_pages;
Some(prev_pages as i32)
}
pub(crate) fn grow_static(&mut self, add_pages: u32) -> Option<i32> {
// debug!("grow_memory_static called!");
// assert!(self.max.is_some());
if add_pages == 0 {
return Some(self.current as _);
}
let prev_pages = self.current;
let new_pages = match self.current.checked_add(add_pages) {
Some(new_pages) => new_pages,
None => return None,
};
if let Some(val) = self.max {
if new_pages > val {
return None;
}
// Wasm linear memories are never allowed to grow beyond what is
// indexable. If the memory has no maximum, enforce the greatest
// limit here.
} else if new_pages >= Self::MAX_PAGES {
return None;
}
let prev_bytes = (prev_pages * Self::PAGE_SIZE) as usize;
let new_bytes = (new_pages * Self::PAGE_SIZE) as usize;
unsafe {
self.memory
.protect(prev_bytes..new_bytes, sys::Protect::ReadWrite)
.ok()?;
}
self.current = new_pages;
Some(prev_pages as i32)
}
}
impl Deref for LinearMemory {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe { self.memory.as_slice() }
}
}
impl DerefMut for LinearMemory {
fn deref_mut(&mut self) -> &mut [u8] {
unsafe { self.memory.as_slice_mut() }
}
}

View File

@ -1,104 +0,0 @@
use crate::{
backend::{FuncResolver, ProtectedCaller},
error::Result,
import::ImportObject,
sig_registry::SigRegistry,
structures::Map,
types::{
FuncIndex, Global, GlobalDesc, GlobalIndex, ImportedFuncIndex, ImportedGlobalIndex,
ImportedMemoryIndex, ImportedTableIndex, Initializer, LocalGlobalIndex, LocalMemoryIndex,
LocalTableIndex, Memory, MemoryIndex, SigIndex, Table, TableIndex,
},
Instance,
};
use hashbrown::HashMap;
use std::rc::Rc;
/// This is used to instantiate a new WebAssembly module.
#[doc(hidden)]
pub struct ModuleInner {
pub func_resolver: Box<dyn FuncResolver>,
pub protected_caller: Box<dyn ProtectedCaller>,
// This are strictly local and the typsystem ensures that.
pub memories: Map<LocalMemoryIndex, Memory>,
pub globals: Map<LocalGlobalIndex, Global>,
pub tables: Map<LocalTableIndex, Table>,
// These are strictly imported and the typesystem ensures that.
pub imported_functions: Map<ImportedFuncIndex, ImportName>,
pub imported_memories: Map<ImportedMemoryIndex, (ImportName, Memory)>,
pub imported_tables: Map<ImportedTableIndex, (ImportName, Table)>,
pub imported_globals: Map<ImportedGlobalIndex, (ImportName, GlobalDesc)>,
pub exports: HashMap<String, ExportIndex>,
pub data_initializers: Vec<DataInitializer>,
pub elem_initializers: Vec<TableInitializer>,
pub start_func: Option<FuncIndex>,
pub func_assoc: Map<FuncIndex, SigIndex>,
pub sig_registry: SigRegistry,
}
pub struct Module(pub Rc<ModuleInner>);
impl Module {
pub(crate) fn new(inner: Rc<ModuleInner>) -> Self {
Module(inner)
}
/// Instantiate a WebAssembly module with the provided imports.
pub fn instantiate(&self, imports: ImportObject) -> Result<Instance> {
Instance::new(Rc::clone(&self.0), Box::new(imports))
}
}
impl ModuleInner {}
#[doc(hidden)]
#[derive(Debug, Clone)]
pub struct ImportName {
pub namespace: String,
pub name: String,
}
impl From<(String, String)> for ImportName {
fn from(n: (String, String)) -> Self {
ImportName {
namespace: n.0,
name: n.1,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ExportIndex {
Func(FuncIndex),
Memory(MemoryIndex),
Global(GlobalIndex),
Table(TableIndex),
}
/// A data initializer for linear memory.
#[derive(Debug, Clone)]
pub struct DataInitializer {
/// The index of the memory to initialize.
pub memory_index: MemoryIndex,
/// Either a constant offset or a `get_global`
pub base: Initializer,
/// The initialization data.
pub data: Vec<u8>,
}
/// A WebAssembly table initializer.
#[derive(Debug, Clone)]
pub struct TableInitializer {
/// The index of a table to initialize.
pub table_index: TableIndex,
/// Either a constant offset or a `get_global`
pub base: Initializer,
/// The values to write into the table elements.
pub elements: Vec<FuncIndex>,
}

View File

@ -1,41 +0,0 @@
use crate::{
structures::Map,
types::{FuncSig, SigIndex},
};
use hashbrown::HashMap;
#[derive(Debug)]
pub struct SigRegistry {
func_table: HashMap<FuncSig, SigIndex>,
sig_assoc: Map<SigIndex, FuncSig>,
duplicated_sig_assoc: Map<SigIndex, SigIndex>,
}
impl SigRegistry {
pub fn new() -> Self {
Self {
func_table: HashMap::new(),
sig_assoc: Map::new(),
duplicated_sig_assoc: Map::new(),
}
}
pub fn register(&mut self, func_sig: FuncSig) -> SigIndex {
// self.sig_assoc.push(func_sig)
let func_table = &mut self.func_table;
let sig_assoc = &mut self.sig_assoc;
let sig_index = *func_table
.entry(func_sig.clone())
.or_insert_with(|| sig_assoc.push(func_sig));
self.duplicated_sig_assoc.push(sig_index);
sig_index
}
pub fn lookup_deduplicated_sigindex(&self, sig_index: SigIndex) -> SigIndex {
self.duplicated_sig_assoc[sig_index]
}
pub fn lookup_func_sig(&self, sig_index: SigIndex) -> &FuncSig {
&self.sig_assoc[sig_index]
}
}

View File

@ -1,46 +0,0 @@
use super::{SliceMap, TypedIndex};
use std::{
marker::PhantomData,
mem,
ops::{Deref, DerefMut},
};
#[derive(Debug, Clone)]
pub struct BoxedMap<K, V>
where
K: TypedIndex,
{
elems: Box<[V]>,
_marker: PhantomData<K>,
}
impl<K, V> BoxedMap<K, V>
where
K: TypedIndex,
{
pub(in crate::structures) fn new(elems: Box<[V]>) -> Self {
Self {
elems,
_marker: PhantomData,
}
}
}
impl<K, V> Deref for BoxedMap<K, V>
where
K: TypedIndex,
{
type Target = SliceMap<K, V>;
fn deref(&self) -> &SliceMap<K, V> {
unsafe { mem::transmute::<&[V], _>(&*self.elems) }
}
}
impl<K, V> DerefMut for BoxedMap<K, V>
where
K: TypedIndex,
{
fn deref_mut(&mut self) -> &mut SliceMap<K, V> {
unsafe { mem::transmute::<&mut [V], _>(&mut *self.elems) }
}
}

View File

@ -1,221 +0,0 @@
use super::{BoxedMap, SliceMap, TypedIndex};
use std::{
iter::{self, Extend, FromIterator},
marker::PhantomData,
mem,
ops::{Deref, DerefMut},
slice, vec,
};
/// Dense item map
#[derive(Debug, Clone)]
pub struct Map<K, V>
where
K: TypedIndex,
{
elems: Vec<V>,
_marker: PhantomData<K>,
}
impl<K, V> Map<K, V>
where
K: TypedIndex,
{
pub fn new() -> Self {
Self {
elems: Vec::new(),
_marker: PhantomData,
}
}
pub fn with_capacity(capacity: usize) -> Self {
Self {
elems: Vec::with_capacity(capacity),
_marker: PhantomData,
}
}
pub fn len(&self) -> usize {
self.elems.len()
}
pub fn push(&mut self, value: V) -> K {
let len = self.len();
self.elems.push(value);
K::new(len)
}
pub fn next_index(&self) -> K {
K::new(self.len())
}
pub fn reserve_exact(&mut self, size: usize) {
self.elems.reserve_exact(size);
}
pub fn into_boxed_map(self) -> BoxedMap<K, V> {
BoxedMap::new(self.elems.into_boxed_slice())
}
}
impl<K, V> Map<K, V>
where
K: TypedIndex,
V: Clone,
{
pub fn resize(&mut self, new_len: usize, value: V) {
self.elems.resize(new_len, value);
}
}
impl<K, V> Extend<V> for Map<K, V>
where
K: TypedIndex,
{
fn extend<I: IntoIterator<Item = V>>(&mut self, iter: I) {
self.elems.extend(iter);
}
}
impl<K, V> FromIterator<V> for Map<K, V>
where
K: TypedIndex,
{
fn from_iter<I: IntoIterator<Item = V>>(iter: I) -> Self {
let elems: Vec<V> = iter.into_iter().collect();
Self {
elems,
_marker: PhantomData,
}
}
}
impl<K, V> Deref for Map<K, V>
where
K: TypedIndex,
{
type Target = SliceMap<K, V>;
fn deref(&self) -> &SliceMap<K, V> {
unsafe { mem::transmute::<&[V], _>(self.elems.as_slice()) }
}
}
impl<K, V> DerefMut for Map<K, V>
where
K: TypedIndex,
{
fn deref_mut(&mut self) -> &mut SliceMap<K, V> {
unsafe { mem::transmute::<&mut [V], _>(self.elems.as_mut_slice()) }
}
}
pub struct IntoIter<K, V>
where
K: TypedIndex,
{
enumerated: iter::Enumerate<vec::IntoIter<V>>,
_marker: PhantomData<K>,
}
impl<K, V> IntoIter<K, V>
where
K: TypedIndex,
{
pub(in crate::structures) fn new(into_iter: vec::IntoIter<V>) -> Self {
Self {
enumerated: into_iter.enumerate(),
_marker: PhantomData,
}
}
}
impl<K, V> Iterator for IntoIter<K, V>
where
K: TypedIndex,
{
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
self.enumerated.next().map(|(i, v)| (K::new(i), v))
}
}
impl<K, V> IntoIterator for Map<K, V>
where
K: TypedIndex,
{
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self.elems.into_iter())
}
}
impl<'a, K, V> IntoIterator for &'a Map<K, V>
where
K: TypedIndex,
{
type Item = (K, &'a V);
type IntoIter = Iter<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
Iter::new(self.elems.iter())
}
}
impl<'a, K, V> IntoIterator for &'a mut Map<K, V>
where
K: TypedIndex,
{
type Item = (K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
IterMut::new(self.elems.iter_mut())
}
}
pub struct Iter<'a, K: TypedIndex, V: 'a> {
enumerated: iter::Enumerate<slice::Iter<'a, V>>,
_marker: PhantomData<K>,
}
impl<'a, K: TypedIndex, V: 'a> Iter<'a, K, V> {
pub(in crate::structures) fn new(iter: slice::Iter<'a, V>) -> Self {
Self {
enumerated: iter.enumerate(),
_marker: PhantomData,
}
}
}
impl<'a, K: TypedIndex, V: 'a> Iterator for Iter<'a, K, V> {
type Item = (K, &'a V);
fn next(&mut self) -> Option<Self::Item> {
self.enumerated.next().map(|(i, v)| (K::new(i), v))
}
}
pub struct IterMut<'a, K: TypedIndex, V: 'a> {
enumerated: iter::Enumerate<slice::IterMut<'a, V>>,
_marker: PhantomData<K>,
}
impl<'a, K: TypedIndex, V: 'a> IterMut<'a, K, V> {
pub(in crate::structures) fn new(iter: slice::IterMut<'a, V>) -> Self {
Self {
enumerated: iter.enumerate(),
_marker: PhantomData,
}
}
}
impl<'a, K: TypedIndex, V: 'a> Iterator for IterMut<'a, K, V> {
type Item = (K, &'a mut V);
fn next(&mut self) -> Option<Self::Item> {
self.enumerated.next().map(|(i, v)| (K::new(i), v))
}
}

View File

@ -1,14 +0,0 @@
mod boxed;
mod map;
mod slice;
pub use self::boxed::BoxedMap;
pub use self::map::{Iter, IterMut, Map};
pub use self::slice::SliceMap;
pub trait TypedIndex {
#[doc(hidden)]
fn new(index: usize) -> Self;
#[doc(hidden)]
fn index(&self) -> usize;
}

View File

@ -1,86 +0,0 @@
#[derive(Debug, Clone)]
enum MonoVecInner<T> {
None,
Inline(T),
Heap(Vec<T>),
}
/// A type that can hold zero items,
/// one item, or many items.
#[derive(Debug, Clone)]
pub struct MonoVec<T> {
inner: MonoVecInner<T>,
}
impl<T> MonoVec<T> {
pub fn new() -> Self {
Self {
inner: MonoVecInner::None,
}
}
pub fn new_inline(item: T) -> Self {
Self {
inner: MonoVecInner::Inline(item),
}
}
pub fn with_capacity(capacity: usize) -> Self {
match capacity {
0 | 1 => Self::new(),
_ => Self {
inner: MonoVecInner::Heap(Vec::with_capacity(capacity)),
},
}
}
pub fn push(&mut self, item: T) {
let uninit = unsafe { mem::uninitialized() };
let prev = mem::replace(&mut self.inner, uninit);
let next = match prev {
MonoVecInner::None => MonoVecInner::Inline(item),
MonoVecInner::Inline(previous_item) => MonoVecInner::Heap(vec![previous_item, item]),
MonoVecInner::Heap(mut v) => {
v.push(item);
MonoVecInner::Heap(v)
}
};
let uninit = mem::replace(&mut self.inner, next);
mem::forget(uninit);
}
pub fn pop(&mut self) -> Option<T> {
match self.inner {
MonoVecInner::None => None,
MonoVecInner::Inline(ref mut item) => {
let uninit = unsafe { mem::uninitialized() };
let item = mem::replace(item, uninit);
let uninit = mem::replace(&mut self.inner, MonoVecInner::None);
mem::forget(uninit);
Some(item)
}
MonoVecInner::Heap(ref mut v) => v.pop(),
}
}
pub fn as_slice(&self) -> &[T] {
match self.inner {
MonoVecInner::None => unsafe {
slice::from_raw_parts(mem::align_of::<T>() as *const T, 0)
},
MonoVecInner::Inline(ref item) => slice::from_ref(item),
MonoVecInner::Heap(ref v) => &v[..],
}
}
pub fn as_slice_mut(&mut self) -> &mut [T] {
match self.inner {
MonoVecInner::None => unsafe {
slice::from_raw_parts_mut(mem::align_of::<T>() as *mut T, 0)
},
MonoVecInner::Inline(ref mut item) => slice::from_mut(item),
MonoVecInner::Heap(ref mut v) => &mut v[..],
}
}
}

View File

@ -1,69 +0,0 @@
use super::{Iter, IterMut, TypedIndex};
use std::{
marker::PhantomData,
ops::{Index, IndexMut},
};
/// This is a dynamically-sized slice
/// that can only be indexed by the
/// correct index type.
#[derive(Debug)]
pub struct SliceMap<K, V>
where
K: TypedIndex,
{
_marker: PhantomData<K>,
slice: [V],
}
impl<K, V> SliceMap<K, V>
where
K: TypedIndex,
{
pub fn get(&self, index: K) -> Option<&V> {
self.slice.get(index.index())
}
pub fn get_mut(&mut self, index: K) -> Option<&mut V> {
self.slice.get_mut(index.index())
}
pub fn len(&self) -> usize {
self.slice.len()
}
pub fn iter(&self) -> Iter<K, V> {
Iter::new(self.slice.iter())
}
pub fn iter_mut(&mut self) -> IterMut<K, V> {
IterMut::new(self.slice.iter_mut())
}
pub fn as_ptr(&self) -> *const V {
self as *const SliceMap<K, V> as *const V
}
pub fn as_mut_ptr(&mut self) -> *mut V {
self as *mut SliceMap<K, V> as *mut V
}
}
impl<K, V> Index<K> for SliceMap<K, V>
where
K: TypedIndex,
{
type Output = V;
fn index(&self, index: K) -> &V {
&self.slice[index.index()]
}
}
impl<K, V> IndexMut<K> for SliceMap<K, V>
where
K: TypedIndex,
{
fn index_mut(&mut self, index: K) -> &mut V {
&mut self.slice[index.index()]
}
}

View File

@ -1,11 +0,0 @@
#[cfg(unix)]
mod unix;
#[cfg(windows)]
mod windows;
#[cfg(unix)]
pub use self::unix::*;
#[cfg(windows)]
pub use self::windows::*;

View File

@ -1,116 +0,0 @@
use errno;
use nix::libc;
use page_size;
use std::ops::Range;
use std::{ptr, slice};
#[derive(Debug)]
pub struct Memory {
ptr: *mut u8,
size: usize,
}
impl Memory {
pub fn with_size(size: usize) -> Result<Self, String> {
if size == 0 {
return Ok(Self {
ptr: ptr::null_mut(),
size: 0,
});
}
let size = round_up_to_page_size(size, page_size::get());
let ptr = unsafe {
libc::mmap(
ptr::null_mut(),
size,
libc::PROT_NONE,
libc::MAP_PRIVATE | libc::MAP_ANON,
-1,
0,
)
};
if ptr == -1 as _ {
Err(errno::errno().to_string())
} else {
Ok(Self {
ptr: ptr as *mut u8,
size,
})
}
}
pub unsafe fn protect(&mut self, range: Range<usize>, protect: Protect) -> Result<(), String> {
let protect = protect.to_protect_const();
let page_size = page_size::get();
let start = self
.ptr
.add(round_down_to_page_size(range.start, page_size));
let size = round_up_to_page_size(range.end - range.start, page_size);
assert!(size <= self.size);
let success = libc::mprotect(start as _, size, protect as i32);
if success == -1 {
Err(errno::errno().to_string())
} else {
Ok(())
}
}
pub fn size(&self) -> usize {
self.size
}
pub unsafe fn as_slice(&self) -> &[u8] {
slice::from_raw_parts(self.ptr, self.size)
}
pub unsafe fn as_slice_mut(&mut self) -> &mut [u8] {
slice::from_raw_parts_mut(self.ptr, self.size)
}
pub fn as_ptr(&self) -> *mut u8 {
self.ptr
}
}
impl Drop for Memory {
fn drop(&mut self) {
if !self.ptr.is_null() {
let success = unsafe { libc::munmap(self.ptr as _, self.size) };
assert_eq!(success, 0, "failed to unmap memory: {}", errno::errno());
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[allow(dead_code)]
pub enum Protect {
None,
Read,
ReadWrite,
ReadExec,
}
impl Protect {
fn to_protect_const(self) -> u32 {
match self {
Protect::None => 0,
Protect::Read => 1,
Protect::ReadWrite => 1 | 2,
Protect::ReadExec => 1 | 4,
}
}
}
/// Round `size` up to the nearest multiple of `page_size`.
fn round_up_to_page_size(size: usize, page_size: usize) -> usize {
(size + (page_size - 1)) & !(page_size - 1)
}
/// Round `size` down to the nearest multiple of `page_size`.
fn round_down_to_page_size(size: usize, page_size: usize) -> usize {
size & !(page_size - 1)
}

View File

@ -1,3 +0,0 @@
mod memory;
pub use self::memory::{Memory, Protect};

View File

@ -1,124 +0,0 @@
use winapi::um::memoryapi::{
VirtualAlloc,
MEM_RESERVE, MEM_COMMIT,
PAGE_NOACCESS, PAGE_EXECUTE_READ, PAGE_READWRITE, PAGE_READONLY,
};
use page_size;
use std::ops::Range;
use std::{ptr, slice};
#[derive(Debug)]
pub struct Memory {
ptr: *mut u8,
size: usize,
}
impl Memory {
pub fn with_size(size: usize) -> Result<Self, String> {
if size == 0 {
return Ok(Self {
ptr: ptr::null_mut(),
size: 0,
});
}
let size = round_up_to_page_size(size, page_size::get());
let ptr = unsafe {
VirtualAlloc(
ptr::null_mut(),
size,
MEM_RESERVE,
PAGE_NOACCESS,
)
};
if ptr.is_null() {
Err("unable to allocate memory")
} else {
Ok(Self {
ptr: ptr as *mut u8,
size,
})
}
}
pub unsafe fn protect(&mut self, range: Range<usize>, protect: Protect) -> Result<(), String> {
let protect = protect.to_protect_const();
let page_size = page_size::get();
let start = self
.ptr
.add(round_down_to_page_size(range.start, page_size));
let size = round_up_to_page_size(range.end - range.start, page_size);
assert!(size <= self.size);
// Commit the virtual memory.
let ptr = VirtualAlloc(
start as _,
size,
MEM_COMMIT,
protect,
);
if ptr.is_null() {
Err("unable to protect memory")
} else {
Ok(())
}
}
pub fn size(&self) -> usize {
self.size
}
pub unsafe fn as_slice(&self) -> &[u8] {
slice::from_raw_parts(self.ptr, self.size)
}
pub unsafe fn as_slice_mut(&mut self) -> &mut [u8] {
slice::from_raw_parts_mut(self.ptr, self.size)
}
pub fn as_ptr(&self) -> *mut u8 {
self.ptr
}
}
impl Drop for Memory {
fn drop(&mut self) {
if !self.ptr.is_null() {
let success = unsafe { libc::munmap(self.ptr as _, self.size) };
assert_eq!(success, 0, "failed to unmap memory: {}", errno::errno());
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[allow(dead_code)]
pub enum Protect {
None,
Read,
ReadWrite,
ReadExec,
}
impl Protect {
fn to_protect_const(self) -> u32 {
match self {
Protect::None => PAGE_NOACCESS,
Protect::Read => PAGE_READONLY,
Protect::ReadWrite => PAGE_READWRITE,
Protect::ReadExec => PAGE_EXECUTE_READ,
}
}
}
/// Round `size` up to the nearest multiple of `page_size`.
fn round_up_to_page_size(size: usize, page_size: usize) -> usize {
(size + (page_size - 1)) & !(page_size - 1)
}
/// Round `size` down to the nearest multiple of `page_size`.
fn round_down_to_page_size(size: usize, page_size: usize) -> usize {
size & !(page_size - 1)
}

View File

@ -1 +0,0 @@
pub use self::memory::{Memory, Protect};

View File

@ -1,45 +0,0 @@
use super::vm;
use crate::types::{ElementType, Table};
#[derive(Debug, Clone)]
pub enum TableElements {
/// This is intended to be a caller-checked Anyfunc.
Anyfunc(Vec<vm::Anyfunc>),
}
#[derive(Debug)]
pub struct TableBacking {
pub elements: TableElements,
pub max: Option<u32>,
}
impl TableBacking {
pub fn new(table: &Table) -> Self {
match table.ty {
ElementType::Anyfunc => {
let initial_table_backing_len = match table.max {
Some(max) => max,
None => table.min,
} as usize;
Self {
elements: TableElements::Anyfunc(vec![
vm::Anyfunc::null();
initial_table_backing_len
]),
max: table.max,
}
}
}
}
pub fn into_vm_table(&mut self) -> vm::LocalTable {
match self.elements {
TableElements::Anyfunc(ref mut funcs) => vm::LocalTable {
base: funcs.as_mut_ptr() as *mut u8,
current_elements: funcs.len(),
capacity: funcs.capacity(),
},
}
}
}

View File

@ -1,276 +0,0 @@
use crate::{module::ModuleInner, structures::TypedIndex};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Type {
/// The `i32` type.
I32,
/// The `i64` type.
I64,
/// The `f32` type.
F32,
/// The `f64` type.
F64,
}
#[derive(Debug, Clone, PartialEq)]
pub enum Value {
/// The `i32` type.
I32(i32),
/// The `i64` type.
I64(i64),
/// The `f32` type.
F32(f32),
/// The `f64` type.
F64(f64),
}
impl Value {
pub fn ty(&self) -> Type {
match self {
Value::I32(_) => Type::I32,
Value::I64(_) => Type::I64,
Value::F32(_) => Type::F32,
Value::F64(_) => Type::F64,
}
}
}
impl From<i32> for Value {
fn from(i: i32) -> Self {
Value::I32(i)
}
}
impl From<i64> for Value {
fn from(i: i64) -> Self {
Value::I64(i)
}
}
impl From<f32> for Value {
fn from(f: f32) -> Self {
Value::F32(f)
}
}
impl From<f64> for Value {
fn from(f: f64) -> Self {
Value::F64(f)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ElementType {
/// Any wasm function.
Anyfunc,
}
#[derive(Debug, Clone, Copy)]
pub struct Table {
/// Type of data stored in this table.
pub ty: ElementType,
/// The minimum number of elements that must be stored in this table.
pub min: u32,
/// The maximum number of elements in this table.
pub max: Option<u32>,
}
impl Table {
pub(crate) fn fits_in_imported(&self, imported: &Table) -> bool {
// TODO: We should define implementation limits.
let imported_max = imported.max.unwrap_or(u32::max_value());
let self_max = self.max.unwrap_or(u32::max_value());
self.ty == imported.ty && imported_max <= self_max && self.min <= imported.min
}
}
/// A const value initializer.
/// Over time, this will be able to represent more and more
/// complex expressions.
#[derive(Debug, Clone, PartialEq)]
pub enum Initializer {
/// Corresponds to a `const.*` instruction.
Const(Value),
/// Corresponds to a `get_global` instruction.
GetGlobal(ImportedGlobalIndex),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct GlobalDesc {
pub mutable: bool,
pub ty: Type,
}
/// A wasm global.
#[derive(Debug, Clone)]
pub struct Global {
pub desc: GlobalDesc,
pub init: Initializer,
}
/// A wasm memory.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Memory {
/// The minimum number of allowed pages.
pub min: u32,
/// The maximum number of allowed pages.
pub max: Option<u32>,
/// This memory can be shared between wasm threads.
pub shared: bool,
}
impl Memory {
pub fn is_static_heap(&self) -> bool {
self.max.is_some()
}
pub(crate) fn fits_in_imported(&self, imported: &Memory) -> bool {
let imported_max = imported.max.unwrap_or(65_536);
let self_max = self.max.unwrap_or(65_536);
self.shared == imported.shared && imported_max <= self_max && self.min <= imported.min
}
}
/// The signature of a function that is either implemented
/// in a wasm module or exposed to wasm by the host.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FuncSig {
pub params: Vec<Type>,
pub returns: Vec<Type>,
}
impl FuncSig {
pub fn check_sig(&self, params: &[Value]) -> bool {
self.params.len() == params.len()
&& self
.params
.iter()
.zip(params.iter().map(|val| val.ty()))
.all(|(t0, ref t1)| t0 == t1)
}
}
pub trait LocalImport {
type Local: TypedIndex;
type Import: TypedIndex;
}
#[rustfmt::skip]
macro_rules! define_map_index {
($ty:ident) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct $ty (u32);
impl TypedIndex for $ty {
#[doc(hidden)]
fn new(index: usize) -> Self {
$ty (index as _)
}
#[doc(hidden)]
fn index(&self) -> usize {
self.0 as usize
}
}
};
($($normal_ty:ident,)* | local: $($local_ty:ident,)* | imported: $($imported_ty:ident,)*) => {
$(
define_map_index!($normal_ty);
define_map_index!($local_ty);
define_map_index!($imported_ty);
impl LocalImport for $normal_ty {
type Local = $local_ty;
type Import = $imported_ty;
}
)*
};
}
#[rustfmt::skip]
define_map_index![
FuncIndex, MemoryIndex, TableIndex, GlobalIndex,
| local: LocalFuncIndex, LocalMemoryIndex, LocalTableIndex, LocalGlobalIndex,
| imported: ImportedFuncIndex, ImportedMemoryIndex, ImportedTableIndex, ImportedGlobalIndex,
];
#[rustfmt::skip]
macro_rules! define_local_or_import {
($ty:ident, $local_ty:ident, $imported_ty:ident, $imports:ident) => {
impl $ty {
pub fn local_or_import(self, module: &ModuleInner) -> LocalOrImport<$ty> {
if self.index() < module.$imports.len() {
LocalOrImport::Import(<Self as LocalImport>::Import::new(self.index()))
} else {
LocalOrImport::Local(<Self as LocalImport>::Local::new(self.index() - module.$imports.len()))
}
}
}
impl $local_ty {
pub fn convert_up(self, module: &ModuleInner) -> $ty {
$ty ((self.index() + module.$imports.len()) as u32)
}
}
impl $imported_ty {
pub fn convert_up(self, _module: &ModuleInner) -> $ty {
$ty (self.index() as u32)
}
}
};
($(($ty:ident | ($local_ty:ident, $imported_ty:ident): $imports:ident),)*) => {
$(
define_local_or_import!($ty, $local_ty, $imported_ty, $imports);
)*
};
}
#[rustfmt::skip]
define_local_or_import![
(FuncIndex | (LocalFuncIndex, ImportedFuncIndex): imported_functions),
(MemoryIndex | (LocalMemoryIndex, ImportedMemoryIndex): imported_memories),
(TableIndex | (LocalTableIndex, ImportedTableIndex): imported_tables),
(GlobalIndex | (LocalGlobalIndex, ImportedGlobalIndex): imported_globals),
];
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct SigIndex(u32);
impl TypedIndex for SigIndex {
#[doc(hidden)]
fn new(index: usize) -> Self {
SigIndex(index as _)
}
#[doc(hidden)]
fn index(&self) -> usize {
self.0 as usize
}
}
pub enum LocalOrImport<T>
where
T: LocalImport,
{
Local(T::Local),
Import(T::Import),
}
impl<T> LocalOrImport<T>
where
T: LocalImport,
{
pub fn local(self) -> Option<T::Local> {
match self {
LocalOrImport::Local(local) => Some(local),
LocalOrImport::Import(_) => None,
}
}
pub fn import(self) -> Option<T::Import> {
match self {
LocalOrImport::Import(import) => Some(import),
LocalOrImport::Local(_) => None,
}
}
}

View File

@ -1,623 +0,0 @@
pub use crate::backing::{ImportBacking, LocalBacking};
use crate::{
module::ModuleInner,
structures::TypedIndex,
types::{LocalMemoryIndex, LocalOrImport, MemoryIndex},
};
use std::{ffi::c_void, mem, ptr, slice};
#[derive(Debug)]
#[repr(C)]
pub struct Ctx {
/// A pointer to an array of locally-defined memories, indexed by `MemoryIndex`.
pub(crate) memories: *mut LocalMemory,
/// A pointer to an array of locally-defined tables, indexed by `TableIndex`.
pub(crate) tables: *mut LocalTable,
/// A pointer to an array of locally-defined globals, indexed by `GlobalIndex`.
pub(crate) globals: *mut LocalGlobal,
/// A pointer to an array of imported memories, indexed by `MemoryIndex,
pub(crate) imported_memories: *mut ImportedMemory,
/// A pointer to an array of imported tables, indexed by `TableIndex`.
pub(crate) imported_tables: *mut ImportedTable,
/// A pointer to an array of imported globals, indexed by `GlobalIndex`.
pub(crate) imported_globals: *mut ImportedGlobal,
/// A pointer to an array of imported functions, indexed by `FuncIndex`.
pub(crate) imported_funcs: *mut ImportedFunc,
pub(crate) local_backing: *mut LocalBacking,
pub(crate) import_backing: *mut ImportBacking,
module: *const ModuleInner,
pub data: *mut c_void,
pub data_finalizer: Option<extern "C" fn(data: *mut c_void)>,
}
impl Ctx {
pub unsafe fn new(
local_backing: &mut LocalBacking,
import_backing: &mut ImportBacking,
module: &ModuleInner,
) -> Self {
Self {
memories: local_backing.vm_memories.as_mut_ptr(),
tables: local_backing.vm_tables.as_mut_ptr(),
globals: local_backing.vm_globals.as_mut_ptr(),
imported_memories: import_backing.memories.as_mut_ptr(),
imported_tables: import_backing.tables.as_mut_ptr(),
imported_globals: import_backing.globals.as_mut_ptr(),
imported_funcs: import_backing.functions.as_mut_ptr(),
local_backing,
import_backing,
module,
data: ptr::null_mut(),
data_finalizer: None,
}
}
pub unsafe fn new_with_data(
local_backing: &mut LocalBacking,
import_backing: &mut ImportBacking,
module: &ModuleInner,
data: *mut c_void,
data_finalizer: extern "C" fn(*mut c_void),
) -> Self {
Self {
memories: local_backing.vm_memories.as_mut_ptr(),
tables: local_backing.vm_tables.as_mut_ptr(),
globals: local_backing.vm_globals.as_mut_ptr(),
imported_memories: import_backing.memories.as_mut_ptr(),
imported_tables: import_backing.tables.as_mut_ptr(),
imported_globals: import_backing.globals.as_mut_ptr(),
imported_funcs: import_backing.functions.as_mut_ptr(),
local_backing,
import_backing,
module,
data,
data_finalizer: Some(data_finalizer),
}
}
pub fn memory<'a>(&'a mut self, mem_index: u32) -> &'a mut [u8] {
let module = unsafe { &*self.module };
let mem_index = MemoryIndex::new(mem_index as usize);
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => {
let local_backing = unsafe { &mut *self.local_backing };
&mut local_backing.memories[local_mem_index][..]
}
LocalOrImport::Import(import_mem_index) => {
let import_backing = unsafe { &mut *self.import_backing };
let vm_memory_import = import_backing.memories[import_mem_index].clone();
unsafe {
let memory = &*vm_memory_import.memory;
slice::from_raw_parts_mut(memory.base, memory.size)
}
}
}
}
}
impl Ctx {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_memories() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_tables() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn offset_globals() -> u8 {
2 * (mem::size_of::<usize>() as u8)
}
pub fn offset_imported_memories() -> u8 {
3 * (mem::size_of::<usize>() as u8)
}
pub fn offset_imported_tables() -> u8 {
4 * (mem::size_of::<usize>() as u8)
}
pub fn offset_imported_globals() -> u8 {
5 * (mem::size_of::<usize>() as u8)
}
pub fn offset_imported_funcs() -> u8 {
6 * (mem::size_of::<usize>() as u8)
}
pub fn offset_signatures() -> u8 {
7 * (mem::size_of::<usize>() as u8)
}
}
/// Used to provide type safety (ish) for passing around function pointers.
/// The typesystem ensures this cannot be dereferenced since an
/// empty enum cannot actually exist.
pub enum Func {}
/// An imported function, which contains the vmctx that owns this function.
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedFunc {
pub func: *const Func,
pub vmctx: *mut Ctx,
}
impl ImportedFunc {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_func() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_vmctx() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
/// Definition of a table used by the VM. (obviously)
#[derive(Debug, Clone)]
#[repr(C)]
pub struct LocalTable {
/// pointer to the elements in the table.
pub base: *mut u8,
/// Number of elements in the table (NOT necessarily the size of the table in bytes!).
pub current_elements: usize,
/// The number of elements that can fit into the memory allocated for this table.
pub capacity: usize,
}
impl LocalTable {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_base() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_current_elements() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedTable {
/// A pointer to the table definition.
pub table: *mut LocalTable,
/// A pointer to the vmcontext that owns this table definition.
pub vmctx: *mut Ctx,
}
impl ImportedTable {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_table() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_vmctx() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
/// Definition of a memory used by the VM.
#[derive(Debug, Clone)]
#[repr(C)]
pub struct LocalMemory {
/// Pointer to the bottom of this linear memory.
pub base: *mut u8,
/// Current size of this linear memory in bytes.
pub size: usize,
/// The local memory index.
pub index: LocalMemoryIndex,
}
impl LocalMemory {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_base() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_size() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedMemory {
/// A pointer to the memory definition.
pub memory: *mut LocalMemory,
pub vmctx: *mut Ctx,
}
impl ImportedMemory {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_memory() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_vmctx() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
/// Definition of a global used by the VM.
#[derive(Debug, Clone)]
#[repr(C)]
pub struct LocalGlobal {
pub data: u64,
}
impl LocalGlobal {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_data() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn null() -> Self {
Self { data: 0 }
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedGlobal {
pub global: *mut LocalGlobal,
}
impl ImportedGlobal {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_global() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone, Copy)]
#[repr(transparent)]
pub struct SigId(pub u32);
/// Caller-checked anyfunc
#[derive(Debug, Clone)]
#[repr(C)]
pub struct Anyfunc {
pub func_data: ImportedFunc,
pub sig_id: SigId,
}
impl Anyfunc {
pub fn null() -> Self {
Self {
func_data: ImportedFunc {
func: ptr::null(),
vmctx: ptr::null_mut(),
},
sig_id: SigId(u32::max_value()),
}
}
#[allow(clippy::erasing_op)] // TODO
pub fn offset_func() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_vmctx() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn offset_sig_id() -> u8 {
2 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[cfg(test)]
mod vm_offset_tests {
use super::{
Anyfunc, Ctx, ImportedFunc, ImportedGlobal, ImportedMemory, ImportedTable, LocalGlobal,
LocalMemory, LocalTable,
};
#[test]
fn vmctx() {
assert_eq!(
Ctx::offset_memories() as usize,
offset_of!(Ctx => memories).get_byte_offset(),
);
assert_eq!(
Ctx::offset_tables() as usize,
offset_of!(Ctx => tables).get_byte_offset(),
);
assert_eq!(
Ctx::offset_globals() as usize,
offset_of!(Ctx => globals).get_byte_offset(),
);
assert_eq!(
Ctx::offset_imported_memories() as usize,
offset_of!(Ctx => imported_memories).get_byte_offset(),
);
assert_eq!(
Ctx::offset_imported_tables() as usize,
offset_of!(Ctx => imported_tables).get_byte_offset(),
);
assert_eq!(
Ctx::offset_imported_globals() as usize,
offset_of!(Ctx => imported_globals).get_byte_offset(),
);
assert_eq!(
Ctx::offset_imported_funcs() as usize,
offset_of!(Ctx => imported_funcs).get_byte_offset(),
);
}
#[test]
fn imported_func() {
assert_eq!(
ImportedFunc::offset_func() as usize,
offset_of!(ImportedFunc => func).get_byte_offset(),
);
assert_eq!(
ImportedFunc::offset_vmctx() as usize,
offset_of!(ImportedFunc => vmctx).get_byte_offset(),
);
}
#[test]
fn local_table() {
assert_eq!(
LocalTable::offset_base() as usize,
offset_of!(LocalTable => base).get_byte_offset(),
);
assert_eq!(
LocalTable::offset_current_elements() as usize,
offset_of!(LocalTable => current_elements).get_byte_offset(),
);
}
#[test]
fn imported_table() {
assert_eq!(
ImportedTable::offset_table() as usize,
offset_of!(ImportedTable => table).get_byte_offset(),
);
assert_eq!(
ImportedTable::offset_vmctx() as usize,
offset_of!(ImportedTable => vmctx).get_byte_offset(),
);
}
#[test]
fn local_memory() {
assert_eq!(
LocalMemory::offset_base() as usize,
offset_of!(LocalMemory => base).get_byte_offset(),
);
assert_eq!(
LocalMemory::offset_size() as usize,
offset_of!(LocalMemory => size).get_byte_offset(),
);
}
#[test]
fn imported_memory() {
assert_eq!(
ImportedMemory::offset_memory() as usize,
offset_of!(ImportedMemory => memory).get_byte_offset(),
);
assert_eq!(
ImportedMemory::offset_vmctx() as usize,
offset_of!(ImportedMemory => vmctx).get_byte_offset(),
);
}
#[test]
fn local_global() {
assert_eq!(
LocalGlobal::offset_data() as usize,
offset_of!(LocalGlobal => data).get_byte_offset(),
);
}
#[test]
fn imported_global() {
assert_eq!(
ImportedGlobal::offset_global() as usize,
offset_of!(ImportedGlobal => global).get_byte_offset(),
);
}
#[test]
fn cc_anyfunc() {
assert_eq!(
Anyfunc::offset_func() as usize,
offset_of!(Anyfunc => func_data: ImportedFunc => func).get_byte_offset(),
);
assert_eq!(
Anyfunc::offset_vmctx() as usize,
offset_of!(Anyfunc => func_data: ImportedFunc => vmctx).get_byte_offset(),
);
assert_eq!(
Anyfunc::offset_sig_id() as usize,
offset_of!(Anyfunc => sig_id).get_byte_offset(),
);
}
}
#[cfg(test)]
mod vm_ctx_tests {
use super::{Ctx, ImportBacking, LocalBacking};
use crate::module::ModuleInner;
use crate::structures::Map;
use std::ffi::c_void;
struct TestData {
x: u32,
y: bool,
str: String,
}
extern "C" fn test_data_finalizer(data: *mut c_void) {
let test_data: &mut TestData = unsafe { &mut *(data as *mut TestData) };
assert_eq!(test_data.x, 10);
assert_eq!(test_data.y, true);
assert_eq!(test_data.str, "Test".to_string());
println!("hello from finalizer");
drop(test_data);
}
#[test]
fn test_callback_on_drop() {
let mut data = TestData {
x: 10,
y: true,
str: "Test".to_string(),
};
let mut local_backing = LocalBacking {
memories: Map::new().into_boxed_map(),
tables: Map::new().into_boxed_map(),
vm_memories: Map::new().into_boxed_map(),
vm_tables: Map::new().into_boxed_map(),
vm_globals: Map::new().into_boxed_map(),
};
let mut import_backing = ImportBacking {
functions: Map::new().into_boxed_map(),
memories: Map::new().into_boxed_map(),
tables: Map::new().into_boxed_map(),
globals: Map::new().into_boxed_map(),
};
let module = generate_module();
let data = &mut data as *mut _ as *mut c_void;
let ctx = unsafe {
Ctx::new_with_data(
&mut local_backing,
&mut import_backing,
&module,
data,
test_data_finalizer,
)
};
let ctx_test_data = cast_test_data(ctx.data);
assert_eq!(ctx_test_data.x, 10);
assert_eq!(ctx_test_data.y, true);
assert_eq!(ctx_test_data.str, "Test".to_string());
drop(ctx);
}
fn cast_test_data(data: *mut c_void) -> &'static mut TestData {
let test_data: &mut TestData = unsafe { &mut *(data as *mut TestData) };
test_data
}
fn generate_module() -> ModuleInner {
use super::Func;
use crate::backend::{FuncResolver, ProtectedCaller, SigRegistry, Token};
use crate::error::RuntimeResult;
use crate::types::{FuncIndex, LocalFuncIndex, Value};
use hashbrown::HashMap;
use std::ptr::NonNull;
struct Placeholder;
impl FuncResolver for Placeholder {
fn get(
&self,
_module: &ModuleInner,
_local_func_index: LocalFuncIndex,
) -> Option<NonNull<Func>> {
None
}
}
impl ProtectedCaller for Placeholder {
fn call(
&self,
_module: &ModuleInner,
_func_index: FuncIndex,
_params: &[Value],
_returns: &mut [Value],
_import_backing: &ImportBacking,
_vmctx: *mut Ctx,
_: Token,
) -> RuntimeResult<()> {
Ok(())
}
}
ModuleInner {
func_resolver: Box::new(Placeholder),
protected_caller: Box::new(Placeholder),
memories: Map::new(),
globals: Map::new(),
tables: Map::new(),
// These are strictly imported and the typesystem ensures that.
imported_functions: Map::new(),
imported_memories: Map::new(),
imported_tables: Map::new(),
imported_globals: Map::new(),
exports: HashMap::new(),
data_initializers: Vec::new(),
elem_initializers: Vec::new(),
start_func: None,
func_assoc: Map::new(),
sig_registry: SigRegistry::new(),
}
}
}

View File

@ -1,118 +0,0 @@
use crate::{
memory::LinearMemory,
structures::TypedIndex,
types::{ImportedMemoryIndex, LocalMemoryIndex, LocalTableIndex},
vm,
};
// +*****************************+
// | LOCAL MEMORIES |
// +****************************+
pub unsafe extern "C" fn local_static_memory_grow(
memory_index: LocalMemoryIndex,
by_pages: u32,
ctx: *mut vm::Ctx,
) -> i32 {
if let Some(old) = (*(*ctx).local_backing)
.memory(memory_index)
.grow_static(by_pages)
{
// Store the new size back into the vmctx.
(*(*ctx).memories.add(memory_index.index())).size =
(old as usize + by_pages as usize) * LinearMemory::PAGE_SIZE as usize;
old
} else {
-1
}
}
pub unsafe extern "C" fn local_static_memory_size(
memory_index: LocalMemoryIndex,
ctx: *mut vm::Ctx,
) -> u32 {
(*(*ctx).local_backing).memory(memory_index).pages()
}
pub unsafe extern "C" fn local_dynamic_memory_grow(
memory_index: LocalMemoryIndex,
by_pages: u32,
ctx: *mut vm::Ctx,
) -> i32 {
if let Some(old) = (*(*ctx).local_backing)
.memory(memory_index)
.grow_dynamic(by_pages)
{
// Store the new size back into the vmctx.
(*(*ctx).memories.add(memory_index.index())).size =
(old as usize + by_pages as usize) * LinearMemory::PAGE_SIZE as usize;
old
} else {
-1
}
}
// +*****************************+
// | IMPORTED MEMORIES |
// +****************************+
pub unsafe extern "C" fn imported_static_memory_grow(
imported_mem_index: ImportedMemoryIndex,
by_pages: u32,
caller_ctx: *mut vm::Ctx,
) -> i32 {
let import_backing = &*(*caller_ctx).import_backing;
let vm_imported_mem = import_backing.imported_memory(imported_mem_index);
// We can assume that the memory here is local to the callee ctx.
let local_mem_index = (*vm_imported_mem.memory).index;
if let Some(old) = (*(*vm_imported_mem.vmctx).local_backing)
.memory(local_mem_index)
.grow_dynamic(by_pages)
{
// Store the new size back into the vmctx.
(*(*vm_imported_mem.vmctx)
.memories
.add(local_mem_index.index()))
.size = (old as usize + by_pages as usize) * LinearMemory::PAGE_SIZE as usize;
old
} else {
-1
}
}
pub unsafe extern "C" fn imported_static_memory_size(
imported_memory_index: ImportedMemoryIndex,
caller_ctx: *mut vm::Ctx,
) -> u32 {
let import_backing = &*(*caller_ctx).import_backing;
let vm_imported_mem = import_backing.imported_memory(imported_memory_index);
// We can assume that the memory here is local to the callee ctx.
let local_mem_index = (*vm_imported_mem.memory).index;
(*(*vm_imported_mem.vmctx).local_backing)
.memory(local_mem_index)
.pages()
}
// +*****************************+
// | LOCAL TABLES |
// +****************************+
pub unsafe extern "C" fn local_table_grow(
table_index: LocalTableIndex,
by_elems: u32,
ctx: *mut vm::Ctx,
) -> i32 {
let _ = table_index;
let _ = by_elems;
let _ = ctx;
unimplemented!()
}
pub unsafe extern "C" fn local_table_size(table_index: LocalTableIndex, ctx: *mut vm::Ctx) -> u32 {
let _ = table_index;
let _ = ctx;
unimplemented!()
}