Merge branch 'master' into fix/emscripten-translate

This commit is contained in:
Brandon Fish
2019-01-29 23:07:32 -06:00
46 changed files with 2510 additions and 1514 deletions

View File

@ -35,7 +35,7 @@ pub trait Compiler {
/// The functionality exposed by this trait is expected to be used
/// for calling functions exported by a webassembly module from
/// host code only.
pub trait ProtectedCaller {
pub trait ProtectedCaller: Send + Sync {
/// This calls the exported function designated by `local_func_index`.
/// Important to note, this supports calling imported functions that are
/// then exported.
@ -62,7 +62,7 @@ pub trait ProtectedCaller {
) -> RuntimeResult<Vec<Value>>;
}
pub trait FuncResolver {
pub trait FuncResolver: Send + Sync {
/// This returns a pointer to the function designated by the `local_func_index`
/// parameter.
fn get(

View File

@ -1,53 +1,55 @@
use crate::{
error::{LinkError, LinkResult},
export::{Context, Export},
global::Global,
import::ImportObject,
memory::LinearMemory,
memory::Memory,
module::{ImportName, ModuleInner},
structures::{BoxedMap, Map, SliceMap, TypedIndex},
table::{TableBacking, TableElements},
table::Table,
types::{
ElementType, ImportedFuncIndex, ImportedGlobalIndex, ImportedMemoryIndex,
ImportedTableIndex, Initializer, LocalGlobalIndex, LocalMemoryIndex, LocalOrImport,
LocalTableIndex, Type, Value,
ImportedFuncIndex, ImportedGlobalIndex, ImportedMemoryIndex, ImportedTableIndex,
Initializer, LocalGlobalIndex, LocalMemoryIndex, LocalOrImport, LocalTableIndex, Value,
},
vm,
};
use std::{mem, slice};
use std::slice;
#[derive(Debug)]
pub struct LocalBacking {
pub(crate) memories: BoxedMap<LocalMemoryIndex, LinearMemory>,
pub(crate) tables: BoxedMap<LocalTableIndex, TableBacking>,
pub(crate) memories: BoxedMap<LocalMemoryIndex, Memory>,
pub(crate) tables: BoxedMap<LocalTableIndex, Table>,
pub(crate) globals: BoxedMap<LocalGlobalIndex, Global>,
pub(crate) vm_memories: BoxedMap<LocalMemoryIndex, vm::LocalMemory>,
pub(crate) vm_tables: BoxedMap<LocalTableIndex, vm::LocalTable>,
pub(crate) vm_globals: BoxedMap<LocalGlobalIndex, vm::LocalGlobal>,
pub(crate) vm_memories: BoxedMap<LocalMemoryIndex, *mut vm::LocalMemory>,
pub(crate) vm_tables: BoxedMap<LocalTableIndex, *mut vm::LocalTable>,
pub(crate) vm_globals: BoxedMap<LocalGlobalIndex, *mut vm::LocalGlobal>,
}
impl LocalBacking {
pub fn memory(&mut self, local_memory_index: LocalMemoryIndex) -> &mut LinearMemory {
&mut self.memories[local_memory_index]
}
// impl LocalBacking {
// pub fn memory(&mut self, local_memory_index: LocalMemoryIndex) -> &mut Memory {
// &mut self.memories[local_memory_index]
// }
pub fn table(&mut self, local_table_index: LocalTableIndex) -> &mut TableBacking {
&mut self.tables[local_table_index]
}
}
// pub fn table(&mut self, local_table_index: LocalTableIndex) -> &mut TableBacking {
// &mut self.tables[local_table_index]
// }
// }
impl LocalBacking {
pub(crate) fn new(module: &ModuleInner, imports: &ImportBacking, vmctx: *mut vm::Ctx) -> Self {
let mut memories = Self::generate_memories(module);
let mut tables = Self::generate_tables(module);
let globals = Self::generate_globals(module);
let mut globals = Self::generate_globals(module, imports);
let vm_memories = Self::finalize_memories(module, imports, &mut memories);
let vm_tables = Self::finalize_tables(module, imports, &mut tables, vmctx);
let vm_globals = Self::finalize_globals(module, imports, globals);
let vm_globals = Self::finalize_globals(&mut globals);
Self {
memories,
tables,
globals,
vm_memories,
vm_tables,
@ -55,20 +57,20 @@ impl LocalBacking {
}
}
fn generate_memories(module: &ModuleInner) -> BoxedMap<LocalMemoryIndex, LinearMemory> {
fn generate_memories(module: &ModuleInner) -> BoxedMap<LocalMemoryIndex, Memory> {
let mut memories = Map::with_capacity(module.memories.len());
for (_, mem) in &module.memories {
for (_, &desc) in &module.memories {
// If we use emscripten, we set a fixed initial and maximum
// let memory = if options.abi == InstanceABI::Emscripten {
// // We use MAX_PAGES, so at the end the result is:
// // (initial * LinearMemory::PAGE_SIZE) == LinearMemory::DEFAULT_HEAP_SIZE
// // However, it should be: (initial * LinearMemory::PAGE_SIZE) == 16777216
// LinearMemory::new(LinearMemory::MAX_PAGES, None)
// // (initial * Memory::PAGE_SIZE) == Memory::DEFAULT_HEAP_SIZE
// // However, it should be: (initial * Memory::PAGE_SIZE) == 16777216
// Memory::new(Memory::MAX_PAGES, None)
// } else {
// LinearMemory::new(memory.minimum, memory.maximum.map(|m| m as u32))
// Memory::new(memory.minimum, memory.maximum.map(|m| m as u32))
// };
let memory = LinearMemory::new(mem);
let memory = Memory::new(desc).expect("unable to create memory");
memories.push(memory);
}
@ -78,8 +80,8 @@ impl LocalBacking {
fn finalize_memories(
module: &ModuleInner,
imports: &ImportBacking,
memories: &mut SliceMap<LocalMemoryIndex, LinearMemory>,
) -> BoxedMap<LocalMemoryIndex, vm::LocalMemory> {
memories: &mut SliceMap<LocalMemoryIndex, Memory>,
) -> BoxedMap<LocalMemoryIndex, *mut vm::LocalMemory> {
// For each init that has some data...
for init in module
.data_initializers
@ -89,9 +91,9 @@ impl LocalBacking {
let init_base = match init.base {
Initializer::Const(Value::I32(offset)) => offset as u32,
Initializer::Const(_) => panic!("a const initializer must be the i32 type"),
Initializer::GetGlobal(imported_global_index) => {
if module.imported_globals[imported_global_index].1.ty == Type::I32 {
unsafe { (*imports.globals[imported_global_index].global).data as u32 }
Initializer::GetGlobal(import_global_index) => {
if let Value::I32(x) = imports.globals[import_global_index].get() {
x as u32
} else {
panic!("unsupported global type for initialzer")
}
@ -100,20 +102,20 @@ impl LocalBacking {
match init.memory_index.local_or_import(module) {
LocalOrImport::Local(local_memory_index) => {
let memory_desc = &module.memories[local_memory_index];
let memory_desc = module.memories[local_memory_index];
let data_top = init_base + init.data.len();
assert!((memory_desc.min * LinearMemory::PAGE_SIZE) as usize >= data_top);
let mem: &mut LinearMemory = &mut memories[local_memory_index];
assert!(memory_desc.minimum.bytes().0 >= data_top);
let mem_init_view = &mut mem[init_base..init_base + init.data.len()];
mem_init_view.copy_from_slice(&init.data);
let mem = &memories[local_memory_index];
mem.write_many(init_base as u32, &init.data).unwrap();
}
LocalOrImport::Import(imported_memory_index) => {
let vm_imported_memory = imports.imported_memory(imported_memory_index);
// Write the initialization data to the memory that
// we think the imported memory is.
unsafe {
let local_memory = &(*vm_imported_memory.memory);
let local_memory = &*imports.vm_memories[imported_memory_index];
let memory_slice =
slice::from_raw_parts_mut(local_memory.base, local_memory.size);
slice::from_raw_parts_mut(local_memory.base, local_memory.bound);
let mem_init_view =
&mut memory_slice[init_base..init_base + init.data.len()];
@ -125,17 +127,17 @@ impl LocalBacking {
memories
.iter_mut()
.map(|(index, mem)| mem.into_vm_memory(index))
.map(|(_, mem)| mem.vm_local_memory())
.collect::<Map<_, _>>()
.into_boxed_map()
}
fn generate_tables(module: &ModuleInner) -> BoxedMap<LocalTableIndex, TableBacking> {
fn generate_tables(module: &ModuleInner) -> BoxedMap<LocalTableIndex, Table> {
let mut tables = Map::with_capacity(module.tables.len());
for (_, table) in &module.tables {
let table_backing = TableBacking::new(table);
tables.push(table_backing);
for (_, &table_desc) in module.tables.iter() {
let table = Table::new(table_desc).unwrap();
tables.push(table);
}
tables.into_boxed_map()
@ -145,16 +147,16 @@ impl LocalBacking {
fn finalize_tables(
module: &ModuleInner,
imports: &ImportBacking,
tables: &mut SliceMap<LocalTableIndex, TableBacking>,
tables: &mut SliceMap<LocalTableIndex, Table>,
vmctx: *mut vm::Ctx,
) -> BoxedMap<LocalTableIndex, vm::LocalTable> {
) -> BoxedMap<LocalTableIndex, *mut vm::LocalTable> {
for init in &module.elem_initializers {
let init_base = match init.base {
Initializer::Const(Value::I32(offset)) => offset as u32,
Initializer::Const(_) => panic!("a const initializer must be the i32 type"),
Initializer::GetGlobal(imported_global_index) => {
if module.imported_globals[imported_global_index].1.ty == Type::I32 {
unsafe { (*imports.globals[imported_global_index].global).data as u32 }
Initializer::GetGlobal(import_global_index) => {
if let Value::I32(x) = imports.globals[import_global_index].get() {
x as u32
} else {
panic!("unsupported global type for initialzer")
}
@ -163,141 +165,132 @@ impl LocalBacking {
match init.table_index.local_or_import(module) {
LocalOrImport::Local(local_table_index) => {
let table = &mut tables[local_table_index];
match table.elements {
TableElements::Anyfunc(ref mut elements) => {
if elements.len() < init_base + init.elements.len() {
// Grow the table if it's too small.
elements
.resize(init_base + init.elements.len(), vm::Anyfunc::null());
}
let table = &tables[local_table_index];
for (i, &func_index) in init.elements.iter().enumerate() {
let sig_index = module.func_assoc[func_index];
let sig_id = vm::SigId(sig_index.index() as u32);
let func_data = match func_index.local_or_import(module) {
LocalOrImport::Local(local_func_index) => vm::ImportedFunc {
func: module
.func_resolver
.get(module, local_func_index)
.unwrap()
.as_ptr(),
vmctx,
},
LocalOrImport::Import(imported_func_index) => {
imports.functions[imported_func_index].clone()
}
};
elements[init_base + i] = vm::Anyfunc { func_data, sig_id };
}
}
if (table.size() as usize) < init_base + init.elements.len() {
let delta = (init_base + init.elements.len()) - table.size() as usize;
// Grow the table if it's too small.
table.grow(delta as u32).expect("couldn't grow table");
}
}
LocalOrImport::Import(imported_table_index) => {
let (_, table_description) = module.imported_tables[imported_table_index];
match table_description.ty {
ElementType::Anyfunc => {
let imported_table = &imports.tables[imported_table_index];
let imported_local_table = (*imported_table).table;
let mut elements = unsafe {
Vec::from_raw_parts(
(*imported_local_table).base as *mut vm::Anyfunc,
(*imported_local_table).current_elements,
(*imported_local_table).capacity,
)
table.anyfunc_direct_access_mut(|elements| {
for (i, &func_index) in init.elements.iter().enumerate() {
let sig_index = module.func_assoc[func_index];
let sig_id = vm::SigId(sig_index.index() as u32);
let (func, ctx) = match func_index.local_or_import(module) {
LocalOrImport::Local(local_func_index) => (
module
.func_resolver
.get(module, local_func_index)
.unwrap()
.as_ptr()
as *const vm::Func,
vmctx,
),
LocalOrImport::Import(imported_func_index) => {
let vm::ImportedFunc { func, vmctx } =
imports.vm_functions[imported_func_index];
(func, vmctx)
}
};
if elements.len() < init_base + init.elements.len() {
// Grow the table if it's too small.
elements
.resize(init_base + init.elements.len(), vm::Anyfunc::null());
// Since the vector may have changed location after reallocating,
// we must fix the base, current_elements, and capacity fields.
unsafe {
(*imported_local_table).base = elements.as_mut_ptr() as *mut u8;
(*imported_local_table).current_elements = elements.len();
(*imported_local_table).capacity = elements.capacity();
}
}
for (i, &func_index) in init.elements.iter().enumerate() {
let sig_index = module.func_assoc[func_index];
let sig_id = vm::SigId(sig_index.index() as u32);
let func_data = match func_index.local_or_import(module) {
LocalOrImport::Local(local_func_index) => vm::ImportedFunc {
func: module
.func_resolver
.get(module, local_func_index)
.unwrap()
.as_ptr(),
vmctx,
},
LocalOrImport::Import(imported_func_index) => {
imports.functions[imported_func_index].clone()
}
};
elements[init_base + i] = vm::Anyfunc { func_data, sig_id };
}
// println!("imported elements: {:#?}", elements);
// THIS IS EXTREMELY IMPORTANT.
mem::forget(elements);
elements[init_base + i] = vm::Anyfunc { func, ctx, sig_id };
}
});
}
LocalOrImport::Import(import_table_index) => {
let table = &imports.tables[import_table_index];
if (table.size() as usize) < init_base + init.elements.len() {
let delta = (init_base + init.elements.len()) - table.size() as usize;
// Grow the table if it's too small.
table.grow(delta as u32).expect("couldn't grow table");
}
table.anyfunc_direct_access_mut(|elements| {
for (i, &func_index) in init.elements.iter().enumerate() {
let sig_index = module.func_assoc[func_index];
let sig_id = vm::SigId(sig_index.index() as u32);
let (func, ctx) = match func_index.local_or_import(module) {
LocalOrImport::Local(local_func_index) => (
module
.func_resolver
.get(module, local_func_index)
.unwrap()
.as_ptr()
as *const vm::Func,
vmctx,
),
LocalOrImport::Import(imported_func_index) => {
let vm::ImportedFunc { func, vmctx } =
imports.vm_functions[imported_func_index];
(func, vmctx)
}
};
elements[init_base + i] = vm::Anyfunc { func, ctx, sig_id };
}
});
}
}
}
tables
.iter_mut()
.map(|(_, table)| table.into_vm_table())
.map(|(_, table)| table.vm_local_table())
.collect::<Map<_, _>>()
.into_boxed_map()
}
fn generate_globals(module: &ModuleInner) -> BoxedMap<LocalGlobalIndex, vm::LocalGlobal> {
fn generate_globals(
module: &ModuleInner,
imports: &ImportBacking,
) -> BoxedMap<LocalGlobalIndex, Global> {
let mut globals = Map::with_capacity(module.globals.len());
globals.resize(module.globals.len(), vm::LocalGlobal::null());
for (_, global_init) in module.globals.iter() {
let value = match &global_init.init {
Initializer::Const(value) => value.clone(),
Initializer::GetGlobal(import_global_index) => {
imports.globals[*import_global_index].get()
}
};
let global = if global_init.desc.mutable {
Global::new_mutable(value)
} else {
Global::new(value)
};
globals.push(global);
}
globals.into_boxed_map()
}
fn finalize_globals(
module: &ModuleInner,
imports: &ImportBacking,
mut globals: BoxedMap<LocalGlobalIndex, vm::LocalGlobal>,
) -> BoxedMap<LocalGlobalIndex, vm::LocalGlobal> {
for ((_, to), (_, from)) in globals.iter_mut().zip(module.globals.iter()) {
to.data = match from.init {
Initializer::Const(ref value) => match value {
Value::I32(x) => *x as u64,
Value::I64(x) => *x as u64,
Value::F32(x) => x.to_bits() as u64,
Value::F64(x) => x.to_bits(),
},
Initializer::GetGlobal(imported_global_index) => unsafe {
(*imports.globals[imported_global_index].global).data
},
};
}
globals: &mut SliceMap<LocalGlobalIndex, Global>,
) -> BoxedMap<LocalGlobalIndex, *mut vm::LocalGlobal> {
globals
.iter_mut()
.map(|(_, global)| global.vm_local_global())
.collect::<Map<_, _>>()
.into_boxed_map()
}
}
#[derive(Debug)]
pub struct ImportBacking {
pub(crate) functions: BoxedMap<ImportedFuncIndex, vm::ImportedFunc>,
pub(crate) memories: BoxedMap<ImportedMemoryIndex, vm::ImportedMemory>,
pub(crate) tables: BoxedMap<ImportedTableIndex, vm::ImportedTable>,
pub(crate) globals: BoxedMap<ImportedGlobalIndex, vm::ImportedGlobal>,
pub(crate) memories: BoxedMap<ImportedMemoryIndex, Memory>,
pub(crate) tables: BoxedMap<ImportedTableIndex, Table>,
pub(crate) globals: BoxedMap<ImportedGlobalIndex, Global>,
pub(crate) vm_functions: BoxedMap<ImportedFuncIndex, vm::ImportedFunc>,
pub(crate) vm_memories: BoxedMap<ImportedMemoryIndex, *mut vm::LocalMemory>,
pub(crate) vm_tables: BoxedMap<ImportedTableIndex, *mut vm::LocalTable>,
pub(crate) vm_globals: BoxedMap<ImportedGlobalIndex, *mut vm::LocalGlobal>,
}
impl ImportBacking {
@ -309,48 +302,48 @@ impl ImportBacking {
let mut failed = false;
let mut link_errors = vec![];
let functions = import_functions(module, imports, vmctx).unwrap_or_else(|le| {
let vm_functions = import_functions(module, imports, vmctx).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
});
let memories = import_memories(module, imports, vmctx).unwrap_or_else(|le| {
let (memories, vm_memories) = import_memories(module, imports).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
(Map::new().into_boxed_map(), Map::new().into_boxed_map())
});
let tables = import_tables(module, imports, vmctx).unwrap_or_else(|le| {
let (tables, vm_tables) = import_tables(module, imports).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
(Map::new().into_boxed_map(), Map::new().into_boxed_map())
});
let globals = import_globals(module, imports).unwrap_or_else(|le| {
let (globals, vm_globals) = import_globals(module, imports).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
(Map::new().into_boxed_map(), Map::new().into_boxed_map())
});
if failed {
Err(link_errors)
} else {
Ok(ImportBacking {
functions,
memories,
tables,
globals,
vm_functions,
vm_memories,
vm_tables,
vm_globals,
})
}
}
pub fn imported_func(&self, func_index: ImportedFuncIndex) -> vm::ImportedFunc {
self.functions[func_index].clone()
}
pub fn imported_memory(&self, memory_index: ImportedMemoryIndex) -> vm::ImportedMemory {
self.memories[memory_index].clone()
pub fn imported_func(&self, index: ImportedFuncIndex) -> vm::ImportedFunc {
self.vm_functions[index].clone()
}
}
@ -363,7 +356,7 @@ fn import_functions(
let mut functions = Map::with_capacity(module.imported_functions.len());
for (index, ImportName { namespace, name }) in &module.imported_functions {
let sig_index = module.func_assoc[index.convert_up(module)];
let expected_sig = module.sig_registry.lookup_func_sig(sig_index);
let expected_sig = module.sig_registry.lookup_signature(sig_index);
let import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
@ -373,7 +366,7 @@ fn import_functions(
ctx,
signature,
}) => {
if expected_sig == &signature {
if expected_sig == signature {
functions.push(vm::ImportedFunc {
func: func.inner(),
vmctx: match ctx {
@ -424,36 +417,30 @@ fn import_functions(
fn import_memories(
module: &ModuleInner,
imports: &mut ImportObject,
vmctx: *mut vm::Ctx,
) -> LinkResult<BoxedMap<ImportedMemoryIndex, vm::ImportedMemory>> {
) -> LinkResult<(
BoxedMap<ImportedMemoryIndex, Memory>,
BoxedMap<ImportedMemoryIndex, *mut vm::LocalMemory>,
)> {
let mut link_errors = vec![];
let mut memories = Map::with_capacity(module.imported_memories.len());
let mut vm_memories = Map::with_capacity(module.imported_memories.len());
for (_index, (ImportName { namespace, name }, expected_memory_desc)) in
&module.imported_memories
{
let memory_import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
.get_namespace(&namespace)
.and_then(|namespace| namespace.get_export(&name));
match memory_import {
Some(Export::Memory {
local,
ctx,
memory: memory_desc,
}) => {
if expected_memory_desc.fits_in_imported(&memory_desc) {
memories.push(vm::ImportedMemory {
memory: local.inner(),
vmctx: match ctx {
Context::External(ctx) => ctx,
Context::Internal => vmctx,
},
});
Some(Export::Memory(mut memory)) => {
if expected_memory_desc.fits_in_imported(memory.descriptor()) {
memories.push(memory.clone());
vm_memories.push(memory.vm_local_memory());
} else {
link_errors.push(LinkError::IncorrectMemoryDescription {
link_errors.push(LinkError::IncorrectMemoryDescriptor {
namespace: namespace.clone(),
name: name.clone(),
expected: expected_memory_desc.clone(),
found: memory_desc.clone(),
expected: *expected_memory_desc,
found: memory.descriptor(),
});
}
}
@ -484,41 +471,35 @@ fn import_memories(
if link_errors.len() > 0 {
Err(link_errors)
} else {
Ok(memories.into_boxed_map())
Ok((memories.into_boxed_map(), vm_memories.into_boxed_map()))
}
}
fn import_tables(
module: &ModuleInner,
imports: &mut ImportObject,
vmctx: *mut vm::Ctx,
) -> LinkResult<BoxedMap<ImportedTableIndex, vm::ImportedTable>> {
) -> LinkResult<(
BoxedMap<ImportedTableIndex, Table>,
BoxedMap<ImportedTableIndex, *mut vm::LocalTable>,
)> {
let mut link_errors = vec![];
let mut tables = Map::with_capacity(module.imported_tables.len());
let mut vm_tables = Map::with_capacity(module.imported_tables.len());
for (_index, (ImportName { namespace, name }, expected_table_desc)) in &module.imported_tables {
let table_import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
.get_namespace(&namespace)
.and_then(|namespace| namespace.get_export(&name));
match table_import {
Some(Export::Table {
local,
ctx,
table: table_desc,
}) => {
if expected_table_desc.fits_in_imported(&table_desc) {
tables.push(vm::ImportedTable {
table: local.inner(),
vmctx: match ctx {
Context::External(ctx) => ctx,
Context::Internal => vmctx,
},
});
Some(Export::Table(mut table)) => {
if expected_table_desc.fits_in_imported(table.descriptor()) {
vm_tables.push(table.vm_local_table());
tables.push(table);
} else {
link_errors.push(LinkError::IncorrectTableDescription {
link_errors.push(LinkError::IncorrectTableDescriptor {
namespace: namespace.clone(),
name: name.clone(),
expected: expected_table_desc.clone(),
found: table_desc.clone(),
expected: *expected_table_desc,
found: table.descriptor(),
});
}
}
@ -549,32 +530,35 @@ fn import_tables(
if link_errors.len() > 0 {
Err(link_errors)
} else {
Ok(tables.into_boxed_map())
Ok((tables.into_boxed_map(), vm_tables.into_boxed_map()))
}
}
fn import_globals(
module: &ModuleInner,
imports: &mut ImportObject,
) -> LinkResult<BoxedMap<ImportedGlobalIndex, vm::ImportedGlobal>> {
) -> LinkResult<(
BoxedMap<ImportedGlobalIndex, Global>,
BoxedMap<ImportedGlobalIndex, *mut vm::LocalGlobal>,
)> {
let mut link_errors = vec![];
let mut globals = Map::with_capacity(module.imported_globals.len());
let mut vm_globals = Map::with_capacity(module.imported_globals.len());
for (_, (ImportName { namespace, name }, imported_global_desc)) in &module.imported_globals {
let import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
match import {
Some(Export::Global { local, global }) => {
if global == *imported_global_desc {
globals.push(vm::ImportedGlobal {
global: local.inner(),
});
Some(Export::Global(mut global)) => {
if global.descriptor() == *imported_global_desc {
vm_globals.push(global.vm_local_global());
globals.push(global);
} else {
link_errors.push(LinkError::IncorrectGlobalDescription {
link_errors.push(LinkError::IncorrectGlobalDescriptor {
namespace: namespace.clone(),
name: name.clone(),
expected: imported_global_desc.clone(),
found: global.clone(),
expected: *imported_global_desc,
found: global.descriptor(),
});
}
}
@ -605,6 +589,6 @@ fn import_globals(
if link_errors.len() > 0 {
Err(link_errors)
} else {
Ok(globals.into_boxed_map())
Ok((globals.into_boxed_map(), vm_globals.into_boxed_map()))
}
}

View File

@ -1,4 +1,7 @@
use crate::types::{FuncSig, GlobalDesc, Memory, MemoryIndex, Table, TableIndex, Type};
use crate::types::{
FuncSig, GlobalDescriptor, MemoryDescriptor, MemoryIndex, TableDescriptor, TableIndex, Type,
};
use std::sync::Arc;
pub type Result<T> = std::result::Result<T, Box<Error>>;
pub type CompileResult<T> = std::result::Result<T, Box<CompileError>>;
@ -39,30 +42,30 @@ pub enum LinkError {
IncorrectImportSignature {
namespace: String,
name: String,
expected: FuncSig,
found: FuncSig,
expected: Arc<FuncSig>,
found: Arc<FuncSig>,
},
ImportNotFound {
namespace: String,
name: String,
},
IncorrectMemoryDescription {
IncorrectMemoryDescriptor {
namespace: String,
name: String,
expected: Memory,
found: Memory,
expected: MemoryDescriptor,
found: MemoryDescriptor,
},
IncorrectTableDescription {
IncorrectTableDescriptor {
namespace: String,
name: String,
expected: Table,
found: Table,
expected: TableDescriptor,
found: TableDescriptor,
},
IncorrectGlobalDescription {
IncorrectGlobalDescriptor {
namespace: String,
name: String,
expected: GlobalDesc,
found: GlobalDesc,
expected: GlobalDescriptor,
found: GlobalDescriptor,
},
}
@ -100,9 +103,16 @@ impl PartialEq for RuntimeError {
/// Comparing two `ResolveError`s always evaluates to false.
#[derive(Debug, Clone)]
pub enum ResolveError {
Signature { expected: FuncSig, found: Vec<Type> },
ExportNotFound { name: String },
ExportWrongType { name: String },
Signature {
expected: Arc<FuncSig>,
found: Vec<Type>,
},
ExportNotFound {
name: String,
},
ExportWrongType {
name: String,
},
}
impl PartialEq for ResolveError {
@ -130,6 +140,20 @@ impl PartialEq for CallError {
}
}
/// This error type is produced when creating something,
/// like a `Memory` or a `Table`.
#[derive(Debug, Clone)]
pub enum CreationError {
UnableToCreateMemory,
UnableToCreateTable,
}
impl PartialEq for CreationError {
fn eq(&self, _other: &CreationError) -> bool {
false
}
}
/// The amalgamation of all errors that can occur
/// during the compilation, instantiation, or execution
/// of a webassembly module.
@ -142,6 +166,7 @@ pub enum Error {
RuntimeError(RuntimeError),
ResolveError(ResolveError),
CallError(CallError),
CreationError(CreationError),
}
impl PartialEq for Error {
@ -210,6 +235,12 @@ impl From<CallError> for Box<Error> {
}
}
impl From<CreationError> for Box<Error> {
fn from(creation_err: CreationError) -> Self {
Box::new(Error::CreationError(creation_err))
}
}
impl From<RuntimeError> for Box<CallError> {
fn from(runtime_err: RuntimeError) -> Self {
Box::new(CallError::Runtime(runtime_err))

View File

@ -1,11 +1,9 @@
use crate::{
instance::InstanceInner,
module::ExportIndex,
module::ModuleInner,
types::{FuncSig, GlobalDesc, Memory, Table},
vm,
global::Global, instance::InstanceInner, memory::Memory, module::ExportIndex,
module::ModuleInner, table::Table, types::FuncSig, vm,
};
use hashbrown::hash_map;
use std::sync::Arc;
#[derive(Debug, Copy, Clone)]
pub enum Context {
@ -18,22 +16,11 @@ pub enum Export {
Function {
func: FuncPointer,
ctx: Context,
signature: FuncSig,
},
Memory {
local: MemoryPointer,
ctx: Context,
memory: Memory,
},
Table {
local: TablePointer,
ctx: Context,
table: Table,
},
Global {
local: GlobalPointer,
global: GlobalDesc,
signature: Arc<FuncSig>,
},
Memory(Memory),
Table(Table),
Global(Global),
}
#[derive(Debug, Clone)]
@ -52,54 +39,6 @@ impl FuncPointer {
}
}
#[derive(Debug, Clone)]
pub struct MemoryPointer(*mut vm::LocalMemory);
impl MemoryPointer {
/// This needs to be unsafe because there is
/// no way to check whether the passed function
/// is valid and has the right signature.
pub unsafe fn new(f: *mut vm::LocalMemory) -> Self {
MemoryPointer(f)
}
pub(crate) fn inner(&self) -> *mut vm::LocalMemory {
self.0
}
}
#[derive(Debug, Clone)]
pub struct TablePointer(*mut vm::LocalTable);
impl TablePointer {
/// This needs to be unsafe because there is
/// no way to check whether the passed function
/// is valid and has the right signature.
pub unsafe fn new(f: *mut vm::LocalTable) -> Self {
TablePointer(f)
}
pub(crate) fn inner(&self) -> *mut vm::LocalTable {
self.0
}
}
#[derive(Debug, Clone)]
pub struct GlobalPointer(*mut vm::LocalGlobal);
impl GlobalPointer {
/// This needs to be unsafe because there is
/// no way to check whether the passed function
/// is valid and has the right signature.
pub unsafe fn new(f: *mut vm::LocalGlobal) -> Self {
GlobalPointer(f)
}
pub(crate) fn inner(&self) -> *mut vm::LocalGlobal {
self.0
}
}
pub struct ExportIter<'a> {
inner: &'a mut InstanceInner,
iter: hash_map::Iter<'a, String, ExportIndex>,

View File

@ -0,0 +1,132 @@
use crate::{
export::Export,
import::IsExport,
types::{GlobalDescriptor, Type, Value},
vm,
};
use std::{cell::RefCell, fmt, rc::Rc};
pub struct Global {
desc: GlobalDescriptor,
storage: Rc<RefCell<vm::LocalGlobal>>,
}
impl Global {
/// Create a new `Global` value.
///
/// Usage:
///
/// ```
/// # use wasmer_runtime_core::global::Global;
/// # use wasmer_runtime_core::types::Value;
/// let global = Global::new(Value::I32(42));
/// ```
pub fn new(value: Value) -> Self {
Self::new_internal(value, false)
}
/// Create a new, mutable `Global` value.
///
/// Usage:
///
/// ```
/// # use wasmer_runtime_core::global::Global;
/// # use wasmer_runtime_core::types::Value;
/// let global = Global::new_mutable(Value::I32(42));
/// ```
pub fn new_mutable(value: Value) -> Self {
Self::new_internal(value, true)
}
fn new_internal(value: Value, mutable: bool) -> Self {
let desc = GlobalDescriptor {
mutable,
ty: value.ty(),
};
let local_global = vm::LocalGlobal {
data: match value {
Value::I32(x) => x as u64,
Value::I64(x) => x as u64,
Value::F32(x) => x.to_bits() as u64,
Value::F64(x) => x.to_bits(),
},
};
Self {
desc,
storage: Rc::new(RefCell::new(local_global)),
}
}
/// Get the [`GlobalDescriptor`] generated for this global.
///
/// [`GlobalDescriptor`]: struct.GlobalDescriptor.html
pub fn descriptor(&self) -> GlobalDescriptor {
self.desc
}
/// Set the value help by this global.
///
/// This method will panic if the value is
/// the wrong type.
pub fn set(&self, value: Value) {
if self.desc.mutable {
if self.desc.ty == value.ty() {
let local_global = vm::LocalGlobal {
data: match value {
Value::I32(x) => x as u64,
Value::I64(x) => x as u64,
Value::F32(x) => x.to_bits() as u64,
Value::F64(x) => x.to_bits(),
},
};
*self.storage.borrow_mut() = local_global;
} else {
panic!("Wrong type for setting this global")
}
} else {
panic!("Cannot modify global immutable by default")
}
}
/// Get the value held by this global.
pub fn get(&self) -> Value {
let data = self.storage.borrow().data;
match self.desc.ty {
Type::I32 => Value::I32(data as i32),
Type::I64 => Value::I64(data as i64),
Type::F32 => Value::F32(f32::from_bits(data as u32)),
Type::F64 => Value::F64(f64::from_bits(data)),
}
}
pub(crate) fn vm_local_global(&mut self) -> *mut vm::LocalGlobal {
&mut *self.storage.borrow_mut()
}
}
impl IsExport for Global {
fn to_export(&mut self) -> Export {
Export::Global(self.clone())
}
}
impl Clone for Global {
fn clone(&self) -> Self {
Self {
desc: self.desc,
storage: Rc::clone(&self.storage),
}
}
}
impl fmt::Debug for Global {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Global")
.field("desc", &self.desc)
.field("value", &self.get())
.finish()
}
}

View File

@ -5,6 +5,16 @@ pub trait LikeNamespace {
fn get_export(&mut self, name: &str) -> Option<Export>;
}
pub trait IsExport {
fn to_export(&mut self) -> Export;
}
impl IsExport for Export {
fn to_export(&mut self) -> Export {
self.clone()
}
}
/// All of the import data used when instantiating.
///
/// It's suggested that you use the [`imports!`] macro
@ -14,11 +24,11 @@ pub trait LikeNamespace {
///
/// # Usage:
/// ```
/// # use wasmer_runtime_core::imports;
/// # use wasmer_runtime_core::{imports, func};
/// # use wasmer_runtime_core::vm::Ctx;
/// let import_object = imports! {
/// "env" => {
/// "foo" => foo<[i32] -> [i32]>,
/// "foo" => func!(foo, [i32] -> [i32]),
/// },
/// };
///
@ -74,7 +84,7 @@ impl ImportObject {
}
pub struct Namespace {
map: HashMap<String, Export>,
map: HashMap<String, Box<dyn IsExport>>,
}
impl Namespace {
@ -84,13 +94,19 @@ impl Namespace {
}
}
pub fn insert(&mut self, name: impl Into<String>, export: Export) -> Option<Export> {
self.map.insert(name.into(), export)
pub fn insert<S, E>(&mut self, name: S, export: E) -> Option<Box<dyn IsExport>>
where
S: Into<String>,
E: IsExport + 'static,
{
self.map.insert(name.into(), Box::new(export))
}
}
impl LikeNamespace for Namespace {
fn get_export(&mut self, name: &str) -> Option<Export> {
self.map.get(name).cloned()
self.map
.get_mut(name)
.map(|is_export| is_export.to_export())
}
}

View File

@ -2,24 +2,29 @@ use crate::{
backend::Token,
backing::{ImportBacking, LocalBacking},
error::{CallError, CallResult, ResolveError, ResolveResult, Result},
export::{
Context, Export, ExportIter, FuncPointer, GlobalPointer, MemoryPointer, TablePointer,
},
export::{Context, Export, ExportIter, FuncPointer},
global::Global,
import::{ImportObject, LikeNamespace},
memory::Memory,
module::{ExportIndex, Module, ModuleInner},
types::{
FuncIndex, FuncSig, GlobalDesc, GlobalIndex, LocalOrImport, Memory, MemoryIndex, Table,
TableIndex, Value,
},
table::Table,
types::{FuncIndex, FuncSig, GlobalIndex, LocalOrImport, MemoryIndex, TableIndex, Value},
vm,
};
use std::{mem, rc::Rc};
use std::{mem, sync::Arc};
pub(crate) struct InstanceInner {
#[allow(dead_code)]
pub(crate) backing: LocalBacking,
import_backing: ImportBacking,
vmctx: Box<vm::Ctx>,
pub(crate) vmctx: *mut vm::Ctx,
}
impl Drop for InstanceInner {
fn drop(&mut self) {
// Drop the vmctx.
unsafe { Box::from_raw(self.vmctx) };
}
}
/// An instantiated WebAssembly module.
@ -30,14 +35,17 @@ pub(crate) struct InstanceInner {
///
/// [`ImportObject`]: struct.ImportObject.html
pub struct Instance {
module: Rc<ModuleInner>,
module: Arc<ModuleInner>,
inner: Box<InstanceInner>,
#[allow(dead_code)]
imports: Box<ImportObject>,
}
impl Instance {
pub(crate) fn new(module: Rc<ModuleInner>, mut imports: Box<ImportObject>) -> Result<Instance> {
pub(crate) fn new(
module: Arc<ModuleInner>,
mut imports: Box<ImportObject>,
) -> Result<Instance> {
// We need the backing and import_backing to create a vm::Ctx, but we need
// a vm::Ctx to create a backing and an import_backing. The solution is to create an
// uninitialized vm::Ctx and then initialize it in-place.
@ -50,15 +58,16 @@ impl Instance {
let mut inner = Box::new(InstanceInner {
backing,
import_backing,
vmctx,
vmctx: Box::leak(vmctx),
});
// Initialize the vm::Ctx in-place after the backing
// has been boxed.
*inner.vmctx =
unsafe { vm::Ctx::new(&mut inner.backing, &mut inner.import_backing, &module) };
unsafe {
*inner.vmctx = vm::Ctx::new(&mut inner.backing, &mut inner.import_backing, &module)
};
let mut instance = Instance {
let instance = Instance {
module,
inner,
imports,
@ -85,7 +94,7 @@ impl Instance {
/// # Ok(())
/// # }
/// ```
pub fn func(&mut self, name: &str) -> ResolveResult<Function> {
pub fn func(&self, name: &str) -> ResolveResult<Function> {
let export_index =
self.module
.exports
@ -100,12 +109,12 @@ impl Instance {
.func_assoc
.get(*func_index)
.expect("broken invariant, incorrect func index");
let signature = self.module.sig_registry.lookup_func_sig(sig_index);
let signature = self.module.sig_registry.lookup_signature(sig_index);
Ok(Function {
signature,
module: &self.module,
instance_inner: &mut self.inner,
instance_inner: &self.inner,
func_index: *func_index,
})
} else {
@ -138,7 +147,7 @@ impl Instance {
/// # Ok(())
/// # }
/// ```
pub fn call(&mut self, name: &str, args: &[Value]) -> CallResult<Vec<Value>> {
pub fn call(&self, name: &str, args: &[Value]) -> CallResult<Vec<Value>> {
let export_index =
self.module
.exports
@ -164,7 +173,7 @@ impl Instance {
///
/// [`Ctx`]: struct.Ctx.html
pub fn context(&self) -> &vm::Ctx {
&self.inner.vmctx
unsafe { &*self.inner.vmctx }
}
/// Returns a mutable reference to the
@ -172,7 +181,7 @@ impl Instance {
///
/// [`Ctx`]: struct.Ctx.html
pub fn context_mut(&mut self) -> &mut vm::Ctx {
&mut self.inner.vmctx
unsafe { &mut *self.inner.vmctx }
}
/// Returns a iterator over all of the items
@ -183,7 +192,7 @@ impl Instance {
/// The module used to instantiate this Instance.
pub fn module(&self) -> Module {
Module::new(Rc::clone(&self.module))
Module::new(Arc::clone(&self.module))
}
pub fn ctx(&mut self) -> &mut vm::Ctx {
@ -192,15 +201,15 @@ impl Instance {
}
impl Instance {
fn call_with_index(&mut self, func_index: FuncIndex, args: &[Value]) -> CallResult<Vec<Value>> {
fn call_with_index(&self, func_index: FuncIndex, args: &[Value]) -> CallResult<Vec<Value>> {
let sig_index = *self
.module
.func_assoc
.get(func_index)
.expect("broken invariant, incorrect func index");
let signature = self.module.sig_registry.lookup_func_sig(sig_index);
let signature = self.module.sig_registry.lookup_signature(sig_index);
if !signature.check_sig(args) {
if !signature.check_param_value_types(args) {
Err(ResolveError::Signature {
expected: signature.clone(),
found: args.iter().map(|val| val.ty()).collect(),
@ -208,9 +217,9 @@ impl Instance {
}
let vmctx = match func_index.local_or_import(&self.module) {
LocalOrImport::Local(_) => &mut *self.inner.vmctx,
LocalOrImport::Local(_) => self.inner.vmctx,
LocalOrImport::Import(imported_func_index) => {
self.inner.import_backing.functions[imported_func_index].vmctx
self.inner.import_backing.vm_functions[imported_func_index].vmctx
}
};
@ -231,7 +240,7 @@ impl Instance {
impl InstanceInner {
pub(crate) fn get_export_from_index(
&mut self,
&self,
module: &ModuleInner,
export_index: &ExportIndex,
) -> Export {
@ -242,46 +251,32 @@ impl InstanceInner {
Export::Function {
func,
ctx: match ctx {
Context::Internal => Context::External(&mut *self.vmctx),
Context::Internal => Context::External(self.vmctx),
ctx @ Context::External(_) => ctx,
},
signature,
}
}
ExportIndex::Memory(memory_index) => {
let (local, ctx, memory) = self.get_memory_from_index(module, *memory_index);
Export::Memory {
local,
ctx: match ctx {
Context::Internal => Context::External(&mut *self.vmctx),
ctx @ Context::External(_) => ctx,
},
memory,
}
let memory = self.get_memory_from_index(module, *memory_index);
Export::Memory(memory)
}
ExportIndex::Global(global_index) => {
let (local, global) = self.get_global_from_index(module, *global_index);
Export::Global { local, global }
let global = self.get_global_from_index(module, *global_index);
Export::Global(global)
}
ExportIndex::Table(table_index) => {
let (local, ctx, table) = self.get_table_from_index(module, *table_index);
Export::Table {
local,
ctx: match ctx {
Context::Internal => Context::External(&mut *self.vmctx),
ctx @ Context::External(_) => ctx,
},
table,
}
let table = self.get_table_from_index(module, *table_index);
Export::Table(table)
}
}
}
fn get_func_from_index(
&mut self,
&self,
module: &ModuleInner,
func_index: FuncIndex,
) -> (FuncPointer, Context, FuncSig) {
) -> (FuncPointer, Context, Arc<FuncSig>) {
let sig_index = *module
.func_assoc
.get(func_index)
@ -298,7 +293,7 @@ impl InstanceInner {
Context::Internal,
),
LocalOrImport::Import(imported_func_index) => {
let imported_func = &self.import_backing.functions[imported_func_index];
let imported_func = &self.import_backing.vm_functions[imported_func_index];
(
imported_func.func as *const _,
Context::External(imported_func.vmctx),
@ -306,105 +301,38 @@ impl InstanceInner {
}
};
let signature = module.sig_registry.lookup_func_sig(sig_index).clone();
let signature = module.sig_registry.lookup_signature(sig_index);
(unsafe { FuncPointer::new(func_ptr) }, ctx, signature)
}
fn get_memory_from_index(
&mut self,
module: &ModuleInner,
mem_index: MemoryIndex,
) -> (MemoryPointer, Context, Memory) {
fn get_memory_from_index(&self, module: &ModuleInner, mem_index: MemoryIndex) -> Memory {
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => {
let vm_mem = &mut self.backing.vm_memories[local_mem_index];
(
unsafe { MemoryPointer::new(vm_mem) },
Context::Internal,
*module
.memories
.get(local_mem_index)
.expect("broken invariant, memories"),
)
}
LocalOrImport::Local(local_mem_index) => self.backing.memories[local_mem_index].clone(),
LocalOrImport::Import(imported_mem_index) => {
let &(_, mem) = &module
.imported_memories
.get(imported_mem_index)
.expect("missing imported memory index");
let vm::ImportedMemory { memory, vmctx } =
&self.import_backing.memories[imported_mem_index];
(
unsafe { MemoryPointer::new(*memory) },
Context::External(*vmctx),
*mem,
)
self.import_backing.memories[imported_mem_index].clone()
}
}
}
fn get_global_from_index(
&mut self,
module: &ModuleInner,
global_index: GlobalIndex,
) -> (GlobalPointer, GlobalDesc) {
fn get_global_from_index(&self, module: &ModuleInner, global_index: GlobalIndex) -> Global {
match global_index.local_or_import(module) {
LocalOrImport::Local(local_global_index) => {
let vm_global = &mut self.backing.vm_globals[local_global_index];
(
unsafe { GlobalPointer::new(vm_global) },
module
.globals
.get(local_global_index)
.expect("broken invariant, globals")
.desc,
)
self.backing.globals[local_global_index].clone()
}
LocalOrImport::Import(imported_global_index) => {
let &(_, imported_global_desc) = &module
.imported_globals
.get(imported_global_index)
.expect("missing imported global index");
let vm::ImportedGlobal { global } =
&self.import_backing.globals[imported_global_index];
(
unsafe { GlobalPointer::new(*global) },
*imported_global_desc,
)
LocalOrImport::Import(import_global_index) => {
self.import_backing.globals[import_global_index].clone()
}
}
}
fn get_table_from_index(
&mut self,
module: &ModuleInner,
table_index: TableIndex,
) -> (TablePointer, Context, Table) {
fn get_table_from_index(&self, module: &ModuleInner, table_index: TableIndex) -> Table {
match table_index.local_or_import(module) {
LocalOrImport::Local(local_table_index) => {
let vm_table = &mut self.backing.vm_tables[local_table_index];
(
unsafe { TablePointer::new(vm_table) },
Context::Internal,
*module
.tables
.get(local_table_index)
.expect("broken invariant, tables"),
)
self.backing.tables[local_table_index].clone()
}
LocalOrImport::Import(imported_table_index) => {
let &(_, tab) = &module
.imported_tables
.get(imported_table_index)
.expect("missing imported table index");
let vm::ImportedTable { table, vmctx } =
&self.import_backing.tables[imported_table_index];
(
unsafe { TablePointer::new(*table) },
Context::External(*vmctx),
*tab,
)
self.import_backing.tables[imported_table_index].clone()
}
}
}
@ -420,9 +348,9 @@ impl LikeNamespace for Instance {
/// A representation of an exported WebAssembly function.
pub struct Function<'a> {
signature: &'a FuncSig,
pub(crate) signature: Arc<FuncSig>,
module: &'a ModuleInner,
instance_inner: &'a mut InstanceInner,
pub(crate) instance_inner: &'a InstanceInner,
func_index: FuncIndex,
}
@ -450,7 +378,7 @@ impl<'a> Function<'a> {
/// # }
/// ```
pub fn call(&mut self, params: &[Value]) -> CallResult<Vec<Value>> {
if !self.signature.check_sig(params) {
if !self.signature.check_param_value_types(params) {
Err(ResolveError::Signature {
expected: self.signature.clone(),
found: params.iter().map(|val| val.ty()).collect(),
@ -458,9 +386,9 @@ impl<'a> Function<'a> {
}
let vmctx = match self.func_index.local_or_import(self.module) {
LocalOrImport::Local(_) => &mut *self.instance_inner.vmctx,
LocalOrImport::Local(_) => self.instance_inner.vmctx,
LocalOrImport::Import(imported_func_index) => {
self.instance_inner.import_backing.functions[imported_func_index].vmctx
self.instance_inner.import_backing.vm_functions[imported_func_index].vmctx
}
};
@ -479,7 +407,7 @@ impl<'a> Function<'a> {
}
pub fn signature(&self) -> &FuncSig {
self.signature
&*self.signature
}
pub fn raw(&self) -> *const vm::Func {
@ -491,7 +419,7 @@ impl<'a> Function<'a> {
.unwrap()
.as_ptr(),
LocalOrImport::Import(import_func_index) => {
self.instance_inner.import_backing.functions[import_func_index].func
self.instance_inner.import_backing.vm_functions[import_func_index].func
}
}
}

View File

@ -9,6 +9,7 @@ pub mod backend;
mod backing;
pub mod error;
pub mod export;
pub mod global;
pub mod import;
pub mod instance;
pub mod memory;
@ -18,6 +19,7 @@ pub mod structures;
mod sys;
pub mod table;
pub mod types;
pub mod units;
pub mod vm;
#[doc(hidden)]
pub mod vmcalls;
@ -29,7 +31,7 @@ pub use self::error::Result;
pub use self::instance::Instance;
#[doc(inline)]
pub use self::module::Module;
use std::rc::Rc;
use std::sync::Arc;
pub mod prelude {
pub use crate::import::{ImportObject, Namespace};
@ -39,7 +41,7 @@ pub mod prelude {
MemoryIndex, TableIndex, Type, Value,
};
pub use crate::vm;
pub use crate::{export_func, imports};
pub use crate::{func, imports};
}
/// Compile a [`Module`] using the provided compiler from
@ -55,7 +57,7 @@ pub fn compile_with(
let token = backend::Token::generate();
compiler
.compile(wasm, token)
.map(|inner| module::Module::new(Rc::new(inner)))
.map(|inner| module::Module::new(Arc::new(inner)))
}
/// Perform validation as defined by the

View File

@ -1,11 +1,20 @@
#[macro_export]
#[cfg(feature = "debug")]
macro_rules! debug {
($fmt:expr) => (if cfg!(any(debug_assertions, feature="debug")) { println!(concat!("wasmer-runtime(:{})::", $fmt), line!()) });
($fmt:expr, $($arg:tt)*) => (if cfg!(any(debug_assertions, feature="debug")) { println!(concat!("wasmer-runtime(:{})::", $fmt, "\n"), line!(), $($arg)*) });
($fmt:expr) => (println!(concat!("wasmer-runtime(:{})::", $fmt), line!()));
($fmt:expr, $($arg:tt)*) => (println!(concat!("wasmer-runtime(:{})::", $fmt, "\n"), line!(), $($arg)*));
}
#[macro_export]
macro_rules! export_func {
($func:ident, [ $( $params:ident ),* ] -> [ $( $returns:ident ),* ]) => {{
#[cfg(not(feature = "debug"))]
macro_rules! debug {
($fmt:expr) => {};
($fmt:expr, $($arg:tt)*) => {};
}
#[macro_export]
macro_rules! func {
($func:ident, [ $( $params:ident ),* ] -> [ $( $returns:ident ),* ] ) => {{
use $crate::{
export::{Context, Export, FuncPointer},
types::{FuncSig, Type},
@ -17,10 +26,10 @@ macro_rules! export_func {
Export::Function {
func: unsafe { FuncPointer::new(func as _) },
ctx: Context::Internal,
signature: FuncSig {
params: vec![$($crate::__export_func_convert_type!($params),)*],
returns: vec![$($crate::__export_func_convert_type!($returns),)*],
},
signature: FuncSig::new(
&[$($crate::__export_func_convert_type!($params),)*] as &[Type],
&[$($crate::__export_func_convert_type!($returns),)*] as &[Type],
).into(),
}
}};
}
@ -62,11 +71,11 @@ macro_rules! __export_func_convert_type {
///
/// # Usage:
/// ```
/// # use wasmer_runtime_core::imports;
/// # use wasmer_runtime_core::{imports, func};
/// # use wasmer_runtime_core::vm::Ctx;
/// let import_object = imports! {
/// "env" => {
/// "foo" => foo<[i32] -> [i32]>,
/// "foo" => func!(foo, [i32] -> [i32]),
/// },
/// };
///
@ -96,13 +105,10 @@ macro_rules! imports {
#[macro_export]
#[doc(hidden)]
macro_rules! __imports_internal {
( { $( $imp_name:expr => $func:ident < [ $( $params:ident ),* ] -> [ $( $returns:ident ),* ] >, )* } ) => {{
( { $( $imp_name:expr => $import_item:expr, )* } ) => {{
let mut ns = Namespace::new();
$(
ns.insert($imp_name, $crate::export_func!(
$func,
[ $( $params ),* ] -> [ $( $returns )* ]
));
ns.insert($imp_name, $import_item);
)*
ns
}};

View File

@ -1,224 +0,0 @@
use std::ops::{Deref, DerefMut};
use crate::{
sys,
types::{LocalMemoryIndex, Memory},
vm,
};
/// A linear memory instance.
#[derive(Debug)]
pub struct LinearMemory {
/// The actual memory allocation.
memory: sys::Memory,
/// The current number of wasm pages.
current: u32,
// The maximum size the WebAssembly Memory is allowed to grow
// to, in units of WebAssembly pages. When present, the maximum
// parameter acts as a hint to the engine to reserve memory up
// front. However, the engine may ignore or clamp this reservation
// request. In general, most WebAssembly modules shouldn't need
// to set a maximum.
max: Option<u32>,
// The size of the extra guard pages after the end.
// Is used to optimize loads and stores with constant offsets.
offset_guard_size: usize,
/// Requires exception catching to handle out-of-bounds accesses.
requires_signal_catch: bool,
}
/// It holds the raw bytes of memory accessed by a WebAssembly Instance
impl LinearMemory {
pub(crate) const PAGE_SIZE: u32 = 65_536;
pub(crate) const MAX_PAGES: u32 = 65_536;
#[doc(hidden)]
pub const DEFAULT_HEAP_SIZE: usize = 1 << 32; // 4 GiB
#[doc(hidden)]
pub const DEFAULT_GUARD_SIZE: usize = 1 << 31; // 2 GiB
pub(crate) const DEFAULT_SIZE: usize = Self::DEFAULT_HEAP_SIZE + Self::DEFAULT_GUARD_SIZE; // 6 GiB
/// Create a new linear memory instance with specified initial and maximum number of pages.
///
/// `maximum` cannot be set to more than `65536` pages.
pub fn new(mem: &Memory) -> Self {
assert!(mem.min <= Self::MAX_PAGES);
assert!(mem.max.is_none() || mem.max.unwrap() <= Self::MAX_PAGES);
let (mmap_size, initial_pages, offset_guard_size, requires_signal_catch) = if
/*mem.is_static_heap()*/
true {
(Self::DEFAULT_SIZE, mem.min, Self::DEFAULT_GUARD_SIZE, true)
// This is a static heap
} else {
// this is a dynamic heap
assert!(!mem.shared, "shared memories must have a maximum size.");
(
mem.min as usize * Self::PAGE_SIZE as usize,
mem.min,
0,
false,
)
};
let mut memory = sys::Memory::with_size(mmap_size).unwrap();
// map initial pages as readwrite since the inital mmap is mapped as not accessible.
if initial_pages != 0 {
unsafe {
memory
.protect(
0..(initial_pages as usize * Self::PAGE_SIZE as usize),
sys::Protect::ReadWrite,
)
.expect("unable to make memory accessible");
}
}
Self {
memory,
current: initial_pages,
max: mem.max,
offset_guard_size,
requires_signal_catch,
}
}
/// Returns an base address of this linear memory.
fn base(&mut self) -> *mut u8 {
self.memory.as_ptr()
}
/// Returns the size in bytes
pub(crate) fn size(&self) -> usize {
self.current as usize * Self::PAGE_SIZE as usize
}
pub fn pages(&self) -> u32 {
self.current
}
/// Returns the maximum number of wasm pages allowed.
pub fn max(&self) -> u32 {
self.max.unwrap_or(Self::MAX_PAGES)
}
pub fn into_vm_memory(&mut self, index: LocalMemoryIndex) -> vm::LocalMemory {
vm::LocalMemory {
base: self.base(),
size: self.size(),
index,
}
}
/// Grow memory by the specified amount of pages.
///
/// Returns `None` if memory can't be grown by the specified amount
/// of pages.
pub(crate) fn grow_dynamic(&mut self, add_pages: u32) -> Option<i32> {
debug!("grow_memory_dynamic called!");
assert!(self.max.is_none());
if add_pages == 0 {
return Some(self.current as _);
}
let prev_pages = self.current;
let new_pages = match self.current.checked_add(add_pages) {
Some(new_pages) => new_pages,
None => return None,
};
if let Some(val) = self.max {
if new_pages > val {
return None;
}
// Wasm linear memories are never allowed to grow beyond what is
// indexable. If the memory has no maximum, enforce the greatest
// limit here.
} else if new_pages >= Self::MAX_PAGES {
return None;
}
let new_bytes = (new_pages * Self::PAGE_SIZE) as usize;
if new_bytes > self.memory.size() - self.offset_guard_size {
let memory_size = new_bytes.checked_add(self.offset_guard_size)?;
let mut new_memory = sys::Memory::with_size(memory_size).ok()?;
unsafe {
new_memory
.protect(0..new_bytes, sys::Protect::ReadWrite)
.ok()?;
}
let copy_size = self.memory.size() - self.offset_guard_size;
unsafe {
new_memory.as_slice_mut()[..copy_size]
.copy_from_slice(&self.memory.as_slice()[..copy_size]);
}
self.memory = new_memory;
}
self.current = new_pages;
Some(prev_pages as i32)
}
pub(crate) fn grow_static(&mut self, add_pages: u32) -> Option<i32> {
// debug!("grow_memory_static called!");
// assert!(self.max.is_some());
if add_pages == 0 {
return Some(self.current as _);
}
let prev_pages = self.current;
let new_pages = match self.current.checked_add(add_pages) {
Some(new_pages) => new_pages,
None => return None,
};
if let Some(val) = self.max {
if new_pages > val {
return None;
}
// Wasm linear memories are never allowed to grow beyond what is
// indexable. If the memory has no maximum, enforce the greatest
// limit here.
} else if new_pages >= Self::MAX_PAGES {
return None;
}
let prev_bytes = (prev_pages * Self::PAGE_SIZE) as usize;
let new_bytes = (new_pages * Self::PAGE_SIZE) as usize;
unsafe {
self.memory
.protect(prev_bytes..new_bytes, sys::Protect::ReadWrite)
.ok()?;
}
self.current = new_pages;
Some(prev_pages as i32)
}
}
impl Deref for LinearMemory {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe { self.memory.as_slice() }
}
}
impl DerefMut for LinearMemory {
fn deref_mut(&mut self) -> &mut [u8] {
unsafe { self.memory.as_slice_mut() }
}
}

View File

@ -0,0 +1,110 @@
use crate::{
error::CreationError,
sys,
types::MemoryDescriptor,
units::{Bytes, Pages},
vm,
};
pub const DYNAMIC_GUARD_SIZE: usize = 4096;
/// This is an internal-only api.
///
/// A Dynamic memory allocates only the minimum amount of memory
/// when first created. Over time, as it grows, it may reallocate to
/// a different location and size.
///
/// Dynamic memories are signifigantly faster to create than static
/// memories and use much less virtual memory, however, they require
/// the webassembly module to bounds-check memory accesses.
///
/// While, a dynamic memory could use a vector of some sort as its
/// backing memory, we use mmap (or the platform-equivalent) to allow
/// us to add a guard-page at the end to help elide some bounds-checks.
pub struct DynamicMemory {
memory: sys::Memory,
current: Pages,
max: Option<Pages>,
}
impl DynamicMemory {
pub(super) fn new(
desc: MemoryDescriptor,
local: &mut vm::LocalMemory,
) -> Result<Box<Self>, CreationError> {
let min_bytes: Bytes = desc.minimum.into();
let memory = {
let mut memory = sys::Memory::with_size(min_bytes.0 + DYNAMIC_GUARD_SIZE)
.map_err(|_| CreationError::UnableToCreateMemory)?;
if desc.minimum != Pages(0) {
unsafe {
memory
.protect(0..min_bytes.0, sys::Protect::ReadWrite)
.map_err(|_| CreationError::UnableToCreateMemory)?;
}
}
memory
};
let mut storage = Box::new(DynamicMemory {
memory,
current: desc.minimum,
max: desc.maximum,
});
let storage_ptr: *mut DynamicMemory = &mut *storage;
local.base = storage.memory.as_ptr();
local.bound = min_bytes.0;
local.memory = storage_ptr as *mut ();
Ok(storage)
}
pub fn size(&self) -> Pages {
self.current
}
pub fn grow(&mut self, delta: Pages, local: &mut vm::LocalMemory) -> Option<Pages> {
if delta == Pages(0) {
return Some(self.current);
}
let new_pages = self.current.checked_add(delta)?;
if let Some(max) = self.max {
if new_pages > max {
return None;
}
}
let mut new_memory =
sys::Memory::with_size(new_pages.bytes().0 + DYNAMIC_GUARD_SIZE).ok()?;
unsafe {
new_memory
.protect(0..new_pages.bytes().0, sys::Protect::ReadWrite)
.ok()?;
new_memory.as_slice_mut()[..self.current.bytes().0]
.copy_from_slice(&self.memory.as_slice()[..self.current.bytes().0]);
}
self.memory = new_memory; //The old memory gets dropped.
local.base = self.memory.as_ptr();
local.bound = new_pages.bytes().0;
let old_pages = self.current;
self.current = new_pages;
Some(old_pages)
}
pub fn as_slice(&self) -> &[u8] {
unsafe { &self.memory.as_slice()[0..self.current.bytes().0] }
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe { &mut self.memory.as_slice_mut()[0..self.current.bytes().0] }
}
}

View File

@ -0,0 +1,303 @@
use crate::{
error::CreationError,
export::Export,
import::IsExport,
memory::dynamic::DYNAMIC_GUARD_SIZE,
memory::static_::{SAFE_STATIC_GUARD_SIZE, SAFE_STATIC_HEAP_SIZE},
types::{MemoryDescriptor, ValueType},
units::Pages,
vm,
};
use std::{cell::RefCell, fmt, mem, ptr, rc::Rc, slice};
pub use self::dynamic::DynamicMemory;
pub use self::static_::{SharedStaticMemory, StaticMemory};
mod dynamic;
mod static_;
pub struct Memory {
desc: MemoryDescriptor,
storage: Rc<RefCell<(MemoryStorage, Box<vm::LocalMemory>)>>,
}
impl Memory {
/// Create a new `Memory` from a [`MemoryDescriptor`]
///
/// [`MemoryDescriptor`]: struct.MemoryDescriptor.html
///
/// Usage:
///
/// ```
/// # use wasmer_runtime_core::types::MemoryDescriptor;
/// # use wasmer_runtime_core::memory::Memory;
/// # use wasmer_runtime_core::error::Result;
/// # use wasmer_runtime_core::units::Pages;
/// # fn create_memory() -> Result<()> {
/// let descriptor = MemoryDescriptor {
/// minimum: Pages(10),
/// maximum: None,
/// shared: false,
/// };
///
/// let memory = Memory::new(descriptor)?;
/// # Ok(())
/// # }
/// ```
pub fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
let mut vm_local_memory = Box::new(vm::LocalMemory {
base: ptr::null_mut(),
bound: 0,
memory: ptr::null_mut(),
});
let memory_storage = match desc.memory_type() {
MemoryType::Dynamic => {
MemoryStorage::Dynamic(DynamicMemory::new(desc, &mut vm_local_memory)?)
}
MemoryType::Static => {
MemoryStorage::Static(StaticMemory::new(desc, &mut vm_local_memory)?)
}
MemoryType::SharedStatic => unimplemented!("shared memories are not yet implemented"),
};
Ok(Memory {
desc,
storage: Rc::new(RefCell::new((memory_storage, vm_local_memory))),
})
}
/// Return the [`MemoryDescriptor`] that this memory
/// was created with.
///
/// [`MemoryDescriptor`]: struct.MemoryDescriptor.html
pub fn descriptor(&self) -> MemoryDescriptor {
self.desc
}
/// Grow this memory by the specfied number of pages.
pub fn grow(&mut self, delta: Pages) -> Option<Pages> {
match &mut *self.storage.borrow_mut() {
(MemoryStorage::Dynamic(ref mut dynamic_memory), ref mut local) => {
dynamic_memory.grow(delta, local)
}
(MemoryStorage::Static(ref mut static_memory), ref mut local) => {
static_memory.grow(delta, local)
}
(MemoryStorage::SharedStatic(_), _) => unimplemented!(),
}
}
/// The size, in wasm pages, of this memory.
pub fn size(&self) -> Pages {
match &*self.storage.borrow() {
(MemoryStorage::Dynamic(ref dynamic_memory), _) => dynamic_memory.size(),
(MemoryStorage::Static(ref static_memory), _) => static_memory.size(),
(MemoryStorage::SharedStatic(_), _) => unimplemented!(),
}
}
pub fn read<T: ValueType>(&self, offset: u32) -> Result<T, ()> {
let offset = offset as usize;
let borrow_ref = self.storage.borrow();
let memory_storage = &borrow_ref.0;
let mem_slice = match memory_storage {
MemoryStorage::Dynamic(ref dynamic_memory) => dynamic_memory.as_slice(),
MemoryStorage::Static(ref static_memory) => static_memory.as_slice(),
MemoryStorage::SharedStatic(_) => panic!("cannot slice a shared memory"),
};
if offset + mem::size_of::<T>() <= mem_slice.len() {
T::from_le(&mem_slice[offset..]).map_err(|_| ())
} else {
Err(())
}
}
pub fn write<T: ValueType>(&self, offset: u32, value: T) -> Result<(), ()> {
let offset = offset as usize;
let mut borrow_ref = self.storage.borrow_mut();
let memory_storage = &mut borrow_ref.0;
let mem_slice = match memory_storage {
MemoryStorage::Dynamic(ref mut dynamic_memory) => dynamic_memory.as_slice_mut(),
MemoryStorage::Static(ref mut static_memory) => static_memory.as_slice_mut(),
MemoryStorage::SharedStatic(_) => panic!("cannot slice a shared memory"),
};
if offset + mem::size_of::<T>() <= mem_slice.len() {
value.into_le(&mut mem_slice[offset..]);
Ok(())
} else {
Err(())
}
}
pub fn read_many<T: ValueType>(&self, offset: u32, count: usize) -> Result<Vec<T>, ()> {
let offset = offset as usize;
let borrow_ref = self.storage.borrow();
let memory_storage = &borrow_ref.0;
let mem_slice = match memory_storage {
MemoryStorage::Dynamic(ref dynamic_memory) => dynamic_memory.as_slice(),
MemoryStorage::Static(ref static_memory) => static_memory.as_slice(),
MemoryStorage::SharedStatic(_) => panic!("cannot slice a shared memory"),
};
let bytes_size = count * mem::size_of::<T>();
if offset + bytes_size <= mem_slice.len() {
let buffer = &mem_slice[offset..offset + bytes_size];
let value_type_buffer = unsafe {
slice::from_raw_parts(
buffer.as_ptr() as *const T,
buffer.len() / mem::size_of::<T>(),
)
};
Ok(value_type_buffer.to_vec())
} else {
Err(())
}
}
pub fn write_many<T: ValueType>(&self, offset: u32, values: &[T]) -> Result<(), ()> {
let offset = offset as usize;
let mut borrow_ref = self.storage.borrow_mut();
let memory_storage = &mut borrow_ref.0;
let mem_slice = match memory_storage {
MemoryStorage::Dynamic(ref mut dynamic_memory) => dynamic_memory.as_slice_mut(),
MemoryStorage::Static(ref mut static_memory) => static_memory.as_slice_mut(),
MemoryStorage::SharedStatic(_) => panic!("cannot slice a shared memory"),
};
let bytes_size = values.len() * mem::size_of::<T>();
if offset + bytes_size <= mem_slice.len() {
let u8_buffer =
unsafe { slice::from_raw_parts(values.as_ptr() as *const u8, bytes_size) };
mem_slice[offset..offset + bytes_size].copy_from_slice(u8_buffer);
Ok(())
} else {
Err(())
}
}
pub fn direct_access<T: ValueType, F, R>(&self, f: F) -> R
where
F: FnOnce(&[T]) -> R,
{
let borrow_ref = self.storage.borrow();
let memory_storage = &borrow_ref.0;
let mem_slice = match memory_storage {
MemoryStorage::Dynamic(ref dynamic_memory) => dynamic_memory.as_slice(),
MemoryStorage::Static(ref static_memory) => static_memory.as_slice(),
MemoryStorage::SharedStatic(_) => panic!("cannot slice a shared memory"),
};
let t_buffer = unsafe {
slice::from_raw_parts(
mem_slice.as_ptr() as *const T,
mem_slice.len() / mem::size_of::<T>(),
)
};
f(t_buffer)
}
pub fn direct_access_mut<T: ValueType, F, R>(&self, f: F) -> R
where
F: FnOnce(&mut [T]) -> R,
{
let mut borrow_ref = self.storage.borrow_mut();
let memory_storage = &mut borrow_ref.0;
let mem_slice = match memory_storage {
MemoryStorage::Dynamic(ref mut dynamic_memory) => dynamic_memory.as_slice_mut(),
MemoryStorage::Static(ref mut static_memory) => static_memory.as_slice_mut(),
MemoryStorage::SharedStatic(_) => panic!("cannot slice a shared memory"),
};
let t_buffer = unsafe {
slice::from_raw_parts_mut(
mem_slice.as_mut_ptr() as *mut T,
mem_slice.len() / mem::size_of::<T>(),
)
};
f(t_buffer)
}
pub(crate) fn vm_local_memory(&mut self) -> *mut vm::LocalMemory {
&mut *self.storage.borrow_mut().1
}
}
impl IsExport for Memory {
fn to_export(&mut self) -> Export {
Export::Memory(self.clone())
}
}
impl Clone for Memory {
fn clone(&self) -> Self {
Self {
desc: self.desc,
storage: Rc::clone(&self.storage),
}
}
}
pub enum MemoryStorage {
Dynamic(Box<DynamicMemory>),
Static(Box<StaticMemory>),
SharedStatic(Box<SharedStaticMemory>),
}
impl MemoryStorage {
pub fn to_type(&self) -> MemoryType {
match self {
MemoryStorage::Dynamic(_) => MemoryType::Dynamic,
MemoryStorage::Static(_) => MemoryType::Static,
MemoryStorage::SharedStatic(_) => MemoryType::SharedStatic,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MemoryType {
Dynamic,
Static,
SharedStatic,
}
impl MemoryType {
#[doc(hidden)]
pub fn guard_size(self) -> u64 {
match self {
MemoryType::Dynamic => DYNAMIC_GUARD_SIZE as u64,
MemoryType::Static => SAFE_STATIC_GUARD_SIZE as u64,
MemoryType::SharedStatic => SAFE_STATIC_GUARD_SIZE as u64,
}
}
#[doc(hidden)]
pub fn bounds(self) -> Option<u64> {
match self {
MemoryType::Dynamic => None,
MemoryType::Static => Some(SAFE_STATIC_HEAP_SIZE as u64),
MemoryType::SharedStatic => Some(SAFE_STATIC_HEAP_SIZE as u64),
}
}
}
impl fmt::Debug for Memory {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Memory")
.field("desc", &self.desc)
.field("size", &self.size())
.finish()
}
}

View File

@ -0,0 +1,10 @@
#[doc(hidden)]
pub const SAFE_STATIC_HEAP_SIZE: usize = 1 << 32; // 4 GiB
#[doc(hidden)]
pub const SAFE_STATIC_GUARD_SIZE: usize = 1 << 31; // 2 GiB
mod shared;
mod unshared;
pub use self::shared::SharedStaticMemory;
pub use self::unshared::StaticMemory;

View File

@ -0,0 +1,11 @@
use crate::sys;
use parking_lot::Mutex;
use std::sync::atomic::AtomicUsize;
// Remove this attribute once this is used.
#[allow(dead_code)]
pub struct SharedStaticMemory {
memory: sys::Memory,
current: AtomicUsize,
lock: Mutex<()>,
}

View File

@ -0,0 +1,102 @@
use crate::{
error::CreationError,
memory::static_::{SAFE_STATIC_GUARD_SIZE, SAFE_STATIC_HEAP_SIZE},
sys,
types::MemoryDescriptor,
units::Pages,
vm,
};
/// This is an internal-only api.
///
/// A static memory allocates 6GB of *virtual* memory when created
/// in order to allow the webassembly module to contain no bounds-checks.
///
/// Additionally, static memories stay at a single virtual address, so there is no need
/// to reload its address on each use.
///
/// Static memories take a relatively long time to create, so if memories are short-lived,
/// it's recommended that a dynamic memory is used. There is currently no user-facing api that
/// allows them to select the type of memory used however.
pub struct StaticMemory {
memory: sys::Memory,
current: Pages,
max: Option<Pages>,
}
impl StaticMemory {
pub(in crate::memory) fn new(
desc: MemoryDescriptor,
local: &mut vm::LocalMemory,
) -> Result<Box<Self>, CreationError> {
let memory = {
let mut memory = sys::Memory::with_size(SAFE_STATIC_HEAP_SIZE + SAFE_STATIC_GUARD_SIZE)
.map_err(|_| CreationError::UnableToCreateMemory)?;
if desc.minimum != Pages(0) {
unsafe {
memory
.protect(0..desc.minimum.bytes().0, sys::Protect::ReadWrite)
.map_err(|_| CreationError::UnableToCreateMemory)?;
}
}
memory
};
let mut storage = Box::new(StaticMemory {
memory,
current: desc.minimum,
max: desc.maximum,
});
let storage_ptr: *mut StaticMemory = &mut *storage;
local.base = storage.memory.as_ptr();
local.bound = desc.minimum.bytes().0;
local.memory = storage_ptr as *mut ();
Ok(storage)
}
pub fn size(&self) -> Pages {
self.current
}
pub fn grow(&mut self, delta: Pages, local: &mut vm::LocalMemory) -> Option<Pages> {
if delta == Pages(0) {
return Some(self.current);
}
let new_pages = self.current.checked_add(delta)?;
if let Some(max) = self.max {
if new_pages > max {
return None;
}
}
unsafe {
self.memory
.protect(
self.current.bytes().0..new_pages.bytes().0,
sys::Protect::ReadWrite,
)
.ok()?;
}
local.bound = new_pages.bytes().0;
let old_pages = self.current;
self.current = new_pages;
Some(old_pages)
}
pub fn as_slice(&self) -> &[u8] {
unsafe { &self.memory.as_slice()[0..self.current.bytes().0] }
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe { &mut self.memory.as_slice_mut()[0..self.current.bytes().0] }
}
}

View File

@ -5,14 +5,15 @@ use crate::{
sig_registry::SigRegistry,
structures::Map,
types::{
FuncIndex, Global, GlobalDesc, GlobalIndex, ImportedFuncIndex, ImportedGlobalIndex,
ImportedMemoryIndex, ImportedTableIndex, Initializer, LocalGlobalIndex, LocalMemoryIndex,
LocalTableIndex, Memory, MemoryIndex, SigIndex, Table, TableIndex,
FuncIndex, GlobalDescriptor, GlobalIndex, GlobalInit, ImportedFuncIndex,
ImportedGlobalIndex, ImportedMemoryIndex, ImportedTableIndex, Initializer,
LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryDescriptor, MemoryIndex,
SigIndex, TableDescriptor, TableIndex,
},
Instance,
};
use hashbrown::HashMap;
use std::rc::Rc;
use std::sync::Arc;
/// This is used to instantiate a new WebAssembly module.
#[doc(hidden)]
@ -21,15 +22,15 @@ pub struct ModuleInner {
pub protected_caller: Box<dyn ProtectedCaller>,
// This are strictly local and the typsystem ensures that.
pub memories: Map<LocalMemoryIndex, Memory>,
pub globals: Map<LocalGlobalIndex, Global>,
pub tables: Map<LocalTableIndex, Table>,
pub memories: Map<LocalMemoryIndex, MemoryDescriptor>,
pub globals: Map<LocalGlobalIndex, GlobalInit>,
pub tables: Map<LocalTableIndex, TableDescriptor>,
// These are strictly imported and the typesystem ensures that.
pub imported_functions: Map<ImportedFuncIndex, ImportName>,
pub imported_memories: Map<ImportedMemoryIndex, (ImportName, Memory)>,
pub imported_tables: Map<ImportedTableIndex, (ImportName, Table)>,
pub imported_globals: Map<ImportedGlobalIndex, (ImportName, GlobalDesc)>,
pub imported_memories: Map<ImportedMemoryIndex, (ImportName, MemoryDescriptor)>,
pub imported_tables: Map<ImportedTableIndex, (ImportName, TableDescriptor)>,
pub imported_globals: Map<ImportedGlobalIndex, (ImportName, GlobalDescriptor)>,
pub exports: HashMap<String, ExportIndex>,
@ -49,10 +50,10 @@ pub struct ModuleInner {
///
/// [`compile`]: fn.compile.html
/// [`compile_with`]: fn.compile_with.html
pub struct Module(#[doc(hidden)] pub Rc<ModuleInner>);
pub struct Module(#[doc(hidden)] pub Arc<ModuleInner>);
impl Module {
pub(crate) fn new(inner: Rc<ModuleInner>) -> Self {
pub(crate) fn new(inner: Arc<ModuleInner>) -> Self {
Module(inner)
}
@ -79,7 +80,7 @@ impl Module {
/// # }
/// ```
pub fn instantiate(&self, import_object: ImportObject) -> Result<Instance> {
Instance::new(Rc::clone(&self.0), Box::new(import_object))
Instance::new(Arc::clone(&self.0), Box::new(import_object))
}
}

View File

@ -3,39 +3,50 @@ use crate::{
types::{FuncSig, SigIndex},
};
use hashbrown::HashMap;
use lazy_static::lazy_static;
use parking_lot::RwLock;
use std::sync::Arc;
#[derive(Debug)]
pub struct SigRegistry {
func_table: HashMap<FuncSig, SigIndex>,
sig_assoc: Map<SigIndex, FuncSig>,
duplicated_sig_assoc: Map<SigIndex, SigIndex>,
}
impl SigRegistry {
pub fn new() -> Self {
Self {
lazy_static! {
static ref GLOBAL_SIG_REGISTRY: RwLock<GlobalSigRegistry> = {
let registry = GlobalSigRegistry {
func_table: HashMap::new(),
sig_assoc: Map::new(),
duplicated_sig_assoc: Map::new(),
}
}
};
RwLock::new(registry)
};
}
struct GlobalSigRegistry {
func_table: HashMap<Arc<FuncSig>, SigIndex>,
sig_assoc: Map<SigIndex, Arc<FuncSig>>,
}
#[derive(Debug)]
pub struct SigRegistry;
impl SigRegistry {
pub fn lookup_sig_index<Sig>(&self, func_sig: Sig) -> SigIndex
where
Sig: Into<Arc<FuncSig>>,
{
let func_sig = func_sig.into();
let mut global = (*GLOBAL_SIG_REGISTRY).write();
let global = &mut *global;
let func_table = &mut global.func_table;
let sig_assoc = &mut global.sig_assoc;
pub fn register(&mut self, func_sig: FuncSig) -> SigIndex {
// self.sig_assoc.push(func_sig)
let func_table = &mut self.func_table;
let sig_assoc = &mut self.sig_assoc;
let sig_index = *func_table
.entry(func_sig.clone())
.entry(Arc::clone(&func_sig))
.or_insert_with(|| sig_assoc.push(func_sig));
self.duplicated_sig_assoc.push(sig_index);
sig_index
}
pub fn lookup_deduplicated_sigindex(&self, sig_index: SigIndex) -> SigIndex {
self.duplicated_sig_assoc[sig_index]
}
pub fn lookup_func_sig(&self, sig_index: SigIndex) -> &FuncSig {
&self.sig_assoc[sig_index]
pub fn lookup_signature(&self, sig_index: SigIndex) -> Arc<FuncSig> {
let global = (*GLOBAL_SIG_REGISTRY).read();
Arc::clone(&global.sig_assoc[sig_index])
}
}

View File

@ -4,6 +4,9 @@ use page_size;
use std::ops::{Bound, RangeBounds};
use std::{ptr, slice};
unsafe impl Send for Memory {}
unsafe impl Sync for Memory {}
#[derive(Debug)]
pub struct Memory {
ptr: *mut u8,

View File

@ -7,6 +7,9 @@ use page_size;
use std::ops::{Bound, RangeBounds};
use std::{ptr, slice};
unsafe impl Send for Memory {}
unsafe impl Sync for Memory {}
#[derive(Debug)]
pub struct Memory {
ptr: *mut u8,

View File

@ -1,45 +0,0 @@
use super::vm;
use crate::types::{ElementType, Table};
#[derive(Debug, Clone)]
pub enum TableElements {
/// This is intended to be a caller-checked Anyfunc.
Anyfunc(Vec<vm::Anyfunc>),
}
#[derive(Debug)]
pub struct TableBacking {
pub elements: TableElements,
pub max: Option<u32>,
}
impl TableBacking {
pub fn new(table: &Table) -> Self {
match table.ty {
ElementType::Anyfunc => {
let initial_table_backing_len = match table.max {
Some(max) => max,
None => table.min,
} as usize;
Self {
elements: TableElements::Anyfunc(vec![
vm::Anyfunc::null();
initial_table_backing_len
]),
max: table.max,
}
}
}
}
pub fn into_vm_table(&mut self) -> vm::LocalTable {
match self.elements {
TableElements::Anyfunc(ref mut funcs) => vm::LocalTable {
base: funcs.as_mut_ptr() as *mut u8,
current_elements: funcs.len(),
capacity: funcs.capacity(),
},
}
}
}

View File

@ -0,0 +1,134 @@
use crate::{
error::CreationError,
instance::Function,
sig_registry::SigRegistry,
structures::TypedIndex,
types::{FuncSig, TableDescriptor},
vm,
};
use std::{ptr, sync::Arc};
enum AnyfuncInner<'a> {
Host {
ptr: *const vm::Func,
signature: Arc<FuncSig>,
},
Managed(Function<'a>),
}
pub struct Anyfunc<'a> {
inner: AnyfuncInner<'a>,
}
impl<'a> Anyfunc<'a> {
pub unsafe fn new<Sig>(func: *const vm::Func, signature: Sig) -> Self
where
Sig: Into<Arc<FuncSig>>,
{
Self {
inner: AnyfuncInner::Host {
ptr: func as _,
signature: signature.into(),
},
}
}
}
impl<'a> From<Function<'a>> for Anyfunc<'a> {
fn from(function: Function<'a>) -> Self {
Anyfunc {
inner: AnyfuncInner::Managed(function),
}
}
}
pub struct AnyfuncTable {
backing: Vec<vm::Anyfunc>,
max: Option<u32>,
}
impl AnyfuncTable {
pub fn new(
desc: TableDescriptor,
local: &mut vm::LocalTable,
) -> Result<Box<Self>, CreationError> {
let initial_table_backing_len = match desc.maximum {
Some(max) => max,
None => desc.minimum,
} as usize;
let mut storage = Box::new(AnyfuncTable {
backing: vec![vm::Anyfunc::null(); initial_table_backing_len],
max: desc.maximum,
});
let storage_ptr: *mut AnyfuncTable = &mut *storage;
local.base = storage.backing.as_mut_ptr() as *mut u8;
local.count = storage.backing.len();
local.table = storage_ptr as *mut ();
Ok(storage)
}
pub fn current_size(&self) -> u32 {
self.backing.len() as u32
}
pub fn internal_buffer(&mut self) -> &mut [vm::Anyfunc] {
&mut self.backing
}
pub fn grow(&mut self, delta: u32, local: &mut vm::LocalTable) -> Option<u32> {
let starting_len = self.backing.len() as u32;
let new_len = starting_len.checked_add(delta)?;
if let Some(max) = self.max {
if new_len > max {
return None;
}
}
self.backing.resize(new_len as usize, vm::Anyfunc::null());
local.base = self.backing.as_mut_ptr() as *mut u8;
local.count = self.backing.len();
Some(starting_len)
}
pub fn set(&mut self, index: u32, element: Anyfunc) -> Result<(), ()> {
if let Some(slot) = self.backing.get_mut(index as usize) {
let anyfunc = match element.inner {
AnyfuncInner::Host { ptr, signature } => {
let sig_index = SigRegistry.lookup_sig_index(signature);
let sig_id = vm::SigId(sig_index.index() as u32);
vm::Anyfunc {
func: ptr,
ctx: ptr::null_mut(),
sig_id,
}
}
AnyfuncInner::Managed(ref func) => {
let sig_index = SigRegistry.lookup_sig_index(Arc::clone(&func.signature));
let sig_id = vm::SigId(sig_index.index() as u32);
vm::Anyfunc {
func: func.raw(),
ctx: func.instance_inner.vmctx,
sig_id,
}
}
};
*slot = anyfunc;
Ok(())
} else {
Err(())
}
}
}

View File

@ -0,0 +1,142 @@
use crate::{
error::CreationError,
export::Export,
import::IsExport,
types::{ElementType, TableDescriptor},
vm,
};
use std::{cell::RefCell, fmt, ptr, rc::Rc};
mod anyfunc;
pub use self::anyfunc::Anyfunc;
use self::anyfunc::AnyfuncTable;
pub enum Element<'a> {
Anyfunc(Anyfunc<'a>),
}
// #[derive(Debug)]
pub enum TableStorage {
/// This is intended to be a caller-checked Anyfunc.
Anyfunc(Box<AnyfuncTable>),
}
pub struct Table {
desc: TableDescriptor,
storage: Rc<RefCell<(TableStorage, vm::LocalTable)>>,
}
impl Table {
/// Create a new `Table` from a [`TableDescriptor`]
///
/// [`TableDescriptor`]: struct.TableDescriptor.html
///
/// Usage:
///
/// ```
/// # use wasmer_runtime_core::types::{TableDescriptor, ElementType};
/// # use wasmer_runtime_core::table::Table;
/// # use wasmer_runtime_core::error::Result;
/// # fn create_table() -> Result<()> {
/// let descriptor = TableDescriptor {
/// element: ElementType::Anyfunc,
/// minimum: 10,
/// maximum: None,
/// };
///
/// let table = Table::new(descriptor)?;
/// # Ok(())
/// # }
/// ```
pub fn new(desc: TableDescriptor) -> Result<Self, CreationError> {
let mut local = vm::LocalTable {
base: ptr::null_mut(),
count: 0,
table: ptr::null_mut(),
};
let storage = match desc.element {
ElementType::Anyfunc => TableStorage::Anyfunc(AnyfuncTable::new(desc, &mut local)?),
};
Ok(Self {
desc,
storage: Rc::new(RefCell::new((storage, local))),
})
}
/// Get the `TableDescriptor` used to create this `Table`.
pub fn descriptor(&self) -> TableDescriptor {
self.desc
}
/// Set the element at index.
pub fn set(&self, index: u32, element: Element) -> Result<(), ()> {
match &mut *self.storage.borrow_mut() {
(TableStorage::Anyfunc(ref mut anyfunc_table), _) => {
match element {
Element::Anyfunc(anyfunc) => anyfunc_table.set(index, anyfunc),
// _ => panic!("wrong element type for anyfunc table"),
}
}
}
}
pub(crate) fn anyfunc_direct_access_mut<F, R>(&self, f: F) -> R
where
F: FnOnce(&mut [vm::Anyfunc]) -> R,
{
match &mut *self.storage.borrow_mut() {
(TableStorage::Anyfunc(ref mut anyfunc_table), _) => f(anyfunc_table.internal_buffer()),
}
}
/// The current size of this table.
pub fn size(&self) -> u32 {
match &*self.storage.borrow() {
(TableStorage::Anyfunc(ref anyfunc_table), _) => anyfunc_table.current_size(),
}
}
/// Grow this table by `delta`.
pub fn grow(&self, delta: u32) -> Option<u32> {
if delta == 0 {
return Some(self.size());
}
match &mut *self.storage.borrow_mut() {
(TableStorage::Anyfunc(ref mut anyfunc_table), ref mut local) => {
anyfunc_table.grow(delta, local)
}
}
}
pub(crate) fn vm_local_table(&mut self) -> *mut vm::LocalTable {
&mut self.storage.borrow_mut().1
}
}
impl IsExport for Table {
fn to_export(&mut self) -> Export {
Export::Table(self.clone())
}
}
impl Clone for Table {
fn clone(&self) -> Self {
Self {
desc: self.desc,
storage: Rc::clone(&self.storage),
}
}
}
impl fmt::Debug for Table {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Table")
.field("desc", &self.desc)
.field("size", &self.size())
.finish()
}
}

View File

@ -1,4 +1,5 @@
use crate::{module::ModuleInner, structures::TypedIndex};
use crate::{memory::MemoryType, module::ModuleInner, structures::TypedIndex, units::Pages};
use std::{borrow::Cow, mem};
/// Represents a WebAssembly type.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -64,6 +65,62 @@ impl From<f64> for Value {
}
}
pub enum ValueError {
BufferTooSmall,
}
pub trait ValueType: Copy + Clone
where
Self: Sized,
{
fn into_le(self, buffer: &mut [u8]);
fn from_le(buffer: &[u8]) -> Result<Self, ValueError>;
}
macro_rules! convert_value_impl {
($t:ty) => {
impl ValueType for $t {
fn into_le(self, buffer: &mut [u8]) {
buffer[..mem::size_of::<Self>()].copy_from_slice(&self.to_le_bytes());
}
fn from_le(buffer: &[u8]) -> Result<Self, ValueError> {
if buffer.len() >= mem::size_of::<Self>() {
let mut array = [0u8; mem::size_of::<Self>()];
array.copy_from_slice(&buffer[..mem::size_of::<Self>()]);
Ok(Self::from_le_bytes(array))
} else {
Err(ValueError::BufferTooSmall)
}
}
}
};
( $($t:ty),* ) => {
$(
convert_value_impl!($t);
)*
};
}
convert_value_impl!(u8, i8, u16, i16, u32, i32, u64, i64);
impl ValueType for f32 {
fn into_le(self, buffer: &mut [u8]) {
self.to_bits().into_le(buffer);
}
fn from_le(buffer: &[u8]) -> Result<Self, ValueError> {
Ok(f32::from_bits(<u32 as ValueType>::from_le(buffer)?))
}
}
impl ValueType for f64 {
fn into_le(self, buffer: &mut [u8]) {
self.to_bits().into_le(buffer);
}
fn from_le(buffer: &[u8]) -> Result<Self, ValueError> {
Ok(f64::from_bits(<u64 as ValueType>::from_le(buffer)?))
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ElementType {
/// Any wasm function.
@ -71,21 +128,23 @@ pub enum ElementType {
}
#[derive(Debug, Clone, Copy)]
pub struct Table {
pub struct TableDescriptor {
/// Type of data stored in this table.
pub ty: ElementType,
pub element: ElementType,
/// The minimum number of elements that must be stored in this table.
pub min: u32,
pub minimum: u32,
/// The maximum number of elements in this table.
pub max: Option<u32>,
pub maximum: Option<u32>,
}
impl Table {
pub(crate) fn fits_in_imported(&self, imported: &Table) -> bool {
impl TableDescriptor {
pub(crate) fn fits_in_imported(&self, imported: TableDescriptor) -> bool {
// TODO: We should define implementation limits.
let imported_max = imported.max.unwrap_or(u32::max_value());
let self_max = self.max.unwrap_or(u32::max_value());
self.ty == imported.ty && imported_max <= self_max && self.min <= imported.min
let imported_max = imported.maximum.unwrap_or(u32::max_value());
let self_max = self.maximum.unwrap_or(u32::max_value());
self.element == imported.element
&& imported_max <= self_max
&& self.minimum <= imported.minimum
}
}
@ -101,39 +160,46 @@ pub enum Initializer {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct GlobalDesc {
pub struct GlobalDescriptor {
pub mutable: bool,
pub ty: Type,
}
/// A wasm global.
#[derive(Debug, Clone)]
pub struct Global {
pub desc: GlobalDesc,
pub struct GlobalInit {
pub desc: GlobalDescriptor,
pub init: Initializer,
}
/// A wasm memory.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Memory {
pub struct MemoryDescriptor {
/// The minimum number of allowed pages.
pub min: u32,
pub minimum: Pages,
/// The maximum number of allowed pages.
pub max: Option<u32>,
pub maximum: Option<Pages>,
/// This memory can be shared between wasm threads.
pub shared: bool,
}
impl Memory {
pub fn is_static_heap(&self) -> bool {
self.max.is_some()
impl MemoryDescriptor {
pub fn memory_type(self) -> MemoryType {
match (self.maximum.is_some(), self.shared) {
(true, true) => MemoryType::SharedStatic,
(true, false) => MemoryType::Static,
(false, false) => MemoryType::Dynamic,
(false, true) => panic!("shared memory without a max is not allowed"),
}
}
pub(crate) fn fits_in_imported(&self, imported: &Memory) -> bool {
let imported_max = imported.max.unwrap_or(65_536);
let self_max = self.max.unwrap_or(65_536);
pub(crate) fn fits_in_imported(&self, imported: MemoryDescriptor) -> bool {
let imported_max = imported.maximum.unwrap_or(Pages(65_536));
let self_max = self.maximum.unwrap_or(Pages(65_536));
self.shared == imported.shared && imported_max <= self_max && self.min <= imported.min
self.shared == imported.shared
&& imported_max <= self_max
&& self.minimum <= imported.minimum
}
}
@ -141,12 +207,31 @@ impl Memory {
/// in a wasm module or exposed to wasm by the host.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FuncSig {
pub params: Vec<Type>,
pub returns: Vec<Type>,
params: Cow<'static, [Type]>,
returns: Cow<'static, [Type]>,
}
impl FuncSig {
pub fn check_sig(&self, params: &[Value]) -> bool {
pub fn new<Params, Returns>(params: Params, returns: Returns) -> Self
where
Params: Into<Cow<'static, [Type]>>,
Returns: Into<Cow<'static, [Type]>>,
{
Self {
params: params.into(),
returns: returns.into(),
}
}
pub fn params(&self) -> &[Type] {
&self.params
}
pub fn returns(&self) -> &[Type] {
&self.returns
}
pub fn check_param_value_types(&self, params: &[Value]) -> bool {
self.params.len() == params.len()
&& self
.params

View File

@ -0,0 +1,94 @@
use std::{
fmt,
ops::{Add, Sub},
};
const WASM_PAGE_SIZE: usize = 65_536;
const WASM_MAX_PAGES: usize = 65_536;
/// Units of WebAssembly pages (as specified to be 65,536 bytes).
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct Pages(pub u32);
impl Pages {
pub fn checked_add(self, rhs: Pages) -> Option<Pages> {
let added = (self.0 as usize) + (rhs.0 as usize);
if added <= WASM_MAX_PAGES {
Some(Pages(added as u32))
} else {
None
}
}
pub fn bytes(self) -> Bytes {
self.into()
}
}
impl fmt::Debug for Pages {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} pages", self.0)
}
}
/// Units of WebAssembly memory in terms of 8-bit bytes.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct Bytes(pub usize);
impl fmt::Debug for Bytes {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} bytes", self.0)
}
}
impl From<Pages> for Bytes {
fn from(pages: Pages) -> Bytes {
Bytes((pages.0 as usize) * WASM_PAGE_SIZE)
}
}
impl<T> Sub<T> for Pages
where
T: Into<Pages>,
{
type Output = Pages;
fn sub(self, rhs: T) -> Pages {
Pages(self.0 - rhs.into().0)
}
}
impl<T> Add<T> for Pages
where
T: Into<Pages>,
{
type Output = Pages;
fn add(self, rhs: T) -> Pages {
Pages(self.0 + rhs.into().0)
}
}
impl From<Bytes> for Pages {
fn from(bytes: Bytes) -> Pages {
Pages((bytes.0 / WASM_PAGE_SIZE) as u32)
}
}
impl<T> Sub<T> for Bytes
where
T: Into<Bytes>,
{
type Output = Bytes;
fn sub(self, rhs: T) -> Bytes {
Bytes(self.0 - rhs.into().0)
}
}
impl<T> Add<T> for Bytes
where
T: Into<Bytes>,
{
type Output = Bytes;
fn add(self, rhs: T) -> Bytes {
Bytes(self.0 + rhs.into().0)
}
}

View File

@ -1,10 +1,11 @@
pub use crate::backing::{ImportBacking, LocalBacking};
use crate::{
memory::Memory,
module::ModuleInner,
structures::TypedIndex,
types::{LocalMemoryIndex, LocalOrImport, MemoryIndex},
types::{LocalOrImport, MemoryIndex},
};
use std::{ffi::c_void, mem, ptr, slice};
use std::{ffi::c_void, mem, ptr};
/// The context of the currently running WebAssembly instance.
///
@ -13,28 +14,28 @@ use std::{ffi::c_void, mem, ptr, slice};
#[repr(C)]
pub struct Ctx {
/// A pointer to an array of locally-defined memories, indexed by `MemoryIndex`.
pub(crate) memories: *mut LocalMemory,
pub(crate) memories: *mut *mut LocalMemory,
/// A pointer to an array of locally-defined tables, indexed by `TableIndex`.
pub(crate) tables: *mut LocalTable,
pub(crate) tables: *mut *mut LocalTable,
/// A pointer to an array of locally-defined globals, indexed by `GlobalIndex`.
pub(crate) globals: *mut LocalGlobal,
pub(crate) globals: *mut *mut LocalGlobal,
/// A pointer to an array of imported memories, indexed by `MemoryIndex,
pub(crate) imported_memories: *mut ImportedMemory,
pub(crate) imported_memories: *mut *mut LocalMemory,
/// A pointer to an array of imported tables, indexed by `TableIndex`.
pub(crate) imported_tables: *mut ImportedTable,
pub(crate) imported_tables: *mut *mut LocalTable,
/// A pointer to an array of imported globals, indexed by `GlobalIndex`.
pub(crate) imported_globals: *mut ImportedGlobal,
pub(crate) imported_globals: *mut *mut LocalGlobal,
/// A pointer to an array of imported functions, indexed by `FuncIndex`.
pub(crate) imported_funcs: *mut ImportedFunc,
pub(crate) local_backing: *mut LocalBacking,
pub(crate) import_backing: *mut ImportBacking,
local_backing: *mut LocalBacking,
import_backing: *mut ImportBacking,
module: *const ModuleInner,
pub data: *mut c_void,
@ -53,10 +54,10 @@ impl Ctx {
tables: local_backing.vm_tables.as_mut_ptr(),
globals: local_backing.vm_globals.as_mut_ptr(),
imported_memories: import_backing.memories.as_mut_ptr(),
imported_tables: import_backing.tables.as_mut_ptr(),
imported_globals: import_backing.globals.as_mut_ptr(),
imported_funcs: import_backing.functions.as_mut_ptr(),
imported_memories: import_backing.vm_memories.as_mut_ptr(),
imported_tables: import_backing.vm_tables.as_mut_ptr(),
imported_globals: import_backing.vm_globals.as_mut_ptr(),
imported_funcs: import_backing.vm_functions.as_mut_ptr(),
local_backing,
import_backing,
@ -80,10 +81,10 @@ impl Ctx {
tables: local_backing.vm_tables.as_mut_ptr(),
globals: local_backing.vm_globals.as_mut_ptr(),
imported_memories: import_backing.memories.as_mut_ptr(),
imported_tables: import_backing.tables.as_mut_ptr(),
imported_globals: import_backing.globals.as_mut_ptr(),
imported_funcs: import_backing.functions.as_mut_ptr(),
imported_memories: import_backing.vm_memories.as_mut_ptr(),
imported_tables: import_backing.vm_tables.as_mut_ptr(),
imported_globals: import_backing.vm_globals.as_mut_ptr(),
imported_funcs: import_backing.vm_functions.as_mut_ptr(),
local_backing,
import_backing,
@ -109,65 +110,21 @@ impl Ctx {
/// fn read_memory(ctx: &Ctx) -> u8 {
/// let first_memory = ctx.memory(0);
/// // Read the first byte of that linear memory.
/// first_memory[0]
/// first_memory.read(0).unwrap()
/// }
/// ```
pub fn memory<'a>(&'a self, mem_index: u32) -> &'a [u8] {
pub fn memory<'a>(&'a self, mem_index: u32) -> &'a Memory {
let module = unsafe { &*self.module };
let mem_index = MemoryIndex::new(mem_index as usize);
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => {
let local_backing = unsafe { &*self.local_backing };
&local_backing.memories[local_mem_index][..]
}
LocalOrImport::Import(import_mem_index) => {
let import_backing = unsafe { &mut *self.import_backing };
let vm_memory_import = import_backing.memories[import_mem_index].clone();
unsafe {
let memory = &*vm_memory_import.memory;
slice::from_raw_parts(memory.base, memory.size)
}
}
}
}
/// This exposes the specified memory of the WebAssembly instance
/// as a mutable slice.
///
/// WebAssembly will soon support multiple linear memories, so this
/// forces the user to specify.
///
/// # Usage:
///
/// ```
/// # use wasmer_runtime_core::{
/// # vm::Ctx,
/// # error::Result,
/// # };
/// extern fn host_func(ctx: &mut Ctx) {
/// let first_memory = ctx.memory_mut(0);
/// // Set the first byte of that linear memory.
/// first_memory[0] = 42;
/// }
/// ```
pub fn memory_mut<'a>(&'a mut self, mem_index: u32) -> &'a mut [u8] {
let module = unsafe { &*self.module };
let mem_index = MemoryIndex::new(mem_index as usize);
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => {
let local_backing = unsafe { &mut *self.local_backing };
&mut local_backing.memories[local_mem_index][..]
}
LocalOrImport::Import(import_mem_index) => {
let import_backing = unsafe { &mut *self.import_backing };
let vm_memory_import = import_backing.memories[import_mem_index].clone();
unsafe {
let memory = &*vm_memory_import.memory;
slice::from_raw_parts_mut(memory.base, memory.size)
}
}
LocalOrImport::Local(local_mem_index) => unsafe {
let local_backing = &*self.local_backing;
&local_backing.memories[local_mem_index]
},
LocalOrImport::Import(import_mem_index) => unsafe {
let import_backing = &*self.import_backing;
&import_backing.memories[import_mem_index]
},
}
}
}
@ -244,9 +201,9 @@ pub struct LocalTable {
/// pointer to the elements in the table.
pub base: *mut u8,
/// Number of elements in the table (NOT necessarily the size of the table in bytes!).
pub current_elements: usize,
/// The number of elements that can fit into the memory allocated for this table.
pub capacity: usize,
pub count: usize,
/// The table that this represents. At the moment, this can only be `*mut AnyfuncTable`.
pub table: *mut (),
}
impl LocalTable {
@ -255,31 +212,7 @@ impl LocalTable {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_current_elements() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedTable {
/// A pointer to the table definition.
pub table: *mut LocalTable,
/// A pointer to the vmcontext that owns this table definition.
pub vmctx: *mut Ctx,
}
impl ImportedTable {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_table() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_vmctx() -> u8 {
pub fn offset_count() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
@ -295,9 +228,11 @@ pub struct LocalMemory {
/// Pointer to the bottom of this linear memory.
pub base: *mut u8,
/// Current size of this linear memory in bytes.
pub size: usize,
/// The local memory index.
pub index: LocalMemoryIndex,
pub bound: usize,
/// The actual memory that this represents.
/// This is either `*mut DynamicMemory`, `*mut StaticMemory`,
/// or `*mut SharedStaticMemory`.
pub memory: *mut (),
}
impl LocalMemory {
@ -306,30 +241,7 @@ impl LocalMemory {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_size() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedMemory {
/// A pointer to the memory definition.
pub memory: *mut LocalMemory,
pub vmctx: *mut Ctx,
}
impl ImportedMemory {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_memory() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_vmctx() -> u8 {
pub fn offset_bound() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
@ -360,23 +272,6 @@ impl LocalGlobal {
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedGlobal {
pub global: *mut LocalGlobal,
}
impl ImportedGlobal {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_global() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
#[derive(Debug, Clone, Copy)]
#[repr(transparent)]
pub struct SigId(pub u32);
@ -385,17 +280,16 @@ pub struct SigId(pub u32);
#[derive(Debug, Clone)]
#[repr(C)]
pub struct Anyfunc {
pub func_data: ImportedFunc,
pub func: *const Func,
pub ctx: *mut Ctx,
pub sig_id: SigId,
}
impl Anyfunc {
pub fn null() -> Self {
Self {
func_data: ImportedFunc {
func: ptr::null(),
vmctx: ptr::null_mut(),
},
func: ptr::null(),
ctx: ptr::null_mut(),
sig_id: SigId(u32::max_value()),
}
}
@ -420,10 +314,7 @@ impl Anyfunc {
#[cfg(test)]
mod vm_offset_tests {
use super::{
Anyfunc, Ctx, ImportedFunc, ImportedGlobal, ImportedMemory, ImportedTable, LocalGlobal,
LocalMemory, LocalTable,
};
use super::{Anyfunc, Ctx, ImportedFunc, LocalGlobal, LocalMemory, LocalTable};
#[test]
fn vmctx() {
@ -484,21 +375,8 @@ mod vm_offset_tests {
);
assert_eq!(
LocalTable::offset_current_elements() as usize,
offset_of!(LocalTable => current_elements).get_byte_offset(),
);
}
#[test]
fn imported_table() {
assert_eq!(
ImportedTable::offset_table() as usize,
offset_of!(ImportedTable => table).get_byte_offset(),
);
assert_eq!(
ImportedTable::offset_vmctx() as usize,
offset_of!(ImportedTable => vmctx).get_byte_offset(),
LocalTable::offset_count() as usize,
offset_of!(LocalTable => count).get_byte_offset(),
);
}
@ -510,21 +388,8 @@ mod vm_offset_tests {
);
assert_eq!(
LocalMemory::offset_size() as usize,
offset_of!(LocalMemory => size).get_byte_offset(),
);
}
#[test]
fn imported_memory() {
assert_eq!(
ImportedMemory::offset_memory() as usize,
offset_of!(ImportedMemory => memory).get_byte_offset(),
);
assert_eq!(
ImportedMemory::offset_vmctx() as usize,
offset_of!(ImportedMemory => vmctx).get_byte_offset(),
LocalMemory::offset_bound() as usize,
offset_of!(LocalMemory => bound).get_byte_offset(),
);
}
@ -536,24 +401,16 @@ mod vm_offset_tests {
);
}
#[test]
fn imported_global() {
assert_eq!(
ImportedGlobal::offset_global() as usize,
offset_of!(ImportedGlobal => global).get_byte_offset(),
);
}
#[test]
fn cc_anyfunc() {
assert_eq!(
Anyfunc::offset_func() as usize,
offset_of!(Anyfunc => func_data: ImportedFunc => func).get_byte_offset(),
offset_of!(Anyfunc => func).get_byte_offset(),
);
assert_eq!(
Anyfunc::offset_vmctx() as usize,
offset_of!(Anyfunc => func_data: ImportedFunc => vmctx).get_byte_offset(),
offset_of!(Anyfunc => ctx).get_byte_offset(),
);
assert_eq!(
@ -595,15 +452,21 @@ mod vm_ctx_tests {
let mut local_backing = LocalBacking {
memories: Map::new().into_boxed_map(),
tables: Map::new().into_boxed_map(),
globals: Map::new().into_boxed_map(),
vm_memories: Map::new().into_boxed_map(),
vm_tables: Map::new().into_boxed_map(),
vm_globals: Map::new().into_boxed_map(),
};
let mut import_backing = ImportBacking {
functions: Map::new().into_boxed_map(),
memories: Map::new().into_boxed_map(),
tables: Map::new().into_boxed_map(),
globals: Map::new().into_boxed_map(),
vm_functions: Map::new().into_boxed_map(),
vm_memories: Map::new().into_boxed_map(),
vm_tables: Map::new().into_boxed_map(),
vm_globals: Map::new().into_boxed_map(),
};
let module = generate_module();
let data = &mut data as *mut _ as *mut c_void;
@ -680,7 +543,7 @@ mod vm_ctx_tests {
start_func: None,
func_assoc: Map::new(),
sig_registry: SigRegistry::new(),
sig_registry: SigRegistry,
}
}
}

View File

@ -1,7 +1,10 @@
#![allow(clippy::cast_ptr_alignment)]
use crate::{
memory::LinearMemory,
memory::{DynamicMemory, StaticMemory},
structures::TypedIndex,
types::{ImportedMemoryIndex, LocalMemoryIndex, LocalTableIndex},
units::Pages,
vm,
};
@ -11,17 +14,14 @@ use crate::{
pub unsafe extern "C" fn local_static_memory_grow(
memory_index: LocalMemoryIndex,
by_pages: u32,
ctx: *mut vm::Ctx,
delta: Pages,
ctx: &mut vm::Ctx,
) -> i32 {
if let Some(old) = (*(*ctx).local_backing)
.memory(memory_index)
.grow_static(by_pages)
{
// Store the new size back into the vmctx.
(*(*ctx).memories.add(memory_index.index())).size =
(old as usize + by_pages as usize) * LinearMemory::PAGE_SIZE as usize;
old
let local_memory = *ctx.memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut StaticMemory;
if let Some(old) = (*memory).grow(delta, &mut *local_memory) {
old.0 as i32
} else {
-1
}
@ -29,71 +29,91 @@ pub unsafe extern "C" fn local_static_memory_grow(
pub unsafe extern "C" fn local_static_memory_size(
memory_index: LocalMemoryIndex,
ctx: *mut vm::Ctx,
) -> u32 {
(*(*ctx).local_backing).memory(memory_index).pages()
ctx: &vm::Ctx,
) -> Pages {
let local_memory = *ctx.memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut StaticMemory;
(*memory).size()
}
pub unsafe extern "C" fn local_dynamic_memory_grow(
memory_index: LocalMemoryIndex,
by_pages: u32,
ctx: *mut vm::Ctx,
delta: Pages,
ctx: &mut vm::Ctx,
) -> i32 {
if let Some(old) = (*(*ctx).local_backing)
.memory(memory_index)
.grow_dynamic(by_pages)
{
// Store the new size back into the vmctx.
(*(*ctx).memories.add(memory_index.index())).size =
(old as usize + by_pages as usize) * LinearMemory::PAGE_SIZE as usize;
old
let local_memory = *ctx.memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut DynamicMemory;
if let Some(old) = (*memory).grow(delta, &mut *local_memory) {
old.0 as i32
} else {
-1
}
}
pub unsafe extern "C" fn local_dynamic_memory_size(
memory_index: LocalMemoryIndex,
ctx: &vm::Ctx,
) -> Pages {
let local_memory = *ctx.memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut DynamicMemory;
(*memory).size()
}
// +*****************************+
// | IMPORTED MEMORIES |
// +****************************+
pub unsafe extern "C" fn imported_static_memory_grow(
imported_mem_index: ImportedMemoryIndex,
by_pages: u32,
caller_ctx: *mut vm::Ctx,
import_memory_index: ImportedMemoryIndex,
delta: Pages,
ctx: &mut vm::Ctx,
) -> i32 {
let import_backing = &*(*caller_ctx).import_backing;
let vm_imported_mem = import_backing.imported_memory(imported_mem_index);
let local_memory = *ctx.imported_memories.add(import_memory_index.index());
let memory = (*local_memory).memory as *mut StaticMemory;
// We can assume that the memory here is local to the callee ctx.
let local_mem_index = (*vm_imported_mem.memory).index;
if let Some(old) = (*(*vm_imported_mem.vmctx).local_backing)
.memory(local_mem_index)
.grow_dynamic(by_pages)
{
// Store the new size back into the vmctx.
(*(*vm_imported_mem.vmctx)
.memories
.add(local_mem_index.index()))
.size = (old as usize + by_pages as usize) * LinearMemory::PAGE_SIZE as usize;
old
if let Some(old) = (*memory).grow(delta, &mut *local_memory) {
old.0 as i32
} else {
-1
}
}
pub unsafe extern "C" fn imported_static_memory_size(
imported_memory_index: ImportedMemoryIndex,
caller_ctx: *mut vm::Ctx,
) -> u32 {
let import_backing = &*(*caller_ctx).import_backing;
let vm_imported_mem = import_backing.imported_memory(imported_memory_index);
import_memory_index: ImportedMemoryIndex,
ctx: &vm::Ctx,
) -> Pages {
let local_memory = *ctx.imported_memories.add(import_memory_index.index());
let memory = (*local_memory).memory as *mut StaticMemory;
// We can assume that the memory here is local to the callee ctx.
let local_mem_index = (*vm_imported_mem.memory).index;
(*(*vm_imported_mem.vmctx).local_backing)
.memory(local_mem_index)
.pages()
(*memory).size()
}
pub unsafe extern "C" fn imported_dynamic_memory_grow(
memory_index: ImportedMemoryIndex,
delta: Pages,
ctx: &mut vm::Ctx,
) -> i32 {
let local_memory = *ctx.imported_memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut DynamicMemory;
if let Some(old) = (*memory).grow(delta, &mut *local_memory) {
old.0 as i32
} else {
-1
}
}
pub unsafe extern "C" fn imported_dynamic_memory_size(
memory_index: ImportedMemoryIndex,
ctx: &vm::Ctx,
) -> Pages {
let local_memory = *ctx.imported_memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut DynamicMemory;
(*memory).size()
}
// +*****************************+
@ -102,16 +122,16 @@ pub unsafe extern "C" fn imported_static_memory_size(
pub unsafe extern "C" fn local_table_grow(
table_index: LocalTableIndex,
by_elems: u32,
ctx: *mut vm::Ctx,
delta: u32,
ctx: &mut vm::Ctx,
) -> i32 {
let _ = table_index;
let _ = by_elems;
let _ = delta;
let _ = ctx;
unimplemented!()
}
pub unsafe extern "C" fn local_table_size(table_index: LocalTableIndex, ctx: *mut vm::Ctx) -> u32 {
pub unsafe extern "C" fn local_table_size(table_index: LocalTableIndex, ctx: &vm::Ctx) -> u32 {
let _ = table_index;
let _ = ctx;
unimplemented!()