Add importable memories and dynamic memories

This commit is contained in:
Lachlan Sneff
2019-01-25 15:28:54 -08:00
parent a20627964c
commit e4686e67c4
26 changed files with 992 additions and 694 deletions

View File

@ -2,7 +2,7 @@ use crate::{
error::{LinkError, LinkResult},
export::{Context, Export},
import::ImportObject,
memory::LinearMemory,
memory::{Memory, WASM_PAGE_SIZE},
module::{ImportName, ModuleInner},
structures::{BoxedMap, Map, SliceMap, TypedIndex},
table::{TableBacking, TableElements},
@ -17,23 +17,23 @@ use std::{mem, slice};
#[derive(Debug)]
pub struct LocalBacking {
pub(crate) memories: BoxedMap<LocalMemoryIndex, LinearMemory>,
pub(crate) memories: BoxedMap<LocalMemoryIndex, Memory>,
pub(crate) tables: BoxedMap<LocalTableIndex, TableBacking>,
pub(crate) vm_memories: BoxedMap<LocalMemoryIndex, vm::LocalMemory>,
pub(crate) vm_memories: BoxedMap<LocalMemoryIndex, *mut vm::LocalMemory>,
pub(crate) vm_tables: BoxedMap<LocalTableIndex, vm::LocalTable>,
pub(crate) vm_globals: BoxedMap<LocalGlobalIndex, vm::LocalGlobal>,
}
impl LocalBacking {
pub fn memory(&mut self, local_memory_index: LocalMemoryIndex) -> &mut LinearMemory {
&mut self.memories[local_memory_index]
}
// impl LocalBacking {
// pub fn memory(&mut self, local_memory_index: LocalMemoryIndex) -> &mut Memory {
// &mut self.memories[local_memory_index]
// }
pub fn table(&mut self, local_table_index: LocalTableIndex) -> &mut TableBacking {
&mut self.tables[local_table_index]
}
}
// pub fn table(&mut self, local_table_index: LocalTableIndex) -> &mut TableBacking {
// &mut self.tables[local_table_index]
// }
// }
impl LocalBacking {
pub(crate) fn new(module: &ModuleInner, imports: &ImportBacking, vmctx: *mut vm::Ctx) -> Self {
@ -55,20 +55,20 @@ impl LocalBacking {
}
}
fn generate_memories(module: &ModuleInner) -> BoxedMap<LocalMemoryIndex, LinearMemory> {
fn generate_memories(module: &ModuleInner) -> BoxedMap<LocalMemoryIndex, Memory> {
let mut memories = Map::with_capacity(module.memories.len());
for (_, mem) in &module.memories {
for (_, &desc) in &module.memories {
// If we use emscripten, we set a fixed initial and maximum
// let memory = if options.abi == InstanceABI::Emscripten {
// // We use MAX_PAGES, so at the end the result is:
// // (initial * LinearMemory::PAGE_SIZE) == LinearMemory::DEFAULT_HEAP_SIZE
// // However, it should be: (initial * LinearMemory::PAGE_SIZE) == 16777216
// LinearMemory::new(LinearMemory::MAX_PAGES, None)
// // (initial * Memory::PAGE_SIZE) == Memory::DEFAULT_HEAP_SIZE
// // However, it should be: (initial * Memory::PAGE_SIZE) == 16777216
// Memory::new(Memory::MAX_PAGES, None)
// } else {
// LinearMemory::new(memory.minimum, memory.maximum.map(|m| m as u32))
// Memory::new(memory.minimum, memory.maximum.map(|m| m as u32))
// };
let memory = LinearMemory::new(mem);
let memory = Memory::new(desc).expect("unable to create memory");
memories.push(memory);
}
@ -78,8 +78,8 @@ impl LocalBacking {
fn finalize_memories(
module: &ModuleInner,
imports: &ImportBacking,
memories: &mut SliceMap<LocalMemoryIndex, LinearMemory>,
) -> BoxedMap<LocalMemoryIndex, vm::LocalMemory> {
memories: &mut SliceMap<LocalMemoryIndex, Memory>,
) -> BoxedMap<LocalMemoryIndex, *mut vm::LocalMemory> {
// For each init that has some data...
for init in module
.data_initializers
@ -91,7 +91,7 @@ impl LocalBacking {
Initializer::Const(_) => panic!("a const initializer must be the i32 type"),
Initializer::GetGlobal(imported_global_index) => {
if module.imported_globals[imported_global_index].1.ty == Type::I32 {
unsafe { (*imports.globals[imported_global_index].global).data as u32 }
unsafe { (*imports.vm_globals[imported_global_index].global).data as u32 }
} else {
panic!("unsupported global type for initialzer")
}
@ -100,20 +100,23 @@ impl LocalBacking {
match init.memory_index.local_or_import(module) {
LocalOrImport::Local(local_memory_index) => {
let memory_desc = &module.memories[local_memory_index];
let memory_desc = module.memories[local_memory_index];
let data_top = init_base + init.data.len();
assert!((memory_desc.min * LinearMemory::PAGE_SIZE) as usize >= data_top);
let mem: &mut LinearMemory = &mut memories[local_memory_index];
assert!(memory_desc.min as usize * WASM_PAGE_SIZE >= data_top);
let mem: &mut Memory = &mut memories[local_memory_index];
let mem_init_view =
&mut mem.as_slice_mut()[init_base..init_base + init.data.len()];
let mem_init_view = &mut mem[init_base..init_base + init.data.len()];
mem_init_view.copy_from_slice(&init.data);
}
LocalOrImport::Import(imported_memory_index) => {
let vm_imported_memory = imports.imported_memory(imported_memory_index);
// Write the initialization data to the memory that
// we think the imported memory is.
unsafe {
let local_memory = &(*vm_imported_memory.memory);
let local_memory = &*imports.vm_memories[imported_memory_index];
let memory_slice =
slice::from_raw_parts_mut(local_memory.base, local_memory.size);
slice::from_raw_parts_mut(local_memory.base, local_memory.bound);
let mem_init_view =
&mut memory_slice[init_base..init_base + init.data.len()];
@ -125,7 +128,7 @@ impl LocalBacking {
memories
.iter_mut()
.map(|(index, mem)| mem.into_vm_memory(index))
.map(|(_, mem)| mem.vm_local_memory())
.collect::<Map<_, _>>()
.into_boxed_map()
}
@ -133,7 +136,7 @@ impl LocalBacking {
fn generate_tables(module: &ModuleInner) -> BoxedMap<LocalTableIndex, TableBacking> {
let mut tables = Map::with_capacity(module.tables.len());
for (_, table) in &module.tables {
for (_, &table) in &module.tables {
let table_backing = TableBacking::new(table);
tables.push(table_backing);
}
@ -154,7 +157,7 @@ impl LocalBacking {
Initializer::Const(_) => panic!("a const initializer must be the i32 type"),
Initializer::GetGlobal(imported_global_index) => {
if module.imported_globals[imported_global_index].1.ty == Type::I32 {
unsafe { (*imports.globals[imported_global_index].global).data as u32 }
unsafe { (*imports.vm_globals[imported_global_index].global).data as u32 }
} else {
panic!("unsupported global type for initialzer")
}
@ -186,7 +189,7 @@ impl LocalBacking {
vmctx,
},
LocalOrImport::Import(imported_func_index) => {
imports.functions[imported_func_index].clone()
imports.vm_functions[imported_func_index].clone()
}
};
@ -199,7 +202,7 @@ impl LocalBacking {
let (_, table_description) = module.imported_tables[imported_table_index];
match table_description.ty {
ElementType::Anyfunc => {
let imported_table = &imports.tables[imported_table_index];
let imported_table = &imports.vm_tables[imported_table_index];
let imported_local_table = (*imported_table).table;
let mut elements = unsafe {
@ -237,7 +240,7 @@ impl LocalBacking {
vmctx,
},
LocalOrImport::Import(imported_func_index) => {
imports.functions[imported_func_index].clone()
imports.vm_functions[imported_func_index].clone()
}
};
@ -283,7 +286,7 @@ impl LocalBacking {
Value::F64(x) => x.to_bits(),
},
Initializer::GetGlobal(imported_global_index) => unsafe {
(*imports.globals[imported_global_index].global).data
(*imports.vm_globals[imported_global_index].global).data
},
};
}
@ -294,10 +297,12 @@ impl LocalBacking {
#[derive(Debug)]
pub struct ImportBacking {
pub(crate) functions: BoxedMap<ImportedFuncIndex, vm::ImportedFunc>,
pub(crate) memories: BoxedMap<ImportedMemoryIndex, vm::ImportedMemory>,
pub(crate) tables: BoxedMap<ImportedTableIndex, vm::ImportedTable>,
pub(crate) globals: BoxedMap<ImportedGlobalIndex, vm::ImportedGlobal>,
pub(crate) memories: BoxedMap<ImportedMemoryIndex, Memory>,
pub(crate) vm_functions: BoxedMap<ImportedFuncIndex, vm::ImportedFunc>,
pub(crate) vm_memories: BoxedMap<ImportedMemoryIndex, *mut vm::LocalMemory>,
pub(crate) vm_tables: BoxedMap<ImportedTableIndex, vm::ImportedTable>,
pub(crate) vm_globals: BoxedMap<ImportedGlobalIndex, vm::ImportedGlobal>,
}
impl ImportBacking {
@ -309,25 +314,25 @@ impl ImportBacking {
let mut failed = false;
let mut link_errors = vec![];
let functions = import_functions(module, imports, vmctx).unwrap_or_else(|le| {
let vm_functions = import_functions(module, imports, vmctx).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
});
let memories = import_memories(module, imports, vmctx).unwrap_or_else(|le| {
let (vm_memories, memories) = import_memories(module, imports).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
(Map::new().into_boxed_map(), Map::new().into_boxed_map())
});
let vm_tables = import_tables(module, imports, vmctx).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
});
let tables = import_tables(module, imports, vmctx).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
});
let globals = import_globals(module, imports).unwrap_or_else(|le| {
let vm_globals = import_globals(module, imports).unwrap_or_else(|le| {
failed = true;
link_errors.extend(le);
Map::new().into_boxed_map()
@ -337,20 +342,18 @@ impl ImportBacking {
Err(link_errors)
} else {
Ok(ImportBacking {
functions,
memories,
tables,
globals,
vm_functions,
vm_memories,
vm_tables,
vm_globals,
})
}
}
pub fn imported_func(&self, func_index: ImportedFuncIndex) -> vm::ImportedFunc {
self.functions[func_index].clone()
}
pub fn imported_memory(&self, memory_index: ImportedMemoryIndex) -> vm::ImportedMemory {
self.memories[memory_index].clone()
pub fn imported_func(&self, index: ImportedFuncIndex) -> vm::ImportedFunc {
self.vm_functions[index].clone()
}
}
@ -424,36 +427,30 @@ fn import_functions(
fn import_memories(
module: &ModuleInner,
imports: &mut ImportObject,
vmctx: *mut vm::Ctx,
) -> LinkResult<BoxedMap<ImportedMemoryIndex, vm::ImportedMemory>> {
) -> LinkResult<(
BoxedMap<ImportedMemoryIndex, *mut vm::LocalMemory>,
BoxedMap<ImportedMemoryIndex, Memory>,
)> {
let mut link_errors = vec![];
let mut memories = Map::with_capacity(module.imported_memories.len());
let mut vm_memories = Map::with_capacity(module.imported_memories.len());
for (_index, (ImportName { namespace, name }, expected_memory_desc)) in
&module.imported_memories
{
let memory_import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
.get_namespace(&namespace)
.and_then(|namespace| namespace.get_export(&name));
match memory_import {
Some(Export::Memory {
local,
ctx,
memory: memory_desc,
}) => {
if expected_memory_desc.fits_in_imported(&memory_desc) {
memories.push(vm::ImportedMemory {
memory: local.inner(),
vmctx: match ctx {
Context::External(ctx) => ctx,
Context::Internal => vmctx,
},
});
Some(Export::Memory(mut memory)) => {
if expected_memory_desc.fits_in_imported(memory.description()) {
memories.push(memory.clone());
vm_memories.push(memory.vm_local_memory());
} else {
link_errors.push(LinkError::IncorrectMemoryDescription {
namespace: namespace.clone(),
name: name.clone(),
expected: expected_memory_desc.clone(),
found: memory_desc.clone(),
expected: *expected_memory_desc,
found: memory.description(),
});
}
}
@ -484,7 +481,7 @@ fn import_memories(
if link_errors.len() > 0 {
Err(link_errors)
} else {
Ok(memories.into_boxed_map())
Ok((vm_memories.into_boxed_map(), memories.into_boxed_map()))
}
}
@ -497,13 +494,13 @@ fn import_tables(
let mut tables = Map::with_capacity(module.imported_tables.len());
for (_index, (ImportName { namespace, name }, expected_table_desc)) in &module.imported_tables {
let table_import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
.get_namespace(&namespace)
.and_then(|namespace| namespace.get_export(&name));
match table_import {
Some(Export::Table {
local,
ctx,
table: table_desc,
desc: table_desc,
}) => {
if expected_table_desc.fits_in_imported(&table_desc) {
tables.push(vm::ImportedTable {
@ -564,8 +561,8 @@ fn import_globals(
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
match import {
Some(Export::Global { local, global }) => {
if global == *imported_global_desc {
Some(Export::Global { local, desc }) => {
if desc == *imported_global_desc {
globals.push(vm::ImportedGlobal {
global: local.inner(),
});
@ -574,7 +571,7 @@ fn import_globals(
namespace: namespace.clone(),
name: name.clone(),
expected: imported_global_desc.clone(),
found: global.clone(),
found: desc.clone(),
});
}
}

View File

@ -1,4 +1,4 @@
use crate::types::{FuncSig, GlobalDesc, Memory, MemoryIndex, Table, TableIndex, Type};
use crate::types::{FuncSig, GlobalDesc, MemoryDesc, MemoryIndex, TableDesc, TableIndex, Type};
pub type Result<T> = std::result::Result<T, Box<Error>>;
pub type CompileResult<T> = std::result::Result<T, Box<CompileError>>;
@ -49,14 +49,14 @@ pub enum LinkError {
IncorrectMemoryDescription {
namespace: String,
name: String,
expected: Memory,
found: Memory,
expected: MemoryDesc,
found: MemoryDesc,
},
IncorrectTableDescription {
namespace: String,
name: String,
expected: Table,
found: Table,
expected: TableDesc,
found: TableDesc,
},
IncorrectGlobalDescription {
namespace: String,

View File

@ -1,8 +1,9 @@
use crate::{
instance::InstanceInner,
memory::Memory,
module::ExportIndex,
module::ModuleInner,
types::{FuncSig, GlobalDesc, Memory, Table},
types::{FuncSig, GlobalDesc, TableDesc},
vm,
};
use hashbrown::hash_map;
@ -20,19 +21,15 @@ pub enum Export {
ctx: Context,
signature: FuncSig,
},
Memory {
local: MemoryPointer,
ctx: Context,
memory: Memory,
},
Memory(Memory),
Table {
local: TablePointer,
ctx: Context,
table: Table,
desc: TableDesc,
},
Global {
local: GlobalPointer,
global: GlobalDesc,
desc: GlobalDesc,
},
}
@ -52,22 +49,6 @@ impl FuncPointer {
}
}
#[derive(Debug, Clone)]
pub struct MemoryPointer(*mut vm::LocalMemory);
impl MemoryPointer {
/// This needs to be unsafe because there is
/// no way to check whether the passed function
/// is valid and has the right signature.
pub unsafe fn new(f: *mut vm::LocalMemory) -> Self {
MemoryPointer(f)
}
pub(crate) fn inner(&self) -> *mut vm::LocalMemory {
self.0
}
}
#[derive(Debug, Clone)]
pub struct TablePointer(*mut vm::LocalTable);

View File

@ -5,6 +5,16 @@ pub trait LikeNamespace {
fn get_export(&mut self, name: &str) -> Option<Export>;
}
pub trait IsExport {
fn to_export(&mut self) -> Export;
}
impl IsExport for Export {
fn to_export(&mut self) -> Export {
self.clone()
}
}
/// All of the import data used when instantiating.
///
/// It's suggested that you use the [`imports!`] macro
@ -74,7 +84,7 @@ impl ImportObject {
}
pub struct Namespace {
map: HashMap<String, Export>,
map: HashMap<String, Box<dyn IsExport>>,
}
impl Namespace {
@ -84,13 +94,19 @@ impl Namespace {
}
}
pub fn insert(&mut self, name: impl Into<String>, export: Export) -> Option<Export> {
self.map.insert(name.into(), export)
pub fn insert<S, E>(&mut self, name: S, export: E) -> Option<Box<dyn IsExport>>
where
S: Into<String>,
E: IsExport + 'static,
{
self.map.insert(name.into(), Box::new(export))
}
}
impl LikeNamespace for Namespace {
fn get_export(&mut self, name: &str) -> Option<Export> {
self.map.get(name).cloned()
self.map
.get_mut(name)
.map(|is_export| is_export.to_export())
}
}

View File

@ -2,13 +2,12 @@ use crate::{
backend::Token,
backing::{ImportBacking, LocalBacking},
error::{CallError, CallResult, ResolveError, ResolveResult, Result},
export::{
Context, Export, ExportIter, FuncPointer, GlobalPointer, MemoryPointer, TablePointer,
},
export::{Context, Export, ExportIter, FuncPointer, GlobalPointer, TablePointer},
import::{ImportObject, LikeNamespace},
memory::Memory,
module::{ExportIndex, Module, ModuleInner},
types::{
FuncIndex, FuncSig, GlobalDesc, GlobalIndex, LocalOrImport, Memory, MemoryIndex, Table,
FuncIndex, FuncSig, GlobalDesc, GlobalIndex, LocalOrImport, MemoryIndex, TableDesc,
TableIndex, Value,
},
vm,
@ -206,7 +205,7 @@ impl Instance {
let vmctx = match func_index.local_or_import(&self.module) {
LocalOrImport::Local(_) => &mut *self.inner.vmctx,
LocalOrImport::Import(imported_func_index) => {
self.inner.import_backing.functions[imported_func_index].vmctx
self.inner.import_backing.vm_functions[imported_func_index].vmctx
}
};
@ -245,29 +244,22 @@ impl InstanceInner {
}
}
ExportIndex::Memory(memory_index) => {
let (local, ctx, memory) = self.get_memory_from_index(module, *memory_index);
Export::Memory {
local,
ctx: match ctx {
Context::Internal => Context::External(&mut *self.vmctx),
ctx @ Context::External(_) => ctx,
},
memory,
}
let memory = self.get_memory_from_index(module, *memory_index);
Export::Memory(memory)
}
ExportIndex::Global(global_index) => {
let (local, global) = self.get_global_from_index(module, *global_index);
Export::Global { local, global }
let (local, desc) = self.get_global_from_index(module, *global_index);
Export::Global { local, desc }
}
ExportIndex::Table(table_index) => {
let (local, ctx, table) = self.get_table_from_index(module, *table_index);
let (local, ctx, desc) = self.get_table_from_index(module, *table_index);
Export::Table {
local,
ctx: match ctx {
Context::Internal => Context::External(&mut *self.vmctx),
ctx @ Context::External(_) => ctx,
},
table,
desc,
}
}
}
@ -294,7 +286,7 @@ impl InstanceInner {
Context::Internal,
),
LocalOrImport::Import(imported_func_index) => {
let imported_func = &self.import_backing.functions[imported_func_index];
let imported_func = &self.import_backing.vm_functions[imported_func_index];
(
imported_func.func as *const _,
Context::External(imported_func.vmctx),
@ -307,35 +299,11 @@ impl InstanceInner {
(unsafe { FuncPointer::new(func_ptr) }, ctx, signature)
}
fn get_memory_from_index(
&mut self,
module: &ModuleInner,
mem_index: MemoryIndex,
) -> (MemoryPointer, Context, Memory) {
fn get_memory_from_index(&mut self, module: &ModuleInner, mem_index: MemoryIndex) -> Memory {
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => {
let vm_mem = &mut self.backing.vm_memories[local_mem_index];
(
unsafe { MemoryPointer::new(vm_mem) },
Context::Internal,
*module
.memories
.get(local_mem_index)
.expect("broken invariant, memories"),
)
}
LocalOrImport::Local(local_mem_index) => self.backing.memories[local_mem_index].clone(),
LocalOrImport::Import(imported_mem_index) => {
let &(_, mem) = &module
.imported_memories
.get(imported_mem_index)
.expect("missing imported memory index");
let vm::ImportedMemory { memory, vmctx } =
&self.import_backing.memories[imported_mem_index];
(
unsafe { MemoryPointer::new(*memory) },
Context::External(*vmctx),
*mem,
)
self.import_backing.memories[imported_mem_index].clone()
}
}
}
@ -363,7 +331,7 @@ impl InstanceInner {
.get(imported_global_index)
.expect("missing imported global index");
let vm::ImportedGlobal { global } =
&self.import_backing.globals[imported_global_index];
&self.import_backing.vm_globals[imported_global_index];
(
unsafe { GlobalPointer::new(*global) },
*imported_global_desc,
@ -376,7 +344,7 @@ impl InstanceInner {
&mut self,
module: &ModuleInner,
table_index: TableIndex,
) -> (TablePointer, Context, Table) {
) -> (TablePointer, Context, TableDesc) {
match table_index.local_or_import(module) {
LocalOrImport::Local(local_table_index) => {
let vm_table = &mut self.backing.vm_tables[local_table_index];
@ -390,16 +358,16 @@ impl InstanceInner {
)
}
LocalOrImport::Import(imported_table_index) => {
let &(_, tab) = &module
let &(_, desc) = &module
.imported_tables
.get(imported_table_index)
.expect("missing imported table index");
let vm::ImportedTable { table, vmctx } =
&self.import_backing.tables[imported_table_index];
&self.import_backing.vm_tables[imported_table_index];
(
unsafe { TablePointer::new(*table) },
Context::External(*vmctx),
*tab,
*desc,
)
}
}
@ -456,7 +424,7 @@ impl<'a> Function<'a> {
let vmctx = match self.func_index.local_or_import(self.module) {
LocalOrImport::Local(_) => &mut *self.instance_inner.vmctx,
LocalOrImport::Import(imported_func_index) => {
self.instance_inner.import_backing.functions[imported_func_index].vmctx
self.instance_inner.import_backing.vm_functions[imported_func_index].vmctx
}
};
@ -487,7 +455,7 @@ impl<'a> Function<'a> {
.unwrap()
.as_ptr(),
LocalOrImport::Import(import_func_index) => {
self.instance_inner.import_backing.functions[import_func_index].func
self.instance_inner.import_backing.vm_functions[import_func_index].func
}
}
}

View File

@ -1,3 +1,5 @@
#![allow(unused)]
macro_rules! debug {
($fmt:expr) => (if cfg!(any(debug_assertions, feature="debug")) { println!(concat!("wasmer-runtime(:{})::", $fmt), line!()) });
($fmt:expr, $($arg:tt)*) => (if cfg!(any(debug_assertions, feature="debug")) { println!(concat!("wasmer-runtime(:{})::", $fmt, "\n"), line!(), $($arg)*) });

View File

@ -1,224 +0,0 @@
use std::ops::{Deref, DerefMut};
use crate::{
sys,
types::{LocalMemoryIndex, Memory},
vm,
};
/// A linear memory instance.
#[derive(Debug)]
pub struct LinearMemory {
/// The actual memory allocation.
memory: sys::Memory,
/// The current number of wasm pages.
current: u32,
// The maximum size the WebAssembly Memory is allowed to grow
// to, in units of WebAssembly pages. When present, the maximum
// parameter acts as a hint to the engine to reserve memory up
// front. However, the engine may ignore or clamp this reservation
// request. In general, most WebAssembly modules shouldn't need
// to set a maximum.
max: Option<u32>,
// The size of the extra guard pages after the end.
// Is used to optimize loads and stores with constant offsets.
offset_guard_size: usize,
/// Requires exception catching to handle out-of-bounds accesses.
requires_signal_catch: bool,
}
/// It holds the raw bytes of memory accessed by a WebAssembly Instance
impl LinearMemory {
pub(crate) const PAGE_SIZE: u32 = 65_536;
pub(crate) const MAX_PAGES: u32 = 65_536;
#[doc(hidden)]
pub const DEFAULT_HEAP_SIZE: usize = 1 << 32; // 4 GiB
#[doc(hidden)]
pub const DEFAULT_GUARD_SIZE: usize = 1 << 31; // 2 GiB
pub(crate) const DEFAULT_SIZE: usize = Self::DEFAULT_HEAP_SIZE + Self::DEFAULT_GUARD_SIZE; // 6 GiB
/// Create a new linear memory instance with specified initial and maximum number of pages.
///
/// `maximum` cannot be set to more than `65536` pages.
pub(crate) fn new(mem: &Memory) -> Self {
assert!(mem.min <= Self::MAX_PAGES);
assert!(mem.max.is_none() || mem.max.unwrap() <= Self::MAX_PAGES);
let (mmap_size, initial_pages, offset_guard_size, requires_signal_catch) = if
/*mem.is_static_heap()*/
true {
(Self::DEFAULT_SIZE, mem.min, Self::DEFAULT_GUARD_SIZE, true)
// This is a static heap
} else {
// this is a dynamic heap
assert!(!mem.shared, "shared memories must have a maximum size.");
(
mem.min as usize * Self::PAGE_SIZE as usize,
mem.min,
0,
false,
)
};
let mut memory = sys::Memory::with_size(mmap_size).unwrap();
// map initial pages as readwrite since the inital mmap is mapped as not accessible.
if initial_pages != 0 {
unsafe {
memory
.protect(
0..(initial_pages as usize * Self::PAGE_SIZE as usize),
sys::Protect::ReadWrite,
)
.expect("unable to make memory accessible");
}
}
Self {
memory,
current: initial_pages,
max: mem.max,
offset_guard_size,
requires_signal_catch,
}
}
/// Returns an base address of this linear memory.
fn base(&mut self) -> *mut u8 {
self.memory.as_ptr()
}
/// Returns the size in bytes
pub(crate) fn size(&self) -> usize {
self.current as usize * Self::PAGE_SIZE as usize
}
pub fn pages(&self) -> u32 {
self.current
}
/// Returns the maximum number of wasm pages allowed.
pub fn max(&self) -> u32 {
self.max.unwrap_or(Self::MAX_PAGES)
}
pub(crate) fn into_vm_memory(&mut self, index: LocalMemoryIndex) -> vm::LocalMemory {
vm::LocalMemory {
base: self.base(),
size: self.size(),
index,
}
}
/// Grow memory by the specified amount of pages.
///
/// Returns `None` if memory can't be grown by the specified amount
/// of pages.
pub(crate) fn grow_dynamic(&mut self, add_pages: u32) -> Option<i32> {
debug!("grow_memory_dynamic called!");
assert!(self.max.is_none());
if add_pages == 0 {
return Some(self.current as _);
}
let prev_pages = self.current;
let new_pages = match self.current.checked_add(add_pages) {
Some(new_pages) => new_pages,
None => return None,
};
if let Some(val) = self.max {
if new_pages > val {
return None;
}
// Wasm linear memories are never allowed to grow beyond what is
// indexable. If the memory has no maximum, enforce the greatest
// limit here.
} else if new_pages >= Self::MAX_PAGES {
return None;
}
let new_bytes = (new_pages * Self::PAGE_SIZE) as usize;
if new_bytes > self.memory.size() - self.offset_guard_size {
let memory_size = new_bytes.checked_add(self.offset_guard_size)?;
let mut new_memory = sys::Memory::with_size(memory_size).ok()?;
unsafe {
new_memory
.protect(0..new_bytes, sys::Protect::ReadWrite)
.ok()?;
}
let copy_size = self.memory.size() - self.offset_guard_size;
unsafe {
new_memory.as_slice_mut()[..copy_size]
.copy_from_slice(&self.memory.as_slice()[..copy_size]);
}
self.memory = new_memory;
}
self.current = new_pages;
Some(prev_pages as i32)
}
pub(crate) fn grow_static(&mut self, add_pages: u32) -> Option<i32> {
// debug!("grow_memory_static called!");
// assert!(self.max.is_some());
if add_pages == 0 {
return Some(self.current as _);
}
let prev_pages = self.current;
let new_pages = match self.current.checked_add(add_pages) {
Some(new_pages) => new_pages,
None => return None,
};
if let Some(val) = self.max {
if new_pages > val {
return None;
}
// Wasm linear memories are never allowed to grow beyond what is
// indexable. If the memory has no maximum, enforce the greatest
// limit here.
} else if new_pages >= Self::MAX_PAGES {
return None;
}
let prev_bytes = (prev_pages * Self::PAGE_SIZE) as usize;
let new_bytes = (new_pages * Self::PAGE_SIZE) as usize;
unsafe {
self.memory
.protect(prev_bytes..new_bytes, sys::Protect::ReadWrite)
.ok()?;
}
self.current = new_pages;
Some(prev_pages as i32)
}
}
impl Deref for LinearMemory {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe { self.memory.as_slice() }
}
}
impl DerefMut for LinearMemory {
fn deref_mut(&mut self) -> &mut [u8] {
unsafe { self.memory.as_slice_mut() }
}
}

View File

@ -0,0 +1,105 @@
use crate::{
memory::{WASM_MAX_PAGES, WASM_PAGE_SIZE},
sys,
types::MemoryDesc,
vm,
};
pub const DYNAMIC_GUARD_SIZE: usize = 4096;
pub struct DynamicMemory {
memory: sys::Memory,
current: u32,
max: Option<u32>,
}
impl DynamicMemory {
pub(super) fn new(desc: MemoryDesc, local: &mut vm::LocalMemory) -> Option<Box<Self>> {
let memory = {
let mut memory =
sys::Memory::with_size((desc.min as usize * WASM_PAGE_SIZE) + DYNAMIC_GUARD_SIZE)
.ok()?;
if desc.min != 0 {
unsafe {
memory
.protect(
0..(desc.min as usize * WASM_PAGE_SIZE),
sys::Protect::ReadWrite,
)
.ok()?;
}
}
memory
};
let mut storage = Box::new(DynamicMemory {
memory,
current: desc.min,
max: desc.max,
});
let storage_ptr: *mut DynamicMemory = &mut *storage;
local.base = storage.memory.as_ptr();
local.bound = desc.min as usize * WASM_PAGE_SIZE;
local.memory = storage_ptr as *mut ();
println!("local: {:?}", local);
Some(storage)
}
pub fn current(&self) -> u32 {
self.current
}
pub fn grow(&mut self, delta: u32, local: &mut vm::LocalMemory) -> Option<u32> {
if delta == 0 {
return Some(self.current);
}
let new_pages = self.current.checked_add(delta)?;
if let Some(max) = self.max {
if new_pages > max {
return None;
}
}
if new_pages as usize > WASM_MAX_PAGES {
return None;
}
let mut new_memory =
sys::Memory::with_size((new_pages as usize * WASM_PAGE_SIZE) + DYNAMIC_GUARD_SIZE)
.ok()?;
unsafe {
new_memory
.protect(
0..(new_pages as usize * WASM_PAGE_SIZE),
sys::Protect::ReadWrite,
)
.ok()?;
new_memory.as_slice_mut()[..self.current as usize * WASM_PAGE_SIZE]
.copy_from_slice(&self.memory.as_slice()[..self.current as usize * WASM_PAGE_SIZE]);
}
self.memory = new_memory; //The old memory gets dropped.
local.base = self.memory.as_ptr();
local.bound = new_pages as usize * WASM_PAGE_SIZE;
let old_pages = self.current;
self.current = new_pages;
Some(old_pages)
}
pub fn as_slice(&self) -> &[u8] {
unsafe { &self.memory.as_slice()[0..self.current as usize * WASM_PAGE_SIZE] }
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe { &mut self.memory.as_slice_mut()[0..self.current as usize * WASM_PAGE_SIZE] }
}
}

View File

@ -0,0 +1,156 @@
use crate::{
export::Export,
import::IsExport,
memory::dynamic::DYNAMIC_GUARD_SIZE,
memory::static_::{SAFE_STATIC_GUARD_SIZE, SAFE_STATIC_HEAP_SIZE},
types::MemoryDesc,
vm,
};
use std::{cell::UnsafeCell, fmt, ptr, rc::Rc};
pub use self::dynamic::DynamicMemory;
pub use self::static_::{SharedStaticMemory, StaticMemory};
mod dynamic;
mod static_;
pub const WASM_PAGE_SIZE: usize = 65_536;
pub const WASM_MAX_PAGES: usize = 65_536;
pub struct Memory {
desc: MemoryDesc,
storage: Rc<UnsafeCell<(MemoryStorage, Box<vm::LocalMemory>)>>,
}
impl Memory {
pub fn new(desc: MemoryDesc) -> Option<Self> {
let mut vm_local_memory = Box::new(vm::LocalMemory {
base: ptr::null_mut(),
bound: 0,
memory: ptr::null_mut(),
});
let memory_storage = match desc.memory_type() {
MemoryType::Dynamic => {
MemoryStorage::Dynamic(DynamicMemory::new(desc, &mut vm_local_memory)?)
}
MemoryType::Static => {
MemoryStorage::Static(StaticMemory::new(desc, &mut vm_local_memory)?)
}
MemoryType::SharedStatic => unimplemented!(),
};
Some(Memory {
desc,
storage: Rc::new(UnsafeCell::new((memory_storage, vm_local_memory))),
})
}
pub fn description(&self) -> MemoryDesc {
self.desc
}
pub fn grow(&mut self, delta: u32) -> Option<u32> {
match unsafe { &mut *self.storage.get() } {
(MemoryStorage::Dynamic(dynamic_memory), local) => dynamic_memory.grow(delta, local),
(MemoryStorage::Static(static_memory), local) => static_memory.grow(delta, local),
(MemoryStorage::SharedStatic(_), _) => unimplemented!(),
}
}
/// This returns the number of pages in the memory.
pub fn current_pages(&self) -> u32 {
match unsafe { &*self.storage.get() } {
(MemoryStorage::Dynamic(dynamic_memory), _) => dynamic_memory.current(),
(MemoryStorage::Static(static_memory), _) => static_memory.current(),
(MemoryStorage::SharedStatic(_), _) => unimplemented!(),
}
}
pub fn as_slice(&self) -> &[u8] {
match unsafe { &*self.storage.get() } {
(MemoryStorage::Dynamic(dynamic_memory), _) => dynamic_memory.as_slice(),
(MemoryStorage::Static(static_memory), _) => static_memory.as_slice(),
(MemoryStorage::SharedStatic(_), _) => panic!("cannot slice a shared memory"),
}
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
match unsafe { &mut *self.storage.get() } {
(MemoryStorage::Dynamic(dynamic_memory), _) => dynamic_memory.as_slice_mut(),
(MemoryStorage::Static(static_memory), _) => static_memory.as_slice_mut(),
(MemoryStorage::SharedStatic(_), _) => panic!("cannot slice a shared memory"),
}
}
pub(crate) fn vm_local_memory(&mut self) -> *mut vm::LocalMemory {
&mut *unsafe { &mut *self.storage.get() }.1
}
}
impl IsExport for Memory {
fn to_export(&mut self) -> Export {
Export::Memory(self.clone())
}
}
impl Clone for Memory {
fn clone(&self) -> Self {
Self {
desc: self.desc,
storage: Rc::clone(&self.storage),
}
}
}
pub enum MemoryStorage {
Dynamic(Box<DynamicMemory>),
Static(Box<StaticMemory>),
SharedStatic(Box<SharedStaticMemory>),
}
impl MemoryStorage {
pub fn to_type(&self) -> MemoryType {
match self {
MemoryStorage::Dynamic(_) => MemoryType::Dynamic,
MemoryStorage::Static(_) => MemoryType::Static,
MemoryStorage::SharedStatic(_) => MemoryType::SharedStatic,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MemoryType {
Dynamic,
Static,
SharedStatic,
}
impl MemoryType {
#[doc(hidden)]
pub fn guard_size(self) -> u64 {
match self {
MemoryType::Dynamic => DYNAMIC_GUARD_SIZE as u64,
MemoryType::Static => SAFE_STATIC_GUARD_SIZE as u64,
MemoryType::SharedStatic => SAFE_STATIC_GUARD_SIZE as u64,
}
}
#[doc(hidden)]
pub fn bounds(self) -> Option<u64> {
match self {
MemoryType::Dynamic => None,
MemoryType::Static => Some(SAFE_STATIC_HEAP_SIZE as u64),
MemoryType::SharedStatic => Some(SAFE_STATIC_HEAP_SIZE as u64),
}
}
}
impl fmt::Debug for Memory {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Memory")
.field("desc", &self.desc)
.field("size", &(self.current_pages() as usize * WASM_PAGE_SIZE))
.finish()
}
}

View File

@ -0,0 +1,10 @@
#[doc(hidden)]
pub const SAFE_STATIC_HEAP_SIZE: usize = 1 << 32; // 4 GiB
#[doc(hidden)]
pub const SAFE_STATIC_GUARD_SIZE: usize = 1 << 31; // 2 GiB
mod shared;
mod unshared;
pub use self::shared::SharedStaticMemory;
pub use self::unshared::StaticMemory;

View File

@ -0,0 +1,11 @@
use crate::sys;
use parking_lot::Mutex;
use std::sync::atomic::AtomicUsize;
// Remove this attribute once this is used.
#[allow(dead_code)]
pub struct SharedStaticMemory {
memory: sys::Memory,
current: AtomicUsize,
lock: Mutex<()>,
}

View File

@ -0,0 +1,99 @@
use crate::{
memory::{
static_::{SAFE_STATIC_GUARD_SIZE, SAFE_STATIC_HEAP_SIZE},
WASM_MAX_PAGES, WASM_PAGE_SIZE,
},
sys,
types::MemoryDesc,
vm,
};
pub struct StaticMemory {
memory: sys::Memory,
current: u32,
max: Option<u32>,
}
impl StaticMemory {
pub(in crate::memory) fn new(
desc: MemoryDesc,
local: &mut vm::LocalMemory,
) -> Option<Box<Self>> {
let memory = {
let mut memory =
sys::Memory::with_size(SAFE_STATIC_HEAP_SIZE + SAFE_STATIC_GUARD_SIZE).ok()?;
if desc.min != 0 {
unsafe {
memory
.protect(
0..(desc.min as usize * WASM_PAGE_SIZE),
sys::Protect::ReadWrite,
)
.ok()?;
}
}
memory
};
let mut storage = Box::new(StaticMemory {
memory,
current: desc.min,
max: desc.max,
});
let storage_ptr: *mut StaticMemory = &mut *storage;
local.base = storage.memory.as_ptr();
local.bound = desc.min as usize * WASM_PAGE_SIZE;
local.memory = storage_ptr as *mut ();
Some(storage)
}
pub fn current(&self) -> u32 {
self.current
}
pub fn grow(&mut self, delta: u32, local: &mut vm::LocalMemory) -> Option<u32> {
if delta == 0 {
return Some(self.current);
}
let new_pages = self.current.checked_add(delta)?;
if let Some(max) = self.max {
if new_pages > max {
return None;
}
}
if new_pages as usize > WASM_MAX_PAGES {
return None;
}
unsafe {
self.memory
.protect(
self.current as usize * WASM_PAGE_SIZE..new_pages as usize * WASM_PAGE_SIZE,
sys::Protect::ReadWrite,
)
.ok()?;
}
local.bound = new_pages as usize * WASM_PAGE_SIZE;
let old_pages = self.current;
self.current = new_pages;
Some(old_pages)
}
pub fn as_slice(&self) -> &[u8] {
unsafe { &self.memory.as_slice()[0..self.current as usize * WASM_PAGE_SIZE] }
}
pub fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe { &mut self.memory.as_slice_mut()[0..self.current as usize * WASM_PAGE_SIZE] }
}
}

View File

@ -7,7 +7,7 @@ use crate::{
types::{
FuncIndex, Global, GlobalDesc, GlobalIndex, ImportedFuncIndex, ImportedGlobalIndex,
ImportedMemoryIndex, ImportedTableIndex, Initializer, LocalGlobalIndex, LocalMemoryIndex,
LocalTableIndex, Memory, MemoryIndex, SigIndex, Table, TableIndex,
LocalTableIndex, MemoryDesc, MemoryIndex, SigIndex, TableDesc, TableIndex,
},
Instance,
};
@ -21,14 +21,14 @@ pub struct ModuleInner {
pub protected_caller: Box<dyn ProtectedCaller>,
// This are strictly local and the typsystem ensures that.
pub memories: Map<LocalMemoryIndex, Memory>,
pub memories: Map<LocalMemoryIndex, MemoryDesc>,
pub globals: Map<LocalGlobalIndex, Global>,
pub tables: Map<LocalTableIndex, Table>,
pub tables: Map<LocalTableIndex, TableDesc>,
// These are strictly imported and the typesystem ensures that.
pub imported_functions: Map<ImportedFuncIndex, ImportName>,
pub imported_memories: Map<ImportedMemoryIndex, (ImportName, Memory)>,
pub imported_tables: Map<ImportedTableIndex, (ImportName, Table)>,
pub imported_memories: Map<ImportedMemoryIndex, (ImportName, MemoryDesc)>,
pub imported_tables: Map<ImportedTableIndex, (ImportName, TableDesc)>,
pub imported_globals: Map<ImportedGlobalIndex, (ImportName, GlobalDesc)>,
pub exports: HashMap<String, ExportIndex>,

View File

@ -1,5 +1,5 @@
use super::vm;
use crate::types::{ElementType, Table};
use crate::types::{ElementType, TableDesc};
#[derive(Debug, Clone)]
pub enum TableElements {
@ -14,7 +14,7 @@ pub struct TableBacking {
}
impl TableBacking {
pub fn new(table: &Table) -> Self {
pub fn new(table: TableDesc) -> Self {
match table.ty {
ElementType::Anyfunc => {
let initial_table_backing_len = match table.max {

View File

@ -1,4 +1,4 @@
use crate::{module::ModuleInner, structures::TypedIndex};
use crate::{memory::MemoryType, module::ModuleInner, structures::TypedIndex};
/// Represents a WebAssembly type.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -71,7 +71,7 @@ pub enum ElementType {
}
#[derive(Debug, Clone, Copy)]
pub struct Table {
pub struct TableDesc {
/// Type of data stored in this table.
pub ty: ElementType,
/// The minimum number of elements that must be stored in this table.
@ -80,8 +80,8 @@ pub struct Table {
pub max: Option<u32>,
}
impl Table {
pub(crate) fn fits_in_imported(&self, imported: &Table) -> bool {
impl TableDesc {
pub(crate) fn fits_in_imported(&self, imported: &TableDesc) -> bool {
// TODO: We should define implementation limits.
let imported_max = imported.max.unwrap_or(u32::max_value());
let self_max = self.max.unwrap_or(u32::max_value());
@ -115,7 +115,7 @@ pub struct Global {
/// A wasm memory.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Memory {
pub struct MemoryDesc {
/// The minimum number of allowed pages.
pub min: u32,
/// The maximum number of allowed pages.
@ -124,12 +124,17 @@ pub struct Memory {
pub shared: bool,
}
impl Memory {
pub fn is_static_heap(&self) -> bool {
self.max.is_some()
impl MemoryDesc {
pub fn memory_type(self) -> MemoryType {
match (self.max.is_some(), self.shared) {
(true, true) => MemoryType::SharedStatic,
(true, false) => MemoryType::Static,
(false, false) => MemoryType::Dynamic,
(false, true) => panic!("shared memory without a max is not allowed"),
}
}
pub(crate) fn fits_in_imported(&self, imported: &Memory) -> bool {
pub(crate) fn fits_in_imported(&self, imported: MemoryDesc) -> bool {
let imported_max = imported.max.unwrap_or(65_536);
let self_max = self.max.unwrap_or(65_536);

View File

@ -2,7 +2,7 @@ pub use crate::backing::{ImportBacking, LocalBacking};
use crate::{
module::ModuleInner,
structures::TypedIndex,
types::{LocalMemoryIndex, LocalOrImport, MemoryIndex},
types::{LocalOrImport, MemoryIndex},
};
use std::{ffi::c_void, mem, ptr, slice};
@ -13,7 +13,7 @@ use std::{ffi::c_void, mem, ptr, slice};
#[repr(C)]
pub struct Ctx {
/// A pointer to an array of locally-defined memories, indexed by `MemoryIndex`.
pub(crate) memories: *mut LocalMemory,
pub(crate) memories: *mut *mut LocalMemory,
/// A pointer to an array of locally-defined tables, indexed by `TableIndex`.
pub(crate) tables: *mut LocalTable,
@ -22,7 +22,7 @@ pub struct Ctx {
pub(crate) globals: *mut LocalGlobal,
/// A pointer to an array of imported memories, indexed by `MemoryIndex,
pub(crate) imported_memories: *mut ImportedMemory,
pub(crate) imported_memories: *mut *mut LocalMemory,
/// A pointer to an array of imported tables, indexed by `TableIndex`.
pub(crate) imported_tables: *mut ImportedTable,
@ -33,8 +33,8 @@ pub struct Ctx {
/// A pointer to an array of imported functions, indexed by `FuncIndex`.
pub(crate) imported_funcs: *mut ImportedFunc,
pub(crate) local_backing: *mut LocalBacking,
pub(crate) import_backing: *mut ImportBacking,
// pub(crate) local_backing: *mut LocalBacking,
// pub(crate) import_backing: *mut ImportBacking,
module: *const ModuleInner,
pub data: *mut c_void,
@ -53,13 +53,13 @@ impl Ctx {
tables: local_backing.vm_tables.as_mut_ptr(),
globals: local_backing.vm_globals.as_mut_ptr(),
imported_memories: import_backing.memories.as_mut_ptr(),
imported_tables: import_backing.tables.as_mut_ptr(),
imported_globals: import_backing.globals.as_mut_ptr(),
imported_funcs: import_backing.functions.as_mut_ptr(),
imported_memories: import_backing.vm_memories.as_mut_ptr(),
imported_tables: import_backing.vm_tables.as_mut_ptr(),
imported_globals: import_backing.vm_globals.as_mut_ptr(),
imported_funcs: import_backing.vm_functions.as_mut_ptr(),
local_backing,
import_backing,
// local_backing,
// import_backing,
module,
data: ptr::null_mut(),
@ -80,13 +80,13 @@ impl Ctx {
tables: local_backing.vm_tables.as_mut_ptr(),
globals: local_backing.vm_globals.as_mut_ptr(),
imported_memories: import_backing.memories.as_mut_ptr(),
imported_tables: import_backing.tables.as_mut_ptr(),
imported_globals: import_backing.globals.as_mut_ptr(),
imported_funcs: import_backing.functions.as_mut_ptr(),
imported_memories: import_backing.vm_memories.as_mut_ptr(),
imported_tables: import_backing.vm_tables.as_mut_ptr(),
imported_globals: import_backing.vm_globals.as_mut_ptr(),
imported_funcs: import_backing.vm_functions.as_mut_ptr(),
local_backing,
import_backing,
// local_backing,
// import_backing,
module,
data,
@ -116,19 +116,14 @@ impl Ctx {
let module = unsafe { &*self.module };
let mem_index = MemoryIndex::new(mem_index as usize);
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => {
let local_backing = unsafe { &*self.local_backing };
&local_backing.memories[local_mem_index][..]
}
LocalOrImport::Import(import_mem_index) => {
let import_backing = unsafe { &mut *self.import_backing };
let vm_memory_import = import_backing.memories[import_mem_index].clone();
unsafe {
let memory = &*vm_memory_import.memory;
slice::from_raw_parts(memory.base, memory.size)
}
}
LocalOrImport::Local(local_mem_index) => unsafe {
let local_memory = &**self.memories.add(local_mem_index.index());
slice::from_raw_parts(local_memory.base, local_memory.bound)
},
LocalOrImport::Import(import_mem_index) => unsafe {
let local_memory = &**self.imported_memories.add(import_mem_index.index());
slice::from_raw_parts(local_memory.base, local_memory.bound)
},
}
}
@ -155,19 +150,14 @@ impl Ctx {
let module = unsafe { &*self.module };
let mem_index = MemoryIndex::new(mem_index as usize);
match mem_index.local_or_import(module) {
LocalOrImport::Local(local_mem_index) => {
let local_backing = unsafe { &mut *self.local_backing };
&mut local_backing.memories[local_mem_index][..]
}
LocalOrImport::Import(import_mem_index) => {
let import_backing = unsafe { &mut *self.import_backing };
let vm_memory_import = import_backing.memories[import_mem_index].clone();
unsafe {
let memory = &*vm_memory_import.memory;
slice::from_raw_parts_mut(memory.base, memory.size)
}
}
LocalOrImport::Local(local_mem_index) => unsafe {
let local_memory = &**self.memories.add(local_mem_index.index());
slice::from_raw_parts_mut(local_memory.base, local_memory.bound)
},
LocalOrImport::Import(import_mem_index) => unsafe {
let local_memory = &**self.imported_memories.add(import_mem_index.index());
slice::from_raw_parts_mut(local_memory.base, local_memory.bound)
},
}
}
}
@ -295,9 +285,11 @@ pub struct LocalMemory {
/// Pointer to the bottom of this linear memory.
pub base: *mut u8,
/// Current size of this linear memory in bytes.
pub size: usize,
/// The local memory index.
pub index: LocalMemoryIndex,
pub bound: usize,
/// The actual memory that this represents.
/// This is either `*mut DynamicMemory`, `*mut StaticMemory`,
/// or `*mut SharedStaticMemory`.
pub memory: *mut (),
}
impl LocalMemory {
@ -306,7 +298,7 @@ impl LocalMemory {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_size() -> u8 {
pub fn offset_bound() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
@ -315,28 +307,28 @@ impl LocalMemory {
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedMemory {
/// A pointer to the memory definition.
pub memory: *mut LocalMemory,
pub vmctx: *mut Ctx,
}
// #[derive(Debug, Clone)]
// #[repr(C)]
// pub struct ImportedMemory {
// /// A pointer to the memory definition.
// pub memory: *mut LocalMemory,
// pub vmctx: *mut Ctx,
// }
impl ImportedMemory {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_memory() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
// impl ImportedMemory {
// #[allow(clippy::erasing_op)] // TODO
// pub fn offset_memory() -> u8 {
// 0 * (mem::size_of::<usize>() as u8)
// }
pub fn offset_vmctx() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
// pub fn offset_vmctx() -> u8 {
// 1 * (mem::size_of::<usize>() as u8)
// }
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
// pub fn size() -> u8 {
// mem::size_of::<Self>() as u8
// }
// }
/// Definition of a global used by the VM.
#[derive(Debug, Clone)]
@ -421,8 +413,8 @@ impl Anyfunc {
#[cfg(test)]
mod vm_offset_tests {
use super::{
Anyfunc, Ctx, ImportedFunc, ImportedGlobal, ImportedMemory, ImportedTable, LocalGlobal,
LocalMemory, LocalTable,
Anyfunc, Ctx, ImportedFunc, ImportedGlobal, ImportedTable, LocalGlobal, LocalMemory,
LocalTable,
};
#[test]
@ -510,21 +502,8 @@ mod vm_offset_tests {
);
assert_eq!(
LocalMemory::offset_size() as usize,
offset_of!(LocalMemory => size).get_byte_offset(),
);
}
#[test]
fn imported_memory() {
assert_eq!(
ImportedMemory::offset_memory() as usize,
offset_of!(ImportedMemory => memory).get_byte_offset(),
);
assert_eq!(
ImportedMemory::offset_vmctx() as usize,
offset_of!(ImportedMemory => vmctx).get_byte_offset(),
LocalMemory::offset_bound() as usize,
offset_of!(LocalMemory => bound).get_byte_offset(),
);
}
@ -600,10 +579,12 @@ mod vm_ctx_tests {
vm_globals: Map::new().into_boxed_map(),
};
let mut import_backing = ImportBacking {
functions: Map::new().into_boxed_map(),
memories: Map::new().into_boxed_map(),
tables: Map::new().into_boxed_map(),
globals: Map::new().into_boxed_map(),
vm_functions: Map::new().into_boxed_map(),
vm_memories: Map::new().into_boxed_map(),
vm_tables: Map::new().into_boxed_map(),
vm_globals: Map::new().into_boxed_map(),
};
let module = generate_module();
let data = &mut data as *mut _ as *mut c_void;

View File

@ -1,5 +1,5 @@
use crate::{
memory::LinearMemory,
memory::{DynamicMemory, StaticMemory},
structures::TypedIndex,
types::{ImportedMemoryIndex, LocalMemoryIndex, LocalTableIndex},
vm,
@ -11,17 +11,14 @@ use crate::{
pub unsafe extern "C" fn local_static_memory_grow(
memory_index: LocalMemoryIndex,
by_pages: u32,
ctx: *mut vm::Ctx,
delta: u32,
ctx: &mut vm::Ctx,
) -> i32 {
if let Some(old) = (*(*ctx).local_backing)
.memory(memory_index)
.grow_static(by_pages)
{
// Store the new size back into the vmctx.
(*(*ctx).memories.add(memory_index.index())).size =
(old as usize + by_pages as usize) * LinearMemory::PAGE_SIZE as usize;
old
let local_memory = *ctx.memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut StaticMemory;
if let Some(old) = (*memory).grow(delta, &mut *local_memory) {
old as i32
} else {
-1
}
@ -29,71 +26,91 @@ pub unsafe extern "C" fn local_static_memory_grow(
pub unsafe extern "C" fn local_static_memory_size(
memory_index: LocalMemoryIndex,
ctx: *mut vm::Ctx,
ctx: &vm::Ctx,
) -> u32 {
(*(*ctx).local_backing).memory(memory_index).pages()
let local_memory = *ctx.memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut StaticMemory;
(*memory).current()
}
pub unsafe extern "C" fn local_dynamic_memory_grow(
memory_index: LocalMemoryIndex,
by_pages: u32,
ctx: *mut vm::Ctx,
delta: u32,
ctx: &mut vm::Ctx,
) -> i32 {
if let Some(old) = (*(*ctx).local_backing)
.memory(memory_index)
.grow_dynamic(by_pages)
{
// Store the new size back into the vmctx.
(*(*ctx).memories.add(memory_index.index())).size =
(old as usize + by_pages as usize) * LinearMemory::PAGE_SIZE as usize;
old
let local_memory = *ctx.memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut DynamicMemory;
if let Some(old) = (*memory).grow(delta, &mut *local_memory) {
old as i32
} else {
-1
}
}
pub unsafe extern "C" fn local_dynamic_memory_size(
memory_index: LocalMemoryIndex,
ctx: &vm::Ctx,
) -> u32 {
let local_memory = *ctx.memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut DynamicMemory;
(*memory).current()
}
// +*****************************+
// | IMPORTED MEMORIES |
// +****************************+
pub unsafe extern "C" fn imported_static_memory_grow(
imported_mem_index: ImportedMemoryIndex,
by_pages: u32,
caller_ctx: *mut vm::Ctx,
import_memory_index: ImportedMemoryIndex,
delta: u32,
ctx: &mut vm::Ctx,
) -> i32 {
let import_backing = &*(*caller_ctx).import_backing;
let vm_imported_mem = import_backing.imported_memory(imported_mem_index);
let local_memory = *ctx.imported_memories.add(import_memory_index.index());
let memory = (*local_memory).memory as *mut StaticMemory;
// We can assume that the memory here is local to the callee ctx.
let local_mem_index = (*vm_imported_mem.memory).index;
if let Some(old) = (*(*vm_imported_mem.vmctx).local_backing)
.memory(local_mem_index)
.grow_dynamic(by_pages)
{
// Store the new size back into the vmctx.
(*(*vm_imported_mem.vmctx)
.memories
.add(local_mem_index.index()))
.size = (old as usize + by_pages as usize) * LinearMemory::PAGE_SIZE as usize;
old
if let Some(old) = (*memory).grow(delta, &mut *local_memory) {
old as i32
} else {
-1
}
}
pub unsafe extern "C" fn imported_static_memory_size(
imported_memory_index: ImportedMemoryIndex,
caller_ctx: *mut vm::Ctx,
import_memory_index: ImportedMemoryIndex,
ctx: &vm::Ctx,
) -> u32 {
let import_backing = &*(*caller_ctx).import_backing;
let vm_imported_mem = import_backing.imported_memory(imported_memory_index);
let local_memory = *ctx.imported_memories.add(import_memory_index.index());
let memory = (*local_memory).memory as *mut StaticMemory;
// We can assume that the memory here is local to the callee ctx.
let local_mem_index = (*vm_imported_mem.memory).index;
(*(*vm_imported_mem.vmctx).local_backing)
.memory(local_mem_index)
.pages()
(*memory).current()
}
pub unsafe extern "C" fn imported_dynamic_memory_grow(
memory_index: ImportedMemoryIndex,
delta: u32,
ctx: &mut vm::Ctx,
) -> i32 {
let local_memory = *ctx.imported_memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut DynamicMemory;
if let Some(old) = (*memory).grow(delta, &mut *local_memory) {
old as i32
} else {
-1
}
}
pub unsafe extern "C" fn imported_dynamic_memory_size(
memory_index: ImportedMemoryIndex,
ctx: &vm::Ctx,
) -> u32 {
let local_memory = *ctx.imported_memories.add(memory_index.index());
let memory = (*local_memory).memory as *mut DynamicMemory;
(*memory).current()
}
// +*****************************+
@ -102,16 +119,16 @@ pub unsafe extern "C" fn imported_static_memory_size(
pub unsafe extern "C" fn local_table_grow(
table_index: LocalTableIndex,
by_elems: u32,
ctx: *mut vm::Ctx,
delta: u32,
ctx: &mut vm::Ctx,
) -> i32 {
let _ = table_index;
let _ = by_elems;
let _ = delta;
let _ = ctx;
unimplemented!()
}
pub unsafe extern "C" fn local_table_size(table_index: LocalTableIndex, ctx: *mut vm::Ctx) -> u32 {
pub unsafe extern "C" fn local_table_size(table_index: LocalTableIndex, ctx: &vm::Ctx) -> u32 {
let _ = table_index;
let _ = ctx;
unimplemented!()