wasmer/src/webassembly/instance.rs

636 lines
24 KiB
Rust
Raw Normal View History

//! A webassembly::Instance object is a stateful, executable instance of a
//! webassembly::Module. Instance objects contain all the Exported
//! WebAssembly functions that allow calling into WebAssembly code.
//! The webassembly::Instance() constructor function can be called to
//! synchronously instantiate a given webassembly::Module object. However, the
//! primary way to get an Instance is through the asynchronous
2018-10-24 12:36:43 +02:00
//! webassembly::instantiate_streaming() function.
2018-10-24 02:01:46 +02:00
use cranelift_codegen::ir::LibCall;
2018-10-16 03:21:49 +02:00
use cranelift_codegen::{binemit, isa, Context};
2018-10-15 03:03:00 +02:00
use cranelift_entity::EntityRef;
use cranelift_wasm::{FuncIndex, GlobalInit};
use region;
2018-10-16 03:21:49 +02:00
use std::iter::Iterator;
2018-10-14 13:59:11 +02:00
use std::marker::PhantomData;
use std::ptr::write_unaligned;
2018-11-06 15:51:01 +01:00
use std::slice;
2018-10-14 13:59:11 +02:00
use std::sync::Arc;
2018-10-15 03:03:00 +02:00
use super::super::common::slice::{BoundedSlice, UncheckedSlice};
2018-10-15 02:48:59 +02:00
use super::errors::ErrorKind;
use super::import_object::ImportObject;
2018-10-15 02:48:59 +02:00
use super::memory::LinearMemory;
use super::module::Export;
2018-10-24 11:39:00 +02:00
use super::module::Module;
use super::relocation::{Reloc, RelocSink, RelocationType};
2018-10-15 17:10:49 +02:00
pub fn protect_codebuf(code_buf: &Vec<u8>) -> Result<(), String> {
match unsafe {
region::protect(
code_buf.as_ptr(),
code_buf.len(),
region::Protection::ReadWriteExecute,
)
} {
Err(err) => {
return Err(format!(
"failed to give executable permission to code: {}",
err
))
2018-10-16 03:21:49 +02:00
}
2018-10-15 17:10:49 +02:00
Ok(()) => Ok(()),
}
}
2018-10-17 11:22:45 +02:00
fn get_function_addr(
2018-10-14 23:48:59 +02:00
func_index: &FuncIndex,
2018-10-17 11:22:45 +02:00
import_functions: &Vec<*const u8>,
functions: &Vec<Vec<u8>>,
) -> *const u8 {
2018-10-17 11:22:45 +02:00
let index = func_index.index();
let len = import_functions.len();
let func_pointer = if index < len {
import_functions[index]
} else {
(&functions[func_index.index() - len]).as_ptr()
};
func_pointer
2018-10-14 13:59:11 +02:00
}
2018-10-12 02:45:09 +02:00
2018-11-06 13:18:16 +01:00
// TODO: To be removed.
2018-11-07 11:18:55 +01:00
#[derive(Debug)]
#[repr(C, packed)]
2018-10-22 21:03:43 +02:00
pub struct VmCtx<'phantom> {
2018-10-14 13:59:11 +02:00
pub user_data: UserData,
2018-10-23 11:40:17 +02:00
globals: UncheckedSlice<u8>,
memories: UncheckedSlice<UncheckedSlice<u8>>,
tables: UncheckedSlice<BoundedSlice<usize>>,
2018-10-15 11:46:04 +02:00
phantom: PhantomData<&'phantom ()>,
2018-10-13 15:31:56 +02:00
}
2018-11-06 13:18:16 +01:00
// TODO: To be removed.
2018-11-07 11:18:55 +01:00
#[derive(Debug)]
#[repr(C, packed)]
2018-10-14 13:59:11 +02:00
pub struct UserData {
// pub process: Dispatch<Process>,
pub instance: Instance,
}
2018-10-14 13:59:11 +02:00
/// An Instance of a WebAssembly module
#[derive(Debug)]
pub struct Instance {
/// WebAssembly table data
2018-10-15 17:10:49 +02:00
// pub tables: Arc<Vec<RwLock<Vec<usize>>>>,
pub tables: Arc<Vec<Vec<usize>>>,
2018-10-14 13:59:11 +02:00
/// WebAssembly linear memory data
pub memories: Arc<Vec<LinearMemory>>,
2018-10-14 13:59:11 +02:00
/// WebAssembly global variable data
pub globals: Vec<u8>,
2018-10-15 15:58:06 +02:00
/// Webassembly functions
2018-10-15 17:10:49 +02:00
// functions: Vec<usize>,
functions: Vec<Vec<u8>>,
2018-10-17 11:22:45 +02:00
/// Imported functions
import_functions: Vec<*const u8>,
/// The module start function
2018-10-17 16:45:24 +02:00
pub start_func: Option<FuncIndex>,
// Region start memory location
// code_base: *const (),
2018-11-07 11:18:55 +01:00
// C-like pointers to data (heaps, globals, tables)
pub data_pointers: DataPointers,
// Default memory bound
pub default_memory_bound: i32
}
/// Contains pointers to data (heaps, globals, tables) needed
/// by Cranelift.
#[derive(Debug)]
pub struct DataPointers {
// Pointer to tables
pub tables: UncheckedSlice<BoundedSlice<usize>>,
// Pointer to memories
pub memories: UncheckedSlice<UncheckedSlice<u8>>,
// Pointer to globals
pub globals: UncheckedSlice<u8>,
2018-10-14 13:59:11 +02:00
}
2018-10-14 13:59:11 +02:00
impl Instance {
/// Create a new `Instance`.
2018-10-17 11:22:45 +02:00
pub fn new(
module: &Module,
import_object: &ImportObject<&str, &str>,
2018-10-17 11:22:45 +02:00
) -> Result<Instance, ErrorKind> {
2018-10-14 13:59:11 +02:00
let mut tables: Vec<Vec<usize>> = Vec::new();
let mut memories: Vec<LinearMemory> = Vec::new();
let mut globals: Vec<u8> = Vec::new();
2018-10-15 17:10:49 +02:00
let mut functions: Vec<Vec<u8>> = Vec::new();
2018-10-17 11:22:45 +02:00
let mut import_functions: Vec<*const u8> = Vec::new();
// let mut code_base: *const () = ptr::null();
2018-10-14 13:59:11 +02:00
debug!("Instance - Instantiating functions");
2018-10-15 11:46:04 +02:00
// Instantiate functions
2018-10-15 02:48:59 +02:00
{
2018-10-17 11:22:45 +02:00
functions.reserve_exact(module.info.functions.len());
2018-10-15 02:48:59 +02:00
let isa = isa::lookup(module.info.triple.clone())
.unwrap()
.finish(module.info.flags.clone());
2018-10-17 11:22:45 +02:00
let mut relocations = Vec::new();
2018-10-15 02:48:59 +02:00
2018-11-01 17:58:05 +01:00
// We walk through the imported functions and set the relocations
// for each of this functions to be an empty vector (as is defined outside of wasm)
2018-10-17 11:22:45 +02:00
for (module, field) in module.info.imported_funcs.iter() {
2018-11-06 15:51:01 +01:00
let function = import_object
.get(&module.as_str(), &field.as_str())
.ok_or_else(|| {
ErrorKind::LinkError(format!(
"Imported function {}.{} was not provided in the import_functions",
module, field
))
})?;
2018-10-17 11:22:45 +02:00
// println!("GET FUNC {:?}", function);
import_functions.push(function);
relocations.push(vec![]);
}
2018-11-07 11:18:55 +01:00
debug!("Instance - Compiling functions");
2018-10-15 11:46:04 +02:00
// Compile the functions (from cranelift IR to machine code)
2018-10-15 02:48:59 +02:00
for function_body in module.info.function_bodies.values() {
2018-10-15 03:03:00 +02:00
let mut func_context = Context::for_function(function_body.to_owned());
2018-10-15 17:10:49 +02:00
// func_context
// .verify(&*isa)
// .map_err(|e| ErrorKind::CompileError(e.to_string()))?;
// func_context
// .verify_locations(&*isa)
// .map_err(|e| ErrorKind::CompileError(e.to_string()))?;
// let code_size_offset = func_context
// .compile(&*isa)
2018-11-07 11:18:55 +01:00
// .map_err(|e| ErrorKind::CompileError(e.to_string()))?;
2018-10-15 17:10:49 +02:00
// as usize;
let mut code_buf: Vec<u8> = Vec::new();
let mut reloc_sink = RelocSink::new();
let mut trap_sink = binemit::NullTrapSink {};
2018-11-01 17:58:05 +01:00
// This will compile a cranelift ir::Func into a code buffer (stored in memory)
// and will push any inner function calls to the reloc sync.
// In case traps need to be triggered, they will go to trap_sink
2018-10-16 03:21:49 +02:00
func_context
.compile_and_emit(&*isa, &mut code_buf, &mut reloc_sink, &mut trap_sink)
2018-11-07 11:18:55 +01:00
.map_err(|e| {
println!("CompileError: {}", e.to_string());
ErrorKind::CompileError(e.to_string())
})?;
2018-11-01 17:58:05 +01:00
// We set this code_buf to be readable & executable
2018-11-06 15:51:01 +01:00
protect_codebuf(&code_buf).unwrap();
2018-10-15 17:10:49 +02:00
let func_offset = code_buf;
functions.push(func_offset);
2018-10-17 11:22:45 +02:00
// context_and_offsets.push(func_context);
2018-10-16 00:04:05 +02:00
relocations.push(reloc_sink.func_relocs);
// println!("FUNCTION RELOCATIONS {:?}", reloc_sink.func_relocs)
2018-10-15 17:10:49 +02:00
// total_size += code_size_offset;
2018-10-15 02:48:59 +02:00
}
debug!("Instance - Relocating functions");
2018-10-16 00:04:05 +02:00
// For each of the functions used, we see what are the calls inside this functions
// and relocate each call to the proper memory address.
// The relocations are relative to the relocation's address plus four bytes
// TODO: Support architectures other than x64, and other reloc kinds.
for (i, function_relocs) in relocations.iter().enumerate() {
2018-10-24 11:39:00 +02:00
for ref reloc in function_relocs {
let target_func_address: isize = match reloc.target {
2018-10-16 00:04:05 +02:00
RelocationType::Normal(func_index) => {
2018-10-24 11:39:00 +02:00
get_function_addr(&FuncIndex::new(func_index as usize), &import_functions, &functions) as isize
2018-10-16 00:04:05 +02:00
},
RelocationType::CurrentMemory => {
current_memory as isize
},
RelocationType::GrowMemory => {
grow_memory as isize
2018-10-24 01:15:20 +02:00
},
RelocationType::LibCall(LibCall::CeilF32) => {
_ceilf32 as isize
},
RelocationType::LibCall(LibCall::FloorF32) => {
_floorf32 as isize
},
RelocationType::LibCall(LibCall::TruncF32) => {
_truncf32 as isize
},
RelocationType::LibCall(LibCall::NearestF32) => {
_nearbyintf32 as isize
},
2018-10-24 01:22:16 +02:00
RelocationType::LibCall(LibCall::CeilF64) => {
_ceilf64 as isize
},
RelocationType::LibCall(LibCall::FloorF64) => {
_floorf64 as isize
},
RelocationType::LibCall(LibCall::TruncF64) => {
_truncf64 as isize
},
RelocationType::LibCall(LibCall::NearestF64) => {
_nearbyintf64 as isize
},
2018-10-16 00:04:05 +02:00
_ => unimplemented!()
// RelocationType::Intrinsic(name) => {
// get_abi_intrinsic(name)?
// },
};
2018-10-24 11:39:00 +02:00
2018-10-17 11:22:45 +02:00
let func_addr =
get_function_addr(&FuncIndex::new(i), &import_functions, &functions);
2018-10-16 00:04:05 +02:00
match reloc.reloc {
Reloc::Abs8 => unsafe {
2018-10-17 11:22:45 +02:00
let reloc_address = func_addr.offset(reloc.offset as isize) as i64;
2018-10-16 00:04:05 +02:00
let reloc_addend = reloc.addend;
let reloc_abs = target_func_address as i64 + reloc_addend;
write_unaligned(reloc_address as *mut i64, reloc_abs);
},
Reloc::X86PCRel4 => unsafe {
2018-10-17 11:22:45 +02:00
let reloc_address = func_addr.offset(reloc.offset as isize) as isize;
2018-10-16 00:04:05 +02:00
let reloc_addend = reloc.addend as isize;
// TODO: Handle overflow.
let reloc_delta_i32 =
(target_func_address - reloc_address + reloc_addend) as i32;
write_unaligned(reloc_address as *mut i32, reloc_delta_i32);
},
_ => panic!("unsupported reloc kind"),
}
}
}
// We only want to allocate in memory if there is more than
2018-10-15 15:58:06 +02:00
// 0 functions. Otherwise reserving a 0-sized memory region
// cause a panic error
2018-10-15 17:10:49 +02:00
// if total_size > 0 {
// // Allocate the total memory for this functions
// // let map = MmapMut::map_anon(total_size).unwrap();
// // let region_start = map.as_ptr() as usize;
// // code_base = map.as_ptr() as *const ();
// // // Emit this functions to memory
// for (ref func_context, func_offset) in context_and_offsets.iter() {
// let mut trap_sink = TrapSink::new(*func_offset);
// let mut reloc_sink = RelocSink::new();
// let mut code_buf: Vec<u8> = Vec::new();
// // let mut func_pointer = as *mut u8;
// unsafe {
// func_context.emit_to_memory(
// &*isa,
// &mut code_buf,
// &mut reloc_sink,
// &mut trap_sink,
// );
// };
// let func_offset = code_buf.as_ptr() as usize;
// functions.push(*func_offset);
// }
// // Set protection of this memory region to Read + Execute
// // so we are able to execute the functions emitted to memory
// // unsafe {
// // region::protect(region_start as *mut u8, total_size, region::Protection::ReadExecute)
// // .expect("unable to make memory readable+executable");
// // }
// }
2018-10-15 02:48:59 +02:00
}
2018-10-15 03:03:00 +02:00
debug!("Instance - Instantiating tables");
2018-10-15 11:46:04 +02:00
// Instantiate tables
2018-10-14 13:59:11 +02:00
{
// Reserve table space
2018-10-14 13:59:11 +02:00
tables.reserve_exact(module.info.tables.len());
for table in &module.info.tables {
let len = table.entity.size;
let mut v = Vec::with_capacity(len);
v.resize(len, 0);
tables.push(v);
}
// instantiate tables
for table_element in &module.info.table_elements {
2018-10-14 23:48:59 +02:00
assert!(
table_element.base.is_none(),
"globalvalue base not supported yet."
);
2018-10-14 13:59:11 +02:00
let base = 0;
2018-10-14 13:59:11 +02:00
let table = &mut tables[table_element.table_index];
for (i, func_index) in table_element.elements.iter().enumerate() {
// since the table just contains functions in the MVP
// we get the address of the specified function indexes
// to populate the table.
2018-10-13 15:31:56 +02:00
2018-10-14 13:59:11 +02:00
// let func_index = *elem_index - module.info.imported_funcs.len() as u32;
2018-10-17 11:22:45 +02:00
// let func_addr = functions[func_index.index()].as_ptr();
let func_addr = get_function_addr(&func_index, &import_functions, &functions);
2018-10-14 13:59:11 +02:00
table[base + table_element.offset + i] = func_addr as _;
}
}
2018-10-15 03:03:00 +02:00
}
2018-10-13 15:31:56 +02:00
debug!("Instance - Instantiating memories");
2018-10-15 11:46:04 +02:00
// Instantiate memories
2018-10-14 13:59:11 +02:00
{
// Allocate the underlying memory and initialize it to all zeros.
2018-10-19 01:28:16 +02:00
let total_memories = module.info.memories.len();
if total_memories > 0 {
memories.reserve_exact(total_memories);
for memory in &module.info.memories {
let memory = memory.entity;
let v = LinearMemory::new(
memory.pages_count as u32,
memory.maximum.map(|m| m as u32),
);
memories.push(v);
}
} else {
memories.reserve_exact(1);
memories.push(LinearMemory::new(0, None));
2018-10-14 13:59:11 +02:00
}
for init in &module.info.data_initializers {
2018-10-14 13:59:11 +02:00
debug_assert!(init.base.is_none(), "globalvar base not supported yet");
2018-11-06 15:51:01 +01:00
let offset = init.offset;
2018-10-14 13:59:11 +02:00
let mem_mut = memories[init.memory_index].as_mut();
let to_init = &mut mem_mut[offset..offset + init.data.len()];
2018-10-14 13:59:11 +02:00
to_init.copy_from_slice(&init.data);
}
2018-10-15 03:03:00 +02:00
}
2018-10-13 15:31:56 +02:00
debug!("Instance - Instantiating globals");
2018-10-15 11:46:04 +02:00
// Instantiate Globals
2018-10-14 13:59:11 +02:00
{
let globals_count = module.info.globals.len();
// Allocate the underlying memory and initialize it to zeros
let globals_data_size = globals_count * 8;
globals.resize(globals_data_size, 0);
// cast the globals slice to a slice of i64.
2018-10-14 23:48:59 +02:00
let globals_data = unsafe {
slice::from_raw_parts_mut(globals.as_mut_ptr() as *mut i64, globals_count)
};
2018-10-14 13:59:11 +02:00
for (i, global) in module.info.globals.iter().enumerate() {
let value: i64 = match global.entity.initializer {
GlobalInit::I32Const(n) => n as _,
GlobalInit::I64Const(n) => n,
GlobalInit::F32Const(f) => f as _, // unsafe { mem::transmute(f as f64) },
GlobalInit::F64Const(f) => f as _, // unsafe { mem::transmute(f) },
2018-11-06 15:51:01 +01:00
GlobalInit::GlobalRef(_global_index) => {
unimplemented!("GlobalInit::GlobalRef is not yet supported")
}
GlobalInit::Import() => {
// Right now (because there is no module/field fields on the Import
// https://github.com/CraneStation/cranelift/blob/5cabce9b58ff960534d4017fad11f2e78c72ceab/lib/wasm/src/sections_translator.rs#L90-L99 )
// It's impossible to know where to take the global from.
// This should be fixed in Cranelift itself.
unimplemented!("GlobalInit::Import is not yet supported")
}
2018-10-14 13:59:11 +02:00
};
globals_data[i] = value;
}
2018-10-15 03:03:00 +02:00
}
2018-10-13 15:31:56 +02:00
2018-10-16 03:21:49 +02:00
let start_func: Option<FuncIndex> =
module
.info
.start_func
.or_else(|| match module.info.exports.get("main") {
Some(Export::Function(index)) => Some(index.to_owned()),
_ => None,
});
2018-11-07 11:18:55 +01:00
// TODO: Refactor repetitive code
let tables_pointer: Vec<BoundedSlice<usize>> =
tables.iter().map(|table| table[..].into()).collect();
let memories_pointer: Vec<UncheckedSlice<u8>> =
memories.iter().map(|mem| mem[..].into()).collect();
let globals_pointer: UncheckedSlice<u8> = globals[..].into();
let data_pointers = DataPointers {
memories: memories_pointer[..].into(),
globals: globals_pointer,
tables: tables_pointer[..].into(),
};
let default_memory_bound = LinearMemory::WASM_PAGE_SIZE as i32;
2018-10-15 02:48:59 +02:00
Ok(Instance {
2018-10-15 17:10:49 +02:00
tables: Arc::new(tables.into_iter().collect()), // tables.into_iter().map(|table| RwLock::new(table)).collect()),
2018-10-14 13:59:11 +02:00
memories: Arc::new(memories.into_iter().collect()),
2018-11-07 11:18:55 +01:00
globals,
functions,
import_functions,
start_func,
data_pointers,
default_memory_bound,
// code_base: code_base,
2018-10-15 02:48:59 +02:00
})
2018-10-14 13:59:11 +02:00
}
2018-10-13 15:31:56 +02:00
2018-10-24 02:32:06 +02:00
pub fn memory_mut(&mut self, memory_index: usize) -> &mut LinearMemory {
let memories = Arc::get_mut(&mut self.memories).unwrap_or_else(|| {
panic!("Can't get memories as a mutable pointer (there might exist more mutable pointers to the memories)")
});
2018-10-24 02:32:06 +02:00
memories
.get_mut(memory_index)
.unwrap_or_else(|| panic!("no memory for index {}", memory_index))
}
2018-10-14 13:59:11 +02:00
pub fn memories(&self) -> Arc<Vec<LinearMemory>> {
self.memories.clone()
2018-10-13 15:31:56 +02:00
}
2018-11-06 13:18:16 +01:00
pub fn get_function_pointer(&self, func_index: FuncIndex) -> *const u8 {
get_function_addr(&func_index, &self.import_functions, &self.functions)
}
2018-11-06 13:18:16 +01:00
pub fn start(&self) {
if let Some(func_index) = self.start_func {
2018-11-06 13:18:16 +01:00
let func: fn(&Instance) = get_instance_function!(&self, func_index);
func(self)
}
}
2018-10-15 15:58:06 +02:00
2018-11-06 13:18:16 +01:00
// TODO: To be removed.
pub fn generate_context(&self) -> VmCtx {
2018-10-23 11:43:43 +02:00
let memories: Vec<UncheckedSlice<u8>> =
self.memories.iter().map(|mem| mem[..].into()).collect();
let tables: Vec<BoundedSlice<usize>> =
self.tables.iter().map(|table| table[..].into()).collect();
let globals: UncheckedSlice<u8> = self.globals[..].into();
// println!("GENERATING CONTEXT {:?}", self.globals);
2018-10-23 11:40:17 +02:00
2018-10-22 21:03:43 +02:00
// assert!(memories.len() >= 1, "modules must have at least one memory");
// the first memory has a space of `mem::size_of::<VmCtxData>()` rounded
// up to the 4KiB before it. We write the VmCtxData into that.
let instance = self.clone();
2018-10-22 21:03:43 +02:00
let data = VmCtx {
globals: globals,
2018-10-22 21:03:43 +02:00
memories: memories[..].into(),
tables: tables[..].into(),
user_data: UserData {
// process,
2018-10-22 21:03:43 +02:00
instance: instance,
},
phantom: PhantomData,
};
2018-10-22 21:03:43 +02:00
data
// let main_heap_ptr = memories[0].as_mut_ptr() as *mut VmCtxData;
// unsafe {
// main_heap_ptr.sub(1).write(data);
// &*(main_heap_ptr as *const VmCtx)
// }
}
/// Returns a slice of the contents of allocated linear memory.
pub fn inspect_memory(&self, memory_index: usize, address: usize, len: usize) -> &[u8] {
&self
.memories
.get(memory_index)
.unwrap_or_else(|| panic!("no memory for index {}", memory_index))
.as_ref()[address..address + len]
}
// Shows the value of a global variable.
// pub fn inspect_global(&self, global_index: GlobalIndex, ty: ir::Type) -> &[u8] {
// let offset = global_index * 8;
// let len = ty.bytes() as usize;
// &self.globals[offset..offset + len]
2018-10-15 15:58:06 +02:00
// }
2018-10-14 20:37:42 +02:00
// pub fn start_func(&self) -> extern fn(&VmCtx) {
// self.start_func
// }
2018-10-14 13:59:11 +02:00
}
2018-10-13 15:31:56 +02:00
2018-10-14 13:59:11 +02:00
impl Clone for Instance {
fn clone(&self) -> Instance {
2018-11-07 11:18:55 +01:00
// TODO: Refactor repetitive code
let tables_pointer: Vec<BoundedSlice<usize>> =
self.tables.iter().map(|table| table[..].into()).collect();
let memories_pointer: Vec<UncheckedSlice<u8>> =
self.memories.iter().map(|mem| mem[..].into()).collect();
let globals_pointer: UncheckedSlice<u8> = self.globals[..].into();
let data_pointers = DataPointers {
memories: memories_pointer[..].into(),
globals: globals_pointer,
tables: tables_pointer[..].into(),
};
let default_memory_bound =
self.memories.get(0).unwrap().current as i32;
2018-10-14 13:59:11 +02:00
Instance {
tables: Arc::clone(&self.tables),
memories: Arc::clone(&self.memories),
globals: self.globals.clone(),
2018-10-15 15:58:06 +02:00
functions: self.functions.clone(),
start_func: self.start_func.clone(),
2018-10-17 11:22:45 +02:00
import_functions: self.import_functions.clone(),
2018-11-07 11:18:55 +01:00
data_pointers,
default_memory_bound,
// code_base: self.code_base,
2018-10-14 13:59:11 +02:00
}
}
}
2018-10-16 00:04:05 +02:00
2018-11-07 11:18:55 +01:00
extern "C" fn grow_memory(size: u32, memory_index: u32, instance: &mut Instance) -> i32 {
2018-11-06 18:19:20 +01:00
// For now only the first mem can be accessed
// BTW, the memory_index coming in is random!
2018-11-06 13:18:16 +01:00
let memory_index: u32 = 0;
2018-11-07 11:18:55 +01:00
let old_mem_size = instance
2018-10-24 11:39:00 +02:00
.memory_mut(memory_index as usize)
.grow(size)
2018-11-07 11:18:55 +01:00
.unwrap_or(i32::max_value()); // Should be -1 ?
instance.default_memory_bound =
(instance.memories.get(0).unwrap().current as usize * LinearMemory::WASM_PAGE_SIZE) as i32;
// PROBLEM: The memories changed, so I have to do the whole slice thing all over again.
let tables_pointer: Vec<BoundedSlice<usize>> =
instance.tables.iter().map(|table| table[..].into()).collect();
let memories_pointer: Vec<UncheckedSlice<u8>> =
instance.memories.iter().map(|mem| mem[..].into()).collect();
let globals_pointer: UncheckedSlice<u8> =
instance.globals[..].into();
let data_pointers = DataPointers {
memories: memories_pointer[..].into(),
globals: globals_pointer,
tables: tables_pointer[..].into(),
};
// Update data_pointers
instance.data_pointers = data_pointers;
println!(
"
new mem loc = {:p}
instance.default_memory_bound = {:?}
",
&instance.memories.get(0).unwrap().mmap.get(0),
instance.default_memory_bound
);
return old_mem_size;
2018-10-16 00:04:05 +02:00
}
2018-11-06 13:18:16 +01:00
extern "C" fn current_memory(memory_index: u32, instance: &mut Instance) -> u32 {
2018-10-22 21:03:43 +02:00
let memory = &instance.memories[memory_index as usize];
memory.current_size() as u32
2018-10-16 00:04:05 +02:00
}
2018-10-24 01:15:20 +02:00
// Because of this bug https://github.com/rust-lang/rust/issues/34123
// We create internal functions for it
2018-10-24 02:01:46 +02:00
use std::intrinsics::{
ceilf32, ceilf64, floorf32, floorf64, nearbyintf32, nearbyintf64, truncf32, truncf64,
};
2018-10-24 01:15:20 +02:00
2018-10-24 01:22:16 +02:00
// F32
2018-10-24 02:01:46 +02:00
unsafe extern "C" fn _ceilf32(x: f32) -> f32 {
2018-10-24 01:15:20 +02:00
ceilf32(x)
}
2018-10-24 02:01:46 +02:00
unsafe extern "C" fn _floorf32(x: f32) -> f32 {
2018-10-24 01:15:20 +02:00
floorf32(x)
}
2018-10-24 02:01:46 +02:00
unsafe extern "C" fn _truncf32(x: f32) -> f32 {
2018-10-24 01:15:20 +02:00
truncf32(x)
}
2018-10-24 02:01:46 +02:00
unsafe extern "C" fn _nearbyintf32(x: f32) -> f32 {
2018-10-24 01:15:20 +02:00
nearbyintf32(x)
}
2018-10-24 01:22:16 +02:00
// F64
2018-10-24 02:01:46 +02:00
unsafe extern "C" fn _ceilf64(x: f64) -> f64 {
2018-10-24 01:22:16 +02:00
ceilf64(x)
}
2018-10-24 02:01:46 +02:00
unsafe extern "C" fn _floorf64(x: f64) -> f64 {
2018-10-24 01:22:16 +02:00
floorf64(x)
}
2018-10-24 02:01:46 +02:00
unsafe extern "C" fn _truncf64(x: f64) -> f64 {
2018-10-24 01:22:16 +02:00
truncf64(x)
}
2018-10-24 02:01:46 +02:00
unsafe extern "C" fn _nearbyintf64(x: f64) -> f64 {
2018-10-24 01:22:16 +02:00
nearbyintf64(x)
}