2018-10-14 22:10:53 +02:00
|
|
|
//! A webassembly::Instance object is a stateful, executable instance of a
|
|
|
|
//! webassembly::Module. Instance objects contain all the Exported
|
|
|
|
//! WebAssembly functions that allow calling into WebAssembly code.
|
|
|
|
|
|
|
|
//! The webassembly::Instance() constructor function can be called to
|
|
|
|
//! synchronously instantiate a given webassembly::Module object. However, the
|
|
|
|
//! primary way to get an Instance is through the asynchronous
|
|
|
|
//! webassembly::instantiateStreaming() function.
|
2018-10-16 03:21:49 +02:00
|
|
|
use cranelift_codegen::{binemit, isa, Context};
|
2018-10-15 03:03:00 +02:00
|
|
|
use cranelift_entity::EntityRef;
|
|
|
|
use cranelift_wasm::{FuncIndex, GlobalInit};
|
|
|
|
use memmap::MmapMut;
|
|
|
|
use region;
|
|
|
|
use spin::RwLock;
|
2018-10-17 11:22:45 +02:00
|
|
|
use std::collections::HashMap;
|
2018-10-16 03:21:49 +02:00
|
|
|
use std::iter::Iterator;
|
2018-10-14 13:59:11 +02:00
|
|
|
use std::marker::PhantomData;
|
2018-10-16 00:04:05 +02:00
|
|
|
use std::ptr::{self, write_unaligned};
|
2018-10-14 13:59:11 +02:00
|
|
|
use std::sync::Arc;
|
2018-10-14 23:48:59 +02:00
|
|
|
use std::{mem, slice};
|
2018-10-11 21:29:36 +02:00
|
|
|
|
2018-10-15 03:03:00 +02:00
|
|
|
use super::super::common::slice::{BoundedSlice, UncheckedSlice};
|
2018-10-15 02:48:59 +02:00
|
|
|
use super::errors::ErrorKind;
|
2018-10-17 16:08:31 +02:00
|
|
|
use super::import_object::ImportObject;
|
2018-10-15 02:48:59 +02:00
|
|
|
use super::memory::LinearMemory;
|
|
|
|
use super::module::Module;
|
2018-10-16 03:21:49 +02:00
|
|
|
use super::module::{DataInitializer, Export, Exportable};
|
|
|
|
use super::relocation::{Reloc, RelocSink, RelocationType, TrapSink};
|
2018-10-15 17:10:49 +02:00
|
|
|
|
|
|
|
pub fn protect_codebuf(code_buf: &Vec<u8>) -> Result<(), String> {
|
|
|
|
match unsafe {
|
|
|
|
region::protect(
|
|
|
|
code_buf.as_ptr(),
|
|
|
|
code_buf.len(),
|
|
|
|
region::Protection::ReadWriteExecute,
|
|
|
|
)
|
|
|
|
} {
|
|
|
|
Err(err) => {
|
|
|
|
return Err(format!(
|
|
|
|
"failed to give executable permission to code: {}",
|
|
|
|
err
|
|
|
|
))
|
2018-10-16 03:21:49 +02:00
|
|
|
}
|
2018-10-15 17:10:49 +02:00
|
|
|
Ok(()) => Ok(()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-17 11:22:45 +02:00
|
|
|
fn get_function_addr(
|
2018-10-14 23:48:59 +02:00
|
|
|
func_index: &FuncIndex,
|
2018-10-17 11:22:45 +02:00
|
|
|
import_functions: &Vec<*const u8>,
|
|
|
|
functions: &Vec<Vec<u8>>,
|
|
|
|
) -> *const u8 {
|
|
|
|
let index = func_index.index();
|
|
|
|
let len = import_functions.len();
|
|
|
|
let func_pointer = if index < len {
|
|
|
|
import_functions[index]
|
|
|
|
} else {
|
|
|
|
(&functions[func_index.index() - len]).as_ptr()
|
|
|
|
};
|
|
|
|
func_pointer
|
2018-10-14 13:59:11 +02:00
|
|
|
}
|
2018-10-12 02:45:09 +02:00
|
|
|
|
2018-10-17 11:22:45 +02:00
|
|
|
// pub fn get_function_addr(
|
|
|
|
// functions: &[usize],
|
|
|
|
// func_index: &FuncIndex,
|
|
|
|
// ) -> *const () {
|
|
|
|
// let offset = functions[func_index.index()];
|
|
|
|
// (base as usize + offset) as _
|
|
|
|
// }
|
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
/// Zero-sized, non-instantiable type.
|
|
|
|
pub enum VmCtx {}
|
2018-10-11 21:29:36 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
impl VmCtx {
|
|
|
|
pub fn data(&self) -> &VmCtxData {
|
|
|
|
let heap_ptr = self as *const _ as *const VmCtxData;
|
2018-10-14 23:48:59 +02:00
|
|
|
unsafe { &*heap_ptr.sub(1) }
|
2018-10-14 13:59:11 +02:00
|
|
|
}
|
2018-10-11 21:29:36 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
/// This is safe because the offset is 32 bits and thus
|
|
|
|
/// cannot extend out of the guarded wasm memory.
|
|
|
|
pub fn fastpath_offset_ptr<T>(&self, offset: u32) -> *const T {
|
|
|
|
let heap_ptr = self as *const _ as *const u8;
|
2018-10-14 23:48:59 +02:00
|
|
|
unsafe { heap_ptr.add(offset as usize) as *const T }
|
2018-10-14 13:59:11 +02:00
|
|
|
}
|
2018-10-11 21:29:36 +02:00
|
|
|
}
|
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
#[repr(C)]
|
2018-10-15 11:46:04 +02:00
|
|
|
pub struct VmCtxData<'phantom> {
|
2018-10-14 13:59:11 +02:00
|
|
|
pub user_data: UserData,
|
|
|
|
globals: UncheckedSlice<u8>,
|
|
|
|
memories: UncheckedSlice<UncheckedSlice<u8>>,
|
|
|
|
tables: UncheckedSlice<BoundedSlice<usize>>,
|
2018-10-15 11:46:04 +02:00
|
|
|
phantom: PhantomData<&'phantom ()>,
|
2018-10-13 15:31:56 +02:00
|
|
|
}
|
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
#[repr(C)]
|
|
|
|
pub struct UserData {
|
|
|
|
// pub process: Dispatch<Process>,
|
|
|
|
pub instance: Instance,
|
|
|
|
}
|
2018-10-11 21:29:36 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
/// An Instance of a WebAssembly module
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct Instance {
|
|
|
|
/// WebAssembly table data
|
2018-10-15 17:10:49 +02:00
|
|
|
// pub tables: Arc<Vec<RwLock<Vec<usize>>>>,
|
|
|
|
pub tables: Arc<Vec<Vec<usize>>>,
|
2018-10-11 21:29:36 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
/// WebAssembly linear memory data
|
|
|
|
pub memories: Arc<Vec<LinearMemory>>,
|
2018-10-11 21:29:36 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
/// WebAssembly global variable data
|
|
|
|
pub globals: Vec<u8>,
|
2018-10-15 15:58:06 +02:00
|
|
|
|
|
|
|
/// Webassembly functions
|
2018-10-15 17:10:49 +02:00
|
|
|
// functions: Vec<usize>,
|
|
|
|
functions: Vec<Vec<u8>>,
|
|
|
|
|
2018-10-17 11:22:45 +02:00
|
|
|
/// Imported functions
|
|
|
|
import_functions: Vec<*const u8>,
|
|
|
|
|
2018-10-15 20:45:16 +02:00
|
|
|
/// The module start function
|
2018-10-17 16:45:24 +02:00
|
|
|
pub start_func: Option<FuncIndex>,
|
2018-10-15 20:45:16 +02:00
|
|
|
// Region start memory location
|
|
|
|
// code_base: *const (),
|
2018-10-14 13:59:11 +02:00
|
|
|
}
|
2018-10-11 21:29:36 +02:00
|
|
|
|
2018-10-15 17:10:49 +02:00
|
|
|
// pub fn make_vmctx(instance: &mut Instance, mem_base_addrs: &mut [*mut u8]) -> Vec<*mut u8> {
|
|
|
|
// debug_assert!(
|
|
|
|
// instance.tables.len() <= 1,
|
|
|
|
// "non-default tables is not supported"
|
|
|
|
// );
|
|
|
|
|
|
|
|
// let (default_table_ptr, default_table_len) = instance
|
|
|
|
// .tables
|
|
|
|
// .get_mut(0)
|
|
|
|
// .map(|table| (table.as_mut_ptr() as *mut u8, table.len()))
|
|
|
|
// .unwrap_or((ptr::null_mut(), 0));
|
|
|
|
|
|
|
|
// let mut vmctx = Vec::new();
|
|
|
|
// vmctx.push(instance.globals.as_mut_ptr());
|
|
|
|
// vmctx.push(mem_base_addrs.as_mut_ptr() as *mut u8);
|
|
|
|
// vmctx.push(default_table_ptr);
|
|
|
|
// vmctx.push(default_table_len as *mut u8);
|
|
|
|
// vmctx.push(instance as *mut Instance as *mut u8);
|
|
|
|
|
|
|
|
// vmctx
|
|
|
|
// }
|
2018-10-17 11:22:45 +02:00
|
|
|
fn fake_fun(x: i32) -> i32 {
|
|
|
|
return x * 2;
|
|
|
|
}
|
2018-10-15 17:10:49 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
impl Instance {
|
|
|
|
/// Create a new `Instance`.
|
2018-10-17 11:22:45 +02:00
|
|
|
pub fn new(
|
|
|
|
module: &Module,
|
2018-10-17 16:08:31 +02:00
|
|
|
import_object: &ImportObject<&str, &str>,
|
2018-10-17 11:22:45 +02:00
|
|
|
) -> Result<Instance, ErrorKind> {
|
2018-10-14 13:59:11 +02:00
|
|
|
let mut tables: Vec<Vec<usize>> = Vec::new();
|
|
|
|
let mut memories: Vec<LinearMemory> = Vec::new();
|
|
|
|
let mut globals: Vec<u8> = Vec::new();
|
2018-10-15 17:10:49 +02:00
|
|
|
let mut functions: Vec<Vec<u8>> = Vec::new();
|
2018-10-17 11:22:45 +02:00
|
|
|
let mut import_functions: Vec<*const u8> = Vec::new();
|
2018-10-15 20:45:16 +02:00
|
|
|
// let mut code_base: *const () = ptr::null();
|
2018-10-14 13:59:11 +02:00
|
|
|
|
2018-10-15 11:46:04 +02:00
|
|
|
// Instantiate functions
|
2018-10-15 02:48:59 +02:00
|
|
|
{
|
2018-10-17 11:22:45 +02:00
|
|
|
functions.reserve_exact(module.info.functions.len());
|
2018-10-15 02:48:59 +02:00
|
|
|
let isa = isa::lookup(module.info.triple.clone())
|
|
|
|
.unwrap()
|
|
|
|
.finish(module.info.flags.clone());
|
2018-10-17 11:22:45 +02:00
|
|
|
let mut relocations = Vec::new();
|
2018-10-15 02:48:59 +02:00
|
|
|
|
2018-10-15 20:45:16 +02:00
|
|
|
// let mut total_size: usize = 0;
|
2018-10-17 11:22:45 +02:00
|
|
|
// let mut context_and_offsets = Vec::with_capacity(module.info.function_bodies.len());
|
|
|
|
for (module, field) in module.info.imported_funcs.iter() {
|
|
|
|
// let function = &import_object.map(|i| i.get(module).map(|m| m.get(field)));
|
2018-10-17 16:08:31 +02:00
|
|
|
// let mut function = fake_fun as *const u8;
|
|
|
|
let mut function = import_object
|
|
|
|
.get(&module.as_str(), &field.as_str())
|
|
|
|
.ok_or_else(|| {
|
|
|
|
ErrorKind::LinkError(format!(
|
|
|
|
"Imported function {}.{} was not provided in the import_functions",
|
|
|
|
module, field
|
|
|
|
))
|
|
|
|
})?;
|
2018-10-17 11:22:45 +02:00
|
|
|
// println!("GET FUNC {:?}", function);
|
|
|
|
import_functions.push(function);
|
|
|
|
relocations.push(vec![]);
|
|
|
|
}
|
2018-10-15 11:46:04 +02:00
|
|
|
// Compile the functions (from cranelift IR to machine code)
|
2018-10-15 02:48:59 +02:00
|
|
|
for function_body in module.info.function_bodies.values() {
|
2018-10-15 03:03:00 +02:00
|
|
|
let mut func_context = Context::for_function(function_body.to_owned());
|
2018-10-15 17:10:49 +02:00
|
|
|
// func_context
|
|
|
|
// .verify(&*isa)
|
|
|
|
// .map_err(|e| ErrorKind::CompileError(e.to_string()))?;
|
|
|
|
// func_context
|
|
|
|
// .verify_locations(&*isa)
|
|
|
|
// .map_err(|e| ErrorKind::CompileError(e.to_string()))?;
|
|
|
|
// let code_size_offset = func_context
|
|
|
|
// .compile(&*isa)
|
|
|
|
// .map_err(|e| ErrorKind::CompileError(e.to_string()))?
|
|
|
|
// as usize;
|
|
|
|
|
|
|
|
let mut code_buf: Vec<u8> = Vec::new();
|
|
|
|
let mut reloc_sink = RelocSink::new();
|
|
|
|
let mut trap_sink = binemit::NullTrapSink {};
|
|
|
|
|
2018-10-16 03:21:49 +02:00
|
|
|
func_context
|
|
|
|
.compile_and_emit(&*isa, &mut code_buf, &mut reloc_sink, &mut trap_sink)
|
|
|
|
.map_err(|e| ErrorKind::CompileError(e.to_string()))?;
|
2018-10-15 17:10:49 +02:00
|
|
|
protect_codebuf(&code_buf);
|
|
|
|
|
|
|
|
let func_offset = code_buf;
|
|
|
|
functions.push(func_offset);
|
|
|
|
|
2018-10-17 11:22:45 +02:00
|
|
|
// context_and_offsets.push(func_context);
|
2018-10-16 00:04:05 +02:00
|
|
|
relocations.push(reloc_sink.func_relocs);
|
|
|
|
// println!("FUNCTION RELOCATIONS {:?}", reloc_sink.func_relocs)
|
2018-10-15 17:10:49 +02:00
|
|
|
// total_size += code_size_offset;
|
2018-10-15 02:48:59 +02:00
|
|
|
}
|
|
|
|
|
2018-10-16 00:04:05 +02:00
|
|
|
// For each of the functions used, we see what are the calls inside this functions
|
|
|
|
// and relocate each call to the proper memory address.
|
|
|
|
// The relocations are relative to the relocation's address plus four bytes
|
|
|
|
// TODO: Support architectures other than x64, and other reloc kinds.
|
|
|
|
for (i, function_relocs) in relocations.iter().enumerate() {
|
|
|
|
// for r in function_relocs {
|
|
|
|
for (ref reloc, ref reloc_type) in function_relocs {
|
|
|
|
let target_func_address: isize = match reloc_type {
|
|
|
|
RelocationType::Normal(func_index) => {
|
2018-10-17 11:22:45 +02:00
|
|
|
get_function_addr(&FuncIndex::new(*func_index as usize), &import_functions, &functions) as isize
|
2018-10-16 00:04:05 +02:00
|
|
|
},
|
2018-10-18 00:09:04 +02:00
|
|
|
RelocationType::CurrentMemory => {
|
|
|
|
current_memory as isize
|
|
|
|
},
|
|
|
|
RelocationType::GrowMemory => {
|
|
|
|
grow_memory as isize
|
|
|
|
},
|
2018-10-16 00:04:05 +02:00
|
|
|
_ => unimplemented!()
|
|
|
|
// RelocationType::Intrinsic(name) => {
|
|
|
|
// get_abi_intrinsic(name)?
|
|
|
|
// },
|
|
|
|
// RelocationTarget::UserFunc(index) => {
|
|
|
|
// functions[module.defined_func_index(index).expect(
|
|
|
|
// "relocation to imported function not supported yet",
|
|
|
|
// )].as_ptr() as isize
|
|
|
|
// }
|
|
|
|
// RelocationTarget::GrowMemory => grow_memory as isize,
|
|
|
|
// RelocationTarget::CurrentMemory => current_memory as isize,
|
|
|
|
};
|
|
|
|
// print!("FUNCTION {:?}", target_func_address);
|
2018-10-17 11:22:45 +02:00
|
|
|
let func_addr =
|
|
|
|
get_function_addr(&FuncIndex::new(i), &import_functions, &functions);
|
2018-10-16 00:04:05 +02:00
|
|
|
match reloc.reloc {
|
|
|
|
Reloc::Abs8 => unsafe {
|
2018-10-17 11:22:45 +02:00
|
|
|
let reloc_address = func_addr.offset(reloc.offset as isize) as i64;
|
2018-10-16 00:04:05 +02:00
|
|
|
let reloc_addend = reloc.addend;
|
|
|
|
let reloc_abs = target_func_address as i64 + reloc_addend;
|
|
|
|
write_unaligned(reloc_address as *mut i64, reloc_abs);
|
|
|
|
},
|
|
|
|
Reloc::X86PCRel4 => unsafe {
|
2018-10-17 11:22:45 +02:00
|
|
|
let reloc_address = func_addr.offset(reloc.offset as isize) as isize;
|
2018-10-16 00:04:05 +02:00
|
|
|
let reloc_addend = reloc.addend as isize;
|
|
|
|
// TODO: Handle overflow.
|
|
|
|
let reloc_delta_i32 =
|
|
|
|
(target_func_address - reloc_address + reloc_addend) as i32;
|
|
|
|
write_unaligned(reloc_address as *mut i32, reloc_delta_i32);
|
|
|
|
},
|
|
|
|
_ => panic!("unsupported reloc kind"),
|
|
|
|
}
|
|
|
|
// let reloc_address = unsafe {
|
|
|
|
// (target_func_address.to_owned().as_mut_ptr() as *const u8).offset(reloc.offset as isize)
|
|
|
|
// };
|
|
|
|
|
|
|
|
// match reloc.reloc {
|
|
|
|
// Reloc::Abs8 => {
|
|
|
|
// unsafe {
|
|
|
|
// // (reloc_address as *mut usize).write(target_func_address.to_owned().as_ptr() as usize);
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
// _ => unimplemented!()
|
|
|
|
// }
|
|
|
|
|
|
|
|
// let target_func_address: isize = match r.reloc_target {
|
|
|
|
// RelocationTarget::UserFunc(index) => {
|
|
|
|
// functions[module.defined_func_index(index).expect(
|
|
|
|
// "relocation to imported function not supported yet",
|
|
|
|
// )].as_ptr() as isize
|
|
|
|
// }
|
|
|
|
// RelocationTarget::GrowMemory => grow_memory as isize,
|
|
|
|
// RelocationTarget::CurrentMemory => current_memory as isize,
|
|
|
|
// };
|
|
|
|
|
|
|
|
// let body = &mut functions[i];
|
|
|
|
// match r.reloc {
|
|
|
|
// Reloc::Abs8 => unsafe {
|
|
|
|
// let reloc_address = body.as_mut_ptr().offset(r.offset as isize) as i64;
|
|
|
|
// let reloc_addend = r.addend;
|
|
|
|
// let reloc_abs = target_func_address as i64 + reloc_addend;
|
|
|
|
// write_unaligned(reloc_address as *mut i64, reloc_abs);
|
|
|
|
// },
|
|
|
|
// Reloc::X86PCRel4 => unsafe {
|
|
|
|
// let reloc_address = body.as_mut_ptr().offset(r.offset as isize) as isize;
|
|
|
|
// let reloc_addend = r.addend as isize;
|
|
|
|
// // TODO: Handle overflow.
|
|
|
|
// let reloc_delta_i32 =
|
|
|
|
// (target_func_address - reloc_address + reloc_addend) as i32;
|
|
|
|
// write_unaligned(reloc_address as *mut i32, reloc_delta_i32);
|
|
|
|
// },
|
|
|
|
// _ => panic!("unsupported reloc kind"),
|
|
|
|
// }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-15 11:51:38 +02:00
|
|
|
// We only want to allocate in memory if there is more than
|
2018-10-15 15:58:06 +02:00
|
|
|
// 0 functions. Otherwise reserving a 0-sized memory region
|
2018-10-15 11:51:38 +02:00
|
|
|
// cause a panic error
|
2018-10-15 17:10:49 +02:00
|
|
|
// if total_size > 0 {
|
|
|
|
// // Allocate the total memory for this functions
|
|
|
|
// // let map = MmapMut::map_anon(total_size).unwrap();
|
|
|
|
// // let region_start = map.as_ptr() as usize;
|
|
|
|
// // code_base = map.as_ptr() as *const ();
|
|
|
|
|
|
|
|
// // // Emit this functions to memory
|
|
|
|
// for (ref func_context, func_offset) in context_and_offsets.iter() {
|
|
|
|
// let mut trap_sink = TrapSink::new(*func_offset);
|
|
|
|
// let mut reloc_sink = RelocSink::new();
|
|
|
|
// let mut code_buf: Vec<u8> = Vec::new();
|
|
|
|
|
|
|
|
// // let mut func_pointer = as *mut u8;
|
|
|
|
// unsafe {
|
|
|
|
// func_context.emit_to_memory(
|
|
|
|
// &*isa,
|
|
|
|
// &mut code_buf,
|
|
|
|
// &mut reloc_sink,
|
|
|
|
// &mut trap_sink,
|
|
|
|
// );
|
|
|
|
// };
|
|
|
|
// let func_offset = code_buf.as_ptr() as usize;
|
|
|
|
// functions.push(*func_offset);
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // Set protection of this memory region to Read + Execute
|
|
|
|
// // so we are able to execute the functions emitted to memory
|
|
|
|
// // unsafe {
|
|
|
|
// // region::protect(region_start as *mut u8, total_size, region::Protection::ReadExecute)
|
|
|
|
// // .expect("unable to make memory readable+executable");
|
|
|
|
// // }
|
|
|
|
// }
|
2018-10-15 02:48:59 +02:00
|
|
|
}
|
2018-10-15 03:03:00 +02:00
|
|
|
|
2018-10-15 11:46:04 +02:00
|
|
|
// Instantiate tables
|
2018-10-14 13:59:11 +02:00
|
|
|
{
|
2018-10-14 22:10:53 +02:00
|
|
|
// Reserve table space
|
2018-10-14 13:59:11 +02:00
|
|
|
tables.reserve_exact(module.info.tables.len());
|
|
|
|
for table in &module.info.tables {
|
|
|
|
let len = table.entity.size;
|
|
|
|
let mut v = Vec::with_capacity(len);
|
|
|
|
v.resize(len, 0);
|
|
|
|
tables.push(v);
|
|
|
|
}
|
|
|
|
// instantiate tables
|
|
|
|
for table_element in &module.info.table_elements {
|
2018-10-14 23:48:59 +02:00
|
|
|
assert!(
|
|
|
|
table_element.base.is_none(),
|
|
|
|
"globalvalue base not supported yet."
|
|
|
|
);
|
2018-10-14 13:59:11 +02:00
|
|
|
let base = 0;
|
2018-10-11 21:29:36 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
let table = &mut tables[table_element.table_index];
|
|
|
|
for (i, func_index) in table_element.elements.iter().enumerate() {
|
|
|
|
// since the table just contains functions in the MVP
|
|
|
|
// we get the address of the specified function indexes
|
|
|
|
// to populate the table.
|
2018-10-13 15:31:56 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
// let func_index = *elem_index - module.info.imported_funcs.len() as u32;
|
2018-10-17 11:22:45 +02:00
|
|
|
// let func_addr = functions[func_index.index()].as_ptr();
|
|
|
|
let func_addr = get_function_addr(&func_index, &import_functions, &functions);
|
2018-10-14 13:59:11 +02:00
|
|
|
table[base + table_element.offset + i] = func_addr as _;
|
|
|
|
}
|
|
|
|
}
|
2018-10-15 03:03:00 +02:00
|
|
|
}
|
2018-10-13 15:31:56 +02:00
|
|
|
|
2018-10-15 11:46:04 +02:00
|
|
|
// Instantiate memories
|
2018-10-14 13:59:11 +02:00
|
|
|
{
|
|
|
|
// Allocate the underlying memory and initialize it to all zeros.
|
|
|
|
memories.reserve_exact(module.info.memories.len());
|
|
|
|
for memory in &module.info.memories {
|
|
|
|
let memory = memory.entity;
|
2018-10-14 23:48:59 +02:00
|
|
|
let v =
|
|
|
|
LinearMemory::new(memory.pages_count as u32, memory.maximum.map(|m| m as u32));
|
2018-10-14 13:59:11 +02:00
|
|
|
memories.push(v);
|
|
|
|
}
|
2018-10-14 22:23:48 +02:00
|
|
|
for init in &module.info.data_initializers {
|
2018-10-14 13:59:11 +02:00
|
|
|
debug_assert!(init.base.is_none(), "globalvar base not supported yet");
|
|
|
|
let mem_mut = memories[init.memory_index].as_mut();
|
|
|
|
let to_init = &mut mem_mut[init.offset..init.offset + init.data.len()];
|
|
|
|
to_init.copy_from_slice(&init.data);
|
|
|
|
}
|
2018-10-15 03:03:00 +02:00
|
|
|
}
|
2018-10-13 15:31:56 +02:00
|
|
|
|
2018-10-15 11:46:04 +02:00
|
|
|
// Instantiate Globals
|
2018-10-14 13:59:11 +02:00
|
|
|
{
|
|
|
|
let globals_count = module.info.globals.len();
|
|
|
|
// Allocate the underlying memory and initialize it to zeros
|
|
|
|
let globals_data_size = globals_count * 8;
|
|
|
|
globals.resize(globals_data_size, 0);
|
|
|
|
|
|
|
|
// cast the globals slice to a slice of i64.
|
2018-10-14 23:48:59 +02:00
|
|
|
let globals_data = unsafe {
|
|
|
|
slice::from_raw_parts_mut(globals.as_mut_ptr() as *mut i64, globals_count)
|
|
|
|
};
|
2018-10-14 13:59:11 +02:00
|
|
|
for (i, global) in module.info.globals.iter().enumerate() {
|
|
|
|
let value: i64 = match global.entity.initializer {
|
|
|
|
GlobalInit::I32Const(n) => n as _,
|
|
|
|
GlobalInit::I64Const(n) => n,
|
|
|
|
GlobalInit::F32Const(f) => unsafe { mem::transmute(f as f64) },
|
|
|
|
GlobalInit::F64Const(f) => unsafe { mem::transmute(f) },
|
|
|
|
_ => unimplemented!(),
|
|
|
|
};
|
2018-10-14 23:48:59 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
globals_data[i] = value;
|
|
|
|
}
|
2018-10-15 03:03:00 +02:00
|
|
|
}
|
2018-10-13 15:31:56 +02:00
|
|
|
|
2018-10-16 03:21:49 +02:00
|
|
|
let start_func: Option<FuncIndex> =
|
|
|
|
module
|
|
|
|
.info
|
|
|
|
.start_func
|
|
|
|
.or_else(|| match module.info.exports.get("main") {
|
|
|
|
Some(Export::Function(index)) => Some(index.to_owned()),
|
|
|
|
_ => None,
|
|
|
|
});
|
2018-10-15 20:45:16 +02:00
|
|
|
|
2018-10-15 02:48:59 +02:00
|
|
|
Ok(Instance {
|
2018-10-15 17:10:49 +02:00
|
|
|
tables: Arc::new(tables.into_iter().collect()), // tables.into_iter().map(|table| RwLock::new(table)).collect()),
|
2018-10-14 13:59:11 +02:00
|
|
|
memories: Arc::new(memories.into_iter().collect()),
|
|
|
|
globals: globals,
|
2018-10-15 15:58:06 +02:00
|
|
|
functions: functions,
|
2018-10-17 11:22:45 +02:00
|
|
|
import_functions: import_functions,
|
|
|
|
start_func: start_func,
|
2018-10-15 20:45:16 +02:00
|
|
|
// code_base: code_base,
|
2018-10-15 02:48:59 +02:00
|
|
|
})
|
2018-10-14 13:59:11 +02:00
|
|
|
}
|
2018-10-13 15:31:56 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
pub fn memories(&self) -> Arc<Vec<LinearMemory>> {
|
|
|
|
self.memories.clone()
|
2018-10-13 15:31:56 +02:00
|
|
|
}
|
2018-10-16 13:27:26 +02:00
|
|
|
pub fn get_function_pointer(&self, func_index: FuncIndex) -> *const u8 {
|
2018-10-17 16:08:31 +02:00
|
|
|
get_function_addr(&func_index, &self.import_functions, &self.functions)
|
2018-10-16 13:27:26 +02:00
|
|
|
}
|
2018-10-15 13:45:44 +02:00
|
|
|
|
2018-10-17 11:22:45 +02:00
|
|
|
// pub fn is_imported_function(&self, func_index: FuncIndex) -> bool {
|
|
|
|
// func_index.index() < self.import_functions.len()
|
|
|
|
// }
|
|
|
|
|
2018-10-15 11:46:04 +02:00
|
|
|
/// Invoke a WebAssembly function given a FuncIndex and the
|
|
|
|
/// arguments that the function should be called with
|
2018-10-15 20:45:16 +02:00
|
|
|
pub fn get_function<T>(&self, func_index: FuncIndex) -> (fn() -> T) {
|
2018-10-15 17:10:49 +02:00
|
|
|
// let mut mem_base_addrs = self
|
|
|
|
// .memories
|
|
|
|
// .iter_mut()
|
|
|
|
// .map(LinearMemory::base_addr)
|
|
|
|
// .collect::<Vec<_>>();
|
|
|
|
// let vmctx = make_vmctx(&mut self, &mut mem_base_addrs);
|
|
|
|
|
2018-10-15 15:58:06 +02:00
|
|
|
// let vmctx = make_vmctx(instance, &mut mem_base_addrs);
|
|
|
|
// let vmctx = ptr::null();
|
|
|
|
// Rather than writing inline assembly to jump to the code region, we use the fact that
|
|
|
|
// the Rust ABI for calling a function with no arguments and no return matches the one of
|
|
|
|
// the generated code. Thanks to this, we can transmute the code region into a first-class
|
|
|
|
// Rust function and call it.
|
2018-10-15 17:10:49 +02:00
|
|
|
// let func_pointer = get_function_addr(self.code_base, &self.functions, &func_index);
|
2018-10-17 11:22:45 +02:00
|
|
|
let func_pointer = get_function_addr(&func_index, &self.import_functions, &self.functions);
|
2018-10-15 15:58:06 +02:00
|
|
|
unsafe {
|
2018-10-17 11:22:45 +02:00
|
|
|
let func = mem::transmute::<_, fn() -> T>(func_pointer);
|
2018-10-15 20:45:16 +02:00
|
|
|
func
|
|
|
|
// let result = func(2);
|
|
|
|
// println!("FUNCTION INVOKED, result {:?}", result);
|
2018-10-15 15:58:06 +02:00
|
|
|
|
|
|
|
// start_func(vmctx.as_ptr());
|
|
|
|
}
|
2018-10-14 22:10:53 +02:00
|
|
|
}
|
|
|
|
|
2018-10-16 00:04:05 +02:00
|
|
|
pub fn invoke(&self, func_index: FuncIndex, _args: Vec<u8>) -> i32 {
|
|
|
|
let func: fn() -> i32 = self.get_function(func_index);
|
|
|
|
let result = func();
|
|
|
|
println!("RESULT {:?}", result);
|
|
|
|
result
|
2018-10-15 20:45:16 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn start(&self) {
|
|
|
|
if let Some(func_index) = self.start_func {
|
|
|
|
// let vmctx: &VmCtx = ptr::null();
|
|
|
|
let func: fn() = self.get_function(func_index);
|
|
|
|
func()
|
|
|
|
}
|
|
|
|
}
|
2018-10-15 15:58:06 +02:00
|
|
|
|
|
|
|
// pub fn generate_context(&mut self) -> &VmCtx {
|
|
|
|
// let memories: Vec<UncheckedSlice<u8>> = self.memories.iter()
|
|
|
|
// .map(|mem| mem.into())
|
|
|
|
// .collect();
|
|
|
|
|
|
|
|
// let tables: Vec<BoundedSlice<usize>> = self.tables.iter()
|
|
|
|
// .map(|table| table.write()[..].into())
|
|
|
|
// .collect();
|
2018-10-16 03:21:49 +02:00
|
|
|
|
2018-10-15 15:58:06 +02:00
|
|
|
// let globals: UncheckedSlice<u8> = self.globals[..].into();
|
|
|
|
|
|
|
|
// assert!(memories.len() >= 1, "modules must have at least one memory");
|
|
|
|
// // the first memory has a space of `mem::size_of::<VmCtxData>()` rounded
|
|
|
|
// // up to the 4KiB before it. We write the VmCtxData into that.
|
|
|
|
// let data = VmCtxData {
|
|
|
|
// globals: globals,
|
|
|
|
// memories: memories[1..].into(),
|
|
|
|
// tables: tables[..].into(),
|
|
|
|
// user_data: UserData {
|
|
|
|
// // process,
|
|
|
|
// instance,
|
|
|
|
// },
|
|
|
|
// phantom: PhantomData,
|
|
|
|
// };
|
|
|
|
|
|
|
|
// let main_heap_ptr = memories[0].as_mut_ptr() as *mut VmCtxData;
|
|
|
|
// unsafe {
|
|
|
|
// main_heap_ptr
|
|
|
|
// .sub(1)
|
|
|
|
// .write(data);
|
|
|
|
// &*(main_heap_ptr as *const VmCtx)
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
2018-10-14 20:37:42 +02:00
|
|
|
// pub fn start_func(&self) -> extern fn(&VmCtx) {
|
|
|
|
// self.start_func
|
|
|
|
// }
|
2018-10-14 13:59:11 +02:00
|
|
|
}
|
2018-10-13 15:31:56 +02:00
|
|
|
|
2018-10-14 13:59:11 +02:00
|
|
|
impl Clone for Instance {
|
|
|
|
fn clone(&self) -> Instance {
|
|
|
|
Instance {
|
|
|
|
tables: Arc::clone(&self.tables),
|
|
|
|
memories: Arc::clone(&self.memories),
|
|
|
|
globals: self.globals.clone(),
|
2018-10-15 15:58:06 +02:00
|
|
|
functions: self.functions.clone(),
|
2018-10-15 20:45:16 +02:00
|
|
|
start_func: self.start_func.clone(),
|
2018-10-17 11:22:45 +02:00
|
|
|
import_functions: self.import_functions.clone(),
|
2018-10-15 20:45:16 +02:00
|
|
|
// code_base: self.code_base,
|
2018-10-14 13:59:11 +02:00
|
|
|
}
|
|
|
|
}
|
2018-10-11 21:29:36 +02:00
|
|
|
}
|
2018-10-16 00:04:05 +02:00
|
|
|
|
|
|
|
extern "C" fn grow_memory(size: u32, memory_index: u32, vmctx: *mut *mut u8) -> u32 {
|
2018-10-18 00:09:04 +02:00
|
|
|
return 0;
|
|
|
|
// unimplemented!();
|
2018-10-16 00:04:05 +02:00
|
|
|
// unsafe {
|
|
|
|
// let instance = (*vmctx.offset(4)) as *mut Instance;
|
|
|
|
// (*instance)
|
|
|
|
// .memory_mut(memory_index as MemoryIndex)
|
|
|
|
// .grow(size)
|
|
|
|
// .unwrap_or(u32::max_value())
|
|
|
|
// }
|
|
|
|
}
|
|
|
|
|
|
|
|
extern "C" fn current_memory(memory_index: u32, vmctx: *mut *mut u8) -> u32 {
|
2018-10-18 00:09:04 +02:00
|
|
|
return 0;
|
|
|
|
// unimplemented!();
|
2018-10-16 00:04:05 +02:00
|
|
|
// unsafe {
|
|
|
|
// let instance = (*vmctx.offset(4)) as *mut Instance;
|
|
|
|
// (*instance)
|
|
|
|
// .memory_mut(memory_index as MemoryIndex)
|
|
|
|
// .current_size()
|
|
|
|
// }
|
|
|
|
}
|