This commit is contained in:
Lachlan Sneff
2019-02-19 15:36:22 -08:00
parent 3c7dc200fa
commit 82eea00a02
20 changed files with 267 additions and 257 deletions

View File

@ -23,24 +23,16 @@ libc = "0.2.48"
# Dependencies for caching.
[dependencies.serde]
version = "1.0"
optional = true
[dependencies.serde_derive]
version = "1.0"
optional = true
[dependencies.serde_bytes]
version = "0.10"
optional = true
# [dependencies.bincode]
# version = "1.0.1"
# optional = true
[dependencies.serde-bench]
version = "0.0.7"
optional = true
[target.'cfg(windows)'.dependencies]
winapi = { version = "0.3", features = ["errhandlingapi", "minwindef", "minwinbase", "winnt"] }
wasmer-win-exception-handler = { path = "../win-exception-handler", version = "0.0.1" }
[features]
cache = ["serde", "serde_derive", "serde_bytes", "serde-bench", "wasmer-runtime-core/cache"]
debug = ["wasmer-runtime-core/debug"]

View File

@ -32,8 +32,7 @@ impl CacheGenerator {
impl CacheGen for CacheGenerator {
fn generate_cache(&self, module: &ModuleInner) -> Result<(Box<ModuleInfo>, Box<[u8]>, Arc<Memory>), Error> {
let info = Box::new(module.info.clone());
Err(Error::Unknown("".to_string()))
Ok((info, self.backend_cache.into_backend_data()?.into_boxed_slice(), Arc::clone(&self.memory)))
}
}
@ -54,18 +53,26 @@ pub struct BackendCache {
impl BackendCache {
pub fn from_cache(cache: Cache) -> Result<(ModuleInfo, Memory, Self), Error> {
let (info, backend_data, compiled_code) = cache.consume();
let (info, backend_data, compiled_code_arc) = cache.consume();
let backend_cache = deserialize(backend_data.as_slice())
// If this is the only references to this arc, move the memory out.
// else, clone the memory to a new location. This could take a long time,
// depending on the throughput of your memcpy implementation.
let compiled_code = match Arc::try_unwrap(compiled_code_arc) {
Ok(code) => code,
Err(arc) => (*arc).clone(),
};
let backend_cache = deserialize(&backend_data)
.map_err(|e| Error::DeserializeError(e.to_string()))?;
Ok((info, compiled_code, backend_cache))
}
pub fn into_backend_data(self) -> Result<Vec<u8>, Error> {
pub fn into_backend_data(&self) -> Result<Vec<u8>, Error> {
let mut buffer = Vec::new();
serialize(&mut buffer, &self).map_err(|e| Error::SerializeError(e.to_string()))?;
serialize(&mut buffer, self).map_err(|e| Error::SerializeError(e.to_string()))?;
Ok(buffer)
}

View File

@ -1,4 +1,4 @@
#[cfg(feature = "cache")]
mod cache;
mod func_env;
mod libcalls;
@ -14,7 +14,7 @@ use cranelift_codegen::{
settings::{self, Configurable},
};
use target_lexicon::Triple;
#[cfg(feature = "cache")]
use wasmer_runtime_core::{
backend::sys::Memory,
cache::{Cache, Error as CacheError},
@ -25,10 +25,10 @@ use wasmer_runtime_core::{
error::{CompileError, CompileResult},
module::ModuleInner,
};
#[cfg(feature = "cache")]
#[macro_use]
extern crate serde_derive;
#[cfg(feature = "cache")]
extern crate serde;
use wasmparser::{self, WasmDecoder};
@ -57,12 +57,12 @@ impl Compiler for CraneliftCompiler {
}
/// Create a wasmer Module from an already-compiled cache.
#[cfg(feature = "cache")]
unsafe fn from_cache(&self, cache: Cache, _: Token) -> Result<ModuleInner, CacheError> {
module::Module::from_cache(cache)
}
// #[cfg(feature = "cache")]
//
// fn compile_to_backend_cache_data(
// &self,
// wasm: &[u8],

View File

@ -1,4 +1,4 @@
#[cfg(feature = "cache")]
use crate::cache::{BackendCache, CacheGenerator};
use crate::{resolver::FuncResolverBuilder, signal::Caller, trampoline::Trampolines};
@ -8,7 +8,7 @@ use cranelift_wasm;
use hashbrown::HashMap;
use std::sync::Arc;
#[cfg(feature = "cache")]
use wasmer_runtime_core::{
backend::sys::Memory,
cache::{Cache, Error as CacheError},
@ -17,13 +17,15 @@ use wasmer_runtime_core::{
use wasmer_runtime_core::{
backend::Backend,
error::CompileResult,
module::{ModuleInfo, ModuleInner, StringTable, WasmHash},
module::{ModuleInfo, ModuleInner, StringTable},
structures::{Map, TypedIndex},
types::{
FuncIndex, FuncSig, GlobalIndex, LocalFuncIndex, MemoryIndex, SigIndex, TableIndex, Type,
},
};
use wasmer_runtime_core::module::WasmHash;
/// This contains all of the items in a `ModuleInner` except the `func_resolver`.
pub struct Module {
pub info: ModuleInfo,
@ -56,6 +58,7 @@ impl Module {
namespace_table: StringTable::new(),
name_table: StringTable::new(),
wasm_hash: WasmHash::generate(wasm),
},
}
@ -77,37 +80,19 @@ impl Module {
let protected_caller =
Caller::new(&self.info, handler_data, trampolines);
let cache_gen = Box::new(CacheGenerator::new(backend_cache, Arc::clone(&func_resolver.memory)));
Ok(ModuleInner {
func_resolver: Box::new(func_resolver),
protected_caller: Box::new(protected_caller),
cache_gen,
cache_gen,
info: self.info,
})
}
// #[cfg(feature = "cache")]
// pub fn compile_to_backend_cache(
// self,
// isa: &isa::TargetIsa,
// functions: Map<LocalFuncIndex, ir::Function>,
// ) -> CompileResult<(ModuleInfo, BackendCache, Memory)> {
// let (func_resolver_builder, handler_data) =
// FuncResolverBuilder::new(isa, functions, &self.info)?;
// let trampolines = Trampolines::new(isa, &self.info);
// let trampoline_cache = trampolines.to_trampoline_cache();
// let (backend_cache, compiled_code) =
// func_resolver_builder.to_backend_cache(trampoline_cache, handler_data);
// Ok((self.info, backend_cache, compiled_code))
// }
#[cfg(feature = "cache")]
pub fn from_cache(cache: Cache) -> Result<ModuleInner, CacheError> {
let (info, compiled_code, backend_cache) = BackendCache::from_cache(cache)?;
@ -120,12 +105,14 @@ impl Module {
.map_err(|e| CacheError::Unknown(format!("{:?}", e)))?;
let protected_caller = Caller::new(&info, handler_data, trampolines);
let cache_gen = Box::new(CacheGenerator::new(backend_cache, Arc::clone(&func_resolver.memory)));
Ok(ModuleInner {
func_resolver: Box::new(func_resolver),
protected_caller: Box::new(protected_caller),
cache_gen,
cache_gen,
info,
})

View File

@ -22,7 +22,7 @@ pub mod call_names {
pub const DYNAMIC_MEM_SIZE: u32 = 5;
}
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
#[derive(Serialize, Deserialize)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Reloc {
Abs8,
@ -30,7 +30,7 @@ pub enum Reloc {
X86CallPCRel4,
}
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
#[derive(Serialize, Deserialize)]
#[derive(Debug, Copy, Clone)]
pub enum LibCall {
Probestack,
@ -44,7 +44,7 @@ pub enum LibCall {
NearestF64,
}
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
#[derive(Serialize, Deserialize)]
#[derive(Debug, Clone)]
pub struct ExternalRelocation {
/// The relocation code.
@ -66,7 +66,7 @@ pub struct LocalRelocation {
pub target: FuncIndex,
}
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
#[derive(Serialize, Deserialize)]
#[derive(Debug, Clone, Copy)]
pub enum VmCallKind {
StaticMemoryGrow,
@ -79,7 +79,7 @@ pub enum VmCallKind {
DynamicMemorySize,
}
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
#[derive(Serialize, Deserialize)]
#[derive(Debug, Clone, Copy)]
pub enum VmCall {
Local(VmCallKind),
@ -87,7 +87,7 @@ pub enum VmCall {
}
/// Specify the type of relocation
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
#[derive(Serialize, Deserialize)]
#[derive(Debug, Clone)]
pub enum RelocationType {
Intrinsic(String),
@ -218,7 +218,7 @@ impl binemit::RelocSink for RelocSink {
}
}
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
#[derive(Serialize, Deserialize)]
#[derive(Debug, Clone, Copy)]
pub enum TrapCode {
StackOverflow,
@ -244,7 +244,7 @@ impl RelocSink {
}
}
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
#[derive(Serialize, Deserialize)]
#[derive(Debug, Clone, Copy)]
pub struct TrapData {
pub trapcode: TrapCode,
@ -253,7 +253,7 @@ pub struct TrapData {
/// Simple implementation of a TrapSink
/// that saves the info for later.
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
#[derive(Serialize, Deserialize)]
pub struct TrapSink {
trap_datas: Vec<(usize, TrapData)>,
}

View File

@ -1,4 +1,4 @@
#[cfg(feature = "cache")]
use crate::{
cache::{BackendCache, TrampolineCache},
trampoline::Trampolines,
@ -20,7 +20,7 @@ use std::{
cell::UnsafeCell,
sync::Arc,
};
#[cfg(feature = "cache")]
use wasmer_runtime_core::cache::Error as CacheError;
use wasmer_runtime_core::{
self,
@ -43,16 +43,24 @@ extern "C" {
pub fn __chkstk();
}
fn lookup_func(map: &SliceMap<LocalFuncIndex, usize>, memory: &Memory, local_func_index: LocalFuncIndex) -> Option<NonNull<vm::Func>> {
let offset = *map.get(local_func_index)?;
let ptr = unsafe { memory.as_ptr().add(offset) };
NonNull::new(ptr).map(|nonnull| nonnull.cast())
}
#[allow(dead_code)]
pub struct FuncResolverBuilder {
resolver: FuncResolver,
map: Map<LocalFuncIndex, usize>,
memory: Memory,
local_relocs: Map<LocalFuncIndex, Box<[LocalRelocation]>>,
external_relocs: Map<LocalFuncIndex, Box<[ExternalRelocation]>>,
import_len: usize,
}
impl FuncResolverBuilder {
#[cfg(feature = "cache")]
pub fn new_from_backend_cache(
backend_cache: BackendCache,
mut code: Memory,
@ -68,10 +76,8 @@ impl FuncResolverBuilder {
Ok((
Self {
resolver: FuncResolver {
map: backend_cache.offsets,
memory: Arc::new(UnsafeCell::new(code)),
},
map: backend_cache.offsets,
memory: code,
local_relocs: Map::new(),
external_relocs: backend_cache.external_relocs,
import_len: info.imported_functions.len(),
@ -155,7 +161,8 @@ impl FuncResolverBuilder {
let handler_data = HandlerData::new(Arc::new(trap_sink), memory.as_ptr() as _, memory.size());
let mut func_resolver_builder = Self {
resolver: FuncResolver { map, memory: Arc::new(UnsafeCell::new(memory)) },
map,
memory,
local_relocs,
external_relocs,
import_len: info.imported_functions.len(),
@ -171,11 +178,11 @@ impl FuncResolverBuilder {
for ref reloc in relocs.iter() {
let local_func_index = LocalFuncIndex::new(reloc.target.index() - self.import_len);
let target_func_address =
self.resolver.lookup(local_func_index).unwrap().as_ptr() as usize;
lookup_func(&self.map, &self.memory, local_func_index).unwrap().as_ptr() as usize;
// We need the address of the current function
// because these calls are relative.
let func_addr = self.resolver.lookup(index).unwrap().as_ptr() as usize;
let func_addr = lookup_func(&self.map, &self.memory, index).unwrap().as_ptr() as usize;
unsafe {
let reloc_address = func_addr + reloc.offset as usize;
@ -190,127 +197,126 @@ impl FuncResolverBuilder {
}
pub fn finalize(
self,
mut self,
signatures: &SliceMap<SigIndex, Arc<FuncSig>>,
trampolines: Arc<Trampolines>,
handler_data: HandlerData,
) -> CompileResult<(FuncResolver, BackendCache)> {
{
let mut memory = unsafe { (*self.resolver.memory.get()) };
for (index, relocs) in self.external_relocs.iter() {
for ref reloc in relocs.iter() {
let target_func_address: isize = match reloc.target {
RelocationType::LibCall(libcall) => match libcall {
LibCall::CeilF32 => libcalls::ceilf32 as isize,
LibCall::FloorF32 => libcalls::floorf32 as isize,
LibCall::TruncF32 => libcalls::truncf32 as isize,
LibCall::NearestF32 => libcalls::nearbyintf32 as isize,
LibCall::CeilF64 => libcalls::ceilf64 as isize,
LibCall::FloorF64 => libcalls::floorf64 as isize,
LibCall::TruncF64 => libcalls::truncf64 as isize,
LibCall::NearestF64 => libcalls::nearbyintf64 as isize,
#[cfg(all(target_pointer_width = "64", target_os = "windows"))]
LibCall::Probestack => __chkstk as isize,
#[cfg(not(target_os = "windows"))]
LibCall::Probestack => __rust_probestack as isize,
},
RelocationType::Intrinsic(ref name) => match name.as_str() {
"i32print" => i32_print as isize,
"i64print" => i64_print as isize,
"f32print" => f32_print as isize,
"f64print" => f64_print as isize,
"strtdbug" => start_debug as isize,
"enddbug" => end_debug as isize,
_ => Err(CompileError::InternalError {
msg: format!("unexpected intrinsic: {}", name),
})?,
},
RelocationType::VmCall(vmcall) => match vmcall {
VmCall::Local(kind) => match kind {
VmCallKind::StaticMemoryGrow => vmcalls::local_static_memory_grow as _,
VmCallKind::StaticMemorySize => vmcalls::local_static_memory_size as _,
for (index, relocs) in self.external_relocs.iter() {
for ref reloc in relocs.iter() {
let target_func_address: isize = match reloc.target {
RelocationType::LibCall(libcall) => match libcall {
LibCall::CeilF32 => libcalls::ceilf32 as isize,
LibCall::FloorF32 => libcalls::floorf32 as isize,
LibCall::TruncF32 => libcalls::truncf32 as isize,
LibCall::NearestF32 => libcalls::nearbyintf32 as isize,
LibCall::CeilF64 => libcalls::ceilf64 as isize,
LibCall::FloorF64 => libcalls::floorf64 as isize,
LibCall::TruncF64 => libcalls::truncf64 as isize,
LibCall::NearestF64 => libcalls::nearbyintf64 as isize,
#[cfg(all(target_pointer_width = "64", target_os = "windows"))]
LibCall::Probestack => __chkstk as isize,
#[cfg(not(target_os = "windows"))]
LibCall::Probestack => __rust_probestack as isize,
VmCallKind::SharedStaticMemoryGrow => unimplemented!(),
VmCallKind::SharedStaticMemorySize => unimplemented!(),
VmCallKind::DynamicMemoryGrow => {
vmcalls::local_dynamic_memory_grow as _
}
VmCallKind::DynamicMemorySize => {
vmcalls::local_dynamic_memory_size as _
}
},
RelocationType::Intrinsic(ref name) => match name.as_str() {
"i32print" => i32_print as isize,
"i64print" => i64_print as isize,
"f32print" => f32_print as isize,
"f64print" => f64_print as isize,
"strtdbug" => start_debug as isize,
"enddbug" => end_debug as isize,
_ => Err(CompileError::InternalError {
msg: format!("unexpected intrinsic: {}", name),
})?,
},
RelocationType::VmCall(vmcall) => match vmcall {
VmCall::Local(kind) => match kind {
VmCallKind::StaticMemoryGrow => vmcalls::local_static_memory_grow as _,
VmCallKind::StaticMemorySize => vmcalls::local_static_memory_size as _,
VmCallKind::SharedStaticMemoryGrow => unimplemented!(),
VmCallKind::SharedStaticMemorySize => unimplemented!(),
VmCallKind::DynamicMemoryGrow => {
vmcalls::local_dynamic_memory_grow as _
}
VmCallKind::DynamicMemorySize => {
vmcalls::local_dynamic_memory_size as _
}
},
VmCall::Import(kind) => match kind {
VmCallKind::StaticMemoryGrow => {
vmcalls::imported_static_memory_grow as _
}
VmCallKind::StaticMemorySize => {
vmcalls::imported_static_memory_size as _
}
VmCallKind::SharedStaticMemoryGrow => unimplemented!(),
VmCallKind::SharedStaticMemorySize => unimplemented!(),
VmCallKind::DynamicMemoryGrow => {
vmcalls::imported_dynamic_memory_grow as _
}
VmCallKind::DynamicMemorySize => {
vmcalls::imported_dynamic_memory_size as _
}
},
},
RelocationType::Signature(sig_index) => {
let sig_index =
SigRegistry.lookup_sig_index(Arc::clone(&signatures[sig_index]));
sig_index.index() as _
}
};
// We need the address of the current function
// because some of these calls are relative.
let func_addr = self.resolver.lookup(index).unwrap().as_ptr();
// Determine relocation type and apply relocation.
match reloc.reloc {
Reloc::Abs8 => {
let ptr_to_write = (target_func_address as u64)
.checked_add(reloc.addend as u64)
.unwrap();
let empty_space_offset = self.resolver.map[index] + reloc.offset as usize;
let ptr_slice = unsafe {
&mut memory.as_slice_mut()
[empty_space_offset..empty_space_offset + 8]
};
LittleEndian::write_u64(ptr_slice, ptr_to_write);
}
Reloc::X86PCRel4 | Reloc::X86CallPCRel4 => unsafe {
let reloc_address = (func_addr as usize) + reloc.offset as usize;
let reloc_delta = target_func_address
.wrapping_sub(reloc_address as isize)
.wrapping_add(reloc.addend as isize);
write_unaligned(reloc_address as *mut u32, reloc_delta as u32);
VmCall::Import(kind) => match kind {
VmCallKind::StaticMemoryGrow => {
vmcalls::imported_static_memory_grow as _
}
VmCallKind::StaticMemorySize => {
vmcalls::imported_static_memory_size as _
}
VmCallKind::SharedStaticMemoryGrow => unimplemented!(),
VmCallKind::SharedStaticMemorySize => unimplemented!(),
VmCallKind::DynamicMemoryGrow => {
vmcalls::imported_dynamic_memory_grow as _
}
VmCallKind::DynamicMemorySize => {
vmcalls::imported_dynamic_memory_size as _
}
},
},
RelocationType::Signature(sig_index) => {
let sig_index =
SigRegistry.lookup_sig_index(Arc::clone(&signatures[sig_index]));
sig_index.index() as _
}
};
// We need the address of the current function
// because some of these calls are relative.
let func_addr = lookup_func(&self.map, &self.memory, index).unwrap().as_ptr() as usize;
// Determine relocation type and apply relocation.
match reloc.reloc {
Reloc::Abs8 => {
let ptr_to_write = (target_func_address as u64)
.checked_add(reloc.addend as u64)
.unwrap();
let empty_space_offset = self.map[index] + reloc.offset as usize;
let ptr_slice = unsafe {
&mut self.memory.as_slice_mut()
[empty_space_offset..empty_space_offset + 8]
};
LittleEndian::write_u64(ptr_slice, ptr_to_write);
}
Reloc::X86PCRel4 | Reloc::X86CallPCRel4 => unsafe {
let reloc_address = (func_addr as usize) + reloc.offset as usize;
let reloc_delta = target_func_address
.wrapping_sub(reloc_address as isize)
.wrapping_add(reloc.addend as isize);
write_unaligned(reloc_address as *mut u32, reloc_delta as u32);
},
}
}
}
unsafe {
memory
.protect(.., Protect::ReadExec)
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;
}
unsafe {
self.memory
.protect(.., Protect::ReadExec)
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;
}
let backend_cache = BackendCache {
external_relocs: self.external_relocs.clone(),
offsets: self.resolver.map.clone(),
offsets: self.map.clone(),
trap_sink: handler_data.trap_data,
trampolines: trampolines.to_trampoline_cache(),
};
Ok((self.resolver, backend_cache))
Ok((FuncResolver {
map: self.map,
memory: Arc::new(self.memory),
}, backend_cache))
}
}
@ -320,16 +326,7 @@ unsafe impl Send for FuncResolver {}
/// Resolves a function index to a function address.
pub struct FuncResolver {
map: Map<LocalFuncIndex, usize>,
pub(crate) memory: Arc<UnsafeCell<Memory>>,
}
impl FuncResolver {
fn lookup(&self, local_func_index: LocalFuncIndex) -> Option<NonNull<vm::Func>> {
let offset = *self.map.get(local_func_index)?;
let ptr = unsafe { (*self.memory.get()).as_ptr().add(offset) };
NonNull::new(ptr).map(|nonnull| nonnull.cast())
}
pub(crate) memory: Arc<Memory>,
}
// Implements FuncResolver trait.
@ -339,7 +336,7 @@ impl backend::FuncResolver for FuncResolver {
_module: &wasmer_runtime_core::module::ModuleInner,
index: LocalFuncIndex,
) -> Option<NonNull<vm::Func>> {
self.lookup(index)
lookup_func(&self.map, &self.memory, index)
}
}

View File

@ -1,4 +1,4 @@
#[cfg(feature = "cache")]
use crate::cache::TrampolineCache;
use cranelift_codegen::{
binemit::{NullTrapSink, Reloc, RelocSink},
@ -33,7 +33,7 @@ pub struct Trampolines {
}
impl Trampolines {
#[cfg(feature = "cache")]
pub fn from_trampoline_cache(cache: TrampolineCache) -> Self {
// pub struct TrampolineCache {
// #[serde(with = "serde_bytes")]
@ -57,7 +57,7 @@ impl Trampolines {
}
}
#[cfg(feature = "cache")]
pub fn to_trampoline_cache(&self) -> TrampolineCache {
let mut code = vec![0; self.memory.size()];