mirror of
https://github.com/fluencelabs/wasmer
synced 2025-06-24 22:21:32 +00:00
Swap code lazily when tiering up from singlepass to LLVM.
Does not handle long-running functions, but should work at least.
This commit is contained in:
@ -239,6 +239,7 @@ pub struct LLVMBackend {
|
||||
#[allow(dead_code)]
|
||||
buffer: Arc<Buffer>,
|
||||
msm: Option<ModuleStateMap>,
|
||||
local_func_id_to_offset: Vec<usize>,
|
||||
}
|
||||
|
||||
impl LLVMBackend {
|
||||
@ -380,6 +381,17 @@ impl LLVMBackend {
|
||||
}
|
||||
}
|
||||
|
||||
let code_ptr = unsafe { llvm_backend_get_code_ptr(module) } as usize;
|
||||
let code_len = unsafe { llvm_backend_get_code_size(module) } as usize;
|
||||
|
||||
let local_func_id_to_offset: Vec<usize> = local_func_id_to_addr
|
||||
.iter()
|
||||
.map(|&x| {
|
||||
assert!(x >= code_ptr && x < code_ptr + code_len);
|
||||
x - code_ptr
|
||||
})
|
||||
.collect();
|
||||
|
||||
//println!("MSM: {:?}", msm);
|
||||
|
||||
(
|
||||
@ -387,6 +399,7 @@ impl LLVMBackend {
|
||||
module,
|
||||
buffer: Arc::clone(&buffer),
|
||||
msm: Some(msm),
|
||||
local_func_id_to_offset,
|
||||
},
|
||||
LLVMCache { buffer },
|
||||
)
|
||||
@ -397,6 +410,7 @@ impl LLVMBackend {
|
||||
module,
|
||||
buffer: Arc::clone(&buffer),
|
||||
msm: None,
|
||||
local_func_id_to_offset: vec![],
|
||||
},
|
||||
LLVMCache { buffer },
|
||||
)
|
||||
@ -428,6 +442,7 @@ impl LLVMBackend {
|
||||
module,
|
||||
buffer: Arc::clone(&buffer),
|
||||
msm: None,
|
||||
local_func_id_to_offset: vec![],
|
||||
},
|
||||
LLVMCache { buffer },
|
||||
))
|
||||
@ -491,6 +506,10 @@ impl RunnableModule for LLVMBackend {
|
||||
})
|
||||
}
|
||||
|
||||
fn get_local_function_offsets(&self) -> Option<Vec<usize>> {
|
||||
Some(self.local_func_id_to_offset.clone())
|
||||
}
|
||||
|
||||
fn get_module_state_map(&self) -> Option<ModuleStateMap> {
|
||||
self.msm.clone()
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ use wasmer_runtime_core::{
|
||||
module::{ModuleInfo, ModuleInner},
|
||||
structures::{Map, TypedIndex},
|
||||
types::{
|
||||
FuncIndex, FuncSig, GlobalIndex, LocalOrImport, MemoryIndex, SigIndex, TableIndex, Type, ImportedFuncIndex,
|
||||
FuncIndex, FuncSig, GlobalIndex, LocalOrImport, MemoryIndex, SigIndex, TableIndex, Type,
|
||||
},
|
||||
};
|
||||
use wasmparser::{BinaryReaderError, MemoryImmediate, Operator, Type as WpType};
|
||||
@ -303,7 +303,7 @@ fn resolve_memory_ptr(
|
||||
}
|
||||
|
||||
fn emit_stack_map(
|
||||
module_info: &ModuleInfo,
|
||||
_module_info: &ModuleInfo,
|
||||
intrinsics: &Intrinsics,
|
||||
builder: &Builder,
|
||||
local_function_id: usize,
|
||||
@ -311,7 +311,7 @@ fn emit_stack_map(
|
||||
kind: StackmapEntryKind,
|
||||
locals: &[PointerValue],
|
||||
state: &State,
|
||||
ctx: &mut CtxType,
|
||||
_ctx: &mut CtxType,
|
||||
opcode_offset: usize,
|
||||
) {
|
||||
let stackmap_id = target.entries.len();
|
||||
|
@ -11,8 +11,8 @@ use wasmer_runtime_core::state::{
|
||||
use wasmer_runtime_core::vm::Ctx;
|
||||
use wasmer_runtime_core::{
|
||||
module::ModuleInfo,
|
||||
types::{GlobalIndex, TableIndex, LocalOrImport},
|
||||
structures::TypedIndex,
|
||||
types::{GlobalIndex, LocalOrImport, TableIndex},
|
||||
vm,
|
||||
};
|
||||
|
||||
@ -154,30 +154,48 @@ impl StackmapEntry {
|
||||
MachineValue::WasmStack(x)
|
||||
}
|
||||
ValueSemantic::Ctx => MachineValue::Vmctx,
|
||||
ValueSemantic::SignalMem => MachineValue::VmctxDeref(vec![Ctx::offset_interrupt_signal_mem() as usize, 0]),
|
||||
ValueSemantic::PointerToMemoryBase => MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize]),
|
||||
ValueSemantic::PointerToMemoryBound => MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize]),
|
||||
ValueSemantic::MemoryBase => MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize, 0]),
|
||||
ValueSemantic::MemoryBound => MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize, 0]),
|
||||
ValueSemantic::PointerToGlobal(idx) => MachineValue::VmctxDeref(deref_global(module_info, idx, false)),
|
||||
ValueSemantic::Global(idx) => MachineValue::VmctxDeref(deref_global(module_info, idx, true)),
|
||||
ValueSemantic::PointerToTableBase => MachineValue::VmctxDeref(deref_table_base(module_info, 0, false)),
|
||||
ValueSemantic::PointerToTableBound => MachineValue::VmctxDeref(deref_table_bound(module_info, 0, false)),
|
||||
ValueSemantic::SignalMem => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_interrupt_signal_mem() as usize, 0])
|
||||
}
|
||||
ValueSemantic::PointerToMemoryBase => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize])
|
||||
}
|
||||
ValueSemantic::PointerToMemoryBound => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize])
|
||||
}
|
||||
ValueSemantic::MemoryBase => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize, 0])
|
||||
}
|
||||
ValueSemantic::MemoryBound => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize, 0])
|
||||
}
|
||||
ValueSemantic::PointerToGlobal(idx) => {
|
||||
MachineValue::VmctxDeref(deref_global(module_info, idx, false))
|
||||
}
|
||||
ValueSemantic::Global(idx) => {
|
||||
MachineValue::VmctxDeref(deref_global(module_info, idx, true))
|
||||
}
|
||||
ValueSemantic::PointerToTableBase => {
|
||||
MachineValue::VmctxDeref(deref_table_base(module_info, 0, false))
|
||||
}
|
||||
ValueSemantic::PointerToTableBound => {
|
||||
MachineValue::VmctxDeref(deref_table_bound(module_info, 0, false))
|
||||
}
|
||||
ValueSemantic::ImportedFuncPointer(idx) => MachineValue::VmctxDeref(vec![
|
||||
Ctx::offset_imported_funcs() as usize,
|
||||
vm::ImportedFunc::size() as usize * idx + vm::ImportedFunc::offset_func() as usize,
|
||||
vm::ImportedFunc::size() as usize * idx
|
||||
+ vm::ImportedFunc::offset_func() as usize,
|
||||
0,
|
||||
]),
|
||||
ValueSemantic::ImportedFuncCtx(idx) => MachineValue::VmctxDeref(vec![
|
||||
Ctx::offset_imported_funcs() as usize,
|
||||
vm::ImportedFunc::size() as usize * idx + vm::ImportedFunc::offset_vmctx() as usize,
|
||||
0,
|
||||
]),
|
||||
ValueSemantic::DynamicSigindice(idx) => MachineValue::VmctxDeref(vec![
|
||||
Ctx::offset_signatures() as usize,
|
||||
idx * 4,
|
||||
vm::ImportedFunc::size() as usize * idx
|
||||
+ vm::ImportedFunc::offset_vmctx() as usize,
|
||||
0,
|
||||
]),
|
||||
ValueSemantic::DynamicSigindice(idx) => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_signatures() as usize, idx * 4, 0])
|
||||
}
|
||||
};
|
||||
match loc.ty {
|
||||
LocationType::Register => {
|
||||
@ -538,16 +556,10 @@ impl StackMap {
|
||||
|
||||
fn deref_global(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
|
||||
let mut x: Vec<usize> = match GlobalIndex::new(idx).local_or_import(info) {
|
||||
LocalOrImport::Local(idx) => vec![
|
||||
Ctx::offset_globals() as usize,
|
||||
idx.index() * 8,
|
||||
0,
|
||||
],
|
||||
LocalOrImport::Import(idx) => vec![
|
||||
Ctx::offset_imported_globals() as usize,
|
||||
idx.index() * 8,
|
||||
0,
|
||||
],
|
||||
LocalOrImport::Local(idx) => vec![Ctx::offset_globals() as usize, idx.index() * 8, 0],
|
||||
LocalOrImport::Import(idx) => {
|
||||
vec![Ctx::offset_imported_globals() as usize, idx.index() * 8, 0]
|
||||
}
|
||||
};
|
||||
if deref_into_value {
|
||||
x.push(0);
|
||||
@ -557,16 +569,10 @@ fn deref_global(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<us
|
||||
|
||||
fn deref_table_base(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
|
||||
let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {
|
||||
LocalOrImport::Local(idx) => vec![
|
||||
Ctx::offset_tables() as usize,
|
||||
idx.index() * 8,
|
||||
0,
|
||||
],
|
||||
LocalOrImport::Import(idx) => vec![
|
||||
Ctx::offset_imported_tables() as usize,
|
||||
idx.index() * 8,
|
||||
0,
|
||||
],
|
||||
LocalOrImport::Local(idx) => vec![Ctx::offset_tables() as usize, idx.index() * 8, 0],
|
||||
LocalOrImport::Import(idx) => {
|
||||
vec![Ctx::offset_imported_tables() as usize, idx.index() * 8, 0]
|
||||
}
|
||||
};
|
||||
if deref_into_value {
|
||||
x.push(0);
|
||||
@ -576,16 +582,10 @@ fn deref_table_base(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Ve
|
||||
|
||||
fn deref_table_bound(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
|
||||
let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {
|
||||
LocalOrImport::Local(idx) => vec![
|
||||
Ctx::offset_tables() as usize,
|
||||
idx.index() * 8,
|
||||
8,
|
||||
],
|
||||
LocalOrImport::Import(idx) => vec![
|
||||
Ctx::offset_imported_tables() as usize,
|
||||
idx.index() * 8,
|
||||
8,
|
||||
],
|
||||
LocalOrImport::Local(idx) => vec![Ctx::offset_tables() as usize, idx.index() * 8, 8],
|
||||
LocalOrImport::Import(idx) => {
|
||||
vec![Ctx::offset_imported_tables() as usize, idx.index() * 8, 8]
|
||||
}
|
||||
};
|
||||
if deref_into_value {
|
||||
x.push(0);
|
||||
|
@ -151,6 +151,10 @@ pub trait RunnableModule: Send + Sync {
|
||||
None
|
||||
}
|
||||
|
||||
unsafe fn patch_local_function(&self, _idx: usize, _target_address: usize) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// A wasm trampoline contains the necessary data to dynamically call an exported wasm function.
|
||||
/// Given a particular signature index, we are returned a trampoline that is matched with that
|
||||
/// signature and an invoke function that can call the trampoline.
|
||||
@ -167,6 +171,11 @@ pub trait RunnableModule: Send + Sync {
|
||||
fn get_offsets(&self) -> Option<Vec<usize>> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Returns the beginning offsets of all local functions.
|
||||
fn get_local_function_offsets(&self) -> Option<Vec<usize>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CacheGen: Send + Sync {
|
||||
|
@ -17,7 +17,7 @@ use nix::sys::signal::{
|
||||
SIGSEGV, SIGTRAP,
|
||||
};
|
||||
use std::any::Any;
|
||||
use std::cell::UnsafeCell;
|
||||
use std::cell::{Cell, UnsafeCell};
|
||||
use std::ffi::c_void;
|
||||
use std::process;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
@ -41,6 +41,7 @@ struct UnwindInfo {
|
||||
thread_local! {
|
||||
static UNWIND: UnsafeCell<Option<UnwindInfo>> = UnsafeCell::new(None);
|
||||
static CURRENT_CTX: UnsafeCell<*mut vm::Ctx> = UnsafeCell::new(::std::ptr::null_mut());
|
||||
static WAS_SIGINT_TRIGGERED: Cell<bool> = Cell::new(false);
|
||||
}
|
||||
|
||||
struct InterruptSignalMem(*mut u8);
|
||||
@ -69,6 +70,10 @@ lazy_static! {
|
||||
}
|
||||
static INTERRUPT_SIGNAL_DELIVERED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
pub fn was_sigint_triggered_fault() -> bool {
|
||||
WAS_SIGINT_TRIGGERED.with(|x| x.get())
|
||||
}
|
||||
|
||||
pub unsafe fn with_ctx<R, F: FnOnce() -> R>(ctx: *mut vm::Ctx, cb: F) -> R {
|
||||
let addr = CURRENT_CTX.with(|x| x.get());
|
||||
let old = *addr;
|
||||
@ -82,6 +87,17 @@ pub unsafe fn get_wasm_interrupt_signal_mem() -> *mut u8 {
|
||||
INTERRUPT_SIGNAL_MEM.0
|
||||
}
|
||||
|
||||
pub unsafe fn set_wasm_interrupt_on_ctx(ctx: *mut vm::Ctx) {
|
||||
if mprotect(
|
||||
(&*ctx).internal.interrupt_signal_mem as _,
|
||||
INTERRUPT_SIGNAL_MEM_SIZE,
|
||||
PROT_NONE,
|
||||
) < 0
|
||||
{
|
||||
panic!("cannot set PROT_NONE on signal mem");
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn set_wasm_interrupt() {
|
||||
let mem: *mut u8 = INTERRUPT_SIGNAL_MEM.0;
|
||||
if mprotect(mem as _, INTERRUPT_SIGNAL_MEM_SIZE, PROT_NONE) < 0 {
|
||||
@ -188,7 +204,7 @@ extern "C" fn signal_trap_handler(
|
||||
let should_unwind = allocate_and_run(TRAP_STACK_SIZE, || {
|
||||
let mut is_suspend_signal = false;
|
||||
|
||||
println!("SIGNAL: {:?} {:?}", Signal::from_c_int(signum), fault.faulting_addr);
|
||||
WAS_SIGINT_TRIGGERED.with(|x| x.set(false));
|
||||
|
||||
match Signal::from_c_int(signum) {
|
||||
Ok(SIGTRAP) => {
|
||||
@ -215,7 +231,9 @@ extern "C" fn signal_trap_handler(
|
||||
if fault.faulting_addr as usize == get_wasm_interrupt_signal_mem() as usize {
|
||||
is_suspend_signal = true;
|
||||
clear_wasm_interrupt();
|
||||
INTERRUPT_SIGNAL_DELIVERED.store(false, Ordering::SeqCst);
|
||||
if INTERRUPT_SIGNAL_DELIVERED.swap(false, Ordering::SeqCst) {
|
||||
WAS_SIGINT_TRIGGERED.with(|x| x.set(true));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
@ -102,13 +102,20 @@ pub struct CodeMemory {
|
||||
size: usize,
|
||||
}
|
||||
|
||||
unsafe impl Send for CodeMemory {}
|
||||
unsafe impl Sync for CodeMemory {}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
impl CodeMemory {
|
||||
pub fn new(_size: usize) -> CodeMemory {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
pub fn make_executable(&mut self) {
|
||||
pub fn make_executable(&self) {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
pub fn make_writable(&self) {
|
||||
unimplemented!();
|
||||
}
|
||||
}
|
||||
@ -139,11 +146,17 @@ impl CodeMemory {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn make_executable(&mut self) {
|
||||
pub fn make_executable(&self) {
|
||||
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_EXEC) } != 0 {
|
||||
panic!("cannot set code memory to executable");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn make_writable(&self) {
|
||||
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_WRITE) } != 0 {
|
||||
panic!("cannot set code memory to writable");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
|
@ -493,8 +493,7 @@ pub mod x64 {
|
||||
stack[stack_offset] |= v;
|
||||
}
|
||||
},
|
||||
MachineValue::WasmLocal(x) => {
|
||||
match fsm.locals[x] {
|
||||
MachineValue::WasmLocal(x) => match fsm.locals[x] {
|
||||
WasmAbstractValue::Const(x) => {
|
||||
assert!(x <= ::std::u32::MAX as u64);
|
||||
stack[stack_offset] |= x;
|
||||
@ -504,10 +503,11 @@ pub mod x64 {
|
||||
assert!(v <= ::std::u32::MAX as u64);
|
||||
stack[stack_offset] |= v;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
MachineValue::VmctxDeref(ref seq) => {
|
||||
stack[stack_offset] |= compute_vmctx_deref(vmctx as *const Ctx, seq) & (::std::u32::MAX as u64);
|
||||
stack[stack_offset] |=
|
||||
compute_vmctx_deref(vmctx as *const Ctx, seq)
|
||||
& (::std::u32::MAX as u64);
|
||||
}
|
||||
MachineValue::Undefined => {}
|
||||
_ => unimplemented!("TwoHalves.0"),
|
||||
@ -524,8 +524,7 @@ pub mod x64 {
|
||||
stack[stack_offset] |= v << 32;
|
||||
}
|
||||
},
|
||||
MachineValue::WasmLocal(x) => {
|
||||
match fsm.locals[x] {
|
||||
MachineValue::WasmLocal(x) => match fsm.locals[x] {
|
||||
WasmAbstractValue::Const(x) => {
|
||||
assert!(x <= ::std::u32::MAX as u64);
|
||||
stack[stack_offset] |= x << 32;
|
||||
@ -535,10 +534,12 @@ pub mod x64 {
|
||||
assert!(v <= ::std::u32::MAX as u64);
|
||||
stack[stack_offset] |= v << 32;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
MachineValue::VmctxDeref(ref seq) => {
|
||||
stack[stack_offset] |= (compute_vmctx_deref(vmctx as *const Ctx, seq) & (::std::u32::MAX as u64)) << 32;
|
||||
stack[stack_offset] |=
|
||||
(compute_vmctx_deref(vmctx as *const Ctx, seq)
|
||||
& (::std::u32::MAX as u64))
|
||||
<< 32;
|
||||
}
|
||||
MachineValue::Undefined => {}
|
||||
_ => unimplemented!("TwoHalves.1"),
|
||||
@ -583,7 +584,6 @@ pub mod x64 {
|
||||
|
||||
stack_offset -= 1;
|
||||
stack[stack_offset] = (code_base + activate_offset) as u64; // return address
|
||||
println!("activating at {:?}", (code_base + activate_offset) as *const u8);
|
||||
}
|
||||
|
||||
stack_offset -= 1;
|
||||
@ -694,7 +694,6 @@ pub mod x64 {
|
||||
|
||||
catch_unsafe_unwind(
|
||||
|| {
|
||||
::std::intrinsics::breakpoint();
|
||||
run_on_alternative_stack(
|
||||
stack.as_mut_ptr().offset(stack.len() as isize),
|
||||
stack.as_mut_ptr().offset(stack_offset as isize),
|
||||
|
@ -20,6 +20,7 @@ use wasmer_runtime_core::{
|
||||
},
|
||||
cache::{Artifact, Error as CacheError},
|
||||
codegen::*,
|
||||
loader::CodeMemory,
|
||||
memory::MemoryType,
|
||||
module::{ModuleInfo, ModuleInner},
|
||||
state::{
|
||||
@ -172,7 +173,7 @@ unsafe impl Sync for FuncPtr {}
|
||||
|
||||
pub struct X64ExecutionContext {
|
||||
#[allow(dead_code)]
|
||||
code: ExecutableBuffer,
|
||||
code: CodeMemory,
|
||||
#[allow(dead_code)]
|
||||
functions: Vec<X64FunctionCode>,
|
||||
function_pointers: Vec<FuncPtr>,
|
||||
@ -220,6 +221,30 @@ impl RunnableModule for X64ExecutionContext {
|
||||
Some(self.breakpoints.clone())
|
||||
}
|
||||
|
||||
unsafe fn patch_local_function(&self, idx: usize, target_address: usize) -> bool {
|
||||
// movabsq ?, %rax;
|
||||
// jmpq *%rax;
|
||||
#[repr(packed)]
|
||||
struct Trampoline {
|
||||
movabsq: [u8; 2],
|
||||
addr: u64,
|
||||
jmpq: [u8; 2],
|
||||
}
|
||||
|
||||
self.code.make_writable();
|
||||
|
||||
let trampoline = &mut *(self.function_pointers[self.func_import_count + idx].0
|
||||
as *const Trampoline as *mut Trampoline);
|
||||
trampoline.movabsq[0] = 0x48;
|
||||
trampoline.movabsq[1] = 0xb8;
|
||||
trampoline.addr = target_address as u64;
|
||||
trampoline.jmpq[0] = 0xff;
|
||||
trampoline.jmpq[1] = 0xe0;
|
||||
|
||||
self.code.make_executable();
|
||||
true
|
||||
}
|
||||
|
||||
fn get_trampoline(&self, _: &ModuleInfo, sig_index: SigIndex) -> Option<Wasm> {
|
||||
use std::ffi::c_void;
|
||||
use wasmer_runtime_core::typed_func::WasmTrapInfo;
|
||||
@ -306,6 +331,15 @@ impl RunnableModule for X64ExecutionContext {
|
||||
fn get_offsets(&self) -> Option<Vec<usize>> {
|
||||
Some(self.function_offsets.iter().map(|x| x.0).collect())
|
||||
}
|
||||
|
||||
fn get_local_function_offsets(&self) -> Option<Vec<usize>> {
|
||||
Some(
|
||||
self.function_offsets[self.func_import_count..]
|
||||
.iter()
|
||||
.map(|x| x.0)
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -413,7 +447,10 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
|
||||
}
|
||||
};
|
||||
let total_size = assembler.get_offset().0;
|
||||
let output = assembler.finalize().unwrap();
|
||||
let _output = assembler.finalize().unwrap();
|
||||
let mut output = CodeMemory::new(_output.len());
|
||||
output[0.._output.len()].copy_from_slice(&_output);
|
||||
output.make_executable();
|
||||
|
||||
let function_labels = if let Some(x) = self.functions.last() {
|
||||
x.function_labels.as_ref().unwrap()
|
||||
@ -440,14 +477,21 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
|
||||
});
|
||||
}
|
||||
};
|
||||
out_labels.push(FuncPtr(output.ptr(*offset) as _));
|
||||
out_labels.push(FuncPtr(
|
||||
unsafe { output.as_ptr().offset(offset.0 as isize) } as _,
|
||||
));
|
||||
out_offsets.push(*offset);
|
||||
}
|
||||
|
||||
let breakpoints: Arc<HashMap<_, _>> = Arc::new(
|
||||
breakpoints
|
||||
.into_iter()
|
||||
.map(|(offset, f)| (output.ptr(offset) as usize, f))
|
||||
.map(|(offset, f)| {
|
||||
(
|
||||
unsafe { output.as_ptr().offset(offset.0 as isize) } as usize,
|
||||
f,
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
|
||||
@ -1634,6 +1678,12 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
|
||||
fn begin_body(&mut self, _module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
||||
let a = self.assembler.as_mut().unwrap();
|
||||
let start_label = a.get_label();
|
||||
// patchpoint of 16 bytes
|
||||
for _ in 0..16 {
|
||||
a.emit_nop();
|
||||
}
|
||||
a.emit_label(start_label);
|
||||
a.emit_push(Size::S64, Location::GPR(GPR::RBP));
|
||||
a.emit_mov(Size::S64, Location::GPR(GPR::RSP), Location::GPR(GPR::RBP));
|
||||
|
||||
|
@ -60,6 +60,8 @@ pub trait Emitter {
|
||||
|
||||
fn emit_label(&mut self, label: Self::Label);
|
||||
|
||||
fn emit_nop(&mut self);
|
||||
|
||||
fn emit_mov(&mut self, sz: Size, src: Location, dst: Location);
|
||||
fn emit_lea(&mut self, sz: Size, src: Location, dst: Location);
|
||||
fn emit_lea_label(&mut self, label: Self::Label, dst: Location);
|
||||
@ -467,6 +469,10 @@ impl Emitter for Assembler {
|
||||
dynasm!(self ; => label);
|
||||
}
|
||||
|
||||
fn emit_nop(&mut self) {
|
||||
dynasm!(self ; nop);
|
||||
}
|
||||
|
||||
fn emit_mov(&mut self, sz: Size, src: Location, dst: Location) {
|
||||
binop_all_nofp!(mov, self, sz, src, dst, {
|
||||
binop_imm64_gpr!(mov, self, sz, src, dst, {
|
||||
|
@ -9,6 +9,7 @@ use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use std::process::exit;
|
||||
use std::str::FromStr;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use structopt::StructOpt;
|
||||
@ -25,7 +26,9 @@ use wasmer_runtime_core::{
|
||||
self,
|
||||
backend::{Backend, Compiler, CompilerConfig, MemoryBoundCheckMode},
|
||||
debug,
|
||||
fault::{set_wasm_interrupt_on_ctx, was_sigint_triggered_fault},
|
||||
loader::{Instance as LoadedInstance, LocalLoader},
|
||||
Instance, Module,
|
||||
};
|
||||
#[cfg(feature = "backend-singlepass")]
|
||||
use wasmer_singlepass_backend::SinglePassCompiler;
|
||||
@ -113,7 +116,7 @@ struct Run {
|
||||
loader: Option<LoaderName>,
|
||||
|
||||
/// Path to previously saved instance image to resume.
|
||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
||||
#[cfg(feature = "managed")]
|
||||
#[structopt(long = "resume")]
|
||||
resume: Option<String>,
|
||||
|
||||
@ -187,6 +190,55 @@ struct Validate {
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
struct OptimizationState {
|
||||
outcome: Mutex<Option<OptimizationOutcome>>,
|
||||
}
|
||||
|
||||
struct OptimizationOutcome {
|
||||
module: Module,
|
||||
}
|
||||
|
||||
struct Defer<F: FnOnce()>(Option<F>);
|
||||
impl<F: FnOnce()> Drop for Defer<F> {
|
||||
fn drop(&mut self) {
|
||||
if let Some(f) = self.0.take() {
|
||||
f();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
struct CtxWrapper(*mut wasmer_runtime_core::vm::Ctx);
|
||||
unsafe impl Send for CtxWrapper {}
|
||||
unsafe impl Sync for CtxWrapper {}
|
||||
|
||||
#[cfg(feature = "managed")]
|
||||
unsafe fn begin_optimize(
|
||||
binary: Vec<u8>,
|
||||
compiler: Box<dyn Compiler>,
|
||||
ctx: Arc<Mutex<CtxWrapper>>,
|
||||
state: Arc<OptimizationState>,
|
||||
) {
|
||||
let module = match webassembly::compile_with_config_with(
|
||||
&binary[..],
|
||||
CompilerConfig {
|
||||
symbol_map: None,
|
||||
track_state: true,
|
||||
..Default::default()
|
||||
},
|
||||
&*compiler,
|
||||
) {
|
||||
Ok(x) => x,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
let ctx_inner = ctx.lock().unwrap();
|
||||
if !ctx_inner.0.is_null() {
|
||||
*state.outcome.lock().unwrap() = Some(OptimizationOutcome { module });
|
||||
set_wasm_interrupt_on_ctx(ctx_inner.0);
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the contents of a file
|
||||
fn read_file_contents(path: &PathBuf) -> Result<Vec<u8>, io::Error> {
|
||||
let mut buffer: Vec<u8> = Vec::new();
|
||||
@ -505,9 +557,9 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
|
||||
|
||||
let start: Func<(), ()> = instance.func("_start").map_err(|e| format!("{:?}", e))?;
|
||||
|
||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
||||
#[cfg(feature = "managed")]
|
||||
unsafe {
|
||||
if options.backend == Backend::Singlepass || options.backend == Backend::LLVM {
|
||||
if options.backend == Backend::Singlepass {
|
||||
use wasmer_runtime_core::fault::{
|
||||
catch_unsafe_unwind, ensure_sighandler, with_ctx,
|
||||
};
|
||||
@ -530,7 +582,86 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
|
||||
None
|
||||
};
|
||||
|
||||
let ctx_box =
|
||||
Arc::new(Mutex::new(CtxWrapper(instance.context_mut() as *mut _)));
|
||||
// Ensure that the ctx pointer's lifetime is not longer than Instance's.
|
||||
let _deferred_ctx_box_cleanup: Defer<_> = {
|
||||
let ctx_box = ctx_box.clone();
|
||||
Defer(Some(move || {
|
||||
ctx_box.lock().unwrap().0 = ::std::ptr::null_mut();
|
||||
}))
|
||||
};
|
||||
let opt_state = Arc::new(OptimizationState {
|
||||
outcome: Mutex::new(None),
|
||||
});
|
||||
|
||||
{
|
||||
let wasm_binary = wasm_binary.to_vec();
|
||||
let ctx_box = ctx_box.clone();
|
||||
let opt_state = opt_state.clone();
|
||||
::std::thread::spawn(move || {
|
||||
// TODO: CLI option for optimized backend
|
||||
begin_optimize(
|
||||
wasm_binary,
|
||||
get_compiler_by_backend(Backend::LLVM).unwrap(),
|
||||
ctx_box,
|
||||
opt_state,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
let mut patched = false;
|
||||
let mut optimized_instance: Option<Instance> = None;
|
||||
|
||||
loop {
|
||||
let optimized: Option<&mut Instance> =
|
||||
if let Some(ref mut x) = optimized_instance {
|
||||
Some(x)
|
||||
} else {
|
||||
let mut outcome = opt_state.outcome.lock().unwrap();
|
||||
if let Some(x) = outcome.take() {
|
||||
let instance =
|
||||
x.module.instantiate(&import_object).map_err(|e| {
|
||||
format!("Can't instantiate module: {:?}", e)
|
||||
})?;
|
||||
optimized_instance = Some(instance);
|
||||
optimized_instance.as_mut()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
if !patched && false {
|
||||
if let Some(optimized) = optimized {
|
||||
let base = module.info().imported_functions.len();
|
||||
let code_ptr = optimized
|
||||
.module
|
||||
.runnable_module
|
||||
.get_code()
|
||||
.unwrap()
|
||||
.as_ptr()
|
||||
as usize;
|
||||
let target_addresses: Vec<usize> = optimized
|
||||
.module
|
||||
.runnable_module
|
||||
.get_local_function_offsets()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|x| code_ptr + x)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
target_addresses.len(),
|
||||
module.info().func_assoc.len() - base
|
||||
);
|
||||
for i in base..module.info().func_assoc.len() {
|
||||
instance
|
||||
.module
|
||||
.runnable_module
|
||||
.patch_local_function(i - base, target_addresses[i - base]);
|
||||
}
|
||||
patched = true;
|
||||
eprintln!("Patched");
|
||||
}
|
||||
}
|
||||
let breakpoints = instance.module.runnable_module.get_breakpoints();
|
||||
let ctx = instance.context_mut() as *mut _;
|
||||
let ret = with_ctx(ctx, || {
|
||||
@ -560,6 +691,14 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
|
||||
});
|
||||
if let Err(e) = ret {
|
||||
if let Some(new_image) = e.downcast_ref::<InstanceImage>() {
|
||||
// Tier switch event
|
||||
if !was_sigint_triggered_fault()
|
||||
&& optimized_instance.is_none()
|
||||
&& opt_state.outcome.lock().unwrap().is_some()
|
||||
{
|
||||
image = Some(new_image.clone());
|
||||
continue;
|
||||
}
|
||||
let op = interactive_shell(InteractiveShellContext {
|
||||
image: Some(new_image.clone()),
|
||||
});
|
||||
@ -644,18 +783,18 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
||||
#[cfg(feature = "managed")]
|
||||
struct InteractiveShellContext {
|
||||
image: Option<wasmer_runtime_core::state::InstanceImage>,
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
||||
#[cfg(feature = "managed")]
|
||||
#[derive(Debug)]
|
||||
enum ShellExitOperation {
|
||||
ContinueWith(wasmer_runtime_core::state::InstanceImage, Option<Backend>),
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
||||
#[cfg(feature = "managed")]
|
||||
fn interactive_shell(mut ctx: InteractiveShellContext) -> ShellExitOperation {
|
||||
use std::io::Write;
|
||||
|
||||
@ -710,6 +849,8 @@ fn interactive_shell(mut ctx: InteractiveShellContext) -> ShellExitOperation {
|
||||
println!("Program state not available, cannot continue execution");
|
||||
}
|
||||
}
|
||||
// Disabled due to unsafety.
|
||||
/*
|
||||
"switch_backend" => {
|
||||
let backend_name = parts.next();
|
||||
if backend_name.is_none() {
|
||||
@ -731,6 +872,7 @@ fn interactive_shell(mut ctx: InteractiveShellContext) -> ShellExitOperation {
|
||||
println!("Program state not available, cannot continue execution");
|
||||
}
|
||||
}
|
||||
*/
|
||||
"backtrace" | "bt" => {
|
||||
if let Some(ref image) = ctx.image {
|
||||
println!("{}", image.execution_state.colored_output());
|
||||
|
Reference in New Issue
Block a user