mirror of
https://github.com/fluencelabs/wasmer
synced 2025-06-24 22:21:32 +00:00
Swap code lazily when tiering up from singlepass to LLVM.
Does not handle long-running functions, but should work at least.
This commit is contained in:
@ -239,6 +239,7 @@ pub struct LLVMBackend {
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
buffer: Arc<Buffer>,
|
buffer: Arc<Buffer>,
|
||||||
msm: Option<ModuleStateMap>,
|
msm: Option<ModuleStateMap>,
|
||||||
|
local_func_id_to_offset: Vec<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LLVMBackend {
|
impl LLVMBackend {
|
||||||
@ -380,6 +381,17 @@ impl LLVMBackend {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let code_ptr = unsafe { llvm_backend_get_code_ptr(module) } as usize;
|
||||||
|
let code_len = unsafe { llvm_backend_get_code_size(module) } as usize;
|
||||||
|
|
||||||
|
let local_func_id_to_offset: Vec<usize> = local_func_id_to_addr
|
||||||
|
.iter()
|
||||||
|
.map(|&x| {
|
||||||
|
assert!(x >= code_ptr && x < code_ptr + code_len);
|
||||||
|
x - code_ptr
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
//println!("MSM: {:?}", msm);
|
//println!("MSM: {:?}", msm);
|
||||||
|
|
||||||
(
|
(
|
||||||
@ -387,6 +399,7 @@ impl LLVMBackend {
|
|||||||
module,
|
module,
|
||||||
buffer: Arc::clone(&buffer),
|
buffer: Arc::clone(&buffer),
|
||||||
msm: Some(msm),
|
msm: Some(msm),
|
||||||
|
local_func_id_to_offset,
|
||||||
},
|
},
|
||||||
LLVMCache { buffer },
|
LLVMCache { buffer },
|
||||||
)
|
)
|
||||||
@ -397,6 +410,7 @@ impl LLVMBackend {
|
|||||||
module,
|
module,
|
||||||
buffer: Arc::clone(&buffer),
|
buffer: Arc::clone(&buffer),
|
||||||
msm: None,
|
msm: None,
|
||||||
|
local_func_id_to_offset: vec![],
|
||||||
},
|
},
|
||||||
LLVMCache { buffer },
|
LLVMCache { buffer },
|
||||||
)
|
)
|
||||||
@ -428,6 +442,7 @@ impl LLVMBackend {
|
|||||||
module,
|
module,
|
||||||
buffer: Arc::clone(&buffer),
|
buffer: Arc::clone(&buffer),
|
||||||
msm: None,
|
msm: None,
|
||||||
|
local_func_id_to_offset: vec![],
|
||||||
},
|
},
|
||||||
LLVMCache { buffer },
|
LLVMCache { buffer },
|
||||||
))
|
))
|
||||||
@ -491,6 +506,10 @@ impl RunnableModule for LLVMBackend {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_local_function_offsets(&self) -> Option<Vec<usize>> {
|
||||||
|
Some(self.local_func_id_to_offset.clone())
|
||||||
|
}
|
||||||
|
|
||||||
fn get_module_state_map(&self) -> Option<ModuleStateMap> {
|
fn get_module_state_map(&self) -> Option<ModuleStateMap> {
|
||||||
self.msm.clone()
|
self.msm.clone()
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ use wasmer_runtime_core::{
|
|||||||
module::{ModuleInfo, ModuleInner},
|
module::{ModuleInfo, ModuleInner},
|
||||||
structures::{Map, TypedIndex},
|
structures::{Map, TypedIndex},
|
||||||
types::{
|
types::{
|
||||||
FuncIndex, FuncSig, GlobalIndex, LocalOrImport, MemoryIndex, SigIndex, TableIndex, Type, ImportedFuncIndex,
|
FuncIndex, FuncSig, GlobalIndex, LocalOrImport, MemoryIndex, SigIndex, TableIndex, Type,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use wasmparser::{BinaryReaderError, MemoryImmediate, Operator, Type as WpType};
|
use wasmparser::{BinaryReaderError, MemoryImmediate, Operator, Type as WpType};
|
||||||
@ -303,7 +303,7 @@ fn resolve_memory_ptr(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn emit_stack_map(
|
fn emit_stack_map(
|
||||||
module_info: &ModuleInfo,
|
_module_info: &ModuleInfo,
|
||||||
intrinsics: &Intrinsics,
|
intrinsics: &Intrinsics,
|
||||||
builder: &Builder,
|
builder: &Builder,
|
||||||
local_function_id: usize,
|
local_function_id: usize,
|
||||||
@ -311,7 +311,7 @@ fn emit_stack_map(
|
|||||||
kind: StackmapEntryKind,
|
kind: StackmapEntryKind,
|
||||||
locals: &[PointerValue],
|
locals: &[PointerValue],
|
||||||
state: &State,
|
state: &State,
|
||||||
ctx: &mut CtxType,
|
_ctx: &mut CtxType,
|
||||||
opcode_offset: usize,
|
opcode_offset: usize,
|
||||||
) {
|
) {
|
||||||
let stackmap_id = target.entries.len();
|
let stackmap_id = target.entries.len();
|
||||||
|
@ -11,8 +11,8 @@ use wasmer_runtime_core::state::{
|
|||||||
use wasmer_runtime_core::vm::Ctx;
|
use wasmer_runtime_core::vm::Ctx;
|
||||||
use wasmer_runtime_core::{
|
use wasmer_runtime_core::{
|
||||||
module::ModuleInfo,
|
module::ModuleInfo,
|
||||||
types::{GlobalIndex, TableIndex, LocalOrImport},
|
|
||||||
structures::TypedIndex,
|
structures::TypedIndex,
|
||||||
|
types::{GlobalIndex, LocalOrImport, TableIndex},
|
||||||
vm,
|
vm,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -154,30 +154,48 @@ impl StackmapEntry {
|
|||||||
MachineValue::WasmStack(x)
|
MachineValue::WasmStack(x)
|
||||||
}
|
}
|
||||||
ValueSemantic::Ctx => MachineValue::Vmctx,
|
ValueSemantic::Ctx => MachineValue::Vmctx,
|
||||||
ValueSemantic::SignalMem => MachineValue::VmctxDeref(vec![Ctx::offset_interrupt_signal_mem() as usize, 0]),
|
ValueSemantic::SignalMem => {
|
||||||
ValueSemantic::PointerToMemoryBase => MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize]),
|
MachineValue::VmctxDeref(vec![Ctx::offset_interrupt_signal_mem() as usize, 0])
|
||||||
ValueSemantic::PointerToMemoryBound => MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize]),
|
}
|
||||||
ValueSemantic::MemoryBase => MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize, 0]),
|
ValueSemantic::PointerToMemoryBase => {
|
||||||
ValueSemantic::MemoryBound => MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize, 0]),
|
MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize])
|
||||||
ValueSemantic::PointerToGlobal(idx) => MachineValue::VmctxDeref(deref_global(module_info, idx, false)),
|
}
|
||||||
ValueSemantic::Global(idx) => MachineValue::VmctxDeref(deref_global(module_info, idx, true)),
|
ValueSemantic::PointerToMemoryBound => {
|
||||||
ValueSemantic::PointerToTableBase => MachineValue::VmctxDeref(deref_table_base(module_info, 0, false)),
|
MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize])
|
||||||
ValueSemantic::PointerToTableBound => MachineValue::VmctxDeref(deref_table_bound(module_info, 0, false)),
|
}
|
||||||
|
ValueSemantic::MemoryBase => {
|
||||||
|
MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize, 0])
|
||||||
|
}
|
||||||
|
ValueSemantic::MemoryBound => {
|
||||||
|
MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize, 0])
|
||||||
|
}
|
||||||
|
ValueSemantic::PointerToGlobal(idx) => {
|
||||||
|
MachineValue::VmctxDeref(deref_global(module_info, idx, false))
|
||||||
|
}
|
||||||
|
ValueSemantic::Global(idx) => {
|
||||||
|
MachineValue::VmctxDeref(deref_global(module_info, idx, true))
|
||||||
|
}
|
||||||
|
ValueSemantic::PointerToTableBase => {
|
||||||
|
MachineValue::VmctxDeref(deref_table_base(module_info, 0, false))
|
||||||
|
}
|
||||||
|
ValueSemantic::PointerToTableBound => {
|
||||||
|
MachineValue::VmctxDeref(deref_table_bound(module_info, 0, false))
|
||||||
|
}
|
||||||
ValueSemantic::ImportedFuncPointer(idx) => MachineValue::VmctxDeref(vec![
|
ValueSemantic::ImportedFuncPointer(idx) => MachineValue::VmctxDeref(vec![
|
||||||
Ctx::offset_imported_funcs() as usize,
|
Ctx::offset_imported_funcs() as usize,
|
||||||
vm::ImportedFunc::size() as usize * idx + vm::ImportedFunc::offset_func() as usize,
|
vm::ImportedFunc::size() as usize * idx
|
||||||
|
+ vm::ImportedFunc::offset_func() as usize,
|
||||||
0,
|
0,
|
||||||
]),
|
]),
|
||||||
ValueSemantic::ImportedFuncCtx(idx) => MachineValue::VmctxDeref(vec![
|
ValueSemantic::ImportedFuncCtx(idx) => MachineValue::VmctxDeref(vec![
|
||||||
Ctx::offset_imported_funcs() as usize,
|
Ctx::offset_imported_funcs() as usize,
|
||||||
vm::ImportedFunc::size() as usize * idx + vm::ImportedFunc::offset_vmctx() as usize,
|
vm::ImportedFunc::size() as usize * idx
|
||||||
0,
|
+ vm::ImportedFunc::offset_vmctx() as usize,
|
||||||
]),
|
|
||||||
ValueSemantic::DynamicSigindice(idx) => MachineValue::VmctxDeref(vec![
|
|
||||||
Ctx::offset_signatures() as usize,
|
|
||||||
idx * 4,
|
|
||||||
0,
|
0,
|
||||||
]),
|
]),
|
||||||
|
ValueSemantic::DynamicSigindice(idx) => {
|
||||||
|
MachineValue::VmctxDeref(vec![Ctx::offset_signatures() as usize, idx * 4, 0])
|
||||||
|
}
|
||||||
};
|
};
|
||||||
match loc.ty {
|
match loc.ty {
|
||||||
LocationType::Register => {
|
LocationType::Register => {
|
||||||
@ -538,16 +556,10 @@ impl StackMap {
|
|||||||
|
|
||||||
fn deref_global(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
|
fn deref_global(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
|
||||||
let mut x: Vec<usize> = match GlobalIndex::new(idx).local_or_import(info) {
|
let mut x: Vec<usize> = match GlobalIndex::new(idx).local_or_import(info) {
|
||||||
LocalOrImport::Local(idx) => vec![
|
LocalOrImport::Local(idx) => vec![Ctx::offset_globals() as usize, idx.index() * 8, 0],
|
||||||
Ctx::offset_globals() as usize,
|
LocalOrImport::Import(idx) => {
|
||||||
idx.index() * 8,
|
vec![Ctx::offset_imported_globals() as usize, idx.index() * 8, 0]
|
||||||
0,
|
}
|
||||||
],
|
|
||||||
LocalOrImport::Import(idx) => vec![
|
|
||||||
Ctx::offset_imported_globals() as usize,
|
|
||||||
idx.index() * 8,
|
|
||||||
0,
|
|
||||||
],
|
|
||||||
};
|
};
|
||||||
if deref_into_value {
|
if deref_into_value {
|
||||||
x.push(0);
|
x.push(0);
|
||||||
@ -557,16 +569,10 @@ fn deref_global(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<us
|
|||||||
|
|
||||||
fn deref_table_base(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
|
fn deref_table_base(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
|
||||||
let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {
|
let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {
|
||||||
LocalOrImport::Local(idx) => vec![
|
LocalOrImport::Local(idx) => vec![Ctx::offset_tables() as usize, idx.index() * 8, 0],
|
||||||
Ctx::offset_tables() as usize,
|
LocalOrImport::Import(idx) => {
|
||||||
idx.index() * 8,
|
vec![Ctx::offset_imported_tables() as usize, idx.index() * 8, 0]
|
||||||
0,
|
}
|
||||||
],
|
|
||||||
LocalOrImport::Import(idx) => vec![
|
|
||||||
Ctx::offset_imported_tables() as usize,
|
|
||||||
idx.index() * 8,
|
|
||||||
0,
|
|
||||||
],
|
|
||||||
};
|
};
|
||||||
if deref_into_value {
|
if deref_into_value {
|
||||||
x.push(0);
|
x.push(0);
|
||||||
@ -576,19 +582,13 @@ fn deref_table_base(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Ve
|
|||||||
|
|
||||||
fn deref_table_bound(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
|
fn deref_table_bound(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {
|
||||||
let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {
|
let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {
|
||||||
LocalOrImport::Local(idx) => vec![
|
LocalOrImport::Local(idx) => vec![Ctx::offset_tables() as usize, idx.index() * 8, 8],
|
||||||
Ctx::offset_tables() as usize,
|
LocalOrImport::Import(idx) => {
|
||||||
idx.index() * 8,
|
vec![Ctx::offset_imported_tables() as usize, idx.index() * 8, 8]
|
||||||
8,
|
}
|
||||||
],
|
|
||||||
LocalOrImport::Import(idx) => vec![
|
|
||||||
Ctx::offset_imported_tables() as usize,
|
|
||||||
idx.index() * 8,
|
|
||||||
8,
|
|
||||||
],
|
|
||||||
};
|
};
|
||||||
if deref_into_value {
|
if deref_into_value {
|
||||||
x.push(0);
|
x.push(0);
|
||||||
}
|
}
|
||||||
x
|
x
|
||||||
}
|
}
|
||||||
|
@ -151,6 +151,10 @@ pub trait RunnableModule: Send + Sync {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unsafe fn patch_local_function(&self, _idx: usize, _target_address: usize) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
/// A wasm trampoline contains the necessary data to dynamically call an exported wasm function.
|
/// A wasm trampoline contains the necessary data to dynamically call an exported wasm function.
|
||||||
/// Given a particular signature index, we are returned a trampoline that is matched with that
|
/// Given a particular signature index, we are returned a trampoline that is matched with that
|
||||||
/// signature and an invoke function that can call the trampoline.
|
/// signature and an invoke function that can call the trampoline.
|
||||||
@ -167,6 +171,11 @@ pub trait RunnableModule: Send + Sync {
|
|||||||
fn get_offsets(&self) -> Option<Vec<usize>> {
|
fn get_offsets(&self) -> Option<Vec<usize>> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the beginning offsets of all local functions.
|
||||||
|
fn get_local_function_offsets(&self) -> Option<Vec<usize>> {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait CacheGen: Send + Sync {
|
pub trait CacheGen: Send + Sync {
|
||||||
|
@ -17,7 +17,7 @@ use nix::sys::signal::{
|
|||||||
SIGSEGV, SIGTRAP,
|
SIGSEGV, SIGTRAP,
|
||||||
};
|
};
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::cell::UnsafeCell;
|
use std::cell::{Cell, UnsafeCell};
|
||||||
use std::ffi::c_void;
|
use std::ffi::c_void;
|
||||||
use std::process;
|
use std::process;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
@ -41,6 +41,7 @@ struct UnwindInfo {
|
|||||||
thread_local! {
|
thread_local! {
|
||||||
static UNWIND: UnsafeCell<Option<UnwindInfo>> = UnsafeCell::new(None);
|
static UNWIND: UnsafeCell<Option<UnwindInfo>> = UnsafeCell::new(None);
|
||||||
static CURRENT_CTX: UnsafeCell<*mut vm::Ctx> = UnsafeCell::new(::std::ptr::null_mut());
|
static CURRENT_CTX: UnsafeCell<*mut vm::Ctx> = UnsafeCell::new(::std::ptr::null_mut());
|
||||||
|
static WAS_SIGINT_TRIGGERED: Cell<bool> = Cell::new(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
struct InterruptSignalMem(*mut u8);
|
struct InterruptSignalMem(*mut u8);
|
||||||
@ -69,6 +70,10 @@ lazy_static! {
|
|||||||
}
|
}
|
||||||
static INTERRUPT_SIGNAL_DELIVERED: AtomicBool = AtomicBool::new(false);
|
static INTERRUPT_SIGNAL_DELIVERED: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
|
pub fn was_sigint_triggered_fault() -> bool {
|
||||||
|
WAS_SIGINT_TRIGGERED.with(|x| x.get())
|
||||||
|
}
|
||||||
|
|
||||||
pub unsafe fn with_ctx<R, F: FnOnce() -> R>(ctx: *mut vm::Ctx, cb: F) -> R {
|
pub unsafe fn with_ctx<R, F: FnOnce() -> R>(ctx: *mut vm::Ctx, cb: F) -> R {
|
||||||
let addr = CURRENT_CTX.with(|x| x.get());
|
let addr = CURRENT_CTX.with(|x| x.get());
|
||||||
let old = *addr;
|
let old = *addr;
|
||||||
@ -82,6 +87,17 @@ pub unsafe fn get_wasm_interrupt_signal_mem() -> *mut u8 {
|
|||||||
INTERRUPT_SIGNAL_MEM.0
|
INTERRUPT_SIGNAL_MEM.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub unsafe fn set_wasm_interrupt_on_ctx(ctx: *mut vm::Ctx) {
|
||||||
|
if mprotect(
|
||||||
|
(&*ctx).internal.interrupt_signal_mem as _,
|
||||||
|
INTERRUPT_SIGNAL_MEM_SIZE,
|
||||||
|
PROT_NONE,
|
||||||
|
) < 0
|
||||||
|
{
|
||||||
|
panic!("cannot set PROT_NONE on signal mem");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub unsafe fn set_wasm_interrupt() {
|
pub unsafe fn set_wasm_interrupt() {
|
||||||
let mem: *mut u8 = INTERRUPT_SIGNAL_MEM.0;
|
let mem: *mut u8 = INTERRUPT_SIGNAL_MEM.0;
|
||||||
if mprotect(mem as _, INTERRUPT_SIGNAL_MEM_SIZE, PROT_NONE) < 0 {
|
if mprotect(mem as _, INTERRUPT_SIGNAL_MEM_SIZE, PROT_NONE) < 0 {
|
||||||
@ -188,7 +204,7 @@ extern "C" fn signal_trap_handler(
|
|||||||
let should_unwind = allocate_and_run(TRAP_STACK_SIZE, || {
|
let should_unwind = allocate_and_run(TRAP_STACK_SIZE, || {
|
||||||
let mut is_suspend_signal = false;
|
let mut is_suspend_signal = false;
|
||||||
|
|
||||||
println!("SIGNAL: {:?} {:?}", Signal::from_c_int(signum), fault.faulting_addr);
|
WAS_SIGINT_TRIGGERED.with(|x| x.set(false));
|
||||||
|
|
||||||
match Signal::from_c_int(signum) {
|
match Signal::from_c_int(signum) {
|
||||||
Ok(SIGTRAP) => {
|
Ok(SIGTRAP) => {
|
||||||
@ -215,7 +231,9 @@ extern "C" fn signal_trap_handler(
|
|||||||
if fault.faulting_addr as usize == get_wasm_interrupt_signal_mem() as usize {
|
if fault.faulting_addr as usize == get_wasm_interrupt_signal_mem() as usize {
|
||||||
is_suspend_signal = true;
|
is_suspend_signal = true;
|
||||||
clear_wasm_interrupt();
|
clear_wasm_interrupt();
|
||||||
INTERRUPT_SIGNAL_DELIVERED.store(false, Ordering::SeqCst);
|
if INTERRUPT_SIGNAL_DELIVERED.swap(false, Ordering::SeqCst) {
|
||||||
|
WAS_SIGINT_TRIGGERED.with(|x| x.set(true));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -102,13 +102,20 @@ pub struct CodeMemory {
|
|||||||
size: usize,
|
size: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unsafe impl Send for CodeMemory {}
|
||||||
|
unsafe impl Sync for CodeMemory {}
|
||||||
|
|
||||||
#[cfg(not(unix))]
|
#[cfg(not(unix))]
|
||||||
impl CodeMemory {
|
impl CodeMemory {
|
||||||
pub fn new(_size: usize) -> CodeMemory {
|
pub fn new(_size: usize) -> CodeMemory {
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_executable(&mut self) {
|
pub fn make_executable(&self) {
|
||||||
|
unimplemented!();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make_writable(&self) {
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -139,11 +146,17 @@ impl CodeMemory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_executable(&mut self) {
|
pub fn make_executable(&self) {
|
||||||
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_EXEC) } != 0 {
|
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_EXEC) } != 0 {
|
||||||
panic!("cannot set code memory to executable");
|
panic!("cannot set code memory to executable");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn make_writable(&self) {
|
||||||
|
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_WRITE) } != 0 {
|
||||||
|
panic!("cannot set code memory to writable");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
|
@ -493,21 +493,21 @@ pub mod x64 {
|
|||||||
stack[stack_offset] |= v;
|
stack[stack_offset] |= v;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
MachineValue::WasmLocal(x) => {
|
MachineValue::WasmLocal(x) => match fsm.locals[x] {
|
||||||
match fsm.locals[x] {
|
WasmAbstractValue::Const(x) => {
|
||||||
WasmAbstractValue::Const(x) => {
|
assert!(x <= ::std::u32::MAX as u64);
|
||||||
assert!(x <= ::std::u32::MAX as u64);
|
stack[stack_offset] |= x;
|
||||||
stack[stack_offset] |= x;
|
|
||||||
}
|
|
||||||
WasmAbstractValue::Runtime => {
|
|
||||||
let v = f.locals[x].unwrap();
|
|
||||||
assert!(v <= ::std::u32::MAX as u64);
|
|
||||||
stack[stack_offset] |= v;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
WasmAbstractValue::Runtime => {
|
||||||
|
let v = f.locals[x].unwrap();
|
||||||
|
assert!(v <= ::std::u32::MAX as u64);
|
||||||
|
stack[stack_offset] |= v;
|
||||||
|
}
|
||||||
|
},
|
||||||
MachineValue::VmctxDeref(ref seq) => {
|
MachineValue::VmctxDeref(ref seq) => {
|
||||||
stack[stack_offset] |= compute_vmctx_deref(vmctx as *const Ctx, seq) & (::std::u32::MAX as u64);
|
stack[stack_offset] |=
|
||||||
|
compute_vmctx_deref(vmctx as *const Ctx, seq)
|
||||||
|
& (::std::u32::MAX as u64);
|
||||||
}
|
}
|
||||||
MachineValue::Undefined => {}
|
MachineValue::Undefined => {}
|
||||||
_ => unimplemented!("TwoHalves.0"),
|
_ => unimplemented!("TwoHalves.0"),
|
||||||
@ -524,21 +524,22 @@ pub mod x64 {
|
|||||||
stack[stack_offset] |= v << 32;
|
stack[stack_offset] |= v << 32;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
MachineValue::WasmLocal(x) => {
|
MachineValue::WasmLocal(x) => match fsm.locals[x] {
|
||||||
match fsm.locals[x] {
|
WasmAbstractValue::Const(x) => {
|
||||||
WasmAbstractValue::Const(x) => {
|
assert!(x <= ::std::u32::MAX as u64);
|
||||||
assert!(x <= ::std::u32::MAX as u64);
|
stack[stack_offset] |= x << 32;
|
||||||
stack[stack_offset] |= x << 32;
|
|
||||||
}
|
|
||||||
WasmAbstractValue::Runtime => {
|
|
||||||
let v = f.locals[x].unwrap();
|
|
||||||
assert!(v <= ::std::u32::MAX as u64);
|
|
||||||
stack[stack_offset] |= v << 32;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
WasmAbstractValue::Runtime => {
|
||||||
|
let v = f.locals[x].unwrap();
|
||||||
|
assert!(v <= ::std::u32::MAX as u64);
|
||||||
|
stack[stack_offset] |= v << 32;
|
||||||
|
}
|
||||||
|
},
|
||||||
MachineValue::VmctxDeref(ref seq) => {
|
MachineValue::VmctxDeref(ref seq) => {
|
||||||
stack[stack_offset] |= (compute_vmctx_deref(vmctx as *const Ctx, seq) & (::std::u32::MAX as u64)) << 32;
|
stack[stack_offset] |=
|
||||||
|
(compute_vmctx_deref(vmctx as *const Ctx, seq)
|
||||||
|
& (::std::u32::MAX as u64))
|
||||||
|
<< 32;
|
||||||
}
|
}
|
||||||
MachineValue::Undefined => {}
|
MachineValue::Undefined => {}
|
||||||
_ => unimplemented!("TwoHalves.1"),
|
_ => unimplemented!("TwoHalves.1"),
|
||||||
@ -583,7 +584,6 @@ pub mod x64 {
|
|||||||
|
|
||||||
stack_offset -= 1;
|
stack_offset -= 1;
|
||||||
stack[stack_offset] = (code_base + activate_offset) as u64; // return address
|
stack[stack_offset] = (code_base + activate_offset) as u64; // return address
|
||||||
println!("activating at {:?}", (code_base + activate_offset) as *const u8);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
stack_offset -= 1;
|
stack_offset -= 1;
|
||||||
@ -694,7 +694,6 @@ pub mod x64 {
|
|||||||
|
|
||||||
catch_unsafe_unwind(
|
catch_unsafe_unwind(
|
||||||
|| {
|
|| {
|
||||||
::std::intrinsics::breakpoint();
|
|
||||||
run_on_alternative_stack(
|
run_on_alternative_stack(
|
||||||
stack.as_mut_ptr().offset(stack.len() as isize),
|
stack.as_mut_ptr().offset(stack.len() as isize),
|
||||||
stack.as_mut_ptr().offset(stack_offset as isize),
|
stack.as_mut_ptr().offset(stack_offset as isize),
|
||||||
|
@ -20,6 +20,7 @@ use wasmer_runtime_core::{
|
|||||||
},
|
},
|
||||||
cache::{Artifact, Error as CacheError},
|
cache::{Artifact, Error as CacheError},
|
||||||
codegen::*,
|
codegen::*,
|
||||||
|
loader::CodeMemory,
|
||||||
memory::MemoryType,
|
memory::MemoryType,
|
||||||
module::{ModuleInfo, ModuleInner},
|
module::{ModuleInfo, ModuleInner},
|
||||||
state::{
|
state::{
|
||||||
@ -172,7 +173,7 @@ unsafe impl Sync for FuncPtr {}
|
|||||||
|
|
||||||
pub struct X64ExecutionContext {
|
pub struct X64ExecutionContext {
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
code: ExecutableBuffer,
|
code: CodeMemory,
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
functions: Vec<X64FunctionCode>,
|
functions: Vec<X64FunctionCode>,
|
||||||
function_pointers: Vec<FuncPtr>,
|
function_pointers: Vec<FuncPtr>,
|
||||||
@ -220,6 +221,30 @@ impl RunnableModule for X64ExecutionContext {
|
|||||||
Some(self.breakpoints.clone())
|
Some(self.breakpoints.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unsafe fn patch_local_function(&self, idx: usize, target_address: usize) -> bool {
|
||||||
|
// movabsq ?, %rax;
|
||||||
|
// jmpq *%rax;
|
||||||
|
#[repr(packed)]
|
||||||
|
struct Trampoline {
|
||||||
|
movabsq: [u8; 2],
|
||||||
|
addr: u64,
|
||||||
|
jmpq: [u8; 2],
|
||||||
|
}
|
||||||
|
|
||||||
|
self.code.make_writable();
|
||||||
|
|
||||||
|
let trampoline = &mut *(self.function_pointers[self.func_import_count + idx].0
|
||||||
|
as *const Trampoline as *mut Trampoline);
|
||||||
|
trampoline.movabsq[0] = 0x48;
|
||||||
|
trampoline.movabsq[1] = 0xb8;
|
||||||
|
trampoline.addr = target_address as u64;
|
||||||
|
trampoline.jmpq[0] = 0xff;
|
||||||
|
trampoline.jmpq[1] = 0xe0;
|
||||||
|
|
||||||
|
self.code.make_executable();
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
fn get_trampoline(&self, _: &ModuleInfo, sig_index: SigIndex) -> Option<Wasm> {
|
fn get_trampoline(&self, _: &ModuleInfo, sig_index: SigIndex) -> Option<Wasm> {
|
||||||
use std::ffi::c_void;
|
use std::ffi::c_void;
|
||||||
use wasmer_runtime_core::typed_func::WasmTrapInfo;
|
use wasmer_runtime_core::typed_func::WasmTrapInfo;
|
||||||
@ -306,6 +331,15 @@ impl RunnableModule for X64ExecutionContext {
|
|||||||
fn get_offsets(&self) -> Option<Vec<usize>> {
|
fn get_offsets(&self) -> Option<Vec<usize>> {
|
||||||
Some(self.function_offsets.iter().map(|x| x.0).collect())
|
Some(self.function_offsets.iter().map(|x| x.0).collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_local_function_offsets(&self) -> Option<Vec<usize>> {
|
||||||
|
Some(
|
||||||
|
self.function_offsets[self.func_import_count..]
|
||||||
|
.iter()
|
||||||
|
.map(|x| x.0)
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -413,7 +447,10 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
let total_size = assembler.get_offset().0;
|
let total_size = assembler.get_offset().0;
|
||||||
let output = assembler.finalize().unwrap();
|
let _output = assembler.finalize().unwrap();
|
||||||
|
let mut output = CodeMemory::new(_output.len());
|
||||||
|
output[0.._output.len()].copy_from_slice(&_output);
|
||||||
|
output.make_executable();
|
||||||
|
|
||||||
let function_labels = if let Some(x) = self.functions.last() {
|
let function_labels = if let Some(x) = self.functions.last() {
|
||||||
x.function_labels.as_ref().unwrap()
|
x.function_labels.as_ref().unwrap()
|
||||||
@ -440,14 +477,21 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
out_labels.push(FuncPtr(output.ptr(*offset) as _));
|
out_labels.push(FuncPtr(
|
||||||
|
unsafe { output.as_ptr().offset(offset.0 as isize) } as _,
|
||||||
|
));
|
||||||
out_offsets.push(*offset);
|
out_offsets.push(*offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
let breakpoints: Arc<HashMap<_, _>> = Arc::new(
|
let breakpoints: Arc<HashMap<_, _>> = Arc::new(
|
||||||
breakpoints
|
breakpoints
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(offset, f)| (output.ptr(offset) as usize, f))
|
.map(|(offset, f)| {
|
||||||
|
(
|
||||||
|
unsafe { output.as_ptr().offset(offset.0 as isize) } as usize,
|
||||||
|
f,
|
||||||
|
)
|
||||||
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -1634,6 +1678,12 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
|||||||
|
|
||||||
fn begin_body(&mut self, _module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
fn begin_body(&mut self, _module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
||||||
let a = self.assembler.as_mut().unwrap();
|
let a = self.assembler.as_mut().unwrap();
|
||||||
|
let start_label = a.get_label();
|
||||||
|
// patchpoint of 16 bytes
|
||||||
|
for _ in 0..16 {
|
||||||
|
a.emit_nop();
|
||||||
|
}
|
||||||
|
a.emit_label(start_label);
|
||||||
a.emit_push(Size::S64, Location::GPR(GPR::RBP));
|
a.emit_push(Size::S64, Location::GPR(GPR::RBP));
|
||||||
a.emit_mov(Size::S64, Location::GPR(GPR::RSP), Location::GPR(GPR::RBP));
|
a.emit_mov(Size::S64, Location::GPR(GPR::RSP), Location::GPR(GPR::RBP));
|
||||||
|
|
||||||
|
@ -60,6 +60,8 @@ pub trait Emitter {
|
|||||||
|
|
||||||
fn emit_label(&mut self, label: Self::Label);
|
fn emit_label(&mut self, label: Self::Label);
|
||||||
|
|
||||||
|
fn emit_nop(&mut self);
|
||||||
|
|
||||||
fn emit_mov(&mut self, sz: Size, src: Location, dst: Location);
|
fn emit_mov(&mut self, sz: Size, src: Location, dst: Location);
|
||||||
fn emit_lea(&mut self, sz: Size, src: Location, dst: Location);
|
fn emit_lea(&mut self, sz: Size, src: Location, dst: Location);
|
||||||
fn emit_lea_label(&mut self, label: Self::Label, dst: Location);
|
fn emit_lea_label(&mut self, label: Self::Label, dst: Location);
|
||||||
@ -467,6 +469,10 @@ impl Emitter for Assembler {
|
|||||||
dynasm!(self ; => label);
|
dynasm!(self ; => label);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn emit_nop(&mut self) {
|
||||||
|
dynasm!(self ; nop);
|
||||||
|
}
|
||||||
|
|
||||||
fn emit_mov(&mut self, sz: Size, src: Location, dst: Location) {
|
fn emit_mov(&mut self, sz: Size, src: Location, dst: Location) {
|
||||||
binop_all_nofp!(mov, self, sz, src, dst, {
|
binop_all_nofp!(mov, self, sz, src, dst, {
|
||||||
binop_imm64_gpr!(mov, self, sz, src, dst, {
|
binop_imm64_gpr!(mov, self, sz, src, dst, {
|
||||||
|
@ -9,6 +9,7 @@ use std::io::Read;
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use structopt::StructOpt;
|
use structopt::StructOpt;
|
||||||
@ -25,7 +26,9 @@ use wasmer_runtime_core::{
|
|||||||
self,
|
self,
|
||||||
backend::{Backend, Compiler, CompilerConfig, MemoryBoundCheckMode},
|
backend::{Backend, Compiler, CompilerConfig, MemoryBoundCheckMode},
|
||||||
debug,
|
debug,
|
||||||
|
fault::{set_wasm_interrupt_on_ctx, was_sigint_triggered_fault},
|
||||||
loader::{Instance as LoadedInstance, LocalLoader},
|
loader::{Instance as LoadedInstance, LocalLoader},
|
||||||
|
Instance, Module,
|
||||||
};
|
};
|
||||||
#[cfg(feature = "backend-singlepass")]
|
#[cfg(feature = "backend-singlepass")]
|
||||||
use wasmer_singlepass_backend::SinglePassCompiler;
|
use wasmer_singlepass_backend::SinglePassCompiler;
|
||||||
@ -113,7 +116,7 @@ struct Run {
|
|||||||
loader: Option<LoaderName>,
|
loader: Option<LoaderName>,
|
||||||
|
|
||||||
/// Path to previously saved instance image to resume.
|
/// Path to previously saved instance image to resume.
|
||||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
#[cfg(feature = "managed")]
|
||||||
#[structopt(long = "resume")]
|
#[structopt(long = "resume")]
|
||||||
resume: Option<String>,
|
resume: Option<String>,
|
||||||
|
|
||||||
@ -187,6 +190,55 @@ struct Validate {
|
|||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct OptimizationState {
|
||||||
|
outcome: Mutex<Option<OptimizationOutcome>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct OptimizationOutcome {
|
||||||
|
module: Module,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Defer<F: FnOnce()>(Option<F>);
|
||||||
|
impl<F: FnOnce()> Drop for Defer<F> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if let Some(f) = self.0.take() {
|
||||||
|
f();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(transparent)]
|
||||||
|
struct CtxWrapper(*mut wasmer_runtime_core::vm::Ctx);
|
||||||
|
unsafe impl Send for CtxWrapper {}
|
||||||
|
unsafe impl Sync for CtxWrapper {}
|
||||||
|
|
||||||
|
#[cfg(feature = "managed")]
|
||||||
|
unsafe fn begin_optimize(
|
||||||
|
binary: Vec<u8>,
|
||||||
|
compiler: Box<dyn Compiler>,
|
||||||
|
ctx: Arc<Mutex<CtxWrapper>>,
|
||||||
|
state: Arc<OptimizationState>,
|
||||||
|
) {
|
||||||
|
let module = match webassembly::compile_with_config_with(
|
||||||
|
&binary[..],
|
||||||
|
CompilerConfig {
|
||||||
|
symbol_map: None,
|
||||||
|
track_state: true,
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
&*compiler,
|
||||||
|
) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(_) => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
let ctx_inner = ctx.lock().unwrap();
|
||||||
|
if !ctx_inner.0.is_null() {
|
||||||
|
*state.outcome.lock().unwrap() = Some(OptimizationOutcome { module });
|
||||||
|
set_wasm_interrupt_on_ctx(ctx_inner.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Read the contents of a file
|
/// Read the contents of a file
|
||||||
fn read_file_contents(path: &PathBuf) -> Result<Vec<u8>, io::Error> {
|
fn read_file_contents(path: &PathBuf) -> Result<Vec<u8>, io::Error> {
|
||||||
let mut buffer: Vec<u8> = Vec::new();
|
let mut buffer: Vec<u8> = Vec::new();
|
||||||
@ -505,9 +557,9 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
|
|||||||
|
|
||||||
let start: Func<(), ()> = instance.func("_start").map_err(|e| format!("{:?}", e))?;
|
let start: Func<(), ()> = instance.func("_start").map_err(|e| format!("{:?}", e))?;
|
||||||
|
|
||||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
#[cfg(feature = "managed")]
|
||||||
unsafe {
|
unsafe {
|
||||||
if options.backend == Backend::Singlepass || options.backend == Backend::LLVM {
|
if options.backend == Backend::Singlepass {
|
||||||
use wasmer_runtime_core::fault::{
|
use wasmer_runtime_core::fault::{
|
||||||
catch_unsafe_unwind, ensure_sighandler, with_ctx,
|
catch_unsafe_unwind, ensure_sighandler, with_ctx,
|
||||||
};
|
};
|
||||||
@ -530,7 +582,86 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let ctx_box =
|
||||||
|
Arc::new(Mutex::new(CtxWrapper(instance.context_mut() as *mut _)));
|
||||||
|
// Ensure that the ctx pointer's lifetime is not longer than Instance's.
|
||||||
|
let _deferred_ctx_box_cleanup: Defer<_> = {
|
||||||
|
let ctx_box = ctx_box.clone();
|
||||||
|
Defer(Some(move || {
|
||||||
|
ctx_box.lock().unwrap().0 = ::std::ptr::null_mut();
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
let opt_state = Arc::new(OptimizationState {
|
||||||
|
outcome: Mutex::new(None),
|
||||||
|
});
|
||||||
|
|
||||||
|
{
|
||||||
|
let wasm_binary = wasm_binary.to_vec();
|
||||||
|
let ctx_box = ctx_box.clone();
|
||||||
|
let opt_state = opt_state.clone();
|
||||||
|
::std::thread::spawn(move || {
|
||||||
|
// TODO: CLI option for optimized backend
|
||||||
|
begin_optimize(
|
||||||
|
wasm_binary,
|
||||||
|
get_compiler_by_backend(Backend::LLVM).unwrap(),
|
||||||
|
ctx_box,
|
||||||
|
opt_state,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut patched = false;
|
||||||
|
let mut optimized_instance: Option<Instance> = None;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
let optimized: Option<&mut Instance> =
|
||||||
|
if let Some(ref mut x) = optimized_instance {
|
||||||
|
Some(x)
|
||||||
|
} else {
|
||||||
|
let mut outcome = opt_state.outcome.lock().unwrap();
|
||||||
|
if let Some(x) = outcome.take() {
|
||||||
|
let instance =
|
||||||
|
x.module.instantiate(&import_object).map_err(|e| {
|
||||||
|
format!("Can't instantiate module: {:?}", e)
|
||||||
|
})?;
|
||||||
|
optimized_instance = Some(instance);
|
||||||
|
optimized_instance.as_mut()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if !patched && false {
|
||||||
|
if let Some(optimized) = optimized {
|
||||||
|
let base = module.info().imported_functions.len();
|
||||||
|
let code_ptr = optimized
|
||||||
|
.module
|
||||||
|
.runnable_module
|
||||||
|
.get_code()
|
||||||
|
.unwrap()
|
||||||
|
.as_ptr()
|
||||||
|
as usize;
|
||||||
|
let target_addresses: Vec<usize> = optimized
|
||||||
|
.module
|
||||||
|
.runnable_module
|
||||||
|
.get_local_function_offsets()
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| code_ptr + x)
|
||||||
|
.collect();
|
||||||
|
assert_eq!(
|
||||||
|
target_addresses.len(),
|
||||||
|
module.info().func_assoc.len() - base
|
||||||
|
);
|
||||||
|
for i in base..module.info().func_assoc.len() {
|
||||||
|
instance
|
||||||
|
.module
|
||||||
|
.runnable_module
|
||||||
|
.patch_local_function(i - base, target_addresses[i - base]);
|
||||||
|
}
|
||||||
|
patched = true;
|
||||||
|
eprintln!("Patched");
|
||||||
|
}
|
||||||
|
}
|
||||||
let breakpoints = instance.module.runnable_module.get_breakpoints();
|
let breakpoints = instance.module.runnable_module.get_breakpoints();
|
||||||
let ctx = instance.context_mut() as *mut _;
|
let ctx = instance.context_mut() as *mut _;
|
||||||
let ret = with_ctx(ctx, || {
|
let ret = with_ctx(ctx, || {
|
||||||
@ -560,6 +691,14 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
|
|||||||
});
|
});
|
||||||
if let Err(e) = ret {
|
if let Err(e) = ret {
|
||||||
if let Some(new_image) = e.downcast_ref::<InstanceImage>() {
|
if let Some(new_image) = e.downcast_ref::<InstanceImage>() {
|
||||||
|
// Tier switch event
|
||||||
|
if !was_sigint_triggered_fault()
|
||||||
|
&& optimized_instance.is_none()
|
||||||
|
&& opt_state.outcome.lock().unwrap().is_some()
|
||||||
|
{
|
||||||
|
image = Some(new_image.clone());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
let op = interactive_shell(InteractiveShellContext {
|
let op = interactive_shell(InteractiveShellContext {
|
||||||
image: Some(new_image.clone()),
|
image: Some(new_image.clone()),
|
||||||
});
|
});
|
||||||
@ -644,18 +783,18 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
#[cfg(feature = "managed")]
|
||||||
struct InteractiveShellContext {
|
struct InteractiveShellContext {
|
||||||
image: Option<wasmer_runtime_core::state::InstanceImage>,
|
image: Option<wasmer_runtime_core::state::InstanceImage>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
#[cfg(feature = "managed")]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum ShellExitOperation {
|
enum ShellExitOperation {
|
||||||
ContinueWith(wasmer_runtime_core::state::InstanceImage, Option<Backend>),
|
ContinueWith(wasmer_runtime_core::state::InstanceImage, Option<Backend>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(feature = "backend-singlepass", feature = "backend-llvm"))]
|
#[cfg(feature = "managed")]
|
||||||
fn interactive_shell(mut ctx: InteractiveShellContext) -> ShellExitOperation {
|
fn interactive_shell(mut ctx: InteractiveShellContext) -> ShellExitOperation {
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
@ -710,6 +849,8 @@ fn interactive_shell(mut ctx: InteractiveShellContext) -> ShellExitOperation {
|
|||||||
println!("Program state not available, cannot continue execution");
|
println!("Program state not available, cannot continue execution");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Disabled due to unsafety.
|
||||||
|
/*
|
||||||
"switch_backend" => {
|
"switch_backend" => {
|
||||||
let backend_name = parts.next();
|
let backend_name = parts.next();
|
||||||
if backend_name.is_none() {
|
if backend_name.is_none() {
|
||||||
@ -731,6 +872,7 @@ fn interactive_shell(mut ctx: InteractiveShellContext) -> ShellExitOperation {
|
|||||||
println!("Program state not available, cannot continue execution");
|
println!("Program state not available, cannot continue execution");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
"backtrace" | "bt" => {
|
"backtrace" | "bt" => {
|
||||||
if let Some(ref image) = ctx.image {
|
if let Some(ref image) = ctx.image {
|
||||||
println!("{}", image.execution_state.colored_output());
|
println!("{}", image.execution_state.colored_output());
|
||||||
|
Reference in New Issue
Block a user