Full preemptive snapshot/resume.

This commit is contained in:
losfair
2019-06-27 15:49:43 +08:00
parent 7d0b70bddf
commit 967027003d
15 changed files with 523 additions and 223 deletions

View File

@ -164,6 +164,7 @@ impl Intrinsics {
let memory_base_ty = i8_ty; let memory_base_ty = i8_ty;
let memory_bound_ty = void_ty; let memory_bound_ty = void_ty;
let internals_ty = i64_ty; let internals_ty = i64_ty;
let interrupt_signal_mem_ty = i8_ty;
let local_function_ty = i8_ptr_ty; let local_function_ty = i8_ptr_ty;
let anyfunc_ty = context.struct_type( let anyfunc_ty = context.struct_type(
@ -222,6 +223,9 @@ impl Intrinsics {
internals_ty internals_ty
.ptr_type(AddressSpace::Generic) .ptr_type(AddressSpace::Generic)
.as_basic_type_enum(), .as_basic_type_enum(),
interrupt_signal_mem_ty
.ptr_type(AddressSpace::Generic)
.as_basic_type_enum(),
local_function_ty local_function_ty
.ptr_type(AddressSpace::Generic) .ptr_type(AddressSpace::Generic)
.as_basic_type_enum(), .as_basic_type_enum(),

View File

@ -17,6 +17,7 @@ impl FunctionMiddleware for CallTrace {
Event::Internal(InternalEvent::FunctionBegin(id)) => sink.push(Event::Internal( Event::Internal(InternalEvent::FunctionBegin(id)) => sink.push(Event::Internal(
InternalEvent::Breakpoint(Box::new(move |_| { InternalEvent::Breakpoint(Box::new(move |_| {
eprintln!("func ({})", id); eprintln!("func ({})", id);
Ok(())
})), })),
)), )),
_ => {} _ => {}

View File

@ -94,11 +94,9 @@ impl FunctionMiddleware for Metering {
sink.push(Event::WasmOwned(Operator::If { sink.push(Event::WasmOwned(Operator::If {
ty: WpType::EmptyBlockType, ty: WpType::EmptyBlockType,
})); }));
sink.push(Event::Internal(InternalEvent::Breakpoint(Box::new( sink.push(Event::Internal(InternalEvent::Breakpoint(Box::new(|_| {
move |ctx| unsafe { Err(Box::new(ExecutionLimitExceededError))
(ctx.throw)(Box::new(ExecutionLimitExceededError)); }))));
},
))));
sink.push(Event::WasmOwned(Operator::End)); sink.push(Event::WasmOwned(Operator::End));
} }
_ => {} _ => {}

View File

@ -16,8 +16,42 @@ leaq run_on_alternative_stack.returning(%rip), %rax
movq %rax, -24(%rdi) movq %rax, -24(%rdi)
movq %rsi, %rsp movq %rsi, %rsp
movq (%rsp), %xmm0
add $8, %rsp
movq (%rsp), %xmm1
add $8, %rsp
movq (%rsp), %xmm2
add $8, %rsp
movq (%rsp), %xmm3
add $8, %rsp
movq (%rsp), %xmm4
add $8, %rsp
movq (%rsp), %xmm5
add $8, %rsp
movq (%rsp), %xmm6
add $8, %rsp
movq (%rsp), %xmm7
add $8, %rsp
popq %rbp popq %rbp
popq %rax
popq %rbx popq %rbx
popq %rcx
popq %rdx
popq %rdi
popq %rsi
popq %r8
popq %r9
popq %r10
popq %r11
popq %r12 popq %r12
popq %r13 popq %r13
popq %r14 popq %r14

View File

@ -16,8 +16,42 @@ leaq _run_on_alternative_stack.returning(%rip), %rax
movq %rax, -24(%rdi) movq %rax, -24(%rdi)
movq %rsi, %rsp movq %rsi, %rsp
movq (%rsp), %xmm0
add $8, %rsp
movq (%rsp), %xmm1
add $8, %rsp
movq (%rsp), %xmm2
add $8, %rsp
movq (%rsp), %xmm3
add $8, %rsp
movq (%rsp), %xmm4
add $8, %rsp
movq (%rsp), %xmm5
add $8, %rsp
movq (%rsp), %xmm6
add $8, %rsp
movq (%rsp), %xmm7
add $8, %rsp
popq %rbp popq %rbp
popq %rax
popq %rbx popq %rbx
popq %rcx
popq %rdx
popq %rdi
popq %rsi
popq %r8
popq %r9
popq %r10
popq %r11
popq %r12 popq %r12
popq %r13 popq %r13
popq %r14 popq %r14

View File

@ -2,49 +2,103 @@ mod raw {
use std::ffi::c_void; use std::ffi::c_void;
extern "C" { extern "C" {
pub fn run_on_alternative_stack( pub fn run_on_alternative_stack(stack_end: *mut u64, stack_begin: *mut u64) -> u64;
stack_end: *mut u64,
stack_begin: *mut u64,
userdata_arg2: *mut u8,
) -> u64;
pub fn setjmp(env: *mut c_void) -> i32; pub fn setjmp(env: *mut c_void) -> i32;
pub fn longjmp(env: *mut c_void, val: i32) -> !; pub fn longjmp(env: *mut c_void, val: i32) -> !;
} }
} }
use crate::state::x64::{read_stack, X64Register, GPR}; use crate::codegen::{BkptInfo, BkptMap};
use crate::suspend; use crate::state::x64::{build_instance_image, read_stack, X64Register, GPR, XMM};
use crate::vm; use crate::vm;
use libc::siginfo_t; use libc::{mmap, mprotect, siginfo_t, MAP_ANON, MAP_PRIVATE, PROT_NONE, PROT_READ, PROT_WRITE};
use nix::sys::signal::{ use nix::sys::signal::{
sigaction, SaFlags, SigAction, SigHandler, SigSet, SIGBUS, SIGFPE, SIGILL, SIGINT, SIGSEGV, sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal, SIGBUS, SIGFPE, SIGILL, SIGINT,
SIGTRAP, SIGSEGV, SIGTRAP,
}; };
use std::any::Any; use std::any::Any;
use std::cell::UnsafeCell; use std::cell::UnsafeCell;
use std::ffi::c_void; use std::ffi::c_void;
use std::process; use std::process;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Once; use std::sync::Once;
pub(crate) unsafe fn run_on_alternative_stack(stack_end: *mut u64, stack_begin: *mut u64) -> u64 { pub(crate) unsafe fn run_on_alternative_stack(stack_end: *mut u64, stack_begin: *mut u64) -> u64 {
raw::run_on_alternative_stack(stack_end, stack_begin, ::std::ptr::null_mut()) raw::run_on_alternative_stack(stack_end, stack_begin)
} }
const SETJMP_BUFFER_LEN: usize = 27; const SETJMP_BUFFER_LEN: usize = 27;
type SetJmpBuffer = [i32; SETJMP_BUFFER_LEN]; type SetJmpBuffer = [i32; SETJMP_BUFFER_LEN];
thread_local! { struct UnwindInfo {
static UNWIND: UnsafeCell<Option<(SetJmpBuffer, Option<Box<Any>>)>> = UnsafeCell::new(None); jmpbuf: SetJmpBuffer, // in
breakpoints: Option<BkptMap>,
payload: Option<Box<Any>>, // out
} }
pub unsafe fn catch_unsafe_unwind<R, F: FnOnce() -> R>(f: F) -> Result<R, Box<Any>> { thread_local! {
static UNWIND: UnsafeCell<Option<UnwindInfo>> = UnsafeCell::new(None);
}
struct InterruptSignalMem(*mut u8);
unsafe impl Send for InterruptSignalMem {}
unsafe impl Sync for InterruptSignalMem {}
const INTERRUPT_SIGNAL_MEM_SIZE: usize = 4096;
lazy_static! {
static ref INTERRUPT_SIGNAL_MEM: InterruptSignalMem = {
let ptr = unsafe {
mmap(
::std::ptr::null_mut(),
INTERRUPT_SIGNAL_MEM_SIZE,
PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANON,
-1,
0,
)
};
if ptr as isize == -1 {
panic!("cannot allocate code memory");
}
InterruptSignalMem(ptr as _)
};
}
static INTERRUPT_SIGNAL_DELIVERED: AtomicBool = AtomicBool::new(false);
pub unsafe fn get_wasm_interrupt_signal_mem() -> *mut u8 {
INTERRUPT_SIGNAL_MEM.0
}
pub unsafe fn set_wasm_interrupt() {
let mem: *mut u8 = INTERRUPT_SIGNAL_MEM.0;
if mprotect(mem as _, INTERRUPT_SIGNAL_MEM_SIZE, PROT_NONE) < 0 {
panic!("cannot set PROT_NONE on signal mem");
}
}
pub unsafe fn clear_wasm_interrupt() {
let mem: *mut u8 = INTERRUPT_SIGNAL_MEM.0;
if mprotect(mem as _, INTERRUPT_SIGNAL_MEM_SIZE, PROT_READ | PROT_WRITE) < 0 {
panic!("cannot set PROT_READ | PROT_WRITE on signal mem");
}
}
pub unsafe fn catch_unsafe_unwind<R, F: FnOnce() -> R>(
f: F,
breakpoints: Option<BkptMap>,
) -> Result<R, Box<Any>> {
let unwind = UNWIND.with(|x| x.get()); let unwind = UNWIND.with(|x| x.get());
let old = (*unwind).take(); let old = (*unwind).take();
*unwind = Some(([0; SETJMP_BUFFER_LEN], None)); *unwind = Some(UnwindInfo {
jmpbuf: [0; SETJMP_BUFFER_LEN],
breakpoints: breakpoints,
payload: None,
});
if raw::setjmp(&mut (*unwind).as_mut().unwrap().0 as *mut SetJmpBuffer as *mut _) != 0 { if raw::setjmp(&mut (*unwind).as_mut().unwrap().jmpbuf as *mut SetJmpBuffer as *mut _) != 0 {
// error // error
let ret = (*unwind).as_mut().unwrap().1.take().unwrap(); let ret = (*unwind).as_mut().unwrap().payload.take().unwrap();
*unwind = old; *unwind = old;
Err(ret) Err(ret)
} else { } else {
@ -60,8 +114,16 @@ pub unsafe fn begin_unsafe_unwind(e: Box<Any>) -> ! {
let inner = (*unwind) let inner = (*unwind)
.as_mut() .as_mut()
.expect("not within a catch_unsafe_unwind scope"); .expect("not within a catch_unsafe_unwind scope");
inner.1 = Some(e); inner.payload = Some(e);
raw::longjmp(&mut inner.0 as *mut SetJmpBuffer as *mut _, 0xffff); raw::longjmp(&mut inner.jmpbuf as *mut SetJmpBuffer as *mut _, 0xffff);
}
unsafe fn with_breakpoint_map<R, F: FnOnce(Option<&BkptMap>) -> R>(f: F) -> R {
let unwind = UNWIND.with(|x| x.get());
let inner = (*unwind)
.as_mut()
.expect("not within a catch_unsafe_unwind scope");
f(inner.breakpoints.as_ref())
} }
pub fn allocate_and_run<R, F: FnOnce() -> R>(size: usize, f: F) -> R { pub fn allocate_and_run<R, F: FnOnce() -> R>(size: usize, f: F) -> R {
@ -70,7 +132,7 @@ pub fn allocate_and_run<R, F: FnOnce() -> R>(size: usize, f: F) -> R {
ret: Option<R>, ret: Option<R>,
} }
extern "C" fn invoke<F: FnOnce() -> R, R>(_: u64, _: u64, ctx: &mut Context<F, R>) { extern "C" fn invoke<F: FnOnce() -> R, R>(ctx: &mut Context<F, R>) {
let f = ctx.f.take().unwrap(); let f = ctx.f.take().unwrap();
ctx.ret = Some(f()); ctx.ret = Some(f());
} }
@ -89,29 +151,65 @@ pub fn allocate_and_run<R, F: FnOnce() -> R>(size: usize, f: F) -> R {
stack[end_offset - 4] = invoke::<F, R> as usize as u64; stack[end_offset - 4] = invoke::<F, R> as usize as u64;
// NOTE: Keep this consistent with `image-loading-*.s`. // NOTE: Keep this consistent with `image-loading-*.s`.
let stack_begin = stack.as_mut_ptr().offset((end_offset - 4 - 6) as isize); stack[end_offset - 4 - 10] = &mut ctx as *mut Context<F, R> as usize as u64; // rdi
const NUM_SAVED_REGISTERS: usize = 23;
let stack_begin = stack
.as_mut_ptr()
.offset((end_offset - 4 - NUM_SAVED_REGISTERS) as isize);
let stack_end = stack.as_mut_ptr().offset(end_offset as isize); let stack_end = stack.as_mut_ptr().offset(end_offset as isize);
raw::run_on_alternative_stack( raw::run_on_alternative_stack(stack_end, stack_begin);
stack_end,
stack_begin,
&mut ctx as *mut Context<F, R> as *mut u8,
);
ctx.ret.take().unwrap() ctx.ret.take().unwrap()
} }
} }
extern "C" fn signal_trap_handler( extern "C" fn signal_trap_handler(
_signum: ::nix::libc::c_int, signum: ::nix::libc::c_int,
siginfo: *mut siginfo_t, siginfo: *mut siginfo_t,
ucontext: *mut c_void, ucontext: *mut c_void,
) { ) {
unsafe { unsafe {
let fault = get_fault_info(siginfo as _, ucontext); let fault = get_fault_info(siginfo as _, ucontext);
allocate_and_run(65536, || { let mut unwind_result: Box<dyn Any> = Box::new(());
let should_unwind = allocate_and_run(1048576, || {
let mut is_suspend_signal = false;
match Signal::from_c_int(signum) {
Ok(SIGTRAP) => {
// breakpoint
let out: Option<Result<(), Box<dyn Any>>> = with_breakpoint_map(|bkpt_map| {
bkpt_map.and_then(|x| x.get(&(fault.ip as usize))).map(|x| {
x(BkptInfo {
fault: Some(&fault),
})
})
});
match out {
Some(Ok(())) => {
return false;
}
Some(Err(e)) => {
unwind_result = e;
return true;
}
None => {}
}
}
Ok(SIGSEGV) | Ok(SIGBUS) => {
println!("SIGSEGV/SIGBUS on addr {:?}", fault.faulting_addr);
if fault.faulting_addr as usize == get_wasm_interrupt_signal_mem() as usize {
is_suspend_signal = true;
clear_wasm_interrupt();
INTERRUPT_SIGNAL_DELIVERED.store(false, Ordering::SeqCst);
}
}
_ => {}
}
// TODO: make this safer // TODO: make this safer
let ctx = &*(fault.known_registers[X64Register::GPR(GPR::R15).to_index().0].unwrap() let ctx = &mut *(fault.known_registers[X64Register::GPR(GPR::R15).to_index().0].unwrap()
as *mut vm::Ctx); as *mut vm::Ctx);
let rsp = fault.known_registers[X64Register::GPR(GPR::RSP).to_index().0].unwrap(); let rsp = fault.known_registers[X64Register::GPR(GPR::RSP).to_index().0].unwrap();
@ -120,7 +218,7 @@ extern "C" fn signal_trap_handler(
.get_module_state_map() .get_module_state_map()
.unwrap(); .unwrap();
let code_base = (*ctx.module).runnable_module.get_code().unwrap().as_ptr() as usize; let code_base = (*ctx.module).runnable_module.get_code().unwrap().as_ptr() as usize;
let image = read_stack( let es_image = read_stack(
&msm, &msm,
code_base, code_base,
rsp as usize as *const u64, rsp as usize as *const u64,
@ -128,17 +226,26 @@ extern "C" fn signal_trap_handler(
Some(fault.ip as usize as u64), Some(fault.ip as usize as u64),
); );
use colored::*; if is_suspend_signal {
eprintln!( let image = build_instance_image(ctx, es_image);
"\n{}", unwind_result = Box::new(image);
"Wasmer encountered an error while running your WebAssembly program." } else {
.bold() use colored::*;
.red() eprintln!(
); "\n{}",
image.print_backtrace_if_needed(); "Wasmer encountered an error while running your WebAssembly program."
.bold()
.red()
);
es_image.print_backtrace_if_needed();
}
true
}); });
begin_unsafe_unwind(Box::new(())); if should_unwind {
begin_unsafe_unwind(unwind_result);
}
} }
} }
@ -147,14 +254,13 @@ extern "C" fn sigint_handler(
_siginfo: *mut siginfo_t, _siginfo: *mut siginfo_t,
_ucontext: *mut c_void, _ucontext: *mut c_void,
) { ) {
if suspend::get_interrupted() { if INTERRUPT_SIGNAL_DELIVERED.swap(true, Ordering::SeqCst) {
eprintln!( eprintln!("Got another SIGINT before trap is triggered on WebAssembly side, aborting");
"Got another SIGINT before interrupt is handled by WebAssembly program, aborting"
);
process::abort(); process::abort();
} }
suspend::set_interrupted(true); unsafe {
eprintln!("Notified WebAssembly program to exit"); set_wasm_interrupt();
}
} }
pub fn ensure_sighandler() { pub fn ensure_sighandler() {
@ -285,12 +391,17 @@ pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) ->
fs: u64, fs: u64,
gs: u64, gs: u64,
} }
#[repr(C)]
struct fpstate {
_unused: [u8; 168],
xmm: [[u64; 2]; 8],
}
#[allow(dead_code)] #[allow(dead_code)]
#[repr(C)] #[repr(C)]
struct mcontext_t { struct mcontext_t {
es: exception_state, es: exception_state,
ss: regs, ss: regs,
// ... fs: fpstate,
} }
let siginfo = siginfo as *const siginfo_t; let siginfo = siginfo as *const siginfo_t;
@ -298,6 +409,7 @@ pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) ->
let ucontext = ucontext as *const ucontext_t; let ucontext = ucontext as *const ucontext_t;
let ss = &(*(*ucontext).uc_mcontext).ss; let ss = &(*(*ucontext).uc_mcontext).ss;
let fs = &(*(*ucontext).uc_mcontext).fs;
let mut known_registers: [Option<u64>; 24] = [None; 24]; let mut known_registers: [Option<u64>; 24] = [None; 24];
@ -319,7 +431,14 @@ pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) ->
known_registers[X64Register::GPR(GPR::RBP).to_index().0] = Some(ss.rbp); known_registers[X64Register::GPR(GPR::RBP).to_index().0] = Some(ss.rbp);
known_registers[X64Register::GPR(GPR::RSP).to_index().0] = Some(ss.rsp); known_registers[X64Register::GPR(GPR::RSP).to_index().0] = Some(ss.rsp);
// TODO: XMM registers known_registers[X64Register::XMM(XMM::XMM0).to_index().0] = Some(fs.xmm[0][0]);
known_registers[X64Register::XMM(XMM::XMM1).to_index().0] = Some(fs.xmm[1][0]);
known_registers[X64Register::XMM(XMM::XMM2).to_index().0] = Some(fs.xmm[2][0]);
known_registers[X64Register::XMM(XMM::XMM3).to_index().0] = Some(fs.xmm[3][0]);
known_registers[X64Register::XMM(XMM::XMM4).to_index().0] = Some(fs.xmm[4][0]);
known_registers[X64Register::XMM(XMM::XMM5).to_index().0] = Some(fs.xmm[5][0]);
known_registers[X64Register::XMM(XMM::XMM6).to_index().0] = Some(fs.xmm[6][0]);
known_registers[X64Register::XMM(XMM::XMM7).to_index().0] = Some(fs.xmm[7][0]);
FaultInfo { FaultInfo {
faulting_addr: si_addr, faulting_addr: si_addr,

View File

@ -9,6 +9,7 @@ use crate::{
use crate::{ use crate::{
cache::{Artifact, Error as CacheError}, cache::{Artifact, Error as CacheError},
codegen::BkptMap,
module::ModuleInfo, module::ModuleInfo,
sys::Memory, sys::Memory,
}; };
@ -89,6 +90,10 @@ pub trait RunnableModule: Send + Sync {
None None
} }
fn get_breakpoints(&self) -> Option<BkptMap> {
None
}
/// A wasm trampoline contains the necessary data to dynamically call an exported wasm function. /// A wasm trampoline contains the necessary data to dynamically call an exported wasm function.
/// Given a particular signature index, we are returned a trampoline that is matched with that /// Given a particular signature index, we are returned a trampoline that is matched with that
/// signature and an invoke function that can call the trampoline. /// signature and an invoke function that can call the trampoline.

View File

@ -9,6 +9,7 @@ use crate::{
}; };
use smallvec::SmallVec; use smallvec::SmallVec;
use std::any::Any; use std::any::Any;
use std::collections::HashMap;
use std::fmt; use std::fmt;
use std::fmt::Debug; use std::fmt::Debug;
use std::marker::PhantomData; use std::marker::PhantomData;
@ -16,6 +17,9 @@ use std::sync::{Arc, RwLock};
use wasmparser::{self, WasmDecoder}; use wasmparser::{self, WasmDecoder};
use wasmparser::{Operator, Type as WpType}; use wasmparser::{Operator, Type as WpType};
pub type BkptHandler = Box<Fn(BkptInfo) -> Result<(), Box<dyn Any>> + Send + Sync + 'static>;
pub type BkptMap = Arc<HashMap<usize, BkptHandler>>;
#[derive(Debug)] #[derive(Debug)]
pub enum Event<'a, 'b> { pub enum Event<'a, 'b> {
Internal(InternalEvent), Internal(InternalEvent),
@ -26,7 +30,7 @@ pub enum Event<'a, 'b> {
pub enum InternalEvent { pub enum InternalEvent {
FunctionBegin(u32), FunctionBegin(u32),
FunctionEnd, FunctionEnd,
Breakpoint(Box<Fn(BkptInfo) + Send + Sync + 'static>), Breakpoint(BkptHandler),
SetInternal(u32), SetInternal(u32),
GetInternal(u32), GetInternal(u32),
} }
@ -43,8 +47,8 @@ impl fmt::Debug for InternalEvent {
} }
} }
pub struct BkptInfo { pub struct BkptInfo<'a> {
pub throw: unsafe fn(Box<dyn Any>) -> !, pub fault: Option<&'a dyn Any>,
} }
pub trait ModuleCodeGenerator<FCG: FunctionCodeGenerator<E>, RM: RunnableModule, E: Debug> { pub trait ModuleCodeGenerator<FCG: FunctionCodeGenerator<E>, RM: RunnableModule, E: Debug> {

View File

@ -46,8 +46,6 @@ pub use trampoline_x64 as trampoline;
#[cfg(all(unix, target_arch = "x86_64"))] #[cfg(all(unix, target_arch = "x86_64"))]
pub mod alternative_stack; pub mod alternative_stack;
pub mod state; pub mod state;
#[cfg(all(unix, target_arch = "x86_64"))]
pub mod suspend;
use self::error::CompileResult; use self::error::CompileResult;
#[doc(inline)] #[doc(inline)]

View File

@ -53,10 +53,17 @@ pub struct FunctionStateMap {
pub locals: Vec<WasmAbstractValue>, pub locals: Vec<WasmAbstractValue>,
pub shadow_size: usize, // for single-pass backend, 32 bytes on x86-64 pub shadow_size: usize, // for single-pass backend, 32 bytes on x86-64
pub diffs: Vec<MachineStateDiff>, pub diffs: Vec<MachineStateDiff>,
pub wasm_function_header_target_offset: Option<usize>,
pub wasm_offset_to_target_offset: Vec<usize>, pub wasm_offset_to_target_offset: Vec<usize>,
pub loop_offsets: BTreeMap<usize, usize>, /* offset -> diff_id */ pub loop_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
pub call_offsets: BTreeMap<usize, usize>, /* offset -> diff_id */ pub call_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
pub trappable_offsets: BTreeMap<usize, usize>, /* offset -> diff_id */ pub trappable_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
}
#[derive(Clone, Debug)]
pub struct OffsetInfo {
pub diff_id: usize,
pub activate_offset: usize,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -98,7 +105,7 @@ impl ModuleStateMap {
.unwrap(); .unwrap();
match fsm.call_offsets.get(&(ip - base)) { match fsm.call_offsets.get(&(ip - base)) {
Some(x) => Some((fsm, fsm.diffs[*x].build_state(fsm))), Some(x) => Some((fsm, fsm.diffs[x.diff_id].build_state(fsm))),
None => None, None => None,
} }
} }
@ -120,7 +127,25 @@ impl ModuleStateMap {
.unwrap(); .unwrap();
match fsm.trappable_offsets.get(&(ip - base)) { match fsm.trappable_offsets.get(&(ip - base)) {
Some(x) => Some((fsm, fsm.diffs[*x].build_state(fsm))), Some(x) => Some((fsm, fsm.diffs[x.diff_id].build_state(fsm))),
None => None,
}
}
}
fn lookup_loop_ip(&self, ip: usize, base: usize) -> Option<(&FunctionStateMap, MachineState)> {
if ip < base || ip - base >= self.total_size {
None
} else {
//println!("lookup ip: {} in {:?}", ip - base, self.local_functions);
let (_, fsm) = self
.local_functions
.range((Unbounded, Included(&(ip - base))))
.last()
.unwrap();
match fsm.loop_offsets.get(&(ip - base)) {
Some(x) => Some((fsm, fsm.diffs[x.diff_id].build_state(fsm))),
None => None, None => None,
} }
} }
@ -140,6 +165,7 @@ impl FunctionStateMap {
shadow_size, shadow_size,
locals, locals,
diffs: vec![], diffs: vec![],
wasm_function_header_target_offset: None,
wasm_offset_to_target_offset: Vec::new(), wasm_offset_to_target_offset: Vec::new(),
loop_offsets: BTreeMap::new(), loop_offsets: BTreeMap::new(),
call_offsets: BTreeMap::new(), call_offsets: BTreeMap::new(),
@ -330,6 +356,7 @@ impl InstanceImage {
pub mod x64 { pub mod x64 {
use super::*; use super::*;
use crate::alternative_stack::{catch_unsafe_unwind, run_on_alternative_stack}; use crate::alternative_stack::{catch_unsafe_unwind, run_on_alternative_stack};
use crate::codegen::BkptMap;
use crate::structures::TypedIndex; use crate::structures::TypedIndex;
use crate::types::LocalGlobalIndex; use crate::types::LocalGlobalIndex;
use crate::vm::Ctx; use crate::vm::Ctx;
@ -352,6 +379,7 @@ pub mod x64 {
code_base: usize, code_base: usize,
image: InstanceImage, image: InstanceImage,
vmctx: &mut Ctx, vmctx: &mut Ctx,
breakpoints: Option<BkptMap>,
) -> Result<u64, Box<dyn Any>> { ) -> Result<u64, Box<dyn Any>> {
let mut stack: Vec<u64> = vec![0; 1048576 * 8 / 8]; // 8MB stack let mut stack: Vec<u64> = vec![0; 1048576 * 8 / 8]; // 8MB stack
let mut stack_offset: usize = stack.len(); let mut stack_offset: usize = stack.len();
@ -368,15 +396,31 @@ pub mod x64 {
// Bottom to top // Bottom to top
for f in image.execution_state.frames.iter().rev() { for f in image.execution_state.frames.iter().rev() {
let fsm = local_functions_vec[f.local_function_id]; let fsm = local_functions_vec[f.local_function_id];
let call_begin_offset = fsm.wasm_offset_to_target_offset[f.wasm_inst_offset]; let begin_offset = if f.wasm_inst_offset == ::std::usize::MAX {
fsm.wasm_function_header_target_offset.unwrap()
} else {
fsm.wasm_offset_to_target_offset[f.wasm_inst_offset]
};
// Left bound must be Excluded because it's possible that the previous instruction's (after-)call offset == call_begin_offset. let (target_inst_offset, diff_id) = fsm
let (after_call_inst, diff_id) = fsm .loop_offsets
.call_offsets .get(&begin_offset)
.range((Excluded(&call_begin_offset), Unbounded)) .map(|v| (v.activate_offset, v.diff_id))
.next() .or_else(|| {
.map(|(k, v)| (*k, *v)) fsm.trappable_offsets
.expect("instruction offset not found in call offsets"); .get(&begin_offset)
.map(|v| (v.activate_offset, v.diff_id))
})
.or_else(|| {
// Left bound must be Excluded because it's possible that the previous instruction's (after-)call offset == call_begin_offset.
// This might not be the correct offset if begin_offset itself does not correspond to a call(_indirect) instruction,
// but anyway safety isn't broken because diff_id always corresponds to target_inst_offset.
fsm.call_offsets
.range((Excluded(&begin_offset), Unbounded))
.next()
.map(|(_, v)| (v.activate_offset, v.diff_id))
})
.expect("instruction offset not found in any offset type");
let diff = &fsm.diffs[diff_id]; let diff = &fsm.diffs[diff_id];
let state = diff.build_state(fsm); let state = diff.build_state(fsm);
@ -434,7 +478,10 @@ pub mod x64 {
} }
} }
} }
assert!(got_explicit_shadow); if !got_explicit_shadow {
assert!(fsm.shadow_size % 8 == 0);
stack_offset -= fsm.shadow_size / 8;
}
for (i, v) in state.register_values.iter().enumerate() { for (i, v) in state.register_values.iter().enumerate() {
match *v { match *v {
MachineValue::Undefined => {} MachineValue::Undefined => {}
@ -460,9 +507,11 @@ pub mod x64 {
_ => unreachable!(), _ => unreachable!(),
} }
} }
assert!((stack.len() - stack_offset) % 2 == 0); // 16-byte alignment
// no need to check 16-byte alignment here because it's possible that we're not at a call entry.
stack_offset -= 1; stack_offset -= 1;
stack[stack_offset] = (code_base + after_call_inst) as u64; // return address stack[stack_offset] = (code_base + target_inst_offset) as u64; // return address
} }
stack_offset -= 1; stack_offset -= 1;
@ -477,12 +526,71 @@ pub mod x64 {
stack_offset -= 1; stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R12).to_index().0].unwrap_or(0); stack[stack_offset] = known_registers[X64Register::GPR(GPR::R12).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R11).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R10).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R9).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::R8).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RSI).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDI).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDX).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RCX).to_index().0].unwrap_or(0);
stack_offset -= 1; stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RBX).to_index().0].unwrap_or(0); stack[stack_offset] = known_registers[X64Register::GPR(GPR::RBX).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] = known_registers[X64Register::GPR(GPR::RAX).to_index().0].unwrap_or(0);
stack_offset -= 1; stack_offset -= 1;
stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; // rbp stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; // rbp
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM7).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM6).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM5).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM4).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM3).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM2).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM1).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM0).to_index().0].unwrap_or(0);
if let Some(ref memory) = image.memory { if let Some(ref memory) = image.memory {
assert!(vmctx.internal.memory_bound <= memory.len()); assert!(vmctx.internal.memory_bound <= memory.len());
@ -512,12 +620,15 @@ pub mod x64 {
drop(image); // free up host memory drop(image); // free up host memory
catch_unsafe_unwind(|| { catch_unsafe_unwind(
run_on_alternative_stack( || {
stack.as_mut_ptr().offset(stack.len() as isize), run_on_alternative_stack(
stack.as_mut_ptr().offset(stack_offset as isize), stack.as_mut_ptr().offset(stack.len() as isize),
) stack.as_mut_ptr().offset(stack_offset as isize),
}) )
},
breakpoints,
)
} }
pub fn build_instance_image( pub fn build_instance_image(
@ -575,6 +686,7 @@ pub mod x64 {
let (fsm, state) = match msm let (fsm, state) = match msm
.lookup_call_ip(ret_addr as usize, code_base) .lookup_call_ip(ret_addr as usize, code_base)
.or_else(|| msm.lookup_trappable_ip(ret_addr as usize, code_base)) .or_else(|| msm.lookup_trappable_ip(ret_addr as usize, code_base))
.or_else(|| msm.lookup_loop_ip(ret_addr as usize, code_base))
{ {
Some(x) => x, Some(x) => x,
_ => return ExecutionStateImage { frames: results }, _ => return ExecutionStateImage { frames: results },

View File

@ -1,104 +0,0 @@
use crate::alternative_stack::begin_unsafe_unwind;
use crate::import::{ImportObject, Namespace};
use crate::trampoline::{CallContext, TrampolineBufferBuilder};
use crate::vm::Ctx;
use std::sync::atomic::{AtomicBool, Ordering};
static INTERRUPTED: AtomicBool = AtomicBool::new(false);
pub fn set_interrupted(x: bool) {
INTERRUPTED.store(x, Ordering::SeqCst);
}
pub fn get_interrupted() -> bool {
INTERRUPTED.load(Ordering::SeqCst)
}
pub fn get_and_reset_interrupted() -> bool {
INTERRUPTED.swap(false, Ordering::SeqCst)
}
pub fn patch_import_object(x: &mut ImportObject) {
struct Intrinsics {
suspend: fn(&mut Ctx),
check_interrupt: fn(&mut Ctx),
}
lazy_static! {
static ref INTRINSICS: Intrinsics = {
let mut builder = TrampolineBufferBuilder::new();
let idx_suspend =
builder.add_context_rsp_state_preserving_trampoline(suspend, ::std::ptr::null());
let idx_check_interrupt = builder
.add_context_rsp_state_preserving_trampoline(check_interrupt, ::std::ptr::null());
let trampolines = builder.build();
let ret = Intrinsics {
suspend: unsafe { ::std::mem::transmute(trampolines.get_trampoline(idx_suspend)) },
check_interrupt: unsafe {
::std::mem::transmute(trampolines.get_trampoline(idx_check_interrupt))
},
};
::std::mem::forget(trampolines);
ret
};
}
let mut ns = Namespace::new();
let suspend_fn = INTRINSICS.suspend;
let check_interrupt_fn = INTRINSICS.check_interrupt;
ns.insert("suspend", func!(suspend_fn));
ns.insert("check_interrupt", func!(check_interrupt_fn));
x.register("wasmer_suspend", ns);
}
#[allow(clippy::cast_ptr_alignment)]
unsafe extern "C" fn check_interrupt(ctx: &mut Ctx, _: *const CallContext, stack: *const u64) {
if get_and_reset_interrupted() {
do_suspend(ctx, stack);
}
}
#[allow(clippy::cast_ptr_alignment)]
unsafe extern "C" fn suspend(ctx: &mut Ctx, _: *const CallContext, stack: *const u64) {
do_suspend(ctx, stack);
}
unsafe fn do_suspend(ctx: &mut Ctx, mut stack: *const u64) -> ! {
use crate::state::x64::{build_instance_image, read_stack, X64Register, GPR};
let image = {
let msm = (*ctx.module)
.runnable_module
.get_module_state_map()
.unwrap();
let code_base = (*ctx.module).runnable_module.get_code().unwrap().as_ptr() as usize;
let mut known_registers: [Option<u64>; 24] = [None; 24];
let r15 = *stack;
let r14 = *stack.offset(1);
let r13 = *stack.offset(2);
let r12 = *stack.offset(3);
let rbx = *stack.offset(4);
stack = stack.offset(5);
known_registers[X64Register::GPR(GPR::R15).to_index().0] = Some(r15);
known_registers[X64Register::GPR(GPR::R14).to_index().0] = Some(r14);
known_registers[X64Register::GPR(GPR::R13).to_index().0] = Some(r13);
known_registers[X64Register::GPR(GPR::R12).to_index().0] = Some(r12);
known_registers[X64Register::GPR(GPR::RBX).to_index().0] = Some(rbx);
let es_image = read_stack(&msm, code_base, stack, known_registers, None);
{
use colored::*;
eprintln!("{}", "Suspending instance.".green().bold());
}
build_instance_image(ctx, es_image)
};
begin_unsafe_unwind(Box::new(image));
}

View File

@ -100,6 +100,8 @@ pub struct InternalCtx {
pub memory_bound: usize, pub memory_bound: usize,
pub internals: *mut [u64; INTERNALS_SIZE], // TODO: Make this dynamic? pub internals: *mut [u64; INTERNALS_SIZE], // TODO: Make this dynamic?
pub interrupt_signal_mem: *mut u8,
} }
static INTERNAL_FIELDS: AtomicUsize = AtomicUsize::new(0); static INTERNAL_FIELDS: AtomicUsize = AtomicUsize::new(0);
@ -207,6 +209,17 @@ fn get_intrinsics_for_module(m: &ModuleInfo) -> *const Intrinsics {
} }
} }
#[cfg(all(unix, target_arch = "x86_64"))]
fn get_interrupt_signal_mem() -> *mut u8 {
unsafe { crate::alternative_stack::get_wasm_interrupt_signal_mem() }
}
#[cfg(not(all(unix, target_arch = "x86_64")))]
fn get_interrupt_signal_mem() -> *mut u8 {
static mut REGION: u64 = 0;
unsafe { &mut REGION as *mut u64 as *mut u8 }
}
impl Ctx { impl Ctx {
#[doc(hidden)] #[doc(hidden)]
pub unsafe fn new( pub unsafe fn new(
@ -245,6 +258,8 @@ impl Ctx {
memory_bound: mem_bound, memory_bound: mem_bound,
internals: &mut local_backing.internals.0, internals: &mut local_backing.internals.0,
interrupt_signal_mem: get_interrupt_signal_mem(),
}, },
local_functions: local_backing.local_functions.as_ptr(), local_functions: local_backing.local_functions.as_ptr(),
@ -296,6 +311,8 @@ impl Ctx {
memory_bound: mem_bound, memory_bound: mem_bound,
internals: &mut local_backing.internals.0, internals: &mut local_backing.internals.0,
interrupt_signal_mem: get_interrupt_signal_mem(),
}, },
local_functions: local_backing.local_functions.as_ptr(), local_functions: local_backing.local_functions.as_ptr(),
@ -419,9 +436,13 @@ impl Ctx {
12 * (mem::size_of::<usize>() as u8) 12 * (mem::size_of::<usize>() as u8)
} }
pub fn offset_local_functions() -> u8 { pub fn offset_interrupt_signal_mem() -> u8 {
13 * (mem::size_of::<usize>() as u8) 13 * (mem::size_of::<usize>() as u8)
} }
pub fn offset_local_functions() -> u8 {
14 * (mem::size_of::<usize>() as u8)
}
} }
enum InnerFunc {} enum InnerFunc {}
@ -640,6 +661,11 @@ mod vm_offset_tests {
offset_of!(InternalCtx => internals).get_byte_offset(), offset_of!(InternalCtx => internals).get_byte_offset(),
); );
assert_eq!(
Ctx::offset_interrupt_signal_mem() as usize,
offset_of!(InternalCtx => interrupt_signal_mem).get_byte_offset(),
);
assert_eq!( assert_eq!(
Ctx::offset_local_functions() as usize, Ctx::offset_local_functions() as usize,
offset_of!(Ctx => local_functions).get_byte_offset(), offset_of!(Ctx => local_functions).get_byte_offset(),

View File

@ -24,7 +24,7 @@ use wasmer_runtime_core::{
module::{ModuleInfo, ModuleInner}, module::{ModuleInfo, ModuleInner},
state::{ state::{
x64::new_machine_state, x64::X64Register, FunctionStateMap, MachineState, MachineValue, x64::new_machine_state, x64::X64Register, FunctionStateMap, MachineState, MachineValue,
ModuleStateMap, WasmAbstractValue, ModuleStateMap, OffsetInfo, WasmAbstractValue,
}, },
structures::{Map, TypedIndex}, structures::{Map, TypedIndex},
typed_func::Wasm, typed_func::Wasm,
@ -151,7 +151,12 @@ pub struct X64FunctionCode {
assembler: Option<Assembler>, assembler: Option<Assembler>,
function_labels: Option<HashMap<usize, (DynamicLabel, Option<AssemblyOffset>)>>, function_labels: Option<HashMap<usize, (DynamicLabel, Option<AssemblyOffset>)>>,
breakpoints: Option<HashMap<AssemblyOffset, Box<Fn(BkptInfo) + Send + Sync + 'static>>>, breakpoints: Option<
HashMap<
AssemblyOffset,
Box<Fn(BkptInfo) -> Result<(), Box<dyn Any>> + Send + Sync + 'static>,
>,
>,
returns: SmallVec<[WpType; 1]>, returns: SmallVec<[WpType; 1]>,
locals: Vec<Location>, locals: Vec<Location>,
num_params: usize, num_params: usize,
@ -179,7 +184,7 @@ pub struct X64ExecutionContext {
function_pointers: Vec<FuncPtr>, function_pointers: Vec<FuncPtr>,
function_offsets: Vec<AssemblyOffset>, function_offsets: Vec<AssemblyOffset>,
signatures: Arc<Map<SigIndex, FuncSig>>, signatures: Arc<Map<SigIndex, FuncSig>>,
breakpoints: Arc<HashMap<usize, Box<Fn(BkptInfo) + Send + Sync + 'static>>>, breakpoints: BkptMap,
func_import_count: usize, func_import_count: usize,
msm: ModuleStateMap, msm: ModuleStateMap,
} }
@ -217,6 +222,10 @@ impl RunnableModule for X64ExecutionContext {
Some(self.msm.clone()) Some(self.msm.clone())
} }
fn get_breakpoints(&self) -> Option<BkptMap> {
Some(self.breakpoints.clone())
}
fn get_trampoline(&self, _: &ModuleInfo, sig_index: SigIndex) -> Option<Wasm> { fn get_trampoline(&self, _: &ModuleInfo, sig_index: SigIndex) -> Option<Wasm> {
use std::ffi::c_void; use std::ffi::c_void;
use wasmer_runtime_core::typed_func::WasmTrapInfo; use wasmer_runtime_core::typed_func::WasmTrapInfo;
@ -245,16 +254,17 @@ impl RunnableModule for X64ExecutionContext {
num_params_plus_one.unwrap().as_ptr() as usize - 1, num_params_plus_one.unwrap().as_ptr() as usize - 1,
); );
let args_reverse: SmallVec<[u64; 8]> = args.iter().cloned().rev().collect(); let args_reverse: SmallVec<[u64; 8]> = args.iter().cloned().rev().collect();
protect_unix::BKPT_MAP let ret = match protect_unix::call_protected(
.with(|x| x.borrow_mut().push(execution_context.breakpoints.clone())); || {
let ret = match protect_unix::call_protected(|| { CONSTRUCT_STACK_AND_CALL_WASM(
CONSTRUCT_STACK_AND_CALL_WASM( args_reverse.as_ptr(),
args_reverse.as_ptr(), args_reverse.as_ptr().offset(args_reverse.len() as isize),
args_reverse.as_ptr().offset(args_reverse.len() as isize), ctx,
ctx, func.as_ptr(),
func.as_ptr(), )
) },
}) { Some(execution_context.breakpoints.clone()),
) {
Ok(x) => { Ok(x) => {
if !rets.is_null() { if !rets.is_null() {
*rets = x; *rets = x;
@ -269,7 +279,6 @@ impl RunnableModule for X64ExecutionContext {
false false
} }
}; };
protect_unix::BKPT_MAP.with(|x| x.borrow_mut().pop().unwrap());
ret ret
} }
@ -548,7 +557,13 @@ impl X64FunctionCode {
) { ) {
let state_diff_id = Self::get_state_diff(m, fsm, control_stack); let state_diff_id = Self::get_state_diff(m, fsm, control_stack);
let offset = a.get_offset().0; let offset = a.get_offset().0;
fsm.trappable_offsets.insert(offset, state_diff_id); fsm.trappable_offsets.insert(
offset,
OffsetInfo {
activate_offset: offset,
diff_id: state_diff_id,
},
);
} }
/// Moves `loc` to a valid location for `div`/`idiv`. /// Moves `loc` to a valid location for `div`/`idiv`.
@ -1204,7 +1219,9 @@ impl X64FunctionCode {
} }
// Align stack to 16 bytes. // Align stack to 16 bytes.
if (m.get_stack_offset() + used_gprs.len() * 8 + stack_offset) % 16 != 0 { if (m.get_stack_offset() + used_gprs.len() * 8 + used_xmms.len() * 8 + stack_offset) % 16
!= 0
{
a.emit_sub(Size::S64, Location::Imm32(8), Location::GPR(GPR::RSP)); a.emit_sub(Size::S64, Location::Imm32(8), Location::GPR(GPR::RSP));
stack_offset += 8; stack_offset += 8;
m.state.stack_values.push(MachineValue::Undefined); m.state.stack_values.push(MachineValue::Undefined);
@ -1299,13 +1316,21 @@ impl X64FunctionCode {
Machine::get_param_location(0), Machine::get_param_location(0),
); // vmctx ); // vmctx
assert!(m.state.stack_values.len() % 2 == 1); // explicit shadow takes one slot
cb(a); cb(a);
// Offset needs to be after the 'call' instruction. // Offset needs to be after the 'call' instruction.
if let Some((fsm, control_stack)) = state_context { if let Some((fsm, control_stack)) = state_context {
let state_diff_id = Self::get_state_diff(m, fsm, control_stack); let state_diff_id = Self::get_state_diff(m, fsm, control_stack);
let offset = a.get_offset().0; let offset = a.get_offset().0;
fsm.call_offsets.insert(offset, state_diff_id); fsm.call_offsets.insert(
offset,
OffsetInfo {
activate_offset: offset,
diff_id: state_diff_id,
},
);
} }
// Restore stack. // Restore stack.
@ -1642,6 +1667,31 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
state_diff_id, state_diff_id,
}); });
// Check interrupt signal without branching
let activate_offset = a.get_offset().0;
a.emit_mov(
Size::S64,
Location::Memory(
Machine::get_vmctx_reg(),
vm::Ctx::offset_interrupt_signal_mem() as i32,
),
Location::GPR(GPR::RAX),
);
self.fsm.loop_offsets.insert(
a.get_offset().0,
OffsetInfo {
activate_offset,
diff_id: state_diff_id,
},
);
self.fsm.wasm_function_header_target_offset = Some(a.get_offset().0);
a.emit_mov(
Size::S64,
Location::Memory(GPR::RAX, 0),
Location::GPR(GPR::RAX),
);
assert_eq!(self.machine.state.wasm_inst_offset, ::std::usize::MAX); assert_eq!(self.machine.state.wasm_inst_offset, ::std::usize::MAX);
Ok(()) Ok(())
@ -3863,6 +3913,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
let label = a.get_label(); let label = a.get_label();
let state_diff_id = let state_diff_id =
Self::get_state_diff(&self.machine, &mut self.fsm, &mut self.control_stack); Self::get_state_diff(&self.machine, &mut self.fsm, &mut self.control_stack);
let activate_offset = a.get_offset().0;
self.control_stack.push(ControlFrame { self.control_stack.push(ControlFrame {
label: label, label: label,
loop_like: true, loop_like: true,
@ -3875,10 +3927,29 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
state: self.machine.state.clone(), state: self.machine.state.clone(),
state_diff_id, state_diff_id,
}); });
self.fsm
.loop_offsets
.insert(a.get_offset().0, state_diff_id);
a.emit_label(label); a.emit_label(label);
// Check interrupt signal without branching
a.emit_mov(
Size::S64,
Location::Memory(
Machine::get_vmctx_reg(),
vm::Ctx::offset_interrupt_signal_mem() as i32,
),
Location::GPR(GPR::RAX),
);
self.fsm.loop_offsets.insert(
a.get_offset().0,
OffsetInfo {
activate_offset,
diff_id: state_diff_id,
},
);
a.emit_mov(
Size::S64,
Location::Memory(GPR::RAX, 0),
Location::GPR(GPR::RAX),
);
} }
Operator::Nop => {} Operator::Nop => {}
Operator::MemorySize { reserved } => { Operator::MemorySize { reserved } => {

View File

@ -10,18 +10,15 @@
//! unless you have memory unsafety elsewhere in your code. //! unless you have memory unsafety elsewhere in your code.
//! //!
use std::any::Any; use std::any::Any;
use std::cell::{Cell, RefCell}; use std::cell::Cell;
use std::collections::HashMap;
use std::sync::Arc;
use wasmer_runtime_core::alternative_stack::{ use wasmer_runtime_core::alternative_stack::{
begin_unsafe_unwind, catch_unsafe_unwind, ensure_sighandler, begin_unsafe_unwind, catch_unsafe_unwind, ensure_sighandler,
}; };
use wasmer_runtime_core::codegen::BkptInfo; use wasmer_runtime_core::codegen::BkptMap;
use wasmer_runtime_core::typed_func::WasmTrapInfo; use wasmer_runtime_core::typed_func::WasmTrapInfo;
thread_local! { thread_local! {
pub static TRAP_EARLY_DATA: Cell<Option<Box<dyn Any>>> = Cell::new(None); pub static TRAP_EARLY_DATA: Cell<Option<Box<dyn Any>>> = Cell::new(None);
pub static BKPT_MAP: RefCell<Vec<Arc<HashMap<usize, Box<Fn(BkptInfo) + Send + Sync + 'static>>>>> = RefCell::new(Vec::new());
} }
pub unsafe fn trigger_trap() -> ! { pub unsafe fn trigger_trap() -> ! {
@ -33,17 +30,20 @@ pub enum CallProtError {
Error(Box<dyn Any>), Error(Box<dyn Any>),
} }
pub fn call_protected<T>(f: impl FnOnce() -> T) -> Result<T, CallProtError> { pub fn call_protected<T>(
f: impl FnOnce() -> T,
breakpoints: Option<BkptMap>,
) -> Result<T, CallProtError> {
ensure_sighandler(); ensure_sighandler();
unsafe { unsafe {
let ret = catch_unsafe_unwind(|| f()); let ret = catch_unsafe_unwind(|| f(), breakpoints);
match ret { match ret {
Ok(x) => Ok(x), Ok(x) => Ok(x),
Err(_) => { Err(e) => {
if let Some(data) = TRAP_EARLY_DATA.with(|cell| cell.replace(None)) { if let Some(data) = TRAP_EARLY_DATA.with(|cell| cell.replace(None)) {
Err(CallProtError::Error(data)) Err(CallProtError::Error(data))
} else { } else {
Err(CallProtError::Trap(WasmTrapInfo::Unknown)) Err(CallProtError::Error(e))
} }
} }
} }

View File

@ -505,14 +505,6 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
mapped_dirs, mapped_dirs,
); );
#[cfg(feature = "backend:singlepass")]
{
if options.backend == Backend::Singlepass {
use wasmer_runtime_core::suspend::patch_import_object;
patch_import_object(&mut import_object);
}
}
let mut instance = module let mut instance = module
.instantiate(&import_object) .instantiate(&import_object)
.map_err(|e| format!("Can't instantiate module: {:?}", e))?; .map_err(|e| format!("Can't instantiate module: {:?}", e))?;
@ -543,6 +535,8 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
} else { } else {
None None
}; };
let breakpoints = instance.module.runnable_module.get_breakpoints();
loop { loop {
let ret = if let Some(image) = image.take() { let ret = if let Some(image) = image.take() {
let msm = instance let msm = instance
@ -558,10 +552,14 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
code_base, code_base,
image, image,
instance.context_mut(), instance.context_mut(),
breakpoints.clone(),
) )
.map(|_| ()) .map(|_| ())
} else { } else {
catch_unsafe_unwind(|| start_raw(instance.context_mut())) catch_unsafe_unwind(
|| start_raw(instance.context_mut()),
breakpoints.clone(),
)
}; };
if let Err(e) = ret { if let Err(e) = ret {
if let Some(new_image) = e.downcast_ref::<InstanceImage>() { if let Some(new_image) = e.downcast_ref::<InstanceImage>() {
@ -709,7 +707,7 @@ fn run(options: Run) {
match execute_wasm(&options) { match execute_wasm(&options) {
Ok(()) => {} Ok(()) => {}
Err(message) => { Err(message) => {
eprintln!("{:?}", message); eprintln!("execute_wasm: {:?}", message);
exit(1); exit(1);
} }
} }