Fix a few issues from PR comments.

This commit is contained in:
losfair
2019-08-21 14:53:33 -07:00
parent bf471fbc24
commit bf9d915635
6 changed files with 159 additions and 171 deletions

View File

@ -70,6 +70,8 @@ extern "C" {
) -> bool; ) -> bool;
} }
static SIGNAL_HANDLER_INSTALLED: Once = Once::new();
fn get_callbacks() -> Callbacks { fn get_callbacks() -> Callbacks {
extern "C" fn alloc_memory( extern "C" fn alloc_memory(
size: usize, size: usize,
@ -218,16 +220,6 @@ impl LLVMBackend {
) )
}; };
// Uncomment this to make spectests pass.
// TODO: fix this
/*
static SIGNAL_HANDLER_INSTALLED: Once = Once::new();
SIGNAL_HANDLER_INSTALLED.call_once(|| unsafe {
crate::platform::install_signal_handler();
});*/
if res != LLVMResult::OK { if res != LLVMResult::OK {
panic!("failed to load object") panic!("failed to load object")
} }
@ -235,7 +227,7 @@ impl LLVMBackend {
let buffer = Arc::new(Buffer::LlvmMemory(memory_buffer)); let buffer = Arc::new(Buffer::LlvmMemory(memory_buffer));
let raw_stackmap = unsafe { let raw_stackmap = unsafe {
::std::slice::from_raw_parts( std::slice::from_raw_parts(
llvm_backend_get_stack_map_ptr(module), llvm_backend_get_stack_map_ptr(module),
llvm_backend_get_stack_map_size(module), llvm_backend_get_stack_map_size(module),
) )
@ -281,8 +273,8 @@ impl LLVMBackend {
let mut map_records: BTreeMap<usize, &StkMapRecord> = BTreeMap::new(); let mut map_records: BTreeMap<usize, &StkMapRecord> = BTreeMap::new();
for r in &map.stk_map_records { for record in &map.stk_map_records {
map_records.insert(r.patchpoint_id as usize, r); map_records.insert(record.patchpoint_id as usize, record);
} }
for ((start_id, start_entry), (end_id, end_entry)) in stackmaps for ((start_id, start_entry), (end_id, end_entry)) in stackmaps
@ -314,7 +306,7 @@ impl LLVMBackend {
&mut msm, &mut msm,
); );
} else { } else {
// TODO: optimized out? // The record is optimized out.
} }
} }
@ -329,8 +321,6 @@ impl LLVMBackend {
}) })
.collect(); .collect();
//println!("MSM: {:?}", msm);
( (
Self { Self {
module, module,
@ -341,7 +331,7 @@ impl LLVMBackend {
LLVMCache { buffer }, LLVMCache { buffer },
) )
} else { } else {
eprintln!("WARNING: No stack map"); // This module contains no functions so no stackmaps.
( (
Self { Self {
module, module,
@ -366,8 +356,6 @@ impl LLVMBackend {
return Err("failed to load object".to_string()); return Err("failed to load object".to_string());
} }
static SIGNAL_HANDLER_INSTALLED: Once = Once::new();
SIGNAL_HANDLER_INSTALLED.call_once(|| { SIGNAL_HANDLER_INSTALLED.call_once(|| {
crate::platform::install_signal_handler(); crate::platform::install_signal_handler();
}); });
@ -431,12 +419,16 @@ impl RunnableModule for LLVMBackend {
mem::transmute(symbol) mem::transmute(symbol)
}; };
SIGNAL_HANDLER_INSTALLED.call_once(|| unsafe {
crate::platform::install_signal_handler();
});
Some(unsafe { Wasm::from_raw_parts(trampoline, invoke_trampoline, None) }) Some(unsafe { Wasm::from_raw_parts(trampoline, invoke_trampoline, None) })
} }
fn get_code(&self) -> Option<&[u8]> { fn get_code(&self) -> Option<&[u8]> {
Some(unsafe { Some(unsafe {
::std::slice::from_raw_parts( std::slice::from_raw_parts(
llvm_backend_get_code_ptr(self.module), llvm_backend_get_code_ptr(self.module),
llvm_backend_get_code_size(self.module), llvm_backend_get_code_size(self.module),
) )

View File

@ -185,12 +185,12 @@ impl Drop for CodeMemory {
impl Deref for CodeMemory { impl Deref for CodeMemory {
type Target = [u8]; type Target = [u8];
fn deref(&self) -> &[u8] { fn deref(&self) -> &[u8] {
unsafe { ::std::slice::from_raw_parts(self.ptr, self.size) } unsafe { std::slice::from_raw_parts(self.ptr, self.size) }
} }
} }
impl DerefMut for CodeMemory { impl DerefMut for CodeMemory {
fn deref_mut(&mut self) -> &mut [u8] { fn deref_mut(&mut self) -> &mut [u8] {
unsafe { ::std::slice::from_raw_parts_mut(self.ptr, self.size) } unsafe { std::slice::from_raw_parts_mut(self.ptr, self.size) }
} }
} }

View File

@ -154,7 +154,6 @@ impl ModuleStateMap {
self.lookup_ip(ip, base, |fsm| &fsm.call_offsets) self.lookup_ip(ip, base, |fsm| &fsm.call_offsets)
} }
#[warn(dead_code)]
pub fn lookup_trappable_ip( pub fn lookup_trappable_ip(
&self, &self,
ip: usize, ip: usize,
@ -163,7 +162,6 @@ impl ModuleStateMap {
self.lookup_ip(ip, base, |fsm| &fsm.trappable_offsets) self.lookup_ip(ip, base, |fsm| &fsm.trappable_offsets)
} }
#[warn(dead_code)]
pub fn lookup_loop_ip( pub fn lookup_loop_ip(
&self, &self,
ip: usize, ip: usize,
@ -535,30 +533,30 @@ pub mod x64 {
match inner.0 { match inner.0 {
MachineValue::WasmStack(x) => match state.wasm_stack[x] { MachineValue::WasmStack(x) => match state.wasm_stack[x] {
WasmAbstractValue::Const(x) => { WasmAbstractValue::Const(x) => {
assert!(x <= ::std::u32::MAX as u64); assert!(x <= std::u32::MAX as u64);
stack[stack_offset] |= x; stack[stack_offset] |= x;
} }
WasmAbstractValue::Runtime => { WasmAbstractValue::Runtime => {
let v = f.stack[x].unwrap(); let v = f.stack[x].unwrap();
assert!(v <= ::std::u32::MAX as u64); assert!(v <= std::u32::MAX as u64);
stack[stack_offset] |= v; stack[stack_offset] |= v;
} }
}, },
MachineValue::WasmLocal(x) => match fsm.locals[x] { MachineValue::WasmLocal(x) => match fsm.locals[x] {
WasmAbstractValue::Const(x) => { WasmAbstractValue::Const(x) => {
assert!(x <= ::std::u32::MAX as u64); assert!(x <= std::u32::MAX as u64);
stack[stack_offset] |= x; stack[stack_offset] |= x;
} }
WasmAbstractValue::Runtime => { WasmAbstractValue::Runtime => {
let v = f.locals[x].unwrap(); let v = f.locals[x].unwrap();
assert!(v <= ::std::u32::MAX as u64); assert!(v <= std::u32::MAX as u64);
stack[stack_offset] |= v; stack[stack_offset] |= v;
} }
}, },
MachineValue::VmctxDeref(ref seq) => { MachineValue::VmctxDeref(ref seq) => {
stack[stack_offset] |= stack[stack_offset] |=
compute_vmctx_deref(vmctx as *const Ctx, seq) compute_vmctx_deref(vmctx as *const Ctx, seq)
& (::std::u32::MAX as u64); & (std::u32::MAX as u64);
} }
MachineValue::Undefined => {} MachineValue::Undefined => {}
_ => unimplemented!("TwoHalves.0"), _ => unimplemented!("TwoHalves.0"),
@ -566,30 +564,30 @@ pub mod x64 {
match inner.1 { match inner.1 {
MachineValue::WasmStack(x) => match state.wasm_stack[x] { MachineValue::WasmStack(x) => match state.wasm_stack[x] {
WasmAbstractValue::Const(x) => { WasmAbstractValue::Const(x) => {
assert!(x <= ::std::u32::MAX as u64); assert!(x <= std::u32::MAX as u64);
stack[stack_offset] |= x << 32; stack[stack_offset] |= x << 32;
} }
WasmAbstractValue::Runtime => { WasmAbstractValue::Runtime => {
let v = f.stack[x].unwrap(); let v = f.stack[x].unwrap();
assert!(v <= ::std::u32::MAX as u64); assert!(v <= std::u32::MAX as u64);
stack[stack_offset] |= v << 32; stack[stack_offset] |= v << 32;
} }
}, },
MachineValue::WasmLocal(x) => match fsm.locals[x] { MachineValue::WasmLocal(x) => match fsm.locals[x] {
WasmAbstractValue::Const(x) => { WasmAbstractValue::Const(x) => {
assert!(x <= ::std::u32::MAX as u64); assert!(x <= std::u32::MAX as u64);
stack[stack_offset] |= x << 32; stack[stack_offset] |= x << 32;
} }
WasmAbstractValue::Runtime => { WasmAbstractValue::Runtime => {
let v = f.locals[x].unwrap(); let v = f.locals[x].unwrap();
assert!(v <= ::std::u32::MAX as u64); assert!(v <= std::u32::MAX as u64);
stack[stack_offset] |= v << 32; stack[stack_offset] |= v << 32;
} }
}, },
MachineValue::VmctxDeref(ref seq) => { MachineValue::VmctxDeref(ref seq) => {
stack[stack_offset] |= stack[stack_offset] |=
(compute_vmctx_deref(vmctx as *const Ctx, seq) (compute_vmctx_deref(vmctx as *const Ctx, seq)
& (::std::u32::MAX as u64)) & (std::u32::MAX as u64))
<< 32; << 32;
} }
MachineValue::Undefined => {} MachineValue::Undefined => {}
@ -728,7 +726,7 @@ pub mod x64 {
assert_eq!(vmctx.internal.memory_bound, memory.len()); assert_eq!(vmctx.internal.memory_bound, memory.len());
} }
::std::slice::from_raw_parts_mut( std::slice::from_raw_parts_mut(
vmctx.internal.memory_base, vmctx.internal.memory_base,
vmctx.internal.memory_bound, vmctx.internal.memory_bound,
) )
@ -763,7 +761,7 @@ pub mod x64 {
None None
} else { } else {
Some( Some(
::std::slice::from_raw_parts( std::slice::from_raw_parts(
vmctx.internal.memory_base, vmctx.internal.memory_base,
vmctx.internal.memory_bound, vmctx.internal.memory_bound,
) )

View File

@ -70,7 +70,7 @@ unsafe fn do_optimize(
} }
} }
pub fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>( pub unsafe fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>(
module_info: &ModuleInfo, module_info: &ModuleInfo,
wasm_binary: &[u8], wasm_binary: &[u8],
mut resume_image: Option<InstanceImage>, mut resume_image: Option<InstanceImage>,
@ -80,157 +80,155 @@ pub fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>(
optimized_backends: Vec<Box<dyn Fn() -> Box<dyn Compiler> + Send>>, optimized_backends: Vec<Box<dyn Fn() -> Box<dyn Compiler> + Send>>,
interactive_shell: F, interactive_shell: F,
) -> Result<(), String> { ) -> Result<(), String> {
unsafe { ensure_sighandler();
ensure_sighandler();
let ctx_box = Arc::new(Mutex::new(CtxWrapper(baseline.context_mut() as *mut _))); let ctx_box = Arc::new(Mutex::new(CtxWrapper(baseline.context_mut() as *mut _)));
// Ensure that the ctx pointer's lifetime is not longer than Instance's. // Ensure that the ctx pointer's lifetime is not longer than Instance's.
let _deferred_ctx_box_cleanup: Defer<_> = { let _deferred_ctx_box_cleanup: Defer<_> = {
let ctx_box = ctx_box.clone(); let ctx_box = ctx_box.clone();
Defer(Some(move || { Defer(Some(move || {
ctx_box.lock().unwrap().0 = ::std::ptr::null_mut(); ctx_box.lock().unwrap().0 = ::std::ptr::null_mut();
})) }))
}; };
let opt_state = Arc::new(OptimizationState { let opt_state = Arc::new(OptimizationState {
outcome: Mutex::new(None), outcome: Mutex::new(None),
}); });
{ {
let wasm_binary = wasm_binary.to_vec(); let wasm_binary = wasm_binary.to_vec();
let ctx_box = ctx_box.clone(); let ctx_box = ctx_box.clone();
let opt_state = opt_state.clone(); let opt_state = opt_state.clone();
::std::thread::spawn(move || { ::std::thread::spawn(move || {
for backend in optimized_backends { for backend in optimized_backends {
if !ctx_box.lock().unwrap().0.is_null() { if !ctx_box.lock().unwrap().0.is_null() {
do_optimize(&wasm_binary, backend(), &ctx_box, &opt_state); do_optimize(&wasm_binary, backend(), &ctx_box, &opt_state);
}
} }
}); }
});
}
let mut optimized_instances: Vec<Instance> = vec![];
push_code_version(CodeVersion {
baseline: true,
msm: baseline
.module
.runnable_module
.get_module_state_map()
.unwrap(),
base: baseline.module.runnable_module.get_code().unwrap().as_ptr() as usize,
});
let n_versions: Cell<usize> = Cell::new(1);
let _deferred_pop_versions = Defer(Some(|| {
for _ in 0..n_versions.get() {
pop_code_version().unwrap();
} }
}));
let mut optimized_instances: Vec<Instance> = vec![]; loop {
let new_optimized: Option<&mut Instance> = {
push_code_version(CodeVersion { let mut outcome = opt_state.outcome.lock().unwrap();
baseline: true, if let Some(x) = outcome.take() {
msm: baseline let instance = x
.module
.instantiate(&import_object)
.map_err(|e| format!("Can't instantiate module: {:?}", e))?;
// Keep the optimized code alive.
optimized_instances.push(instance);
optimized_instances.last_mut()
} else {
None
}
};
if let Some(optimized) = new_optimized {
let base = module_info.imported_functions.len();
let code_ptr = optimized
.module .module
.runnable_module .runnable_module
.get_module_state_map() .get_code()
.unwrap(), .unwrap()
base: baseline.module.runnable_module.get_code().unwrap().as_ptr() as usize, .as_ptr() as usize;
}); let target_addresses: Vec<usize> = optimized
let n_versions: Cell<usize> = Cell::new(1); .module
.runnable_module
let _deferred_pop_versions = Defer(Some(|| { .get_local_function_offsets()
for _ in 0..n_versions.get() { .unwrap()
pop_code_version().unwrap(); .into_iter()
.map(|x| code_ptr + x)
.collect();
assert_eq!(target_addresses.len(), module_info.func_assoc.len() - base);
for i in base..module_info.func_assoc.len() {
baseline
.module
.runnable_module
.patch_local_function(i - base, target_addresses[i - base]);
} }
}));
loop { push_code_version(CodeVersion {
let new_optimized: Option<&mut Instance> = { baseline: false,
let mut outcome = opt_state.outcome.lock().unwrap(); msm: optimized
if let Some(x) = outcome.take() { .module
let instance = x .runnable_module
.module .get_module_state_map()
.instantiate(&import_object) .unwrap(),
.map_err(|e| format!("Can't instantiate module: {:?}", e))?; base: optimized
// Keep the optimized code alive.
optimized_instances.push(instance);
optimized_instances.last_mut()
} else {
None
}
};
if let Some(optimized) = new_optimized {
let base = module_info.imported_functions.len();
let code_ptr = optimized
.module .module
.runnable_module .runnable_module
.get_code() .get_code()
.unwrap() .unwrap()
.as_ptr() as usize; .as_ptr() as usize,
let target_addresses: Vec<usize> = optimized });
n_versions.set(n_versions.get() + 1);
baseline.context_mut().local_functions = optimized.context_mut().local_functions;
}
// Assuming we do not want to do breakpoint-based debugging on optimized backends.
let breakpoints = baseline.module.runnable_module.get_breakpoints();
let ctx = baseline.context_mut() as *mut _;
let ret = with_ctx(ctx, || {
if let Some(image) = resume_image.take() {
let msm = baseline
.module .module
.runnable_module .runnable_module
.get_local_function_offsets() .get_module_state_map()
.unwrap() .unwrap();
.into_iter() let code_base =
.map(|x| code_ptr + x) baseline.module.runnable_module.get_code().unwrap().as_ptr() as usize;
.collect(); invoke_call_return_on_stack(
assert_eq!(target_addresses.len(), module_info.func_assoc.len() - base); &msm,
for i in base..module_info.func_assoc.len() { code_base,
baseline image,
.module baseline.context_mut(),
.runnable_module breakpoints.clone(),
.patch_local_function(i - base, target_addresses[i - base]); )
} .map(|_| ())
} else {
push_code_version(CodeVersion { catch_unsafe_unwind(|| start_raw(baseline.context_mut()), breakpoints.clone())
baseline: false,
msm: optimized
.module
.runnable_module
.get_module_state_map()
.unwrap(),
base: optimized
.module
.runnable_module
.get_code()
.unwrap()
.as_ptr() as usize,
});
n_versions.set(n_versions.get() + 1);
baseline.context_mut().local_functions = optimized.context_mut().local_functions;
} }
// Assuming we do not want to do breakpoint-based debugging on optimized backends. });
let breakpoints = baseline.module.runnable_module.get_breakpoints(); if let Err(e) = ret {
let ctx = baseline.context_mut() as *mut _; if let Ok(new_image) = e.downcast::<InstanceImage>() {
let ret = with_ctx(ctx, || { // Tier switch event
if let Some(image) = resume_image.take() { if !was_sigint_triggered_fault() && opt_state.outcome.lock().unwrap().is_some()
let msm = baseline {
.module resume_image = Some(*new_image);
.runnable_module continue;
.get_module_state_map()
.unwrap();
let code_base =
baseline.module.runnable_module.get_code().unwrap().as_ptr() as usize;
invoke_call_return_on_stack(
&msm,
code_base,
image,
baseline.context_mut(),
breakpoints.clone(),
)
.map(|_| ())
} else {
catch_unsafe_unwind(|| start_raw(baseline.context_mut()), breakpoints.clone())
} }
}); let op = interactive_shell(InteractiveShellContext {
if let Err(e) = ret { image: Some(*new_image),
if let Ok(new_image) = e.downcast::<InstanceImage>() { patched: n_versions.get() > 1,
// Tier switch event });
if !was_sigint_triggered_fault() && opt_state.outcome.lock().unwrap().is_some() match op {
{ ShellExitOperation::ContinueWith(new_image) => {
resume_image = Some(*new_image); resume_image = Some(new_image);
continue;
} }
let op = interactive_shell(InteractiveShellContext {
image: Some(*new_image),
patched: n_versions.get() > 1,
});
match op {
ShellExitOperation::ContinueWith(new_image) => {
resume_image = Some(new_image);
}
}
} else {
return Err("Error while executing WebAssembly".into());
} }
} else { } else {
return Ok(()); return Err("Error while executing WebAssembly".into());
} }
} else {
return Ok(());
} }
} }
} }

View File

@ -277,7 +277,7 @@ impl RunnableModule for X64ExecutionContext {
let execution_context = let execution_context =
::std::mem::transmute_copy::<&dyn RunnableModule, &X64ExecutionContext>(&&**rm); ::std::mem::transmute_copy::<&dyn RunnableModule, &X64ExecutionContext>(&&**rm);
let args = ::std::slice::from_raw_parts( let args = std::slice::from_raw_parts(
args, args,
num_params_plus_one.unwrap().as_ptr() as usize - 1, num_params_plus_one.unwrap().as_ptr() as usize - 1,
); );
@ -1690,7 +1690,7 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
let start_label = a.get_label(); let start_label = a.get_label();
// skip the patchpoint during normal execution // skip the patchpoint during normal execution
a.emit_jmp(Condition::None, start_label); a.emit_jmp(Condition::None, start_label);
// patchpoint of 32 bytes // patchpoint of 32 1-byte nops
for _ in 0..32 { for _ in 0..32 {
a.emit_nop(); a.emit_nop();
} }

View File

@ -587,7 +587,7 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
let start_raw: extern "C" fn(&mut wasmer_runtime_core::vm::Ctx) = let start_raw: extern "C" fn(&mut wasmer_runtime_core::vm::Ctx) =
unsafe { ::std::mem::transmute(start.get_vm_func()) }; unsafe { ::std::mem::transmute(start.get_vm_func()) };
run_tiering( unsafe { run_tiering(
module.info(), module.info(),
&wasm_binary, &wasm_binary,
if let Some(ref path) = options.resume { if let Some(ref path) = options.resume {
@ -612,7 +612,7 @@ fn execute_wasm(options: &Run) -> Result<(), String> {
}) })
.collect(), .collect(),
interactive_shell, interactive_shell,
)?; )? };
} }
#[cfg(not(feature = "managed"))] #[cfg(not(feature = "managed"))]