Generate trampolines to call exported functions instead of using libffi (#108)

* remove codegen.rs

* Add export trampolines and remove libffi

* Remove unused extern crates
This commit is contained in:
Lachlan Sneff
2019-01-22 15:00:27 -08:00
committed by GitHub
parent ee67bf95db
commit 38b0fbf3c5
13 changed files with 300 additions and 1832 deletions

View File

@ -1,28 +1,28 @@
mod recovery;
mod sighandler;
pub use self::recovery::HandlerData;
pub use self::recovery::{call_protected, HandlerData};
use crate::trampoline::Trampolines;
use crate::call::recovery::call_protected;
use hashbrown::HashSet;
use libffi::high::{arg as libffi_arg, call as libffi_call, CodePtr};
use std::iter;
use wasmer_runtime_core::{
backend::{ProtectedCaller, Token},
error::RuntimeResult,
export::Context,
module::{ExportIndex, ModuleInner},
types::{FuncIndex, FuncSig, LocalOrImport, Type, Value},
types::{FuncIndex, FuncSig, LocalOrImport, SigIndex, Type, Value},
vm::{self, ImportBacking},
};
pub struct Caller {
func_export_set: HashSet<FuncIndex>,
handler_data: HandlerData,
trampolines: Trampolines,
}
impl Caller {
pub fn new(module: &ModuleInner, handler_data: HandlerData) -> Self {
pub fn new(module: &ModuleInner, handler_data: HandlerData, trampolines: Trampolines) -> Self {
let mut func_export_set = HashSet::new();
for export_index in module.exports.values() {
if let ExportIndex::Func(func_index) = export_index {
@ -36,6 +36,7 @@ impl Caller {
Self {
func_export_set,
handler_data,
trampolines,
}
}
}
@ -46,12 +47,12 @@ impl ProtectedCaller for Caller {
module: &ModuleInner,
func_index: FuncIndex,
params: &[Value],
returns: &mut [Value],
import_backing: &ImportBacking,
vmctx: *mut vm::Ctx,
_: Token,
) -> RuntimeResult<()> {
let (func_ptr, ctx, signature) = get_func_from_index(&module, import_backing, func_index);
) -> RuntimeResult<Vec<Value>> {
let (func_ptr, ctx, signature, sig_index) =
get_func_from_index(&module, import_backing, func_index);
let vmctx_ptr = match ctx {
Context::External(external_vmctx) => external_vmctx,
@ -61,45 +62,49 @@ impl ProtectedCaller for Caller {
assert!(self.func_export_set.contains(&func_index));
assert!(
returns.len() == signature.returns.len() && signature.returns.len() <= 1,
signature.returns.len() <= 1,
"multi-value returns not yet supported"
);
assert!(signature.check_sig(params), "incorrect signature");
let libffi_args: Vec<_> = params
let param_vec: Vec<u64> = params
.iter()
.map(|val| match val {
Value::I32(ref x) => libffi_arg(x),
Value::I64(ref x) => libffi_arg(x),
Value::F32(ref x) => libffi_arg(x),
Value::F64(ref x) => libffi_arg(x),
Value::I32(x) => *x as u64,
Value::I64(x) => *x as u64,
Value::F32(x) => x.to_bits() as u64,
Value::F64(x) => x.to_bits(),
})
.chain(iter::once(libffi_arg(&vmctx_ptr)))
.collect();
let code_ptr = CodePtr::from_ptr(func_ptr as _);
let mut return_vec = vec![0; signature.returns.len()];
call_protected(&self.handler_data, || {
// Only supports zero or one return values for now.
// To support multiple returns, we will have to
// generate trampolines instead of using libffi.
match signature.returns.first() {
Some(ty) => {
let val = match ty {
Type::I32 => Value::I32(unsafe { libffi_call(code_ptr, &libffi_args) }),
Type::I64 => Value::I64(unsafe { libffi_call(code_ptr, &libffi_args) }),
Type::F32 => Value::F32(unsafe { libffi_call(code_ptr, &libffi_args) }),
Type::F64 => Value::F64(unsafe { libffi_call(code_ptr, &libffi_args) }),
};
returns[0] = val;
}
// call with no returns
None => unsafe {
libffi_call::<()>(code_ptr, &libffi_args);
},
}
})
let trampoline = self
.trampolines
.lookup(sig_index)
.expect("that trampoline doesn't exist");
call_protected(&self.handler_data, || unsafe {
// Leap of faith.
trampoline(
vmctx_ptr,
func_ptr,
param_vec.as_ptr(),
return_vec.as_mut_ptr(),
);
})?;
Ok(return_vec
.iter()
.zip(signature.returns.iter())
.map(|(&x, ty)| match ty {
Type::I32 => Value::I32(x as i32),
Type::I64 => Value::I64(x as i64),
Type::F32 => Value::F32(f32::from_bits(x as u32)),
Type::F64 => Value::F64(f64::from_bits(x as u64)),
})
.collect())
}
}
@ -107,7 +112,7 @@ fn get_func_from_index<'a>(
module: &'a ModuleInner,
import_backing: &ImportBacking,
func_index: FuncIndex,
) -> (*const vm::Func, Context, &'a FuncSig) {
) -> (*const vm::Func, Context, &'a FuncSig, SigIndex) {
let sig_index = *module
.func_assoc
.get(func_index)
@ -134,5 +139,5 @@ fn get_func_from_index<'a>(
let signature = module.sig_registry.lookup_func_sig(sig_index);
(func_ptr, ctx, signature)
(func_ptr, ctx, signature, sig_index)
}

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,3 @@
// pub mod codegen;
mod call;
mod func_env;
mod libcalls;
@ -6,6 +5,7 @@ mod module;
mod module_env;
mod relocation;
mod resolver;
mod trampoline;
use cranelift_codegen::{
isa,

View File

@ -1,4 +1,4 @@
use crate::{call::Caller, resolver::FuncResolverBuilder};
use crate::{call::Caller, resolver::FuncResolverBuilder, trampoline::Trampolines};
use cranelift_codegen::{ir, isa};
use cranelift_entity::EntityRef;
use cranelift_wasm;
@ -38,12 +38,11 @@ impl ProtectedCaller for Placeholder {
_module: &ModuleInner,
_func_index: FuncIndex,
_params: &[Value],
_returns: &mut [Value],
_import_backing: &ImportBacking,
_vmctx: *mut vm::Ctx,
_: Token,
) -> RuntimeResult<()> {
Ok(())
) -> RuntimeResult<Vec<Value>> {
Ok(vec![])
}
}
@ -97,7 +96,10 @@ impl Module {
let (func_resolver_builder, handler_data) = FuncResolverBuilder::new(isa, functions)?;
self.module.func_resolver = Box::new(func_resolver_builder.finalize()?);
self.module.protected_caller = Box::new(Caller::new(&self.module, handler_data));
let trampolines = Trampolines::new(isa, &self.module);
self.module.protected_caller =
Box::new(Caller::new(&self.module, handler_data, trampolines));
Ok(self.module)
}

View File

@ -180,7 +180,7 @@ impl FuncResolverBuilder {
self.resolver
.memory
.protect(0..self.resolver.memory.size(), Protect::ReadExec)
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;;
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;
}
Ok(self.resolver)

View File

@ -0,0 +1,228 @@
use cranelift_codegen::{
binemit::{NullTrapSink, Reloc, RelocSink},
cursor::{Cursor, FuncCursor},
ir::{self, InstBuilder},
isa, Context,
};
use hashbrown::HashMap;
use std::{iter, mem};
use wasmer_runtime_core::{
backend::sys::{Memory, Protect},
module::{ExportIndex, ModuleInner},
types::{FuncSig, SigIndex, Type},
vm,
};
struct NullRelocSink {}
impl RelocSink for NullRelocSink {
fn reloc_ebb(&mut self, _: u32, _: Reloc, _: u32) {}
fn reloc_external(&mut self, _: u32, _: Reloc, _: &ir::ExternalName, _: i64) {}
fn reloc_jt(&mut self, _: u32, _: Reloc, _: ir::JumpTable) {}
}
pub struct Trampolines {
memory: Memory,
offsets: HashMap<SigIndex, usize>,
}
impl Trampolines {
pub fn new(isa: &isa::TargetIsa, module: &ModuleInner) -> Self {
let func_index_iter = module
.exports
.values()
.filter_map(|export| match export {
ExportIndex::Func(func_index) => Some(func_index),
_ => None,
})
.chain(module.start_func.iter());
let mut compiled_functions = Vec::new();
let mut ctx = Context::new();
let mut total_size = 0;
for exported_func_index in func_index_iter {
let sig_index = module.func_assoc[*exported_func_index];
let func_sig = module.sig_registry.lookup_func_sig(sig_index);
let trampoline_func = generate_func(func_sig);
ctx.func = trampoline_func;
let mut code_buf = Vec::new();
ctx.compile_and_emit(
isa,
&mut code_buf,
&mut NullRelocSink {},
&mut NullTrapSink {},
)
.expect("unable to compile trampolines");
ctx.clear();
total_size += round_up(code_buf.len(), mem::size_of::<usize>());
compiled_functions.push((sig_index, code_buf));
}
let mut memory = Memory::with_size(total_size).unwrap();
unsafe {
memory
.protect(0..memory.size(), Protect::ReadWrite)
.unwrap();
}
// "\xCC" disassembles to "int3", which will immediately cause
// an interrupt.
for i in unsafe { memory.as_slice_mut() } {
*i = 0xCC;
}
let mut previous_end = 0;
let mut trampolines = HashMap::with_capacity(compiled_functions.len());
for (sig_index, compiled) in compiled_functions.iter() {
let new_end = previous_end + round_up(compiled.len(), mem::size_of::<usize>());
unsafe {
memory.as_slice_mut()[previous_end..previous_end + compiled.len()]
.copy_from_slice(&compiled[..]);
}
trampolines.insert(*sig_index, previous_end);
previous_end = new_end;
}
unsafe {
memory.protect(0..memory.size(), Protect::ReadExec).unwrap();
}
Self {
memory,
offsets: trampolines,
}
}
pub fn lookup(
&self,
sig_index: SigIndex,
) -> Option<unsafe extern "C" fn(*mut vm::Ctx, *const vm::Func, *const u64, *mut u64)> {
let offset = *self.offsets.get(&sig_index)?;
let ptr = unsafe { self.memory.as_ptr().add(offset) };
unsafe { Some(mem::transmute(ptr)) }
}
}
/// This function generates a trampoline for the specific signature
/// passed into it.
fn generate_func(func_sig: &FuncSig) -> ir::Function {
let trampoline_sig = generate_trampoline_signature();
let mut func =
ir::Function::with_name_signature(ir::ExternalName::testcase("trampln"), trampoline_sig);
let export_sig_ref = func.import_signature(generate_export_signature(func_sig));
let entry_ebb = func.dfg.make_ebb();
let vmctx_ptr = func.dfg.append_ebb_param(entry_ebb, ir::types::I64);
let func_ptr = func.dfg.append_ebb_param(entry_ebb, ir::types::I64);
let args_ptr = func.dfg.append_ebb_param(entry_ebb, ir::types::I64);
let returns_ptr = func.dfg.append_ebb_param(entry_ebb, ir::types::I64);
func.layout.append_ebb(entry_ebb);
let mut pos = FuncCursor::new(&mut func).at_first_insertion_point(entry_ebb);
let mut args_vec = Vec::with_capacity(func_sig.params.len() + 1);
for (index, wasm_ty) in func_sig.params.iter().enumerate() {
let mem_flags = ir::MemFlags::trusted();
let val = pos.ins().load(
wasm_ty_to_clif(*wasm_ty),
mem_flags,
args_ptr,
(index * mem::size_of::<u64>()) as i32,
);
args_vec.push(val);
}
args_vec.push(vmctx_ptr);
let call_inst = pos.ins().call_indirect(export_sig_ref, func_ptr, &args_vec);
let return_values = pos.func.dfg.inst_results(call_inst).to_vec();
for (index, return_val) in return_values.iter().enumerate() {
let mem_flags = ir::MemFlags::trusted();
pos.ins().store(
mem_flags,
*return_val,
returns_ptr,
(index * mem::size_of::<u64>()) as i32,
);
}
pos.ins().return_(&[]);
func
}
fn wasm_ty_to_clif(ty: Type) -> ir::types::Type {
match ty {
Type::I32 => ir::types::I32,
Type::I64 => ir::types::I64,
Type::F32 => ir::types::F32,
Type::F64 => ir::types::F64,
}
}
fn generate_trampoline_signature() -> ir::Signature {
let mut sig = ir::Signature::new(isa::CallConv::SystemV);
let ptr_param = ir::AbiParam {
value_type: ir::types::I64,
purpose: ir::ArgumentPurpose::Normal,
extension: ir::ArgumentExtension::None,
location: ir::ArgumentLoc::Unassigned,
};
sig.params = vec![ptr_param, ptr_param, ptr_param, ptr_param];
sig
}
fn generate_export_signature(func_sig: &FuncSig) -> ir::Signature {
let mut export_clif_sig = ir::Signature::new(isa::CallConv::SystemV);
export_clif_sig.params = func_sig
.params
.iter()
.map(|wasm_ty| ir::AbiParam {
value_type: wasm_ty_to_clif(*wasm_ty),
purpose: ir::ArgumentPurpose::Normal,
extension: ir::ArgumentExtension::None,
location: ir::ArgumentLoc::Unassigned,
})
.chain(iter::once(ir::AbiParam {
value_type: ir::types::I64,
purpose: ir::ArgumentPurpose::VMContext,
extension: ir::ArgumentExtension::None,
location: ir::ArgumentLoc::Unassigned,
}))
.collect();
export_clif_sig.returns = func_sig
.returns
.iter()
.map(|wasm_ty| ir::AbiParam {
value_type: wasm_ty_to_clif(*wasm_ty),
purpose: ir::ArgumentPurpose::Normal,
extension: ir::ArgumentExtension::None,
location: ir::ArgumentLoc::Unassigned,
})
.collect();
export_clif_sig
}
#[inline]
fn round_up(n: usize, multiple: usize) -> usize {
(n + multiple - 1) & !(multiple - 1)
}