Almost finished indirect calls.

This commit is contained in:
losfair
2019-03-09 02:57:23 +08:00
parent 3efccbe0f7
commit 25034ece07
2 changed files with 194 additions and 11 deletions

View File

@ -6,6 +6,8 @@ use byteorder::{ByteOrder, LittleEndian};
use dynasmrt::{ use dynasmrt::{
x64::Assembler, AssemblyOffset, DynamicLabel, DynasmApi, DynasmLabelApi, ExecutableBuffer, x64::Assembler, AssemblyOffset, DynamicLabel, DynasmApi, DynasmLabelApi, ExecutableBuffer,
}; };
use std::cell::RefCell;
use std::ptr::NonNull;
use std::sync::Mutex; use std::sync::Mutex;
use std::{collections::HashMap, sync::Arc}; use std::{collections::HashMap, sync::Arc};
use wasmer_runtime_core::{ use wasmer_runtime_core::{
@ -17,10 +19,14 @@ use wasmer_runtime_core::{
FuncIndex, FuncSig, GlobalIndex, LocalFuncIndex, LocalGlobalIndex, MemoryIndex, SigIndex, FuncIndex, FuncSig, GlobalIndex, LocalFuncIndex, LocalGlobalIndex, MemoryIndex, SigIndex,
TableIndex, Type, Value, TableIndex, Type, Value,
}, },
vm::{self, ImportBacking, LocalGlobal}, vm::{self, ImportBacking, LocalGlobal, LocalTable},
}; };
use wasmparser::{Operator, Type as WpType}; use wasmparser::{Operator, Type as WpType};
thread_local! {
static CURRENT_EXECUTION_CONTEXT: RefCell<Vec<*const X64ExecutionContext>> = RefCell::new(Vec::new());
}
lazy_static! { lazy_static! {
static ref CALL_WASM: unsafe extern "C" fn(params: *const u8, params_len: usize, target: *const u8, memory_base: *mut u8, vmctx: *mut vm::Ctx) -> i64 = { static ref CALL_WASM: unsafe extern "C" fn(params: *const u8, params_len: usize, target: *const u8, memory_base: *mut u8, vmctx: *mut vm::Ctx) -> i64 = {
let mut assembler = Assembler::new().unwrap(); let mut assembler = Assembler::new().unwrap();
@ -207,7 +213,7 @@ pub struct X64ModuleCodeGenerator {
functions: Vec<X64FunctionCode>, functions: Vec<X64FunctionCode>,
signatures: Option<Arc<Map<SigIndex, Arc<FuncSig>>>>, signatures: Option<Arc<Map<SigIndex, Arc<FuncSig>>>>,
function_signatures: Option<Arc<Map<FuncIndex, SigIndex>>>, function_signatures: Option<Arc<Map<FuncIndex, SigIndex>>>,
function_labels: Option<HashMap<usize, DynamicLabel>>, function_labels: Option<HashMap<usize, (DynamicLabel, Option<AssemblyOffset>)>>,
assembler: Option<Assembler>, assembler: Option<Assembler>,
native_trampolines: Arc<NativeTrampolines>, native_trampolines: Arc<NativeTrampolines>,
func_import_count: usize, func_import_count: usize,
@ -222,7 +228,7 @@ pub struct X64FunctionCode {
begin_label: DynamicLabel, begin_label: DynamicLabel,
begin_offset: AssemblyOffset, begin_offset: AssemblyOffset,
assembler: Option<Assembler>, assembler: Option<Assembler>,
function_labels: Option<HashMap<usize, DynamicLabel>>, function_labels: Option<HashMap<usize, (DynamicLabel, Option<AssemblyOffset>)>>,
br_table_data: Option<Vec<Vec<usize>>>, br_table_data: Option<Vec<Vec<usize>>>,
returns: Vec<WpType>, returns: Vec<WpType>,
locals: Vec<Local>, locals: Vec<Local>,
@ -233,13 +239,35 @@ pub struct X64FunctionCode {
unreachable_depth: usize, unreachable_depth: usize,
} }
enum FuncPtrInner {}
#[repr(transparent)]
struct FuncPtr(*const FuncPtrInner);
unsafe impl Send for FuncPtr {}
unsafe impl Sync for FuncPtr {}
pub struct X64ExecutionContext { pub struct X64ExecutionContext {
code: ExecutableBuffer, code: ExecutableBuffer,
functions: Vec<X64FunctionCode>, functions: Vec<X64FunctionCode>,
signatures: Arc<Map<SigIndex, Arc<FuncSig>>>,
function_signatures: Arc<Map<FuncIndex, SigIndex>>,
function_pointers: Vec<FuncPtr>,
br_table_data: Vec<Vec<usize>>, br_table_data: Vec<Vec<usize>>,
func_import_count: usize, func_import_count: usize,
} }
impl FuncResolver for X64ExecutionContext {
fn get(
&self,
_module: &ModuleInner,
_local_func_index: LocalFuncIndex,
) -> Option<NonNull<vm::Func>> {
NonNull::new(
self.function_pointers[_local_func_index.index() as usize + self.func_import_count].0
as *mut vm::Func,
)
}
}
impl ProtectedCaller for X64ExecutionContext { impl ProtectedCaller for X64ExecutionContext {
fn call( fn call(
&self, &self,
@ -308,6 +336,8 @@ impl ProtectedCaller for X64ExecutionContext {
}; };
//println!("MEMORY = {:?}", memory_base); //println!("MEMORY = {:?}", memory_base);
CURRENT_EXECUTION_CONTEXT.with(|x| x.borrow_mut().push(self));
let ret = unsafe { let ret = unsafe {
CALL_WASM( CALL_WASM(
param_buf.as_ptr(), param_buf.as_ptr(),
@ -317,6 +347,9 @@ impl ProtectedCaller for X64ExecutionContext {
_vmctx, _vmctx,
) )
}; };
CURRENT_EXECUTION_CONTEXT.with(|x| x.borrow_mut().pop().unwrap());
Ok(if let Some(ty) = return_ty { Ok(if let Some(ty) = return_ty {
vec![Value::I64(ret)] vec![Value::I64(ret)]
} else { } else {
@ -381,10 +414,14 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext> for X64ModuleCode
vec![], vec![],
), ),
}; };
let begin_label = *function_labels
.entry(self.functions.len() + self.func_import_count)
.or_insert_with(|| assembler.new_dynamic_label());
let begin_offset = assembler.offset(); let begin_offset = assembler.offset();
let begin_label_info = function_labels
.entry(self.functions.len() + self.func_import_count)
.or_insert_with(|| (assembler.new_dynamic_label(), None));
begin_label_info.1 = Some(begin_offset);
let begin_label = begin_label_info.0;
dynasm!( dynasm!(
assembler assembler
; => begin_label ; => begin_label
@ -429,11 +466,56 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext> for X64ModuleCode
*entry = output.ptr(AssemblyOffset(*entry)) as usize; *entry = output.ptr(AssemblyOffset(*entry)) as usize;
} }
} }
let function_labels = if let Some(x) = self.functions.last() {
x.function_labels.as_ref().unwrap()
} else {
self.function_labels.as_ref().unwrap()
};
let mut out_labels: Vec<FuncPtr> = vec![];
for i in 0..function_labels.len() {
let (_, offset) = match function_labels.get(&i) {
Some(x) => x,
None => {
return Err(CodegenError {
message: "label not found",
})
}
};
let offset = match offset {
Some(x) => x,
None => {
return Err(CodegenError {
message: "offset is none",
})
}
};
out_labels.push(FuncPtr(output.ptr(*offset) as _));
}
Ok(X64ExecutionContext { Ok(X64ExecutionContext {
code: output, code: output,
functions: self.functions, functions: self.functions,
br_table_data: br_table_data, br_table_data: br_table_data,
func_import_count: self.func_import_count, func_import_count: self.func_import_count,
signatures: match self.signatures {
Some(x) => x,
None => {
return Err(CodegenError {
message: "no signatures",
})
}
},
function_pointers: out_labels,
function_signatures: match self.function_signatures {
Some(x) => x,
None => {
return Err(CodegenError {
message: "no function signatures",
})
}
},
}) })
} }
@ -464,13 +546,15 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext> for X64ModuleCode
}; };
let id = labels.len(); let id = labels.len();
let offset = self.assembler.as_mut().unwrap().offset();
let label = X64FunctionCode::emit_native_call_trampoline( let label = X64FunctionCode::emit_native_call_trampoline(
self.assembler.as_mut().unwrap(), self.assembler.as_mut().unwrap(),
invoke_import, invoke_import,
0, 0,
id, id,
); );
labels.insert(id, label); labels.insert(id, (label, Some(offset)));
self.func_import_count += 1; self.func_import_count += 1;
@ -1092,6 +1176,7 @@ impl X64FunctionCode {
stack_top: *mut u8, stack_top: *mut u8,
stack_base: *mut u8, stack_base: *mut u8,
vmctx: *mut vm::Ctx, vmctx: *mut vm::Ctx,
memory_base: *mut u8,
) -> u64, ) -> u64,
ctx1: A, ctx1: A,
ctx2: B, ctx2: B,
@ -1114,6 +1199,7 @@ impl X64FunctionCode {
; mov rdx, rsp ; mov rdx, rsp
; mov rcx, rbp ; mov rcx, rbp
; mov r8, r14 // vmctx ; mov r8, r14 // vmctx
; mov r9, r15 // memory_base
; mov rax, QWORD (0xfffffffffffffff0u64 as i64) ; mov rax, QWORD (0xfffffffffffffff0u64 as i64)
; and rsp, rax ; and rsp, rax
; mov rax, QWORD (target as i64) ; mov rax, QWORD (target as i64)
@ -2417,12 +2503,13 @@ impl FunctionCodeGenerator for X64FunctionCode {
} }
Operator::Call { function_index } => { Operator::Call { function_index } => {
let function_index = function_index as usize; let function_index = function_index as usize;
let label = *self let label = self
.function_labels .function_labels
.as_mut() .as_mut()
.unwrap() .unwrap()
.entry(function_index) .entry(function_index)
.or_insert_with(|| assembler.new_dynamic_label()); .or_insert_with(|| (assembler.new_dynamic_label(), None))
.0;
let sig_index = match self.function_signatures.get(FuncIndex::new(function_index)) { let sig_index = match self.function_signatures.get(FuncIndex::new(function_index)) {
Some(x) => *x, Some(x) => *x,
None => { None => {
@ -2451,6 +2538,57 @@ impl FunctionCodeGenerator for X64FunctionCode {
&return_types, &return_types,
)?; )?;
} }
Operator::CallIndirect { index, table_index } => {
if table_index != 0 {
return Err(CodegenError {
message: "only one table is supported",
});
}
if module_info.tables.len() != 1 {
return Err(CodegenError {
message: "no tables",
});
}
let sig_index = SigIndex::new(index as usize);
let sig = match self.signatures.get(sig_index) {
Some(x) => x,
None => {
return Err(CodegenError {
message: "signature does not exist",
})
}
};
let mut param_types: Vec<WpType> =
sig.params().iter().cloned().map(type_to_wp_type).collect();
let return_types: Vec<WpType> =
sig.returns().iter().cloned().map(type_to_wp_type).collect();
param_types.push(WpType::I32); // element index
dynasm!(
assembler
; jmp >after_trampoline
);
let trampoline_label = Self::emit_native_call_trampoline(
assembler,
call_indirect,
0usize,
index as usize,
);
dynasm!(
assembler
; after_trampoline:
);
Self::emit_call_raw(
assembler,
&mut self.value_stack,
trampoline_label,
&param_types,
&return_types,
)?;
}
Operator::End => { Operator::End => {
if self.control_stack.as_ref().unwrap().frames.len() == 1 { if self.control_stack.as_ref().unwrap().frames.len() == 1 {
let frame = self.control_stack.as_mut().unwrap().frames.pop().unwrap(); let frame = self.control_stack.as_mut().unwrap().frames.pop().unwrap();
@ -2968,6 +3106,7 @@ unsafe extern "C" fn do_trap(
stack_top: *mut u8, stack_top: *mut u8,
stack_base: *mut u8, stack_base: *mut u8,
vmctx: *mut vm::Ctx, vmctx: *mut vm::Ctx,
memory_base: *mut u8,
) -> u64 { ) -> u64 {
panic!("TRAP CODE: {:?}", ctx2); panic!("TRAP CODE: {:?}", ctx2);
} }
@ -2978,9 +3117,54 @@ unsafe extern "C" fn invoke_import(
stack_top: *mut u8, stack_top: *mut u8,
stack_base: *mut u8, stack_base: *mut u8,
vmctx: *mut vm::Ctx, vmctx: *mut vm::Ctx,
memory_base: *mut u8,
) -> u64 { ) -> u64 {
let vmctx: &mut vm::Ctx = &mut *vmctx; let vmctx: &mut vm::Ctx = &mut *vmctx;
let import = (*vmctx.imported_funcs.offset(import_id as isize)).func; let import = (*vmctx.imported_funcs.offset(import_id as isize)).func;
CONSTRUCT_STACK_AND_CALL_NATIVE(stack_top, stack_base, vmctx, import) CONSTRUCT_STACK_AND_CALL_NATIVE(stack_top, stack_base, vmctx, import)
} }
unsafe extern "C" fn call_indirect(
_unused: usize,
sig_index: usize,
mut stack_top: *mut u8,
stack_base: *mut u8,
vmctx: *mut vm::Ctx,
memory_base: *mut u8,
) -> u64 {
let elem_index = *(stack_top as *mut u32) as usize;
stack_top = stack_top.offset(8);
assert!(stack_top as usize <= stack_base as usize);
let table: &LocalTable = &*(*(*vmctx).tables);
if elem_index >= table.count as usize {
panic!("element index out of bounds");
}
let func_index = *(table.base as *mut u32).offset(elem_index as isize) as usize;
let ctx: &X64ExecutionContext =
&*CURRENT_EXECUTION_CONTEXT.with(|x| *x.borrow().last().unwrap());
println!(
"SIG INDEX = {}, FUNC INDEX = {}, ELEM INDEX = {}",
sig_index, func_index, elem_index
);
// TODO: Fix table reading. Hardcoding func index = 1 for debugging here.
let func_index = 1usize;
if ctx.signatures[SigIndex::new(sig_index)]
!= ctx.signatures[ctx.function_signatures[FuncIndex::new(func_index)]]
{
panic!("signature mismatch");
}
let func = ctx.function_pointers[func_index].0;
CALL_WASM(
stack_top,
stack_base as usize - stack_top as usize,
func as _,
memory_base,
vmctx,
) as u64
}

View File

@ -39,8 +39,7 @@ impl FuncResolver for Placeholder {
_module: &ModuleInner, _module: &ModuleInner,
_local_func_index: LocalFuncIndex, _local_func_index: LocalFuncIndex,
) -> Option<NonNull<vm::Func>> { ) -> Option<NonNull<vm::Func>> {
panic!(); NonNull::new(0x3f3f3f3f3f3f3f3fusize as *mut vm::Func)
None
} }
} }