2019-02-12 00:52:17 +08:00
|
|
|
use super::codegen::*;
|
2019-03-05 00:23:49 +08:00
|
|
|
use super::stack::{
|
|
|
|
ControlFrame, ControlStack, IfElseState, ScratchRegister, ValueInfo, ValueLocation, ValueStack,
|
|
|
|
};
|
2019-03-18 00:31:36 +08:00
|
|
|
use crate::protect_unix;
|
2019-02-24 00:52:32 +08:00
|
|
|
use byteorder::{ByteOrder, LittleEndian};
|
2019-02-15 02:21:52 +08:00
|
|
|
use dynasmrt::{
|
|
|
|
x64::Assembler, AssemblyOffset, DynamicLabel, DynasmApi, DynasmLabelApi, ExecutableBuffer,
|
|
|
|
};
|
2019-03-09 02:57:23 +08:00
|
|
|
use std::cell::RefCell;
|
|
|
|
use std::ptr::NonNull;
|
2019-03-12 20:59:10 -05:00
|
|
|
use std::{any::Any, collections::HashMap, sync::Arc};
|
2019-02-15 02:21:52 +08:00
|
|
|
use wasmer_runtime_core::{
|
2019-03-17 10:54:50 +08:00
|
|
|
backend::{FuncResolver, ProtectedCaller, Token, UserTrapper},
|
|
|
|
error::{RuntimeError, RuntimeResult},
|
2019-03-18 00:31:36 +08:00
|
|
|
memory::MemoryType,
|
2019-03-17 10:54:50 +08:00
|
|
|
module::{ModuleInfo, ModuleInner},
|
2019-02-15 02:21:52 +08:00
|
|
|
structures::{Map, TypedIndex},
|
|
|
|
types::{
|
2019-03-18 00:31:36 +08:00
|
|
|
FuncIndex, FuncSig, ImportedMemoryIndex, LocalFuncIndex, LocalGlobalIndex,
|
|
|
|
LocalMemoryIndex, LocalOrImport, MemoryIndex, SigIndex, Type, Value,
|
2019-02-15 02:21:52 +08:00
|
|
|
},
|
2019-03-14 10:30:24 +08:00
|
|
|
units::Pages,
|
2019-03-18 00:31:36 +08:00
|
|
|
vm::{self, ImportBacking, LocalGlobal, LocalMemory, LocalTable},
|
2019-02-15 02:21:52 +08:00
|
|
|
};
|
2019-02-12 00:52:17 +08:00
|
|
|
use wasmparser::{Operator, Type as WpType};
|
|
|
|
|
2019-03-09 02:57:23 +08:00
|
|
|
thread_local! {
|
|
|
|
static CURRENT_EXECUTION_CONTEXT: RefCell<Vec<*const X64ExecutionContext>> = RefCell::new(Vec::new());
|
|
|
|
}
|
|
|
|
|
2019-02-24 00:52:32 +08:00
|
|
|
lazy_static! {
|
2019-03-09 00:07:13 +08:00
|
|
|
static ref CALL_WASM: unsafe extern "C" fn(params: *const u8, params_len: usize, target: *const u8, memory_base: *mut u8, vmctx: *mut vm::Ctx) -> i64 = {
|
2019-02-24 00:52:32 +08:00
|
|
|
let mut assembler = Assembler::new().unwrap();
|
|
|
|
let offset = assembler.offset();
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-02-25 23:55:27 +08:00
|
|
|
; push rbx
|
|
|
|
; push r12
|
|
|
|
; push r13
|
|
|
|
; push r14
|
|
|
|
; push r15
|
2019-02-28 23:58:02 +08:00
|
|
|
; mov r15, rcx // memory_base
|
2019-03-09 00:07:13 +08:00
|
|
|
; mov r14, r8 // vmctx
|
2019-02-24 00:52:32 +08:00
|
|
|
; lea rax, [>after_call]
|
|
|
|
; push rax
|
|
|
|
; push rbp
|
|
|
|
; mov rbp, rsp
|
|
|
|
; sub rsp, rsi // params_len
|
|
|
|
; mov rcx, 0
|
|
|
|
; mov r8, rsp
|
|
|
|
; _loop:
|
|
|
|
; cmp rsi, 0
|
|
|
|
; je >_loop_end
|
2019-03-13 18:23:50 +08:00
|
|
|
; mov rax, [rdi]
|
|
|
|
; mov [r8], rax
|
2019-02-25 23:29:18 +08:00
|
|
|
; add r8, 8
|
|
|
|
; add rdi, 8
|
|
|
|
; sub rsi, 8
|
2019-02-24 00:52:32 +08:00
|
|
|
; jmp <_loop
|
|
|
|
; _loop_end:
|
|
|
|
; jmp rdx
|
|
|
|
; after_call:
|
2019-02-25 23:55:27 +08:00
|
|
|
; pop r15
|
|
|
|
; pop r14
|
|
|
|
; pop r13
|
|
|
|
; pop r12
|
|
|
|
; pop rbx
|
2019-02-24 00:52:32 +08:00
|
|
|
; ret
|
|
|
|
);
|
|
|
|
let buf = assembler.finalize().unwrap();
|
|
|
|
let ret = unsafe { ::std::mem::transmute(buf.ptr(offset)) };
|
|
|
|
::std::mem::forget(buf);
|
|
|
|
ret
|
|
|
|
};
|
2019-03-09 00:07:13 +08:00
|
|
|
|
|
|
|
static ref CONSTRUCT_STACK_AND_CALL_NATIVE: unsafe extern "C" fn (stack_top: *mut u8, stack_base: *mut u8, ctx: *mut vm::Ctx, target: *const vm::Func) -> u64 = {
|
|
|
|
let mut assembler = Assembler::new().unwrap();
|
|
|
|
let offset = assembler.offset();
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push r15
|
|
|
|
; push r14
|
|
|
|
; push r13
|
|
|
|
; push r12
|
|
|
|
; sub rsp, 8 // align to 16 bytes
|
|
|
|
|
|
|
|
; mov r15, rdi
|
|
|
|
; mov r14, rsi
|
|
|
|
; mov r13, rdx
|
|
|
|
; mov r12, rcx
|
|
|
|
|
2019-03-17 03:07:27 +08:00
|
|
|
; mov rdi, r13 // ctx
|
|
|
|
|
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
; mov rsi, [r14]
|
|
|
|
|
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
; mov rdx, [r14]
|
|
|
|
|
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
; mov rcx, [r14]
|
|
|
|
|
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
; mov r8, [r14]
|
|
|
|
|
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
; mov r9, [r14]
|
|
|
|
|
2019-03-09 00:07:13 +08:00
|
|
|
; ud2 // FIXME
|
|
|
|
|
|
|
|
; stack_ready:
|
|
|
|
; call r12
|
|
|
|
|
|
|
|
; add rsp, 8
|
|
|
|
; pop r12
|
|
|
|
; pop r13
|
|
|
|
; pop r14
|
|
|
|
; pop r15
|
|
|
|
; ret
|
|
|
|
);
|
|
|
|
let buf = assembler.finalize().unwrap();
|
|
|
|
let ret = unsafe { ::std::mem::transmute(buf.ptr(offset)) };
|
|
|
|
::std::mem::forget(buf);
|
|
|
|
ret
|
|
|
|
};
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
|
2019-02-13 20:04:10 +08:00
|
|
|
#[repr(u8)]
|
|
|
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
2019-03-17 10:54:50 +08:00
|
|
|
#[allow(dead_code)]
|
2019-02-13 20:04:10 +08:00
|
|
|
pub enum Register {
|
|
|
|
RAX,
|
|
|
|
RCX,
|
|
|
|
RDX,
|
|
|
|
RBX,
|
|
|
|
RSP,
|
|
|
|
RBP,
|
|
|
|
RSI,
|
|
|
|
RDI,
|
|
|
|
R8,
|
|
|
|
R9,
|
|
|
|
R10,
|
|
|
|
R11,
|
|
|
|
R12,
|
|
|
|
R13,
|
|
|
|
R14,
|
|
|
|
R15,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Register {
|
2019-03-05 00:23:49 +08:00
|
|
|
pub fn from_scratch_reg(sr: ScratchRegister) -> Register {
|
2019-02-13 20:04:10 +08:00
|
|
|
use self::Register::*;
|
2019-03-05 00:23:49 +08:00
|
|
|
match sr.raw_id() {
|
2019-02-13 20:04:10 +08:00
|
|
|
0 => RDI,
|
|
|
|
1 => RSI,
|
|
|
|
2 => RDX,
|
|
|
|
3 => RCX,
|
|
|
|
4 => R8,
|
|
|
|
5 => R9,
|
|
|
|
6 => R10,
|
|
|
|
7 => R11,
|
2019-02-25 23:55:27 +08:00
|
|
|
8 => RBX,
|
|
|
|
9 => R12,
|
|
|
|
10 => R13,
|
2019-03-09 00:07:13 +08:00
|
|
|
// 11 => R14, // R14 is reserved for vmctx.
|
2019-02-28 23:22:21 +08:00
|
|
|
// 12 => R15, // R15 is reserved for memory base pointer.
|
2019-02-13 20:04:10 +08:00
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
}
|
2019-02-19 20:25:09 +08:00
|
|
|
|
|
|
|
pub fn is_used(&self, stack: &ValueStack) -> bool {
|
|
|
|
for val in &stack.values {
|
|
|
|
match val.location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
if Register::from_scratch_reg(x) == *self {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => break,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
false
|
|
|
|
}
|
2019-02-13 20:04:10 +08:00
|
|
|
}
|
|
|
|
|
2019-03-17 10:54:50 +08:00
|
|
|
#[allow(dead_code)]
|
2019-02-26 20:56:10 +08:00
|
|
|
pub struct NativeTrampolines {
|
2019-03-14 10:30:24 +08:00
|
|
|
memory_size_dynamic_local: DynamicLabel,
|
|
|
|
memory_size_static_local: DynamicLabel,
|
|
|
|
memory_size_shared_local: DynamicLabel,
|
|
|
|
memory_size_dynamic_import: DynamicLabel,
|
|
|
|
memory_size_static_import: DynamicLabel,
|
|
|
|
memory_size_shared_import: DynamicLabel,
|
|
|
|
memory_grow_dynamic_local: DynamicLabel,
|
|
|
|
memory_grow_static_local: DynamicLabel,
|
|
|
|
memory_grow_shared_local: DynamicLabel,
|
|
|
|
memory_grow_dynamic_import: DynamicLabel,
|
|
|
|
memory_grow_static_import: DynamicLabel,
|
|
|
|
memory_grow_shared_import: DynamicLabel,
|
2019-02-26 20:56:10 +08:00
|
|
|
}
|
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
pub struct X64ModuleCodeGenerator {
|
|
|
|
functions: Vec<X64FunctionCode>,
|
2019-03-12 20:59:10 -05:00
|
|
|
signatures: Option<Arc<Map<SigIndex, FuncSig>>>,
|
2019-02-24 00:52:32 +08:00
|
|
|
function_signatures: Option<Arc<Map<FuncIndex, SigIndex>>>,
|
2019-03-09 02:57:23 +08:00
|
|
|
function_labels: Option<HashMap<usize, (DynamicLabel, Option<AssemblyOffset>)>>,
|
2019-02-26 20:56:10 +08:00
|
|
|
assembler: Option<Assembler>,
|
|
|
|
native_trampolines: Arc<NativeTrampolines>,
|
2019-03-08 01:31:37 +08:00
|
|
|
func_import_count: usize,
|
2019-02-12 00:52:17 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct X64FunctionCode {
|
2019-03-12 20:59:10 -05:00
|
|
|
signatures: Arc<Map<SigIndex, FuncSig>>,
|
2019-02-24 00:52:32 +08:00
|
|
|
function_signatures: Arc<Map<FuncIndex, SigIndex>>,
|
2019-02-26 20:56:10 +08:00
|
|
|
native_trampolines: Arc<NativeTrampolines>,
|
2019-02-24 00:52:32 +08:00
|
|
|
|
2019-02-15 02:21:52 +08:00
|
|
|
begin_offset: AssemblyOffset,
|
2019-02-12 00:52:17 +08:00
|
|
|
assembler: Option<Assembler>,
|
2019-03-09 02:57:23 +08:00
|
|
|
function_labels: Option<HashMap<usize, (DynamicLabel, Option<AssemblyOffset>)>>,
|
2019-02-24 12:00:35 +08:00
|
|
|
br_table_data: Option<Vec<Vec<usize>>>,
|
2019-02-14 00:53:06 +08:00
|
|
|
returns: Vec<WpType>,
|
2019-02-12 00:52:17 +08:00
|
|
|
locals: Vec<Local>,
|
2019-02-12 23:15:57 +08:00
|
|
|
num_params: usize,
|
2019-02-12 00:52:17 +08:00
|
|
|
current_stack_offset: usize,
|
2019-02-12 23:15:57 +08:00
|
|
|
value_stack: ValueStack,
|
2019-02-20 22:56:32 +08:00
|
|
|
control_stack: Option<ControlStack>,
|
2019-02-21 21:14:10 +08:00
|
|
|
unreachable_depth: usize,
|
2019-02-12 00:52:17 +08:00
|
|
|
}
|
|
|
|
|
2019-03-09 02:57:23 +08:00
|
|
|
enum FuncPtrInner {}
|
|
|
|
#[repr(transparent)]
|
2019-03-17 03:07:27 +08:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2019-03-09 02:57:23 +08:00
|
|
|
struct FuncPtr(*const FuncPtrInner);
|
|
|
|
unsafe impl Send for FuncPtr {}
|
|
|
|
unsafe impl Sync for FuncPtr {}
|
|
|
|
|
2019-02-15 02:21:52 +08:00
|
|
|
pub struct X64ExecutionContext {
|
|
|
|
code: ExecutableBuffer,
|
|
|
|
functions: Vec<X64FunctionCode>,
|
2019-03-12 20:59:10 -05:00
|
|
|
signatures: Arc<Map<SigIndex, FuncSig>>,
|
2019-03-09 02:57:23 +08:00
|
|
|
function_signatures: Arc<Map<FuncIndex, SigIndex>>,
|
|
|
|
function_pointers: Vec<FuncPtr>,
|
2019-03-17 10:54:50 +08:00
|
|
|
_br_table_data: Vec<Vec<usize>>,
|
2019-03-08 01:31:37 +08:00
|
|
|
func_import_count: usize,
|
2019-02-15 02:21:52 +08:00
|
|
|
}
|
|
|
|
|
2019-03-17 03:07:27 +08:00
|
|
|
pub struct X64RuntimeResolver {
|
2019-03-17 10:54:50 +08:00
|
|
|
_code: ExecutableBuffer,
|
2019-03-17 03:07:27 +08:00
|
|
|
local_pointers: Vec<FuncPtr>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl X64ExecutionContext {
|
2019-03-18 00:31:36 +08:00
|
|
|
fn get_runtime_resolver(
|
|
|
|
&self,
|
|
|
|
module_info: &ModuleInfo,
|
|
|
|
) -> Result<X64RuntimeResolver, CodegenError> {
|
2019-03-17 03:07:27 +08:00
|
|
|
let mut assembler = Assembler::new().unwrap();
|
2019-03-18 00:31:36 +08:00
|
|
|
let mut offsets: Vec<AssemblyOffset> = vec![];
|
2019-03-17 03:07:27 +08:00
|
|
|
|
|
|
|
for i in self.func_import_count..self.function_pointers.len() {
|
|
|
|
offsets.push(assembler.offset());
|
|
|
|
X64FunctionCode::emit_managed_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
module_info,
|
|
|
|
self.function_pointers[i],
|
2019-03-18 00:31:36 +08:00
|
|
|
self.signatures[self.function_signatures[FuncIndex::new(i)]]
|
|
|
|
.params()
|
|
|
|
.len(),
|
2019-03-17 03:07:27 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
let code = assembler.finalize().unwrap();
|
2019-03-18 00:31:36 +08:00
|
|
|
let local_pointers: Vec<FuncPtr> =
|
|
|
|
offsets.iter().map(|x| FuncPtr(code.ptr(*x) as _)).collect();
|
2019-03-17 03:07:27 +08:00
|
|
|
|
|
|
|
Ok(X64RuntimeResolver {
|
2019-03-17 10:54:50 +08:00
|
|
|
_code: code,
|
2019-03-17 03:07:27 +08:00
|
|
|
local_pointers: local_pointers,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FuncResolver for X64RuntimeResolver {
|
2019-03-09 02:57:23 +08:00
|
|
|
fn get(
|
|
|
|
&self,
|
|
|
|
_module: &ModuleInner,
|
|
|
|
_local_func_index: LocalFuncIndex,
|
|
|
|
) -> Option<NonNull<vm::Func>> {
|
2019-03-18 00:31:36 +08:00
|
|
|
NonNull::new(self.local_pointers[_local_func_index.index() as usize].0 as *mut vm::Func)
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-15 02:21:52 +08:00
|
|
|
impl ProtectedCaller for X64ExecutionContext {
|
|
|
|
fn call(
|
|
|
|
&self,
|
|
|
|
_module: &ModuleInner,
|
|
|
|
_func_index: FuncIndex,
|
|
|
|
_params: &[Value],
|
|
|
|
_import_backing: &ImportBacking,
|
|
|
|
_vmctx: *mut vm::Ctx,
|
|
|
|
_: Token,
|
|
|
|
) -> RuntimeResult<Vec<Value>> {
|
2019-03-08 01:31:37 +08:00
|
|
|
let index = _func_index.index() - self.func_import_count;
|
2019-02-15 02:21:52 +08:00
|
|
|
let ptr = self.code.ptr(self.functions[index].begin_offset);
|
|
|
|
let return_ty = self.functions[index].returns.last().cloned();
|
|
|
|
|
|
|
|
if self.functions[index].num_params != _params.len() {
|
2019-03-12 20:59:10 -05:00
|
|
|
return Err(RuntimeError::Trap {
|
2019-02-15 02:21:52 +08:00
|
|
|
msg: "param count mismatch".into(),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-02-24 00:52:32 +08:00
|
|
|
let f = &self.functions[index];
|
2019-02-25 23:38:33 +08:00
|
|
|
let total_size = f.num_params * 8;
|
2019-02-24 00:52:32 +08:00
|
|
|
|
2019-02-25 23:38:33 +08:00
|
|
|
if f.num_params > 0 && f.locals[f.num_params - 1].stack_offset != total_size {
|
|
|
|
panic!("internal error: inconsistent stack layout");
|
2019-02-15 02:21:52 +08:00
|
|
|
}
|
2019-02-24 00:52:32 +08:00
|
|
|
|
|
|
|
let mut param_buf: Vec<u8> = vec![0; total_size];
|
|
|
|
for i in 0..f.num_params {
|
|
|
|
let local = &f.locals[i];
|
|
|
|
let buf = &mut param_buf[total_size - local.stack_offset..];
|
|
|
|
let size = get_size_of_type(&local.ty).unwrap();
|
|
|
|
|
|
|
|
if is_dword(size) {
|
|
|
|
match _params[i] {
|
|
|
|
Value::I32(x) => LittleEndian::write_u32(buf, x as u32),
|
|
|
|
Value::F32(x) => LittleEndian::write_u32(buf, f32::to_bits(x)),
|
|
|
|
_ => {
|
2019-03-12 20:59:10 -05:00
|
|
|
return Err(RuntimeError::Trap {
|
2019-02-24 00:52:32 +08:00
|
|
|
msg: "signature mismatch".into(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
match _params[i] {
|
|
|
|
Value::I64(x) => LittleEndian::write_u64(buf, x as u64),
|
|
|
|
Value::F64(x) => LittleEndian::write_u64(buf, f64::to_bits(x)),
|
|
|
|
_ => {
|
2019-03-12 20:59:10 -05:00
|
|
|
return Err(RuntimeError::Trap {
|
2019-02-24 00:52:32 +08:00
|
|
|
msg: "signature mismatch".into(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-28 23:58:02 +08:00
|
|
|
let memory_base: *mut u8 = if _module.info.memories.len() > 0 {
|
2019-03-14 10:30:24 +08:00
|
|
|
if _module.info.memories.len() != 1 || _module.info.imported_memories.len() != 0 {
|
2019-03-12 20:59:10 -05:00
|
|
|
return Err(RuntimeError::Trap {
|
2019-02-28 23:58:02 +08:00
|
|
|
msg: "only one linear memory is supported".into(),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
unsafe { (**(*_vmctx).memories).base }
|
2019-03-14 10:30:24 +08:00
|
|
|
} else if _module.info.imported_memories.len() > 0 {
|
|
|
|
if _module.info.memories.len() != 0 || _module.info.imported_memories.len() != 1 {
|
|
|
|
return Err(RuntimeError::Trap {
|
|
|
|
msg: "only one linear memory is supported".into(),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
unsafe { (**(*_vmctx).imported_memories).base }
|
2019-02-28 23:58:02 +08:00
|
|
|
} else {
|
|
|
|
::std::ptr::null_mut()
|
|
|
|
};
|
|
|
|
//println!("MEMORY = {:?}", memory_base);
|
|
|
|
|
2019-03-09 02:57:23 +08:00
|
|
|
CURRENT_EXECUTION_CONTEXT.with(|x| x.borrow_mut().push(self));
|
|
|
|
|
2019-03-09 00:07:13 +08:00
|
|
|
let ret = unsafe {
|
2019-03-17 10:27:14 +08:00
|
|
|
protect_unix::call_protected(|| {
|
|
|
|
CALL_WASM(
|
|
|
|
param_buf.as_ptr(),
|
|
|
|
param_buf.len(),
|
|
|
|
ptr,
|
|
|
|
memory_base,
|
|
|
|
_vmctx,
|
|
|
|
)
|
|
|
|
})
|
2019-03-09 00:07:13 +08:00
|
|
|
};
|
2019-03-09 02:57:23 +08:00
|
|
|
|
|
|
|
CURRENT_EXECUTION_CONTEXT.with(|x| x.borrow_mut().pop().unwrap());
|
|
|
|
|
2019-03-17 10:27:14 +08:00
|
|
|
let ret = ret?;
|
|
|
|
|
2019-02-24 00:52:32 +08:00
|
|
|
Ok(if let Some(ty) = return_ty {
|
2019-03-13 18:23:50 +08:00
|
|
|
vec![match ty {
|
|
|
|
WpType::I32 => Value::I32(ret as i32),
|
|
|
|
WpType::I64 => Value::I64(ret),
|
|
|
|
WpType::F32 => Value::F32(f32::from_bits(ret as i32 as u32)),
|
|
|
|
WpType::F64 => Value::F64(f64::from_bits(ret as u64)),
|
|
|
|
_ => unreachable!(),
|
|
|
|
}]
|
2019-02-24 00:52:32 +08:00
|
|
|
} else {
|
|
|
|
vec![]
|
|
|
|
})
|
2019-02-15 02:21:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
fn get_early_trapper(&self) -> Box<dyn UserTrapper> {
|
|
|
|
pub struct Trapper;
|
|
|
|
|
|
|
|
impl UserTrapper for Trapper {
|
2019-03-17 10:54:50 +08:00
|
|
|
unsafe fn do_early_trap(&self, _data: Box<Any>) -> ! {
|
2019-03-12 20:59:10 -05:00
|
|
|
panic!("do_early_trap");
|
2019-02-15 02:21:52 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Box::new(Trapper)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 23:15:57 +08:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2019-02-12 00:52:17 +08:00
|
|
|
struct Local {
|
|
|
|
ty: WpType,
|
|
|
|
stack_offset: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl X64ModuleCodeGenerator {
|
|
|
|
pub fn new() -> X64ModuleCodeGenerator {
|
2019-02-26 20:56:10 +08:00
|
|
|
let mut assembler = Assembler::new().unwrap();
|
|
|
|
let nt = NativeTrampolines {
|
2019-03-14 10:30:24 +08:00
|
|
|
memory_size_dynamic_local: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_size,
|
|
|
|
MemoryKind::DynamicLocal,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_size_static_local: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_size,
|
|
|
|
MemoryKind::StaticLocal,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_size_shared_local: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_size,
|
|
|
|
MemoryKind::SharedLocal,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_size_dynamic_import: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_size,
|
|
|
|
MemoryKind::DynamicImport,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_size_static_import: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_size,
|
|
|
|
MemoryKind::StaticImport,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_size_shared_import: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_size,
|
|
|
|
MemoryKind::SharedImport,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_grow_dynamic_local: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_grow,
|
|
|
|
MemoryKind::DynamicLocal,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_grow_static_local: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_grow,
|
|
|
|
MemoryKind::StaticLocal,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_grow_shared_local: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_grow,
|
|
|
|
MemoryKind::SharedLocal,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_grow_dynamic_import: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_grow,
|
|
|
|
MemoryKind::DynamicImport,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_grow_static_import: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_grow,
|
|
|
|
MemoryKind::StaticImport,
|
|
|
|
0usize,
|
|
|
|
),
|
|
|
|
memory_grow_shared_import: X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
&mut assembler,
|
|
|
|
_memory_grow,
|
|
|
|
MemoryKind::SharedImport,
|
|
|
|
0usize,
|
|
|
|
),
|
2019-02-26 20:56:10 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
X64ModuleCodeGenerator {
|
|
|
|
functions: vec![],
|
|
|
|
signatures: None,
|
|
|
|
function_signatures: None,
|
2019-03-08 01:31:37 +08:00
|
|
|
function_labels: Some(HashMap::new()),
|
2019-02-26 20:56:10 +08:00
|
|
|
assembler: Some(assembler),
|
|
|
|
native_trampolines: Arc::new(nt),
|
2019-03-08 01:31:37 +08:00
|
|
|
func_import_count: 0,
|
2019-02-26 20:56:10 +08:00
|
|
|
}
|
2019-02-12 00:52:17 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 00:31:36 +08:00
|
|
|
impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, X64RuntimeResolver>
|
|
|
|
for X64ModuleCodeGenerator
|
|
|
|
{
|
2019-03-17 10:27:14 +08:00
|
|
|
fn check_precondition(&mut self, module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
2019-03-18 00:31:36 +08:00
|
|
|
for mem in module_info
|
|
|
|
.memories
|
|
|
|
.iter()
|
|
|
|
.map(|(_, v)| v)
|
|
|
|
.chain(module_info.imported_memories.iter().map(|(_, v)| &v.1))
|
|
|
|
{
|
2019-03-17 10:27:14 +08:00
|
|
|
match mem.memory_type() {
|
2019-03-18 00:31:36 +08:00
|
|
|
MemoryType::Dynamic => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "dynamic memory isn't supported yet",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
}
|
2019-03-17 10:27:14 +08:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
fn next_function(&mut self) -> Result<&mut X64FunctionCode, CodegenError> {
|
2019-02-24 12:00:35 +08:00
|
|
|
let (mut assembler, mut function_labels, br_table_data) = match self.functions.last_mut() {
|
2019-02-24 00:52:32 +08:00
|
|
|
Some(x) => (
|
|
|
|
x.assembler.take().unwrap(),
|
|
|
|
x.function_labels.take().unwrap(),
|
2019-02-24 12:00:35 +08:00
|
|
|
x.br_table_data.take().unwrap(),
|
2019-02-24 00:52:32 +08:00
|
|
|
),
|
2019-03-08 01:31:37 +08:00
|
|
|
None => (
|
|
|
|
self.assembler.take().unwrap(),
|
|
|
|
self.function_labels.take().unwrap(),
|
|
|
|
vec![],
|
|
|
|
),
|
2019-02-12 23:15:57 +08:00
|
|
|
};
|
2019-02-15 02:21:52 +08:00
|
|
|
let begin_offset = assembler.offset();
|
2019-03-09 02:57:23 +08:00
|
|
|
let begin_label_info = function_labels
|
|
|
|
.entry(self.functions.len() + self.func_import_count)
|
|
|
|
.or_insert_with(|| (assembler.new_dynamic_label(), None));
|
|
|
|
|
|
|
|
begin_label_info.1 = Some(begin_offset);
|
|
|
|
let begin_label = begin_label_info.0;
|
|
|
|
|
2019-02-12 23:15:57 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; => begin_label
|
2019-02-21 22:04:43 +08:00
|
|
|
//; int 3
|
2019-02-12 23:15:57 +08:00
|
|
|
);
|
2019-02-12 00:52:17 +08:00
|
|
|
let code = X64FunctionCode {
|
2019-02-24 00:52:32 +08:00
|
|
|
signatures: self.signatures.as_ref().unwrap().clone(),
|
|
|
|
function_signatures: self.function_signatures.as_ref().unwrap().clone(),
|
2019-02-26 20:56:10 +08:00
|
|
|
native_trampolines: self.native_trampolines.clone(),
|
2019-02-24 00:52:32 +08:00
|
|
|
|
2019-02-15 02:21:52 +08:00
|
|
|
begin_offset: begin_offset,
|
2019-02-12 23:15:57 +08:00
|
|
|
assembler: Some(assembler),
|
2019-02-24 00:52:32 +08:00
|
|
|
function_labels: Some(function_labels),
|
2019-02-24 12:00:35 +08:00
|
|
|
br_table_data: Some(br_table_data),
|
2019-02-14 00:53:06 +08:00
|
|
|
returns: vec![],
|
2019-02-12 00:52:17 +08:00
|
|
|
locals: vec![],
|
2019-02-12 23:15:57 +08:00
|
|
|
num_params: 0,
|
2019-02-12 00:52:17 +08:00
|
|
|
current_stack_offset: 0,
|
2019-02-25 23:57:12 +08:00
|
|
|
value_stack: ValueStack::new(4), // FIXME: Use of R8 and above registers generates incorrect assembly.
|
2019-02-20 22:56:32 +08:00
|
|
|
control_stack: None,
|
2019-02-21 21:14:10 +08:00
|
|
|
unreachable_depth: 0,
|
2019-02-12 00:52:17 +08:00
|
|
|
};
|
|
|
|
self.functions.push(code);
|
|
|
|
Ok(self.functions.last_mut().unwrap())
|
|
|
|
}
|
2019-02-14 00:53:06 +08:00
|
|
|
|
2019-03-18 00:31:36 +08:00
|
|
|
fn finalize(
|
|
|
|
mut self,
|
|
|
|
module_info: &ModuleInfo,
|
|
|
|
) -> Result<(X64ExecutionContext, X64RuntimeResolver), CodegenError> {
|
2019-03-17 10:54:50 +08:00
|
|
|
let (assembler, mut br_table_data) = match self.functions.last_mut() {
|
2019-02-24 12:00:35 +08:00
|
|
|
Some(x) => (x.assembler.take().unwrap(), x.br_table_data.take().unwrap()),
|
2019-02-15 02:21:52 +08:00
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "no function",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-15 02:21:52 +08:00
|
|
|
}
|
2019-02-14 00:53:06 +08:00
|
|
|
};
|
|
|
|
let output = assembler.finalize().unwrap();
|
2019-02-24 12:00:35 +08:00
|
|
|
|
|
|
|
for table in &mut br_table_data {
|
|
|
|
for entry in table {
|
|
|
|
*entry = output.ptr(AssemblyOffset(*entry)) as usize;
|
|
|
|
}
|
|
|
|
}
|
2019-03-09 02:57:23 +08:00
|
|
|
|
|
|
|
let function_labels = if let Some(x) = self.functions.last() {
|
|
|
|
x.function_labels.as_ref().unwrap()
|
|
|
|
} else {
|
|
|
|
self.function_labels.as_ref().unwrap()
|
|
|
|
};
|
|
|
|
let mut out_labels: Vec<FuncPtr> = vec![];
|
|
|
|
|
|
|
|
for i in 0..function_labels.len() {
|
|
|
|
let (_, offset) = match function_labels.get(&i) {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "label not found",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
let offset = match offset {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "offset is none",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
out_labels.push(FuncPtr(output.ptr(*offset) as _));
|
|
|
|
}
|
|
|
|
|
2019-03-17 03:07:27 +08:00
|
|
|
let ctx = X64ExecutionContext {
|
2019-02-15 02:21:52 +08:00
|
|
|
code: output,
|
|
|
|
functions: self.functions,
|
2019-03-17 10:54:50 +08:00
|
|
|
_br_table_data: br_table_data,
|
2019-03-08 01:31:37 +08:00
|
|
|
func_import_count: self.func_import_count,
|
2019-03-09 02:57:23 +08:00
|
|
|
signatures: match self.signatures {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "no signatures",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
},
|
|
|
|
function_pointers: out_labels,
|
|
|
|
function_signatures: match self.function_signatures {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "no function signatures",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
},
|
2019-03-17 03:07:27 +08:00
|
|
|
};
|
|
|
|
let resolver = ctx.get_runtime_resolver(module_info)?;
|
|
|
|
|
|
|
|
Ok((ctx, resolver))
|
2019-02-14 00:53:06 +08:00
|
|
|
}
|
2019-02-24 00:52:32 +08:00
|
|
|
|
2019-03-18 00:31:36 +08:00
|
|
|
fn feed_signatures(&mut self, signatures: Map<SigIndex, FuncSig>) -> Result<(), CodegenError> {
|
2019-02-24 00:52:32 +08:00
|
|
|
self.signatures = Some(Arc::new(signatures));
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn feed_function_signatures(
|
|
|
|
&mut self,
|
|
|
|
assoc: Map<FuncIndex, SigIndex>,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
self.function_signatures = Some(Arc::new(assoc));
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-03-08 01:31:37 +08:00
|
|
|
|
|
|
|
fn feed_import_function(&mut self) -> Result<(), CodegenError> {
|
|
|
|
let labels = match self.function_labels.as_mut() {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "got function import after code",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-08 01:31:37 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
let id = labels.len();
|
|
|
|
|
2019-03-09 02:57:23 +08:00
|
|
|
let offset = self.assembler.as_mut().unwrap().offset();
|
|
|
|
|
2019-03-08 01:31:37 +08:00
|
|
|
let label = X64FunctionCode::emit_native_call_trampoline(
|
|
|
|
self.assembler.as_mut().unwrap(),
|
|
|
|
invoke_import,
|
2019-03-09 00:07:13 +08:00
|
|
|
0,
|
2019-03-08 01:31:37 +08:00
|
|
|
id,
|
|
|
|
);
|
2019-03-09 02:57:23 +08:00
|
|
|
labels.insert(id, (label, Some(offset)));
|
2019-03-08 01:31:37 +08:00
|
|
|
|
|
|
|
self.func_import_count += 1;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-02-12 00:52:17 +08:00
|
|
|
}
|
|
|
|
|
2019-02-13 20:04:10 +08:00
|
|
|
impl X64FunctionCode {
|
|
|
|
fn gen_rt_pop(assembler: &mut Assembler, info: &ValueInfo) -> Result<(), CodegenError> {
|
|
|
|
match info.location {
|
|
|
|
ValueLocation::Register(_) => {}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-02-25 23:29:18 +08:00
|
|
|
; add rsp, 8
|
2019-02-13 20:04:10 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-02-19 20:25:09 +08:00
|
|
|
|
2019-03-18 00:31:36 +08:00
|
|
|
fn emit_reinterpret(
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
in_ty: WpType,
|
|
|
|
out_ty: WpType,
|
|
|
|
) -> Result<(), CodegenError> {
|
2019-03-14 17:11:35 +08:00
|
|
|
let val = value_stack.pop()?;
|
|
|
|
if val.ty != in_ty {
|
|
|
|
return Err(CodegenError {
|
2019-03-18 00:31:36 +08:00
|
|
|
message: "reinterpret type mismatch",
|
2019-03-14 17:11:35 +08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
value_stack.push(out_ty);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-02-23 01:54:16 +08:00
|
|
|
/// Emits a unary operator.
|
2019-03-05 00:59:05 +08:00
|
|
|
fn emit_unop<F: FnOnce(&mut Assembler, &ValueStack, Register)>(
|
2019-02-23 01:54:16 +08:00
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
f: F,
|
2019-03-05 00:59:05 +08:00
|
|
|
in_ty: WpType,
|
|
|
|
out_ty: WpType,
|
2019-02-23 01:54:16 +08:00
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
let a = value_stack.pop()?;
|
2019-03-05 00:59:05 +08:00
|
|
|
if a.ty != in_ty {
|
2019-02-23 01:54:16 +08:00
|
|
|
return Err(CodegenError {
|
|
|
|
message: "unop(i32) type mismatch",
|
|
|
|
});
|
|
|
|
}
|
2019-03-05 00:59:05 +08:00
|
|
|
value_stack.push(out_ty);
|
2019-02-23 01:54:16 +08:00
|
|
|
|
|
|
|
match a.location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
f(assembler, value_stack, reg);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-05 00:59:05 +08:00
|
|
|
; mov rax, [rsp]
|
2019-02-23 01:54:16 +08:00
|
|
|
);
|
|
|
|
f(assembler, value_stack, Register::RAX);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-05 00:59:05 +08:00
|
|
|
; mov [rsp], rax
|
2019-02-23 01:54:16 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-03-05 00:59:05 +08:00
|
|
|
fn emit_unop_i32<F: FnOnce(&mut Assembler, &ValueStack, Register)>(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
f: F,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
Self::emit_unop(assembler, value_stack, f, WpType::I32, WpType::I32)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn emit_unop_i64<F: FnOnce(&mut Assembler, &ValueStack, Register)>(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
f: F,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
Self::emit_unop(assembler, value_stack, f, WpType::I64, WpType::I64)
|
|
|
|
}
|
|
|
|
|
2019-02-19 20:25:09 +08:00
|
|
|
/// Emits a binary operator.
|
|
|
|
///
|
|
|
|
/// Guarantees that the first Register parameter to callback `f` will never be `Register::RAX`.
|
2019-03-05 00:59:05 +08:00
|
|
|
fn emit_binop<F: FnOnce(&mut Assembler, &ValueStack, Register, Register)>(
|
2019-02-19 20:25:09 +08:00
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
f: F,
|
2019-03-05 00:59:05 +08:00
|
|
|
in_ty: WpType,
|
|
|
|
out_ty: WpType,
|
2019-02-19 20:25:09 +08:00
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
let (a, b) = value_stack.pop2()?;
|
2019-03-05 00:59:05 +08:00
|
|
|
if a.ty != in_ty || b.ty != in_ty {
|
2019-02-19 20:25:09 +08:00
|
|
|
return Err(CodegenError {
|
2019-02-23 01:54:16 +08:00
|
|
|
message: "binop(i32) type mismatch",
|
2019-02-19 20:25:09 +08:00
|
|
|
});
|
|
|
|
}
|
2019-03-05 00:59:05 +08:00
|
|
|
value_stack.push(out_ty);
|
2019-02-19 20:25:09 +08:00
|
|
|
|
|
|
|
if a.location.is_register() && b.location.is_register() {
|
|
|
|
// output is in a_reg.
|
|
|
|
f(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
Register::from_scratch_reg(a.location.get_register()?),
|
|
|
|
Register::from_scratch_reg(b.location.get_register()?),
|
|
|
|
);
|
|
|
|
} else if a.location.is_register() {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-02-25 23:29:18 +08:00
|
|
|
; pop rax
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
|
|
|
f(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
Register::from_scratch_reg(a.location.get_register()?),
|
|
|
|
Register::RAX,
|
|
|
|
);
|
|
|
|
} else if b.location.is_register() {
|
|
|
|
unreachable!();
|
|
|
|
} else {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push rcx
|
2019-03-05 00:59:05 +08:00
|
|
|
; mov rcx, [rsp + 16]
|
|
|
|
; mov rax, [rsp + 8]
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
|
|
|
f(assembler, value_stack, Register::RCX, Register::RAX);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-05 00:59:05 +08:00
|
|
|
; mov [rsp + 16], rcx
|
2019-02-19 20:25:09 +08:00
|
|
|
; pop rcx
|
2019-02-25 23:29:18 +08:00
|
|
|
; add rsp, 8
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-03-05 00:59:05 +08:00
|
|
|
fn emit_binop_i32<F: FnOnce(&mut Assembler, &ValueStack, Register, Register)>(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
f: F,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
Self::emit_binop(assembler, value_stack, f, WpType::I32, WpType::I32)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn emit_binop_i64<F: FnOnce(&mut Assembler, &ValueStack, Register, Register)>(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
f: F,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
Self::emit_binop(assembler, value_stack, f, WpType::I64, WpType::I64)
|
|
|
|
}
|
|
|
|
|
2019-03-13 18:23:50 +08:00
|
|
|
fn emit_shift<F: FnOnce(&mut Assembler, Register)>(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &ValueStack,
|
|
|
|
left: Register,
|
|
|
|
right: Register,
|
|
|
|
f: F,
|
|
|
|
) {
|
|
|
|
let rcx_used = Register::RCX.is_used(value_stack);
|
2019-03-17 10:54:50 +08:00
|
|
|
if rcx_used {
|
2019-03-13 18:23:50 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push rcx
|
|
|
|
);
|
|
|
|
}
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rcx, Rq(right as u8)
|
|
|
|
);
|
|
|
|
f(assembler, left);
|
2019-03-17 10:54:50 +08:00
|
|
|
if rcx_used {
|
2019-03-13 18:23:50 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; pop rcx
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-19 20:25:09 +08:00
|
|
|
fn emit_div_i32(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &ValueStack,
|
|
|
|
left: Register,
|
|
|
|
right: Register,
|
|
|
|
signed: bool,
|
|
|
|
out: Register,
|
|
|
|
) {
|
2019-03-06 01:16:24 +08:00
|
|
|
let dx_save =
|
|
|
|
Register::RDX.is_used(value_stack) && left != Register::RDX && right != Register::RDX;
|
|
|
|
if dx_save {
|
2019-02-19 20:25:09 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push rdx
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push r15
|
|
|
|
; mov r15d, Rd(right as u8)
|
|
|
|
; mov eax, Rd(left as u8)
|
|
|
|
);
|
|
|
|
if signed {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-14 09:15:25 +08:00
|
|
|
; cdq
|
2019-03-06 01:16:24 +08:00
|
|
|
; idiv r15d
|
|
|
|
);
|
|
|
|
} else {
|
2019-02-19 20:25:09 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-14 09:15:25 +08:00
|
|
|
; xor edx, edx
|
2019-03-06 01:16:24 +08:00
|
|
|
; div r15d
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
2019-03-06 01:16:24 +08:00
|
|
|
}
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(left as u8), Rd(out as u8)
|
|
|
|
; pop r15
|
|
|
|
);
|
2019-02-19 20:25:09 +08:00
|
|
|
|
2019-03-06 01:16:24 +08:00
|
|
|
if dx_save {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; pop rdx
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2019-02-19 20:25:09 +08:00
|
|
|
|
2019-03-06 01:16:24 +08:00
|
|
|
fn emit_div_i64(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &ValueStack,
|
|
|
|
left: Register,
|
|
|
|
right: Register,
|
|
|
|
signed: bool,
|
|
|
|
out: Register,
|
|
|
|
) {
|
|
|
|
let dx_save =
|
|
|
|
Register::RDX.is_used(value_stack) && left != Register::RDX && right != Register::RDX;
|
|
|
|
if dx_save {
|
2019-02-19 20:25:09 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-06 01:16:24 +08:00
|
|
|
; push rdx
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
2019-03-06 01:16:24 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push r15
|
|
|
|
; mov r15, Rq(right as u8)
|
|
|
|
; mov rax, Rq(left as u8)
|
|
|
|
);
|
|
|
|
if signed {
|
2019-02-19 20:25:09 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-14 09:15:25 +08:00
|
|
|
; cqo
|
2019-03-06 01:16:24 +08:00
|
|
|
; idiv r15
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
2019-03-06 01:16:24 +08:00
|
|
|
} else {
|
2019-02-19 20:25:09 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-14 09:15:25 +08:00
|
|
|
; xor rdx, rdx
|
2019-03-06 01:16:24 +08:00
|
|
|
; div r15
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
|
|
|
}
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rq(left as u8), Rq(out as u8)
|
|
|
|
; pop r15
|
|
|
|
);
|
2019-02-19 20:25:09 +08:00
|
|
|
|
2019-03-06 01:16:24 +08:00
|
|
|
if dx_save {
|
2019-02-19 20:25:09 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; pop rdx
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2019-02-20 22:56:32 +08:00
|
|
|
|
2019-02-23 01:54:16 +08:00
|
|
|
fn emit_cmp_i32<F: FnOnce(&mut Assembler)>(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
left: Register,
|
|
|
|
right: Register,
|
|
|
|
f: F,
|
|
|
|
) {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp Rd(left as u8), Rd(right as u8)
|
|
|
|
);
|
|
|
|
f(assembler);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; xor Rd(left as u8), Rd(left as u8)
|
|
|
|
; jmp >label_end
|
|
|
|
; label_true:
|
|
|
|
; mov Rd(left as u8), 1
|
|
|
|
; label_end:
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-03-05 00:59:05 +08:00
|
|
|
fn emit_cmp_i64<F: FnOnce(&mut Assembler)>(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
left: Register,
|
|
|
|
right: Register,
|
|
|
|
f: F,
|
|
|
|
) {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp Rq(left as u8), Rq(right as u8)
|
|
|
|
);
|
|
|
|
f(assembler);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; xor Rq(left as u8), Rq(left as u8)
|
|
|
|
; jmp >label_end
|
|
|
|
; label_true:
|
|
|
|
; mov Rq(left as u8), 1
|
|
|
|
; label_end:
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-02-21 22:04:43 +08:00
|
|
|
fn emit_peek_into_ax(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &ValueStack,
|
2019-03-06 01:16:24 +08:00
|
|
|
) -> Result<WpType, CodegenError> {
|
2019-02-21 22:04:43 +08:00
|
|
|
let val = match value_stack.values.last() {
|
|
|
|
Some(x) => *x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "no value",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-21 22:04:43 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
match val.location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, Rq(reg as u8)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
2019-02-25 23:29:18 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, [rsp]
|
|
|
|
);
|
2019-02-21 22:04:43 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-06 01:16:24 +08:00
|
|
|
Ok(val.ty)
|
2019-02-21 22:04:43 +08:00
|
|
|
}
|
|
|
|
|
2019-03-02 01:41:31 +08:00
|
|
|
fn emit_pop_into_reg(
|
2019-02-20 23:21:33 +08:00
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
2019-03-02 01:41:31 +08:00
|
|
|
target: Register,
|
2019-02-23 01:54:16 +08:00
|
|
|
) -> Result<WpType, CodegenError> {
|
2019-02-20 23:21:33 +08:00
|
|
|
let val = value_stack.pop()?;
|
|
|
|
match val.location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-02 01:41:31 +08:00
|
|
|
; mov Rq(target as u8), Rq(reg as u8)
|
2019-02-20 23:21:33 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
2019-02-25 23:29:18 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-02 01:41:31 +08:00
|
|
|
; pop Rq(target as u8)
|
2019-02-25 23:29:18 +08:00
|
|
|
);
|
2019-02-20 23:21:33 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-23 01:54:16 +08:00
|
|
|
Ok(val.ty)
|
2019-02-20 23:21:33 +08:00
|
|
|
}
|
|
|
|
|
2019-03-02 01:41:31 +08:00
|
|
|
fn emit_pop_into_ax(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
) -> Result<WpType, CodegenError> {
|
|
|
|
Self::emit_pop_into_reg(assembler, value_stack, Register::RAX)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn emit_push_from_reg(
|
2019-02-24 00:52:32 +08:00
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
ty: WpType,
|
2019-03-02 01:41:31 +08:00
|
|
|
source: Register,
|
2019-02-24 00:52:32 +08:00
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
let loc = value_stack.push(ty);
|
|
|
|
match loc {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-02 01:41:31 +08:00
|
|
|
; mov Rq(reg as u8), Rq(source as u8)
|
2019-02-24 00:52:32 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
2019-02-25 23:29:18 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-02 01:41:31 +08:00
|
|
|
; push Rq(source as u8)
|
2019-02-25 23:29:18 +08:00
|
|
|
);
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-03-02 01:41:31 +08:00
|
|
|
fn emit_push_from_ax(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
ty: WpType,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
Self::emit_push_from_reg(assembler, value_stack, ty, Register::RAX)
|
|
|
|
}
|
|
|
|
|
2019-02-20 22:56:32 +08:00
|
|
|
fn emit_leave_frame(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
frame: &ControlFrame,
|
|
|
|
value_stack: &mut ValueStack,
|
2019-02-21 22:04:43 +08:00
|
|
|
peek: bool,
|
2019-02-20 22:56:32 +08:00
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
let ret_ty = match frame.returns.len() {
|
|
|
|
1 => Some(frame.returns[0]),
|
|
|
|
0 => None,
|
|
|
|
_ => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "more than one block returns are not yet supported",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-20 22:56:32 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if value_stack.values.len() < frame.value_stack_depth_before + frame.returns.len() {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "value stack underflow",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-03-17 10:54:50 +08:00
|
|
|
if let Some(_) = ret_ty {
|
2019-02-20 23:21:33 +08:00
|
|
|
if value_stack.values.iter().last().map(|x| x.ty) != ret_ty {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "value type != return type",
|
|
|
|
});
|
2019-02-20 22:56:32 +08:00
|
|
|
}
|
2019-02-21 22:04:43 +08:00
|
|
|
if peek {
|
|
|
|
Self::emit_peek_into_ax(assembler, value_stack)?;
|
|
|
|
} else {
|
|
|
|
Self::emit_pop_into_ax(assembler, value_stack)?;
|
|
|
|
}
|
2019-02-20 22:56:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-02-27 23:38:45 +08:00
|
|
|
fn emit_else(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
control_stack: &mut ControlStack,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
was_unreachable: bool,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
let frame = match control_stack.frames.last_mut() {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
2019-03-14 17:11:35 +08:00
|
|
|
message: "no frame (else)",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-27 23:38:45 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if !was_unreachable {
|
|
|
|
Self::emit_leave_frame(assembler, frame, value_stack, false)?;
|
|
|
|
if value_stack.values.len() != frame.value_stack_depth_before {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "value_stack.values.len() != frame.value_stack_depth_before",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// No need to actually unwind the stack here.
|
|
|
|
value_stack.reset_depth(frame.value_stack_depth_before);
|
|
|
|
}
|
|
|
|
|
|
|
|
match frame.if_else {
|
|
|
|
IfElseState::If(label) => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jmp =>frame.label
|
|
|
|
; => label
|
|
|
|
);
|
|
|
|
frame.if_else = IfElseState::Else;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "unexpected if else state",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-27 23:38:45 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-02-20 22:56:32 +08:00
|
|
|
fn emit_block_end(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
control_stack: &mut ControlStack,
|
|
|
|
value_stack: &mut ValueStack,
|
2019-02-21 22:04:43 +08:00
|
|
|
was_unreachable: bool,
|
2019-02-20 22:56:32 +08:00
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
let frame = match control_stack.frames.pop() {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
2019-03-14 17:11:35 +08:00
|
|
|
message: "no frame (block end)",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-20 22:56:32 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-02-21 22:04:43 +08:00
|
|
|
if !was_unreachable {
|
|
|
|
Self::emit_leave_frame(assembler, &frame, value_stack, false)?;
|
2019-02-23 01:54:16 +08:00
|
|
|
if value_stack.values.len() != frame.value_stack_depth_before {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "value_stack.values.len() != frame.value_stack_depth_before",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// No need to actually unwind the stack here.
|
|
|
|
value_stack.reset_depth(frame.value_stack_depth_before);
|
2019-02-20 22:56:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if !frame.loop_like {
|
2019-02-27 23:38:45 +08:00
|
|
|
match frame.if_else {
|
|
|
|
IfElseState::None | IfElseState::Else => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; => frame.label
|
|
|
|
);
|
|
|
|
}
|
|
|
|
IfElseState::If(label) => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; => frame.label
|
|
|
|
; => label
|
|
|
|
);
|
|
|
|
|
|
|
|
if frame.returns.len() != 0 {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "if without else, with non-empty returns",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-02-20 22:56:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if frame.returns.len() == 1 {
|
|
|
|
let loc = value_stack.push(frame.returns[0]);
|
|
|
|
match loc {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-05 00:23:49 +08:00
|
|
|
; mov Rq(reg as u8), rax
|
2019-02-20 22:56:32 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
2019-02-25 23:29:18 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push rax
|
|
|
|
);
|
2019-02-20 22:56:32 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn emit_jmp(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
control_stack: &ControlStack,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
relative_frame_offset: usize,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
let frame = if relative_frame_offset >= control_stack.frames.len() {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "jmp offset out of bounds",
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
&control_stack.frames[control_stack.frames.len() - 1 - relative_frame_offset]
|
|
|
|
};
|
|
|
|
|
2019-02-23 01:54:16 +08:00
|
|
|
if !frame.loop_like {
|
|
|
|
Self::emit_leave_frame(assembler, frame, value_stack, true)?;
|
|
|
|
}
|
2019-02-20 22:56:32 +08:00
|
|
|
|
|
|
|
let mut sp_diff: usize = 0;
|
|
|
|
|
|
|
|
for i in 0..value_stack.values.len() - frame.value_stack_depth_before {
|
|
|
|
let vi = value_stack.values[value_stack.values.len() - 1 - i];
|
|
|
|
if vi.location == ValueLocation::Stack {
|
2019-02-25 23:29:18 +08:00
|
|
|
sp_diff += 8
|
2019-02-20 22:56:32 +08:00
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; add rsp, sp_diff as i32
|
|
|
|
; jmp =>frame.label
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-02-21 21:14:10 +08:00
|
|
|
|
|
|
|
fn emit_return(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
returns: &Vec<WpType>,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
match returns.len() {
|
|
|
|
0 => {}
|
|
|
|
1 => {
|
|
|
|
if value_stack.values.iter().last().map(|x| x.ty) != Some(returns[0]) {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "self.value_stack.last().cloned() != Some(self.returns[0])",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
Self::emit_pop_into_ax(assembler, value_stack)?;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "multiple return values is not yet supported",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-21 21:14:10 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rsp, rbp
|
|
|
|
; pop rbp
|
|
|
|
; ret
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-02-24 00:52:32 +08:00
|
|
|
|
2019-03-18 00:31:36 +08:00
|
|
|
fn emit_managed_call_trampoline(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
info: &ModuleInfo,
|
|
|
|
target: FuncPtr,
|
|
|
|
num_params: usize,
|
|
|
|
) -> Result<(), CodegenError> {
|
2019-03-17 03:07:27 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push rbp
|
|
|
|
; mov rbp, rsp
|
|
|
|
);
|
|
|
|
|
|
|
|
for i in 0..num_params {
|
|
|
|
match i {
|
|
|
|
i if i < 5 => {
|
|
|
|
let reg = match i {
|
|
|
|
0 => Register::RSI,
|
|
|
|
1 => Register::RDX,
|
|
|
|
2 => Register::RCX,
|
|
|
|
3 => Register::R8,
|
|
|
|
4 => Register::R9,
|
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push Rq(reg as u8)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
i => {
|
|
|
|
let offset = (i - 5) * 8;
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, [rbp + (16 + offset) as i32]
|
|
|
|
; push rax
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov r8, rdi // vmctx
|
2019-03-17 10:54:50 +08:00
|
|
|
; mov rdx, QWORD target.0 as usize as i64
|
2019-03-17 03:07:27 +08:00
|
|
|
; mov rsi, QWORD (num_params * 8) as i64
|
|
|
|
; mov rdi, rsp
|
|
|
|
);
|
|
|
|
|
|
|
|
let has_memory = if info.memories.len() > 0 {
|
|
|
|
if info.memories.len() != 1 || info.imported_memories.len() != 0 {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "only one linear memory is supported",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rcx, r8 => vm::Ctx.memories
|
|
|
|
);
|
|
|
|
true
|
|
|
|
} else if info.imported_memories.len() > 0 {
|
|
|
|
if info.memories.len() != 0 || info.imported_memories.len() != 1 {
|
2019-03-18 00:31:36 +08:00
|
|
|
return Err(CodegenError {
|
2019-03-17 03:07:27 +08:00
|
|
|
message: "only one linear memory is supported",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rcx, r8 => vm::Ctx.imported_memories
|
|
|
|
);
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
};
|
|
|
|
|
|
|
|
if has_memory {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rcx, [rcx]
|
|
|
|
; mov rcx, rcx => LocalMemory.base
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rcx, 0
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-17 10:54:50 +08:00
|
|
|
; mov rax, QWORD *CALL_WASM as usize as i64
|
2019-03-17 03:07:27 +08:00
|
|
|
; call rax
|
|
|
|
; mov rsp, rbp
|
|
|
|
; pop rbp
|
|
|
|
; ret
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-03-17 10:27:14 +08:00
|
|
|
fn emit_f32_int_conv_check(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
reg: Register,
|
|
|
|
lower_bound: f32,
|
|
|
|
upper_bound: f32,
|
|
|
|
) {
|
|
|
|
let lower_bound = f32::to_bits(lower_bound);
|
|
|
|
let upper_bound = f32::to_bits(upper_bound);
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm5, r15
|
|
|
|
|
|
|
|
// underflow
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
|
|
|
; mov r15d, lower_bound as i32
|
|
|
|
; movd xmm2, r15d
|
|
|
|
; vcmpltss xmm0, xmm1, xmm2
|
|
|
|
; movd r15d, xmm0
|
|
|
|
; cmp r15d, 1
|
|
|
|
; je >trap
|
|
|
|
|
|
|
|
// overflow
|
|
|
|
; mov r15d, upper_bound as i32
|
|
|
|
; movd xmm2, r15d
|
|
|
|
; vcmpgtss xmm0, xmm1, xmm2
|
|
|
|
; movd r15d, xmm0
|
|
|
|
; cmp r15d, 1
|
|
|
|
; je >trap
|
|
|
|
|
|
|
|
// NaN
|
|
|
|
; vcmpeqss xmm0, xmm1, xmm1
|
|
|
|
; movd r15d, xmm0
|
|
|
|
; cmp r15d, 0
|
|
|
|
; je >trap
|
|
|
|
|
|
|
|
; movq r15, xmm5
|
|
|
|
; jmp >ok
|
|
|
|
|
|
|
|
; trap:
|
|
|
|
; ud2
|
|
|
|
|
|
|
|
; ok:
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn emit_f64_int_conv_check(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
reg: Register,
|
|
|
|
lower_bound: f64,
|
|
|
|
upper_bound: f64,
|
|
|
|
) {
|
|
|
|
let lower_bound = f64::to_bits(lower_bound);
|
|
|
|
let upper_bound = f64::to_bits(upper_bound);
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm5, r15
|
|
|
|
|
|
|
|
// underflow
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
|
|
|
; mov r15, QWORD lower_bound as i64
|
|
|
|
; movq xmm2, r15
|
|
|
|
; vcmpltsd xmm0, xmm1, xmm2
|
|
|
|
; movd r15d, xmm0
|
|
|
|
; cmp r15d, 1
|
|
|
|
; je >trap
|
|
|
|
|
|
|
|
// overflow
|
|
|
|
; mov r15, QWORD upper_bound as i64
|
|
|
|
; movq xmm2, r15
|
|
|
|
; vcmpgtsd xmm0, xmm1, xmm2
|
|
|
|
; movd r15d, xmm0
|
|
|
|
; cmp r15d, 1
|
|
|
|
; je >trap
|
|
|
|
|
|
|
|
// NaN
|
|
|
|
; vcmpeqsd xmm0, xmm1, xmm1
|
|
|
|
; movd r15d, xmm0
|
|
|
|
; cmp r15d, 0
|
|
|
|
; je >trap
|
|
|
|
|
|
|
|
; movq r15, xmm5
|
|
|
|
; jmp >ok
|
|
|
|
|
|
|
|
; trap:
|
|
|
|
; ud2
|
|
|
|
|
|
|
|
; ok:
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-02-26 20:56:10 +08:00
|
|
|
fn emit_native_call_trampoline<A: Copy + Sized, B: Copy + Sized>(
|
2019-02-25 23:29:18 +08:00
|
|
|
assembler: &mut Assembler,
|
2019-02-26 20:56:10 +08:00
|
|
|
target: unsafe extern "C" fn(
|
|
|
|
ctx1: A,
|
|
|
|
ctx2: B,
|
|
|
|
stack_top: *mut u8,
|
|
|
|
stack_base: *mut u8,
|
2019-03-09 00:07:13 +08:00
|
|
|
vmctx: *mut vm::Ctx,
|
2019-03-09 02:57:23 +08:00
|
|
|
memory_base: *mut u8,
|
2019-02-26 20:56:10 +08:00
|
|
|
) -> u64,
|
|
|
|
ctx1: A,
|
|
|
|
ctx2: B,
|
|
|
|
) -> DynamicLabel {
|
2019-02-25 23:29:18 +08:00
|
|
|
let label = assembler.new_dynamic_label();
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; =>label
|
|
|
|
);
|
|
|
|
|
2019-02-26 20:56:10 +08:00
|
|
|
// FIXME: Check at compile time.
|
|
|
|
assert_eq!(::std::mem::size_of::<A>(), ::std::mem::size_of::<i64>());
|
|
|
|
assert_eq!(::std::mem::size_of::<B>(), ::std::mem::size_of::<i64>());
|
2019-02-25 23:29:18 +08:00
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-17 10:54:50 +08:00
|
|
|
; mov rdi, QWORD unsafe { ::std::mem::transmute_copy::<A, i64>(&ctx1) }
|
|
|
|
; mov rsi, QWORD unsafe { ::std::mem::transmute_copy::<B, i64>(&ctx2) }
|
2019-02-26 20:56:10 +08:00
|
|
|
; mov rdx, rsp
|
2019-02-25 23:29:18 +08:00
|
|
|
; mov rcx, rbp
|
2019-03-09 00:07:13 +08:00
|
|
|
; mov r8, r14 // vmctx
|
2019-03-09 02:57:23 +08:00
|
|
|
; mov r9, r15 // memory_base
|
2019-03-17 10:54:50 +08:00
|
|
|
; mov rax, QWORD 0xfffffffffffffff0u64 as i64
|
2019-02-25 23:29:18 +08:00
|
|
|
; and rsp, rax
|
2019-03-17 10:54:50 +08:00
|
|
|
; mov rax, QWORD target as i64
|
2019-02-25 23:29:18 +08:00
|
|
|
; call rax
|
|
|
|
; mov rsp, rbp
|
|
|
|
; pop rbp
|
|
|
|
; ret
|
|
|
|
);
|
|
|
|
|
2019-02-26 20:56:10 +08:00
|
|
|
label
|
2019-02-25 23:29:18 +08:00
|
|
|
}
|
|
|
|
|
2019-02-24 00:52:32 +08:00
|
|
|
fn emit_call_raw(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
target: DynamicLabel,
|
|
|
|
params: &[WpType],
|
|
|
|
returns: &[WpType],
|
|
|
|
) -> Result<(), CodegenError> {
|
2019-02-25 23:29:18 +08:00
|
|
|
let total_size: usize = params.len() * 8;
|
2019-02-24 00:52:32 +08:00
|
|
|
|
|
|
|
if params.len() > value_stack.values.len() {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "value stack underflow in call",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut saved_regs: Vec<Register> = Vec::new();
|
|
|
|
|
|
|
|
for v in &value_stack.values[0..value_stack.values.len() - params.len()] {
|
|
|
|
match v.location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push Rq(reg as u8)
|
|
|
|
);
|
|
|
|
saved_regs.push(reg);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => break,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; lea rax, [>after_call] // TODO: Is this correct?
|
|
|
|
; push rax
|
|
|
|
; push rbp
|
|
|
|
);
|
|
|
|
|
|
|
|
if total_size != 0 {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; sub rsp, total_size as i32
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut offset: usize = 0;
|
|
|
|
let mut caller_stack_offset: usize = 0;
|
2019-03-14 10:30:24 +08:00
|
|
|
for ty in params.iter().rev() {
|
2019-02-24 00:52:32 +08:00
|
|
|
let val = value_stack.pop()?;
|
|
|
|
if val.ty != *ty {
|
|
|
|
return Err(CodegenError {
|
2019-03-14 10:30:24 +08:00
|
|
|
message: "value type mismatch in call",
|
2019-02-24 00:52:32 +08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
match val.location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
2019-02-25 23:29:18 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [rsp + offset as i32], Rq(reg as u8)
|
|
|
|
);
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
2019-02-25 23:29:18 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, [rsp + (total_size + 16 + saved_regs.len() * 8 + caller_stack_offset) as i32]
|
|
|
|
; mov [rsp + offset as i32], rax
|
|
|
|
);
|
|
|
|
caller_stack_offset += 8;
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-25 23:29:18 +08:00
|
|
|
offset += 8;
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
assert_eq!(offset, total_size);
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rbp, rsp
|
|
|
|
);
|
|
|
|
if total_size != 0 {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; add rbp, total_size as i32
|
|
|
|
);
|
|
|
|
}
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jmp =>target
|
|
|
|
; after_call:
|
|
|
|
);
|
2019-02-25 22:47:27 +08:00
|
|
|
|
|
|
|
for reg in saved_regs.iter().rev() {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; pop Rq(*reg as u8)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-02-24 00:52:32 +08:00
|
|
|
if caller_stack_offset != 0 {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; add rsp, caller_stack_offset as i32
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
match returns.len() {
|
|
|
|
0 => {}
|
|
|
|
1 => {
|
|
|
|
Self::emit_push_from_ax(assembler, value_stack, returns[0])?;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "more than 1 function returns are not supported",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-02-28 23:58:02 +08:00
|
|
|
|
2019-03-02 01:41:31 +08:00
|
|
|
fn emit_memory_load<F: FnOnce(&mut Assembler, Register)>(
|
2019-02-28 23:58:02 +08:00
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
f: F,
|
|
|
|
out_ty: WpType,
|
|
|
|
) -> Result<(), CodegenError> {
|
2019-03-02 01:41:31 +08:00
|
|
|
let addr_info = value_stack.pop()?;
|
|
|
|
let out_loc = value_stack.push(out_ty);
|
|
|
|
|
|
|
|
if addr_info.ty != WpType::I32 {
|
2019-02-28 23:58:02 +08:00
|
|
|
return Err(CodegenError {
|
|
|
|
message: "memory address must be i32",
|
|
|
|
});
|
|
|
|
}
|
2019-03-02 01:41:31 +08:00
|
|
|
|
|
|
|
assert_eq!(out_loc, addr_info.location);
|
|
|
|
|
|
|
|
match addr_info.location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; add Rq(reg as u8), r15
|
|
|
|
);
|
|
|
|
f(assembler, reg);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; pop rax
|
|
|
|
; add rax, r15
|
|
|
|
);
|
|
|
|
f(assembler, Register::RAX);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push rax
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn emit_memory_store<F: FnOnce(&mut Assembler, Register, Register)>(
|
|
|
|
assembler: &mut Assembler,
|
|
|
|
value_stack: &mut ValueStack,
|
|
|
|
f: F,
|
|
|
|
value_ty: WpType,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
let value_info = value_stack.pop()?;
|
|
|
|
let addr_info = value_stack.pop()?;
|
|
|
|
|
|
|
|
if addr_info.ty != WpType::I32 {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "memory address must be i32",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if value_info.ty != value_ty {
|
|
|
|
return Err(CodegenError {
|
2019-03-14 10:30:24 +08:00
|
|
|
message: "value type mismatch in memory store",
|
2019-03-02 01:41:31 +08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
match value_info.location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let value_reg = Register::from_scratch_reg(x);
|
|
|
|
let addr_reg =
|
|
|
|
Register::from_scratch_reg(addr_info.location.get_register().unwrap()); // must be a register
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; add Rq(addr_reg as u8), r15
|
|
|
|
);
|
|
|
|
f(assembler, addr_reg, value_reg);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
match addr_info.location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let addr_reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; add Rq(addr_reg as u8), r15
|
|
|
|
; pop rax
|
|
|
|
);
|
|
|
|
f(assembler, addr_reg, Register::RAX);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [rsp - 8], rcx // red zone
|
|
|
|
; pop rax // value
|
|
|
|
; pop rcx // address
|
|
|
|
; add rcx, r15
|
|
|
|
);
|
|
|
|
f(assembler, Register::RCX, Register::RAX);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rcx, [rsp - 24]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-02-28 23:58:02 +08:00
|
|
|
Ok(())
|
|
|
|
}
|
2019-02-13 20:04:10 +08:00
|
|
|
}
|
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
impl FunctionCodeGenerator for X64FunctionCode {
|
2019-02-14 00:53:06 +08:00
|
|
|
fn feed_return(&mut self, ty: WpType) -> Result<(), CodegenError> {
|
|
|
|
self.returns.push(ty);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-02-24 00:52:32 +08:00
|
|
|
/// Stack layout of a call frame:
|
|
|
|
/// - Return address
|
|
|
|
/// - Old RBP
|
|
|
|
/// - Params in reversed order, caller initialized
|
|
|
|
/// - Locals in reversed order, callee initialized
|
2019-02-12 00:52:17 +08:00
|
|
|
fn feed_param(&mut self, ty: WpType) -> Result<(), CodegenError> {
|
2019-02-25 23:29:18 +08:00
|
|
|
self.current_stack_offset += 8;
|
2019-02-12 00:52:17 +08:00
|
|
|
self.locals.push(Local {
|
|
|
|
ty: ty,
|
|
|
|
stack_offset: self.current_stack_offset,
|
|
|
|
});
|
2019-02-13 20:04:10 +08:00
|
|
|
|
2019-02-12 23:15:57 +08:00
|
|
|
self.num_params += 1;
|
2019-02-13 20:04:10 +08:00
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
Ok(())
|
|
|
|
}
|
2019-02-13 20:04:10 +08:00
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
fn feed_local(&mut self, ty: WpType, n: usize) -> Result<(), CodegenError> {
|
2019-02-13 20:04:10 +08:00
|
|
|
let assembler = self.assembler.as_mut().unwrap();
|
2019-02-12 00:52:17 +08:00
|
|
|
let size = get_size_of_type(&ty)?;
|
2019-02-25 23:29:18 +08:00
|
|
|
|
|
|
|
if is_dword(size) {
|
|
|
|
for _ in 0..n {
|
|
|
|
// FIXME: check range of n
|
|
|
|
self.current_stack_offset += 4;
|
|
|
|
self.locals.push(Local {
|
|
|
|
ty: ty,
|
|
|
|
stack_offset: self.current_stack_offset,
|
|
|
|
});
|
|
|
|
dynasm!(
|
2019-02-12 23:15:57 +08:00
|
|
|
assembler
|
2019-02-13 20:04:10 +08:00
|
|
|
; sub rsp, 4
|
|
|
|
; mov DWORD [rsp], 0
|
2019-02-25 23:29:18 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
if n % 2 == 1 {
|
|
|
|
self.current_stack_offset += 4;
|
|
|
|
dynasm!(
|
2019-02-12 23:15:57 +08:00
|
|
|
assembler
|
2019-02-25 23:29:18 +08:00
|
|
|
; sub rsp, 4
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for _ in 0..n {
|
|
|
|
// FIXME: check range of n
|
|
|
|
self.current_stack_offset += 8;
|
|
|
|
self.locals.push(Local {
|
|
|
|
ty: ty,
|
|
|
|
stack_offset: self.current_stack_offset,
|
|
|
|
});
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push 0
|
|
|
|
);
|
2019-02-12 23:15:57 +08:00
|
|
|
}
|
2019-02-12 00:52:17 +08:00
|
|
|
}
|
2019-02-13 20:04:10 +08:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
fn begin_body(&mut self) -> Result<(), CodegenError> {
|
2019-02-20 22:56:32 +08:00
|
|
|
self.control_stack = Some(ControlStack::new(
|
|
|
|
self.assembler.as_mut().unwrap().new_dynamic_label(),
|
|
|
|
self.returns.clone(),
|
|
|
|
));
|
2019-02-12 00:52:17 +08:00
|
|
|
Ok(())
|
|
|
|
}
|
2019-03-09 00:32:18 +08:00
|
|
|
fn feed_opcode(&mut self, op: Operator, module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
2019-02-21 21:14:10 +08:00
|
|
|
let was_unreachable;
|
|
|
|
|
|
|
|
if self.unreachable_depth > 0 {
|
|
|
|
was_unreachable = true;
|
|
|
|
match op {
|
|
|
|
Operator::Block { .. } | Operator::Loop { .. } | Operator::If { .. } => {
|
|
|
|
self.unreachable_depth += 1;
|
|
|
|
}
|
2019-03-14 17:11:35 +08:00
|
|
|
Operator::End => {
|
2019-02-21 21:14:10 +08:00
|
|
|
self.unreachable_depth -= 1;
|
|
|
|
}
|
2019-03-14 17:11:35 +08:00
|
|
|
Operator::Else => {
|
|
|
|
// We are in a reachable true branch
|
|
|
|
if self.unreachable_depth == 1 {
|
2019-03-18 00:31:36 +08:00
|
|
|
if let Some(IfElseState::If(_)) = self
|
|
|
|
.control_stack
|
|
|
|
.as_ref()
|
|
|
|
.unwrap()
|
|
|
|
.frames
|
|
|
|
.last()
|
|
|
|
.map(|x| x.if_else)
|
|
|
|
{
|
2019-03-14 17:11:35 +08:00
|
|
|
self.unreachable_depth -= 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-02-21 21:14:10 +08:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
if self.unreachable_depth > 0 {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
was_unreachable = false;
|
|
|
|
}
|
|
|
|
|
2019-02-12 23:15:57 +08:00
|
|
|
let assembler = self.assembler.as_mut().unwrap();
|
2019-02-21 21:14:10 +08:00
|
|
|
|
2019-02-12 23:15:57 +08:00
|
|
|
match op {
|
2019-03-09 00:32:18 +08:00
|
|
|
Operator::GetGlobal { global_index } => {
|
2019-03-09 00:38:13 +08:00
|
|
|
let mut global_index = global_index as usize;
|
|
|
|
if global_index < module_info.imported_globals.len() {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, r14 => vm::Ctx.imported_globals
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
global_index -= module_info.imported_globals.len();
|
|
|
|
if global_index >= module_info.globals.len() {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "global out of bounds",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, r14 => vm::Ctx.globals
|
|
|
|
);
|
2019-03-09 00:32:18 +08:00
|
|
|
}
|
2019-03-09 00:38:13 +08:00
|
|
|
|
2019-03-09 00:32:18 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, [rax + (global_index as i32) * 8]
|
|
|
|
; mov rax, rax => LocalGlobal.data
|
|
|
|
);
|
|
|
|
Self::emit_push_from_ax(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
type_to_wp_type(
|
|
|
|
module_info.globals[LocalGlobalIndex::new(global_index)]
|
|
|
|
.desc
|
|
|
|
.ty,
|
|
|
|
),
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::SetGlobal { global_index } => {
|
|
|
|
let ty = Self::emit_pop_into_ax(assembler, &mut self.value_stack)?;
|
2019-03-09 00:38:13 +08:00
|
|
|
|
|
|
|
let mut global_index = global_index as usize;
|
|
|
|
if global_index < module_info.imported_globals.len() {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push rbx
|
|
|
|
; mov rbx, r14 => vm::Ctx.imported_globals
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
global_index -= module_info.imported_globals.len();
|
|
|
|
if global_index >= module_info.globals.len() {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "global out of bounds",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push rbx
|
|
|
|
; mov rbx, r14 => vm::Ctx.globals
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-03-09 00:32:18 +08:00
|
|
|
if ty
|
|
|
|
!= type_to_wp_type(
|
|
|
|
module_info.globals[LocalGlobalIndex::new(global_index)]
|
|
|
|
.desc
|
|
|
|
.ty,
|
|
|
|
)
|
|
|
|
{
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "type mismatch in SetGlobal",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rbx, [rbx + (global_index as i32) * 8]
|
|
|
|
; mov rbx => LocalGlobal.data, rax
|
|
|
|
; pop rbx
|
|
|
|
);
|
|
|
|
}
|
2019-02-12 23:15:57 +08:00
|
|
|
Operator::GetLocal { local_index } => {
|
|
|
|
let local_index = local_index as usize;
|
|
|
|
if local_index >= self.locals.len() {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "local out of bounds",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
let local = self.locals[local_index];
|
2019-02-13 20:04:10 +08:00
|
|
|
let location = self.value_stack.push(local.ty);
|
|
|
|
let size = get_size_of_type(&local.ty)?;
|
|
|
|
|
|
|
|
match location {
|
|
|
|
ValueLocation::Register(id) => {
|
|
|
|
if is_dword(size) {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(Register::from_scratch_reg(id) as u8), [rbp - (local.stack_offset as i32)]
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rq(Register::from_scratch_reg(id) as u8), [rbp - (local.stack_offset as i32)]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
if is_dword(size) {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov eax, [rbp - (local.stack_offset as i32)]
|
2019-02-25 23:29:18 +08:00
|
|
|
; push rax
|
2019-02-13 20:04:10 +08:00
|
|
|
);
|
|
|
|
} else {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, [rbp - (local.stack_offset as i32)]
|
2019-02-25 23:29:18 +08:00
|
|
|
; push rax
|
2019-02-13 20:04:10 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-02-23 01:54:16 +08:00
|
|
|
Operator::SetLocal { local_index } => {
|
|
|
|
let local_index = local_index as usize;
|
|
|
|
if local_index >= self.locals.len() {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "local out of bounds",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
let local = self.locals[local_index];
|
|
|
|
let ty = Self::emit_pop_into_ax(assembler, &mut self.value_stack)?;
|
|
|
|
if ty != local.ty {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "SetLocal type mismatch",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if is_dword(get_size_of_type(&ty)?) {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [rbp - (local.stack_offset as i32)], eax
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [rbp - (local.stack_offset as i32)], rax
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2019-03-06 01:16:24 +08:00
|
|
|
Operator::TeeLocal { local_index } => {
|
|
|
|
let local_index = local_index as usize;
|
|
|
|
if local_index >= self.locals.len() {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "local out of bounds",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
let local = self.locals[local_index];
|
|
|
|
let ty = Self::emit_peek_into_ax(assembler, &self.value_stack)?;
|
|
|
|
if ty != local.ty {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "TeeLocal type mismatch",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if is_dword(get_size_of_type(&ty)?) {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [rbp - (local.stack_offset as i32)], eax
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [rbp - (local.stack_offset as i32)], rax
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2019-02-23 01:54:16 +08:00
|
|
|
Operator::I32Const { value } => {
|
|
|
|
let location = self.value_stack.push(WpType::I32);
|
|
|
|
match location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-06 01:16:24 +08:00
|
|
|
; mov Rd(reg as u8), value
|
2019-02-23 01:54:16 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-02-25 23:29:18 +08:00
|
|
|
; push value
|
2019-02-23 01:54:16 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-02-13 20:04:10 +08:00
|
|
|
Operator::I32Add => {
|
2019-02-19 20:25:09 +08:00
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-19 20:25:09 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; add Rd(left as u8), Rd(right as u8)
|
|
|
|
)
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Sub => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-19 20:25:09 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; sub Rd(left as u8), Rd(right as u8)
|
|
|
|
)
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Mul => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-19 20:25:09 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; imul Rd(left as u8), Rd(right as u8)
|
|
|
|
)
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32DivU => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_div_i32(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
false,
|
|
|
|
Register::RAX,
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32DivS => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_div_i32(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
true,
|
|
|
|
Register::RAX,
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32RemU => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_div_i32(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
false,
|
|
|
|
Register::RDX,
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32RemS => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_div_i32(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
true,
|
|
|
|
Register::RDX,
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
2019-02-13 20:04:10 +08:00
|
|
|
}
|
2019-02-24 12:00:35 +08:00
|
|
|
Operator::I32And => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-24 12:00:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; and Rd(left as u8), Rd(right as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Or => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-24 12:00:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; or Rd(left as u8), Rd(right as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-14 17:11:35 +08:00
|
|
|
Operator::I32Xor => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; xor Rd(left as u8), Rd(right as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-02-23 01:54:16 +08:00
|
|
|
Operator::I32Eq => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-23 01:54:16 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp Rd(left as u8), Rd(right as u8)
|
|
|
|
; lahf
|
|
|
|
; shr ax, 14
|
|
|
|
; and eax, 1
|
|
|
|
; mov Rd(left as u8), eax
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-06 01:16:24 +08:00
|
|
|
Operator::I32Ne => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp Rd(left as u8), Rd(right as u8)
|
|
|
|
; lahf
|
|
|
|
; shr ax, 14
|
|
|
|
; and eax, 1
|
|
|
|
; xor eax, 1
|
|
|
|
; mov Rd(left as u8), eax
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-02-23 01:54:16 +08:00
|
|
|
Operator::I32Eqz => {
|
|
|
|
Self::emit_unop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-02-23 01:54:16 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp Rd(reg as u8), 0
|
|
|
|
; lahf
|
|
|
|
; shr ax, 14
|
|
|
|
; and eax, 1
|
|
|
|
);
|
|
|
|
if reg != Register::RAX {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(reg as u8), eax
|
|
|
|
);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-06 01:16:24 +08:00
|
|
|
Operator::I32Clz => {
|
|
|
|
Self::emit_unop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; lzcnt Rd(reg as u8), Rd(reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Ctz => {
|
|
|
|
Self::emit_unop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; tzcnt Rd(reg as u8), Rd(reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Popcnt => {
|
|
|
|
Self::emit_unop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; popcnt Rd(reg as u8), Rd(reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-13 18:23:50 +08:00
|
|
|
Operator::I32Shl => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; shl Rd(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32ShrU => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; shr Rd(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32ShrS => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; sar Rd(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Rotl => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; rol Rd(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Rotr => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; ror Rd(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-02-23 01:54:16 +08:00
|
|
|
// Comparison operators.
|
|
|
|
// https://en.wikibooks.org/wiki/X86_Assembly/Control_Flow
|
|
|
|
// TODO: Is reading flag register directly faster?
|
|
|
|
Operator::I32LtS => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-23 01:54:16 +08:00
|
|
|
Self::emit_cmp_i32(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jl >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32LeS => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-23 01:54:16 +08:00
|
|
|
Self::emit_cmp_i32(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jle >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32GtS => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-23 01:54:16 +08:00
|
|
|
Self::emit_cmp_i32(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jg >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32GeS => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-23 01:54:16 +08:00
|
|
|
Self::emit_cmp_i32(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jge >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32LtU => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-23 01:54:16 +08:00
|
|
|
Self::emit_cmp_i32(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jb >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32LeU => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-23 01:54:16 +08:00
|
|
|
Self::emit_cmp_i32(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jbe >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32GtU => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-23 01:54:16 +08:00
|
|
|
Self::emit_cmp_i32(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; ja >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32GeU => {
|
|
|
|
Self::emit_binop_i32(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-02-23 01:54:16 +08:00
|
|
|
Self::emit_cmp_i32(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jae >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-05 00:59:05 +08:00
|
|
|
Operator::I64Const { value } => {
|
|
|
|
let location = self.value_stack.push(WpType::I64);
|
|
|
|
match location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rq(reg as u8), QWORD value
|
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, QWORD value
|
|
|
|
; push rax
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Operator::I64Add => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; add Rq(left as u8), Rq(right as u8)
|
|
|
|
)
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Sub => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; sub Rq(left as u8), Rq(right as u8)
|
|
|
|
)
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Mul => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; imul Rq(left as u8), Rq(right as u8)
|
|
|
|
)
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64DivU => {
|
2019-03-06 01:16:24 +08:00
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_div_i64(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
false,
|
|
|
|
Register::RAX,
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
2019-03-05 00:59:05 +08:00
|
|
|
}
|
|
|
|
Operator::I64DivS => {
|
2019-03-06 01:16:24 +08:00
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_div_i64(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
true,
|
|
|
|
Register::RAX,
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
2019-03-05 00:59:05 +08:00
|
|
|
}
|
|
|
|
Operator::I64RemU => {
|
2019-03-06 01:16:24 +08:00
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_div_i64(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
false,
|
|
|
|
Register::RDX,
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
2019-03-05 00:59:05 +08:00
|
|
|
}
|
|
|
|
Operator::I64RemS => {
|
2019-03-06 01:16:24 +08:00
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_div_i64(
|
|
|
|
assembler,
|
|
|
|
value_stack,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
true,
|
|
|
|
Register::RDX,
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
2019-03-05 00:59:05 +08:00
|
|
|
}
|
|
|
|
Operator::I64And => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; and Rq(left as u8), Rq(right as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Or => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; or Rq(left as u8), Rq(right as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-14 17:11:35 +08:00
|
|
|
Operator::I64Xor => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; xor Rq(left as u8), Rq(right as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-05 00:59:05 +08:00
|
|
|
Operator::I64Eq => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp Rq(left as u8), Rq(right as u8)
|
|
|
|
; lahf
|
|
|
|
; shr ax, 14
|
|
|
|
; and eax, 1
|
|
|
|
; mov Rd(left as u8), eax
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-06 01:16:24 +08:00
|
|
|
Operator::I64Ne => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp Rq(left as u8), Rq(right as u8)
|
|
|
|
; lahf
|
|
|
|
; shr ax, 14
|
|
|
|
; and eax, 1
|
|
|
|
; xor eax, 1
|
|
|
|
; mov Rd(left as u8), eax
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-05 00:59:05 +08:00
|
|
|
Operator::I64Eqz => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-05 00:59:05 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp Rq(reg as u8), 0
|
|
|
|
; lahf
|
|
|
|
; shr ax, 14
|
|
|
|
; and eax, 1
|
|
|
|
);
|
|
|
|
if reg != Register::RAX {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(reg as u8), eax
|
|
|
|
);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-06 01:16:24 +08:00
|
|
|
Operator::I64Clz => {
|
|
|
|
Self::emit_unop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; lzcnt Rq(reg as u8), Rq(reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Ctz => {
|
|
|
|
Self::emit_unop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; tzcnt Rq(reg as u8), Rq(reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Popcnt => {
|
|
|
|
Self::emit_unop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-06 01:16:24 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; popcnt Rq(reg as u8), Rq(reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-13 18:23:50 +08:00
|
|
|
Operator::I64Shl => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; shl Rq(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64ShrU => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; shr Rq(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64ShrS => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; sar Rq(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Rotl => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; rol Rq(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Rotr => {
|
|
|
|
Self::emit_binop_i64(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, value_stack, left, right| {
|
|
|
|
Self::emit_shift(assembler, value_stack, left, right, |assembler, left| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; ror Rq(left as u8), cl
|
|
|
|
)
|
|
|
|
});
|
2019-03-18 00:31:36 +08:00
|
|
|
},
|
2019-03-13 18:23:50 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-05 00:59:05 +08:00
|
|
|
// Comparison operators.
|
|
|
|
// https://en.wikibooks.org/wiki/X86_Assembly/Control_Flow
|
|
|
|
// TODO: Is reading flag register directly faster?
|
|
|
|
Operator::I64LtS => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
Self::emit_cmp_i64(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jl >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64LeS => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
Self::emit_cmp_i64(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jle >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64GtS => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
Self::emit_cmp_i64(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jg >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64GeS => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
Self::emit_cmp_i64(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jge >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64LtU => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
Self::emit_cmp_i64(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jb >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64LeU => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
Self::emit_cmp_i64(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jbe >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64GtU => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
Self::emit_cmp_i64(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; ja >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64GeU => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-05 00:59:05 +08:00
|
|
|
Self::emit_cmp_i64(assembler, left, right, |assembler| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jae >label_true
|
|
|
|
);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64ExtendSI32 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-05 00:59:05 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movsx Rq(reg as u8), Rd(reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64ExtendUI32 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|_assembler, _value_stack, _reg| {
|
2019-03-05 00:59:05 +08:00
|
|
|
// FIXME: Is it correct to do nothing here?
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32WrapI64 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-05 00:59:05 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(reg as u8), Rd(reg as u8) // clear upper 32 bits
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
2019-02-21 22:04:43 +08:00
|
|
|
Operator::Block { ty } => {
|
|
|
|
self.control_stack
|
|
|
|
.as_mut()
|
|
|
|
.unwrap()
|
|
|
|
.frames
|
|
|
|
.push(ControlFrame {
|
|
|
|
label: assembler.new_dynamic_label(),
|
|
|
|
loop_like: false,
|
2019-02-27 23:38:45 +08:00
|
|
|
if_else: IfElseState::None,
|
2019-02-21 22:04:43 +08:00
|
|
|
returns: match ty {
|
|
|
|
WpType::EmptyBlockType => vec![],
|
|
|
|
_ => vec![ty],
|
|
|
|
},
|
|
|
|
value_stack_depth_before: self.value_stack.values.len(),
|
|
|
|
});
|
|
|
|
}
|
2019-02-23 01:54:16 +08:00
|
|
|
Operator::Unreachable => {
|
2019-03-13 18:23:50 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; ud2
|
|
|
|
);
|
2019-02-23 01:54:16 +08:00
|
|
|
self.unreachable_depth = 1;
|
|
|
|
}
|
2019-02-13 20:04:10 +08:00
|
|
|
Operator::Drop => {
|
|
|
|
let info = self.value_stack.pop()?;
|
|
|
|
Self::gen_rt_pop(assembler, &info)?;
|
2019-02-12 23:15:57 +08:00
|
|
|
}
|
2019-02-21 21:14:10 +08:00
|
|
|
Operator::Return => {
|
|
|
|
Self::emit_return(assembler, &mut self.value_stack, &self.returns)?;
|
|
|
|
self.unreachable_depth = 1;
|
|
|
|
}
|
2019-02-24 00:52:32 +08:00
|
|
|
Operator::Call { function_index } => {
|
|
|
|
let function_index = function_index as usize;
|
2019-03-09 02:57:23 +08:00
|
|
|
let label = self
|
2019-02-24 00:52:32 +08:00
|
|
|
.function_labels
|
|
|
|
.as_mut()
|
|
|
|
.unwrap()
|
|
|
|
.entry(function_index)
|
2019-03-09 02:57:23 +08:00
|
|
|
.or_insert_with(|| (assembler.new_dynamic_label(), None))
|
|
|
|
.0;
|
2019-02-24 00:52:32 +08:00
|
|
|
let sig_index = match self.function_signatures.get(FuncIndex::new(function_index)) {
|
|
|
|
Some(x) => *x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "signature not found",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
let sig = match self.signatures.get(sig_index) {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "signature does not exist",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
let param_types: Vec<WpType> =
|
|
|
|
sig.params().iter().cloned().map(type_to_wp_type).collect();
|
|
|
|
let return_types: Vec<WpType> =
|
|
|
|
sig.returns().iter().cloned().map(type_to_wp_type).collect();
|
|
|
|
Self::emit_call_raw(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
label,
|
|
|
|
¶m_types,
|
|
|
|
&return_types,
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-09 02:57:23 +08:00
|
|
|
Operator::CallIndirect { index, table_index } => {
|
|
|
|
if table_index != 0 {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "only one table is supported",
|
|
|
|
});
|
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
let local_or_import = if module_info.tables.len() > 0 {
|
|
|
|
if module_info.tables.len() != 1 || module_info.imported_tables.len() != 0 {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "only one table is supported",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
CallIndirectLocalOrImport::Local
|
|
|
|
} else if module_info.imported_tables.len() > 0 {
|
|
|
|
if module_info.tables.len() != 0 || module_info.imported_tables.len() != 1 {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "only one table is supported",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
CallIndirectLocalOrImport::Import
|
|
|
|
} else {
|
2019-03-09 02:57:23 +08:00
|
|
|
return Err(CodegenError {
|
|
|
|
message: "no tables",
|
|
|
|
});
|
2019-03-14 10:30:24 +08:00
|
|
|
};
|
2019-03-09 02:57:23 +08:00
|
|
|
let sig_index = SigIndex::new(index as usize);
|
|
|
|
let sig = match self.signatures.get(sig_index) {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "signature does not exist",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
let mut param_types: Vec<WpType> =
|
|
|
|
sig.params().iter().cloned().map(type_to_wp_type).collect();
|
|
|
|
let return_types: Vec<WpType> =
|
|
|
|
sig.returns().iter().cloned().map(type_to_wp_type).collect();
|
|
|
|
param_types.push(WpType::I32); // element index
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; jmp >after_trampoline
|
|
|
|
);
|
|
|
|
|
|
|
|
let trampoline_label = Self::emit_native_call_trampoline(
|
|
|
|
assembler,
|
|
|
|
call_indirect,
|
|
|
|
index as usize,
|
2019-03-14 10:30:24 +08:00
|
|
|
local_or_import,
|
2019-03-09 02:57:23 +08:00
|
|
|
);
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; after_trampoline:
|
|
|
|
);
|
|
|
|
|
|
|
|
Self::emit_call_raw(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
trampoline_label,
|
|
|
|
¶m_types,
|
|
|
|
&return_types,
|
|
|
|
)?;
|
|
|
|
}
|
2019-02-21 21:14:10 +08:00
|
|
|
Operator::End => {
|
|
|
|
if self.control_stack.as_ref().unwrap().frames.len() == 1 {
|
|
|
|
let frame = self.control_stack.as_mut().unwrap().frames.pop().unwrap();
|
2019-02-21 22:04:43 +08:00
|
|
|
|
|
|
|
if !was_unreachable {
|
|
|
|
Self::emit_leave_frame(assembler, &frame, &mut self.value_stack, false)?;
|
2019-03-13 18:23:50 +08:00
|
|
|
} else {
|
|
|
|
self.value_stack.reset_depth(0);
|
2019-02-21 22:04:43 +08:00
|
|
|
}
|
|
|
|
|
2019-02-20 23:21:33 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-02-21 21:14:10 +08:00
|
|
|
; =>frame.label
|
2019-02-20 23:21:33 +08:00
|
|
|
);
|
2019-02-21 21:14:10 +08:00
|
|
|
} else {
|
|
|
|
Self::emit_block_end(
|
|
|
|
assembler,
|
|
|
|
self.control_stack.as_mut().unwrap(),
|
|
|
|
&mut self.value_stack,
|
2019-02-21 22:04:43 +08:00
|
|
|
was_unreachable,
|
2019-02-21 21:14:10 +08:00
|
|
|
)?;
|
2019-02-14 00:53:06 +08:00
|
|
|
}
|
|
|
|
}
|
2019-02-23 01:54:16 +08:00
|
|
|
Operator::Loop { ty } => {
|
|
|
|
let label = assembler.new_dynamic_label();
|
|
|
|
self.control_stack
|
|
|
|
.as_mut()
|
|
|
|
.unwrap()
|
|
|
|
.frames
|
|
|
|
.push(ControlFrame {
|
|
|
|
label: label,
|
|
|
|
loop_like: true,
|
2019-02-27 23:38:45 +08:00
|
|
|
if_else: IfElseState::None,
|
2019-02-23 01:54:16 +08:00
|
|
|
returns: match ty {
|
|
|
|
WpType::EmptyBlockType => vec![],
|
|
|
|
_ => vec![ty],
|
|
|
|
},
|
|
|
|
value_stack_depth_before: self.value_stack.values.len(),
|
|
|
|
});
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; =>label
|
|
|
|
);
|
|
|
|
}
|
2019-02-27 23:38:45 +08:00
|
|
|
Operator::If { ty } => {
|
|
|
|
let label_end = assembler.new_dynamic_label();
|
|
|
|
let label_else = assembler.new_dynamic_label();
|
|
|
|
|
|
|
|
Self::emit_pop_into_ax(assembler, &mut self.value_stack)?; // TODO: typeck?
|
|
|
|
|
|
|
|
self.control_stack
|
|
|
|
.as_mut()
|
|
|
|
.unwrap()
|
|
|
|
.frames
|
|
|
|
.push(ControlFrame {
|
|
|
|
label: label_end,
|
|
|
|
loop_like: false,
|
|
|
|
if_else: IfElseState::If(label_else),
|
|
|
|
returns: match ty {
|
|
|
|
WpType::EmptyBlockType => vec![],
|
|
|
|
_ => vec![ty],
|
|
|
|
},
|
|
|
|
value_stack_depth_before: self.value_stack.values.len(),
|
|
|
|
});
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp eax, 0
|
|
|
|
; je =>label_else
|
|
|
|
);
|
|
|
|
}
|
|
|
|
Operator::Else => {
|
|
|
|
Self::emit_else(
|
|
|
|
assembler,
|
|
|
|
self.control_stack.as_mut().unwrap(),
|
|
|
|
&mut self.value_stack,
|
|
|
|
was_unreachable,
|
|
|
|
)?;
|
|
|
|
}
|
2019-02-28 23:12:42 +08:00
|
|
|
Operator::Select => {
|
|
|
|
Self::emit_pop_into_ax(assembler, &mut self.value_stack)?;
|
|
|
|
let v_b = self.value_stack.pop()?;
|
|
|
|
let v_a = self.value_stack.pop()?;
|
|
|
|
|
|
|
|
if v_b.ty != v_a.ty {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "select: type mismatch",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp eax, 0
|
|
|
|
);
|
|
|
|
match v_b.location {
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmove rax, [rsp]
|
|
|
|
; add rsp, 8
|
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmove rax, Rq(reg as u8)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
match v_a.location {
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmovne rax, [rsp]
|
|
|
|
; add rsp, 8
|
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmovne rax, Rq(reg as u8)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Self::emit_push_from_ax(assembler, &mut self.value_stack, v_a.ty)?;
|
|
|
|
}
|
2019-02-20 23:21:33 +08:00
|
|
|
Operator::Br { relative_depth } => {
|
|
|
|
Self::emit_jmp(
|
|
|
|
assembler,
|
|
|
|
self.control_stack.as_ref().unwrap(),
|
|
|
|
&mut self.value_stack,
|
|
|
|
relative_depth as usize,
|
|
|
|
)?;
|
2019-02-21 21:14:10 +08:00
|
|
|
self.unreachable_depth = 1;
|
2019-02-20 23:21:33 +08:00
|
|
|
}
|
2019-02-21 22:04:43 +08:00
|
|
|
Operator::BrIf { relative_depth } => {
|
|
|
|
let no_br_label = assembler.new_dynamic_label();
|
|
|
|
Self::emit_pop_into_ax(assembler, &mut self.value_stack)?; // TODO: typeck?
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp eax, 0
|
|
|
|
; je =>no_br_label
|
|
|
|
);
|
|
|
|
Self::emit_jmp(
|
|
|
|
assembler,
|
|
|
|
self.control_stack.as_ref().unwrap(),
|
|
|
|
&mut self.value_stack,
|
|
|
|
relative_depth as usize,
|
|
|
|
)?;
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; =>no_br_label
|
|
|
|
);
|
|
|
|
}
|
2019-02-24 12:00:35 +08:00
|
|
|
Operator::BrTable { table } => {
|
|
|
|
let (targets, default_target) = match table.read_table() {
|
|
|
|
Ok(x) => x,
|
|
|
|
Err(_) => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "cannot read br table",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
};
|
|
|
|
let cond_ty = Self::emit_pop_into_ax(assembler, &mut self.value_stack)?;
|
|
|
|
if cond_ty != WpType::I32 {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "expecting i32 for BrTable condition",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
let mut table = vec![0usize; targets.len()];
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cmp eax, targets.len() as i32
|
|
|
|
; jae >default_br
|
|
|
|
; shl rax, 3
|
|
|
|
; push rcx
|
|
|
|
; mov rcx, QWORD table.as_ptr() as usize as i64
|
|
|
|
; add rax, rcx
|
|
|
|
; pop rcx
|
|
|
|
; mov rax, [rax] // assuming upper 32 bits of rax are zeroed
|
|
|
|
; jmp rax
|
|
|
|
);
|
|
|
|
for (i, target) in targets.iter().enumerate() {
|
|
|
|
let AssemblyOffset(offset) = assembler.offset();
|
|
|
|
table[i] = offset;
|
|
|
|
Self::emit_jmp(
|
|
|
|
assembler,
|
|
|
|
self.control_stack.as_ref().unwrap(),
|
|
|
|
&mut self.value_stack,
|
|
|
|
*target as usize,
|
|
|
|
)?; // This does not actually modify value_stack.
|
|
|
|
}
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; default_br:
|
|
|
|
);
|
|
|
|
Self::emit_jmp(
|
|
|
|
assembler,
|
|
|
|
self.control_stack.as_ref().unwrap(),
|
|
|
|
&mut self.value_stack,
|
|
|
|
default_target as usize,
|
|
|
|
)?;
|
|
|
|
self.br_table_data.as_mut().unwrap().push(table);
|
|
|
|
self.unreachable_depth = 1;
|
|
|
|
}
|
2019-02-28 23:58:02 +08:00
|
|
|
Operator::I32Load { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-02 01:41:31 +08:00
|
|
|
|assembler, reg| {
|
2019-02-28 23:58:02 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-02 01:41:31 +08:00
|
|
|
; mov Rd(reg as u8), [Rq(reg as u8) + memarg.offset as i32]
|
2019-02-28 23:58:02 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Load8U { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-02 01:41:31 +08:00
|
|
|
|assembler, reg| {
|
2019-02-28 23:58:02 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-02 01:41:31 +08:00
|
|
|
; movzx Rd(reg as u8), BYTE [Rq(reg as u8) + memarg.offset as i32]
|
2019-02-28 23:58:02 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Load8S { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-02 01:41:31 +08:00
|
|
|
|assembler, reg| {
|
2019-02-28 23:58:02 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-02 01:41:31 +08:00
|
|
|
; movsx Rd(reg as u8), BYTE [Rq(reg as u8) + memarg.offset as i32]
|
2019-02-28 23:58:02 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Load16U { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-02 01:41:31 +08:00
|
|
|
|assembler, reg| {
|
2019-02-28 23:58:02 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-02 01:41:31 +08:00
|
|
|
; movzx Rd(reg as u8), WORD [Rq(reg as u8) + memarg.offset as i32]
|
2019-02-28 23:58:02 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Load16S { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-02 01:41:31 +08:00
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movsx Rd(reg as u8), WORD [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Store { memarg } => {
|
|
|
|
Self::emit_memory_store(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, addr_reg, value_reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [Rq(addr_reg as u8) + memarg.offset as i32], Rd(value_reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Store8 { memarg } => {
|
|
|
|
Self::emit_memory_store(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, addr_reg, value_reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [Rq(addr_reg as u8) + memarg.offset as i32], Rb(value_reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32Store16 { memarg } => {
|
|
|
|
Self::emit_memory_store(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, addr_reg, value_reg| {
|
2019-02-28 23:58:02 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-02 01:41:31 +08:00
|
|
|
; mov [Rq(addr_reg as u8) + memarg.offset as i32], Rw(value_reg as u8)
|
2019-02-28 23:58:02 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-05 00:59:05 +08:00
|
|
|
Operator::I64Load { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rq(reg as u8), [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Load8U { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movzx Rq(reg as u8), BYTE [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Load8S { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movsx Rq(reg as u8), BYTE [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Load16U { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movzx Rq(reg as u8), WORD [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Load16S { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movsx Rq(reg as u8), WORD [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Load32U { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(reg as u8), DWORD [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Load32S { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movsx Rq(reg as u8), DWORD [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Store { memarg } => {
|
|
|
|
Self::emit_memory_store(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, addr_reg, value_reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [Rq(addr_reg as u8) + memarg.offset as i32], Rq(value_reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Store8 { memarg } => {
|
|
|
|
Self::emit_memory_store(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, addr_reg, value_reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [Rq(addr_reg as u8) + memarg.offset as i32], Rb(value_reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Store16 { memarg } => {
|
|
|
|
Self::emit_memory_store(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, addr_reg, value_reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [Rq(addr_reg as u8) + memarg.offset as i32], Rw(value_reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64Store32 { memarg } => {
|
|
|
|
Self::emit_memory_store(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, addr_reg, value_reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [Rq(addr_reg as u8) + memarg.offset as i32], Rd(value_reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-14 17:11:35 +08:00
|
|
|
Operator::F32Const { value } => {
|
|
|
|
let location = self.value_stack.push(WpType::F32);
|
|
|
|
match location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(reg as u8), value.bits() as i32
|
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push value.bits() as i32
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Operator::F64Const { value } => {
|
|
|
|
let location = self.value_stack.push(WpType::F64);
|
|
|
|
match location {
|
|
|
|
ValueLocation::Register(x) => {
|
|
|
|
let reg = Register::from_scratch_reg(x);
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rq(reg as u8), QWORD value.bits() as i64
|
|
|
|
);
|
|
|
|
}
|
|
|
|
ValueLocation::Stack => {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov rax, QWORD value.bits() as i64
|
|
|
|
; push rax
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Operator::F32Load { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(reg as u8), [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Store { memarg } => {
|
|
|
|
Self::emit_memory_store(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, addr_reg, value_reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [Rq(addr_reg as u8) + memarg.offset as i32], Rd(value_reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Load { memarg } => {
|
|
|
|
Self::emit_memory_load(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rq(reg as u8), [Rq(reg as u8) + memarg.offset as i32]
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Store { memarg } => {
|
|
|
|
Self::emit_memory_store(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
|assembler, addr_reg, value_reg| {
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov [Rq(addr_reg as u8) + memarg.offset as i32], Rq(value_reg as u8)
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32ReinterpretF32 => {
|
|
|
|
Self::emit_reinterpret(&mut self.value_stack, WpType::F32, WpType::I32)?;
|
|
|
|
}
|
|
|
|
Operator::F32ReinterpretI32 => {
|
|
|
|
Self::emit_reinterpret(&mut self.value_stack, WpType::I32, WpType::F32)?;
|
|
|
|
}
|
|
|
|
Operator::I64ReinterpretF64 => {
|
|
|
|
Self::emit_reinterpret(&mut self.value_stack, WpType::F64, WpType::I64)?;
|
|
|
|
}
|
|
|
|
Operator::F64ReinterpretI64 => {
|
|
|
|
Self::emit_reinterpret(&mut self.value_stack, WpType::I64, WpType::F64)?;
|
|
|
|
}
|
2019-03-14 19:10:22 +08:00
|
|
|
Operator::F32ConvertSI32 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cvtsi2ss xmm1, Rd(reg as u8)
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32ConvertUI32 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(reg as u8), Rd(reg as u8) // clear upper 32 bits
|
|
|
|
; cvtsi2ss xmm1, Rq(reg as u8)
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32ConvertSI64 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cvtsi2ss xmm1, Rq(reg as u8)
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
/*
|
2019-03-18 00:31:36 +08:00
|
|
|
0: 48 85 ff test %rdi,%rdi
|
|
|
|
3: 78 0b js 10 <ulong2double+0x10>
|
|
|
|
5: c4 e1 fb 2a c7 vcvtsi2sd %rdi,%xmm0,%xmm0
|
|
|
|
a: c3 retq
|
|
|
|
b: 0f 1f 44 00 00 nopl 0x0(%rax,%rax,1)
|
|
|
|
10: 48 89 f8 mov %rdi,%rax
|
|
|
|
13: 83 e7 01 and $0x1,%edi
|
|
|
|
16: 48 d1 e8 shr %rax
|
|
|
|
19: 48 09 f8 or %rdi,%rax
|
|
|
|
1c: c4 e1 fb 2a c0 vcvtsi2sd %rax,%xmm0,%xmm0
|
|
|
|
21: c5 fb 58 c0 vaddsd %xmm0,%xmm0,%xmm0
|
|
|
|
25: c3 retq
|
|
|
|
*/
|
2019-03-14 19:10:22 +08:00
|
|
|
Operator::F32ConvertUI64 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; test Rq(reg as u8), Rq(reg as u8)
|
|
|
|
; js >do_convert
|
|
|
|
; cvtsi2ss xmm1, Rq(reg as u8)
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
; jmp >end_convert
|
|
|
|
; do_convert:
|
|
|
|
; movq xmm5, r15
|
|
|
|
; mov r15, Rq(reg as u8)
|
|
|
|
; and r15, 1
|
|
|
|
; shr Rq(reg as u8), 1
|
|
|
|
; or Rq(reg as u8), r15
|
|
|
|
; cvtsi2ss xmm1, Rq(reg as u8)
|
2019-03-17 19:54:20 +08:00
|
|
|
; addss xmm1, xmm1
|
2019-03-14 19:10:22 +08:00
|
|
|
; movq r15, xmm5
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
; end_convert:
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64ConvertSI32 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cvtsi2sd xmm1, Rd(reg as u8)
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64ConvertUI32 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; mov Rd(reg as u8), Rd(reg as u8) // clear upper 32 bits
|
|
|
|
; cvtsi2sd xmm1, Rq(reg as u8)
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I32,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64ConvertSI64 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; cvtsi2sd xmm1, Rq(reg as u8)
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64ConvertUI64 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; test Rq(reg as u8), Rq(reg as u8)
|
|
|
|
; js >do_convert
|
|
|
|
; cvtsi2sd xmm1, Rq(reg as u8)
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
; jmp >end_convert
|
|
|
|
; do_convert:
|
|
|
|
; movq xmm5, r15
|
|
|
|
; mov r15, Rq(reg as u8)
|
|
|
|
; and r15, 1
|
|
|
|
; shr Rq(reg as u8), 1
|
|
|
|
; or Rq(reg as u8), r15
|
|
|
|
; cvtsi2sd xmm1, Rq(reg as u8)
|
|
|
|
; addsd xmm1, xmm1
|
|
|
|
; movq r15, xmm5
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
; end_convert:
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::I64,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64PromoteF32 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
|
|
|
; cvtss2sd xmm1, xmm1
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32DemoteF64 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
|
|
|
; cvtsd2ss xmm1, xmm1
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-14 17:11:35 +08:00
|
|
|
Operator::F32Add => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; addss xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Sub => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; subss xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Mul => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; mulss xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Div => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; divss xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Max => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; maxss xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Min => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; minss xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Eq => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; cmpeqss xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Ne => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; cmpneqss xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Gt => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; vcmpgtss xmm1, xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Ge => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; vcmpgess xmm1, xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Lt => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; cmpltss xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Le => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; cmpless xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-14 19:10:22 +08:00
|
|
|
Operator::F32Copysign => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(left as u8)
|
|
|
|
; movd xmm2, Rd(right as u8)
|
|
|
|
; mov eax, 0x7fffffffu32 as i32
|
|
|
|
; movd xmm3, eax
|
|
|
|
; pand xmm1, xmm3
|
|
|
|
; mov eax, 0x80000000u32 as i32
|
|
|
|
; movd xmm3, eax
|
|
|
|
; pand xmm2, xmm3
|
|
|
|
; por xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
|
|
|
WpType::F32,
|
|
|
|
)?;
|
|
|
|
}
|
2019-03-14 17:11:35 +08:00
|
|
|
Operator::F32Sqrt => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
|
|
|
; sqrtss xmm1, xmm1
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F32,
|
2019-03-14 17:11:35 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Abs => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; and Rd(reg as u8), 0x7fffffffu32 as i32
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F32,
|
2019-03-14 17:11:35 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Neg => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 17:11:35 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; btc Rd(reg as u8), 31
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F32,
|
2019-03-14 17:11:35 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-15 01:10:31 +08:00
|
|
|
Operator::F32Nearest => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
|
|
|
; roundss xmm1, xmm1, 0
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F32,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Floor => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
|
|
|
; roundss xmm1, xmm1, 1
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F32,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Ceil => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
|
|
|
; roundss xmm1, xmm1, 2
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F32,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F32Trunc => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
|
|
|
; roundss xmm1, xmm1, 3
|
|
|
|
; movd Rd(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F32,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-17 10:27:14 +08:00
|
|
|
Operator::I32TruncUF32 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-18 00:31:36 +08:00
|
|
|
Self::emit_f32_int_conv_check(assembler, reg, -1.0, 4294967296.0);
|
2019-03-17 10:27:14 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
2019-03-17 19:54:20 +08:00
|
|
|
; cvttss2si Rq(reg as u8), xmm1
|
|
|
|
; mov Rd(reg as u8), Rd(reg as u8)
|
2019-03-17 10:27:14 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::I32,
|
2019-03-17 10:27:14 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I32TruncSF32 => {
|
2019-03-15 01:10:31 +08:00
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-18 00:31:36 +08:00
|
|
|
Self::emit_f32_int_conv_check(assembler, reg, -2147483904.0, 2147483648.0);
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
2019-03-17 19:54:20 +08:00
|
|
|
; cvttss2si Rd(reg as u8), xmm1
|
2019-03-15 01:10:31 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::I32,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-17 10:27:14 +08:00
|
|
|
Operator::I64TruncUF32 => {
|
2019-03-15 01:10:31 +08:00
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-18 00:31:36 +08:00
|
|
|
Self::emit_f32_int_conv_check(assembler, reg, -1.0, 18446744073709551616.0);
|
2019-03-17 19:54:20 +08:00
|
|
|
/*
|
|
|
|
LCPI0_0:
|
|
|
|
.long 1593835520 ## float 9.22337203E+18
|
|
|
|
|
|
|
|
movss LCPI0_0(%rip), %xmm1 ## xmm1 = mem[0],zero,zero,zero
|
|
|
|
movaps %xmm0, %xmm2
|
|
|
|
subss %xmm1, %xmm2
|
|
|
|
cvttss2si %xmm2, %rax
|
|
|
|
movabsq $-9223372036854775808, %rcx ## imm = 0x8000000000000000
|
|
|
|
xorq %rax, %rcx
|
|
|
|
cvttss2si %xmm0, %rax
|
|
|
|
ucomiss %xmm1, %xmm0
|
|
|
|
cmovaeq %rcx, %rax
|
|
|
|
*/
|
2019-03-17 10:27:14 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-17 21:27:19 +08:00
|
|
|
; movq xmm5, r15
|
|
|
|
; mov r15d, 1593835520u32 as i32 //float 9.22337203E+18
|
|
|
|
; movd xmm1, r15d
|
2019-03-17 19:54:20 +08:00
|
|
|
; movd xmm2, Rd(reg as u8)
|
|
|
|
; movd xmm3, Rd(reg as u8)
|
|
|
|
; subss xmm2, xmm1
|
|
|
|
; cvttss2si Rq(reg as u8), xmm2
|
2019-03-17 21:27:19 +08:00
|
|
|
; mov r15, QWORD 0x8000000000000000u64 as i64
|
|
|
|
; xor r15, Rq(reg as u8)
|
2019-03-17 19:54:20 +08:00
|
|
|
; cvttss2si Rq(reg as u8), xmm3
|
|
|
|
; ucomiss xmm3, xmm1
|
2019-03-17 21:27:19 +08:00
|
|
|
; cmovae Rq(reg as u8), r15
|
|
|
|
; movq r15, xmm5
|
2019-03-17 10:27:14 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::I64,
|
2019-03-17 10:27:14 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64TruncSF32 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-17 10:27:14 +08:00
|
|
|
Self::emit_f32_int_conv_check(
|
|
|
|
assembler,
|
|
|
|
reg,
|
|
|
|
-9223373136366403584.0,
|
|
|
|
9223372036854775808.0,
|
|
|
|
);
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movd xmm1, Rd(reg as u8)
|
2019-03-17 19:54:20 +08:00
|
|
|
; cvttss2si Rq(reg as u8), xmm1
|
2019-03-15 01:10:31 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F32,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::I64,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-14 19:10:22 +08:00
|
|
|
Operator::F64Add => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; addsd xmm1, xmm2
|
|
|
|
; movq Rq(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Sub => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; subsd xmm1, xmm2
|
|
|
|
; movq Rq(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Mul => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; mulsd xmm1, xmm2
|
|
|
|
; movq Rq(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Div => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; divsd xmm1, xmm2
|
|
|
|
; movq Rq(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Max => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; maxsd xmm1, xmm2
|
|
|
|
; movq Rq(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Min => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; minsd xmm1, xmm2
|
|
|
|
; movq Rq(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Eq => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; cmpeqsd xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Ne => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; cmpneqsd xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Gt => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; vcmpgtsd xmm1, xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Ge => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; vcmpgesd xmm1, xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Lt => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; cmpltsd xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Le => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; cmplesd xmm1, xmm2
|
|
|
|
; movd Rd(left as u8), xmm1
|
|
|
|
; and Rd(left as u8), 1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::I32,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Copysign => {
|
|
|
|
Self::emit_binop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, left, right| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(left as u8)
|
|
|
|
; movq xmm2, Rq(right as u8)
|
|
|
|
; mov rax, QWORD 0x7fffffffffffffffu64 as i64
|
|
|
|
; movq xmm3, rax
|
|
|
|
; pand xmm1, xmm3
|
|
|
|
; mov rax, QWORD 0x8000000000000000u64 as i64
|
|
|
|
; movq xmm3, rax
|
|
|
|
; pand xmm2, xmm3
|
|
|
|
; por xmm1, xmm2
|
|
|
|
; movq Rq(left as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
|
|
|
WpType::F64,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Sqrt => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
|
|
|
; sqrtsd xmm1, xmm1
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F64,
|
2019-03-14 19:10:22 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Abs => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
|
|
|
; mov rax, QWORD 0x7fffffffffffffff
|
|
|
|
; movq xmm2, rax
|
2019-03-17 19:54:20 +08:00
|
|
|
; pand xmm1, xmm2
|
2019-03-14 19:10:22 +08:00
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F64,
|
2019-03-14 19:10:22 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Neg => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-14 19:10:22 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; btc Rq(reg as u8), 63
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F64,
|
2019-03-14 19:10:22 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-15 01:10:31 +08:00
|
|
|
Operator::F64Nearest => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
|
|
|
; roundsd xmm1, xmm1, 0
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F64,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Floor => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
|
|
|
; roundsd xmm1, xmm1, 1
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F64,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Ceil => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
|
|
|
; roundsd xmm1, xmm1, 2
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F64,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::F64Trunc => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
|
|
|
; roundsd xmm1, xmm1, 3
|
|
|
|
; movq Rq(reg as u8), xmm1
|
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::F64,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-17 10:27:14 +08:00
|
|
|
Operator::I32TruncUF64 => {
|
2019-03-15 01:10:31 +08:00
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-18 00:31:36 +08:00
|
|
|
Self::emit_f64_int_conv_check(assembler, reg, -1.0, 4294967296.0);
|
2019-03-17 10:27:14 +08:00
|
|
|
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
2019-03-17 19:54:20 +08:00
|
|
|
; cvttsd2si Rq(reg as u8), xmm1
|
|
|
|
; mov Rd(reg as u8), Rd(reg as u8)
|
2019-03-15 01:10:31 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::I32,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-17 10:27:14 +08:00
|
|
|
Operator::I32TruncSF64 => {
|
2019-03-15 01:10:31 +08:00
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-18 00:31:36 +08:00
|
|
|
Self::emit_f64_int_conv_check(assembler, reg, -2147483649.0, 2147483648.0);
|
2019-03-17 10:27:14 +08:00
|
|
|
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
2019-03-17 19:54:20 +08:00
|
|
|
; cvttsd2si Rd(reg as u8), xmm1
|
2019-03-17 10:27:14 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::I32,
|
2019-03-17 10:27:14 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64TruncUF64 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-18 00:31:36 +08:00
|
|
|
Self::emit_f64_int_conv_check(assembler, reg, -1.0, 18446744073709551616.0);
|
2019-03-17 10:27:14 +08:00
|
|
|
|
2019-03-17 19:54:20 +08:00
|
|
|
/*
|
|
|
|
LCPI0_0:
|
|
|
|
.quad 4890909195324358656 ## double 9.2233720368547758E+18
|
|
|
|
|
|
|
|
movsd LCPI0_0(%rip), %xmm1 ## xmm1 = mem[0],zero
|
|
|
|
movapd %xmm0, %xmm2
|
|
|
|
subsd %xmm1, %xmm2
|
|
|
|
cvttsd2si %xmm2, %rax
|
|
|
|
movabsq $-9223372036854775808, %rcx ## imm = 0x8000000000000000
|
|
|
|
xorq %rax, %rcx
|
|
|
|
cvttsd2si %xmm0, %rax
|
|
|
|
ucomisd %xmm1, %xmm0
|
|
|
|
cmovaeq %rcx, %rax
|
|
|
|
*/
|
|
|
|
|
2019-03-17 10:27:14 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-03-17 21:27:19 +08:00
|
|
|
; movq xmm5, r15
|
|
|
|
; mov r15, QWORD 4890909195324358656u64 as i64 //double 9.2233720368547758E+18
|
|
|
|
; movq xmm1, r15
|
2019-03-17 19:54:20 +08:00
|
|
|
; movq xmm2, Rq(reg as u8)
|
|
|
|
; movq xmm3, Rq(reg as u8)
|
|
|
|
; subsd xmm2, xmm1
|
|
|
|
; cvttsd2si Rq(reg as u8), xmm2
|
2019-03-17 21:27:19 +08:00
|
|
|
; mov r15, QWORD 0x8000000000000000u64 as i64
|
|
|
|
; xor r15, Rq(reg as u8)
|
2019-03-17 19:54:20 +08:00
|
|
|
; cvttsd2si Rq(reg as u8), xmm3
|
|
|
|
; ucomisd xmm3, xmm1
|
2019-03-17 21:27:19 +08:00
|
|
|
; cmovae Rq(reg as u8), r15
|
|
|
|
; movq r15, xmm5
|
2019-03-17 10:27:14 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::I64,
|
2019-03-17 10:27:14 +08:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Operator::I64TruncSF64 => {
|
|
|
|
Self::emit_unop(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
2019-03-17 10:54:50 +08:00
|
|
|
|assembler, _value_stack, reg| {
|
2019-03-17 10:27:14 +08:00
|
|
|
Self::emit_f64_int_conv_check(
|
|
|
|
assembler,
|
|
|
|
reg,
|
|
|
|
-9223372036854777856.0,
|
|
|
|
9223372036854775808.0,
|
|
|
|
);
|
|
|
|
|
2019-03-15 01:10:31 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; movq xmm1, Rq(reg as u8)
|
2019-03-17 19:54:20 +08:00
|
|
|
; cvttsd2si Rq(reg as u8), xmm1
|
2019-03-15 01:10:31 +08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
WpType::F64,
|
2019-03-18 00:31:36 +08:00
|
|
|
WpType::I64,
|
2019-03-15 01:10:31 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-13 18:23:50 +08:00
|
|
|
Operator::Nop => {}
|
2019-03-14 10:30:24 +08:00
|
|
|
Operator::MemorySize { reserved } => {
|
|
|
|
let memory_index = MemoryIndex::new(reserved as usize);
|
|
|
|
let label = match memory_index.local_or_import(module_info) {
|
|
|
|
LocalOrImport::Local(local_mem_index) => {
|
|
|
|
let mem_desc = &module_info.memories[local_mem_index];
|
|
|
|
match mem_desc.memory_type() {
|
2019-03-17 10:27:14 +08:00
|
|
|
//MemoryType::Dynamic => self.native_trampolines.memory_size_dynamic_local,
|
|
|
|
MemoryType::Dynamic => unimplemented!(),
|
2019-03-14 10:30:24 +08:00
|
|
|
MemoryType::Static => self.native_trampolines.memory_size_static_local,
|
2019-03-18 00:31:36 +08:00
|
|
|
MemoryType::SharedStatic => {
|
|
|
|
self.native_trampolines.memory_size_shared_local
|
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
LocalOrImport::Import(import_mem_index) => {
|
|
|
|
let mem_desc = &module_info.imported_memories[import_mem_index].1;
|
|
|
|
match mem_desc.memory_type() {
|
2019-03-17 10:27:14 +08:00
|
|
|
//MemoryType::Dynamic => self.native_trampolines.memory_size_dynamic_import,
|
|
|
|
MemoryType::Dynamic => unimplemented!(),
|
2019-03-14 10:30:24 +08:00
|
|
|
MemoryType::Static => self.native_trampolines.memory_size_static_import,
|
2019-03-18 00:31:36 +08:00
|
|
|
MemoryType::SharedStatic => {
|
|
|
|
self.native_trampolines.memory_size_shared_import
|
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2019-03-18 00:31:36 +08:00
|
|
|
Self::emit_call_raw(assembler, &mut self.value_stack, label, &[], &[WpType::I32])?;
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
|
|
|
Operator::MemoryGrow { reserved } => {
|
|
|
|
let memory_index = MemoryIndex::new(reserved as usize);
|
|
|
|
let label = match memory_index.local_or_import(module_info) {
|
|
|
|
LocalOrImport::Local(local_mem_index) => {
|
|
|
|
let mem_desc = &module_info.memories[local_mem_index];
|
|
|
|
match mem_desc.memory_type() {
|
2019-03-17 10:27:14 +08:00
|
|
|
//MemoryType::Dynamic => self.native_trampolines.memory_grow_dynamic_local,
|
|
|
|
MemoryType::Dynamic => unimplemented!(),
|
2019-03-14 10:30:24 +08:00
|
|
|
MemoryType::Static => self.native_trampolines.memory_grow_static_local,
|
2019-03-18 00:31:36 +08:00
|
|
|
MemoryType::SharedStatic => {
|
|
|
|
self.native_trampolines.memory_grow_shared_local
|
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
LocalOrImport::Import(import_mem_index) => {
|
|
|
|
let mem_desc = &module_info.imported_memories[import_mem_index].1;
|
|
|
|
match mem_desc.memory_type() {
|
2019-03-17 10:27:14 +08:00
|
|
|
//MemoryType::Dynamic => self.native_trampolines.memory_grow_dynamic_import,
|
|
|
|
MemoryType::Dynamic => unimplemented!(),
|
2019-03-14 10:30:24 +08:00
|
|
|
MemoryType::Static => self.native_trampolines.memory_grow_static_import,
|
2019-03-18 00:31:36 +08:00
|
|
|
MemoryType::SharedStatic => {
|
|
|
|
self.native_trampolines.memory_grow_shared_import
|
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
Self::emit_call_raw(
|
|
|
|
assembler,
|
|
|
|
&mut self.value_stack,
|
|
|
|
label,
|
|
|
|
&[WpType::I32],
|
2019-03-18 00:31:36 +08:00
|
|
|
&[WpType::I32],
|
2019-03-14 10:30:24 +08:00
|
|
|
)?;
|
|
|
|
}
|
2019-03-13 18:23:50 +08:00
|
|
|
_ => {
|
|
|
|
panic!("{:?}", op);
|
2019-03-18 00:31:36 +08:00
|
|
|
}
|
2019-02-12 23:15:57 +08:00
|
|
|
}
|
2019-02-12 00:52:17 +08:00
|
|
|
Ok(())
|
|
|
|
}
|
2019-02-20 22:56:32 +08:00
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
fn finalize(&mut self) -> Result<(), CodegenError> {
|
2019-02-12 23:15:57 +08:00
|
|
|
let assembler = self.assembler.as_mut().unwrap();
|
2019-02-20 22:56:32 +08:00
|
|
|
|
2019-02-12 23:15:57 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
2019-02-21 21:14:10 +08:00
|
|
|
; mov rsp, rbp
|
|
|
|
; pop rbp
|
|
|
|
; ret
|
2019-02-20 22:56:32 +08:00
|
|
|
);
|
|
|
|
|
2019-02-20 23:21:33 +08:00
|
|
|
if self.value_stack.values.len() != 0
|
|
|
|
|| self.control_stack.as_ref().unwrap().frames.len() != 0
|
|
|
|
{
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "control/value stack not empty at end of function",
|
|
|
|
});
|
2019-02-20 22:56:32 +08:00
|
|
|
}
|
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_size_of_type(ty: &WpType) -> Result<usize, CodegenError> {
|
|
|
|
match *ty {
|
|
|
|
WpType::I32 | WpType::F32 => Ok(4),
|
|
|
|
WpType::I64 | WpType::F64 => Ok(8),
|
|
|
|
_ => Err(CodegenError {
|
|
|
|
message: "unknown type",
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
}
|
2019-02-13 20:04:10 +08:00
|
|
|
|
|
|
|
fn is_dword(n: usize) -> bool {
|
|
|
|
n == 4
|
|
|
|
}
|
2019-02-15 02:21:52 +08:00
|
|
|
|
2019-02-24 00:52:32 +08:00
|
|
|
fn type_to_wp_type(ty: Type) -> WpType {
|
|
|
|
match ty {
|
|
|
|
Type::I32 => WpType::I32,
|
|
|
|
Type::I64 => WpType::I64,
|
|
|
|
Type::F32 => WpType::F32,
|
|
|
|
Type::F64 => WpType::F64,
|
|
|
|
}
|
|
|
|
}
|
2019-02-26 20:56:10 +08:00
|
|
|
|
2019-03-08 01:31:37 +08:00
|
|
|
unsafe extern "C" fn invoke_import(
|
2019-03-09 00:07:13 +08:00
|
|
|
_unused: usize,
|
|
|
|
import_id: usize,
|
2019-03-08 01:31:37 +08:00
|
|
|
stack_top: *mut u8,
|
|
|
|
stack_base: *mut u8,
|
2019-03-09 00:07:13 +08:00
|
|
|
vmctx: *mut vm::Ctx,
|
2019-03-17 10:54:50 +08:00
|
|
|
_memory_base: *mut u8,
|
2019-03-08 01:31:37 +08:00
|
|
|
) -> u64 {
|
2019-03-09 00:07:13 +08:00
|
|
|
let vmctx: &mut vm::Ctx = &mut *vmctx;
|
|
|
|
let import = (*vmctx.imported_funcs.offset(import_id as isize)).func;
|
|
|
|
|
2019-03-17 10:54:50 +08:00
|
|
|
/*let n_args = (stack_base as usize - stack_top as usize) / 8;
|
2019-03-17 03:07:27 +08:00
|
|
|
|
2019-03-17 10:54:50 +08:00
|
|
|
println!("Calling import: {:?} with vmctx = {:?}, n_args = {}",
|
2019-03-17 03:07:27 +08:00
|
|
|
import,
|
|
|
|
vmctx as *mut _,
|
|
|
|
n_args,
|
|
|
|
);
|
|
|
|
|
|
|
|
for i in 0..n_args {
|
|
|
|
println!("Arg: {:?}", * ((stack_top as usize + i * 8) as *const *const ()));
|
|
|
|
}*/
|
2019-03-13 18:23:50 +08:00
|
|
|
|
2019-03-09 00:07:13 +08:00
|
|
|
CONSTRUCT_STACK_AND_CALL_NATIVE(stack_top, stack_base, vmctx, import)
|
2019-03-08 01:31:37 +08:00
|
|
|
}
|
2019-03-09 02:57:23 +08:00
|
|
|
|
2019-03-14 10:30:24 +08:00
|
|
|
#[repr(u64)]
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
enum CallIndirectLocalOrImport {
|
|
|
|
Local,
|
2019-03-18 00:31:36 +08:00
|
|
|
Import,
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
|
|
|
|
2019-03-09 02:57:23 +08:00
|
|
|
unsafe extern "C" fn call_indirect(
|
|
|
|
sig_index: usize,
|
2019-03-14 10:30:24 +08:00
|
|
|
local_or_import: CallIndirectLocalOrImport,
|
2019-03-09 02:57:23 +08:00
|
|
|
mut stack_top: *mut u8,
|
|
|
|
stack_base: *mut u8,
|
|
|
|
vmctx: *mut vm::Ctx,
|
|
|
|
memory_base: *mut u8,
|
|
|
|
) -> u64 {
|
|
|
|
let elem_index = *(stack_top as *mut u32) as usize;
|
|
|
|
stack_top = stack_top.offset(8);
|
|
|
|
assert!(stack_top as usize <= stack_base as usize);
|
|
|
|
|
2019-03-14 10:30:24 +08:00
|
|
|
let table: &LocalTable = match local_or_import {
|
|
|
|
CallIndirectLocalOrImport::Local => &*(*(*vmctx).tables),
|
|
|
|
CallIndirectLocalOrImport::Import => &*(*(*vmctx).imported_tables),
|
2019-03-18 00:31:36 +08:00
|
|
|
};
|
2019-03-09 02:57:23 +08:00
|
|
|
if elem_index >= table.count as usize {
|
2019-03-17 10:27:14 +08:00
|
|
|
eprintln!("element index out of bounds");
|
2019-03-17 10:54:50 +08:00
|
|
|
protect_unix::trigger_trap();
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
2019-03-12 11:31:55 +08:00
|
|
|
let anyfunc = &*(table.base as *mut vm::Anyfunc).offset(elem_index as isize);
|
2019-03-09 02:57:23 +08:00
|
|
|
let ctx: &X64ExecutionContext =
|
|
|
|
&*CURRENT_EXECUTION_CONTEXT.with(|x| *x.borrow().last().unwrap());
|
|
|
|
|
2019-03-17 10:27:14 +08:00
|
|
|
let func_index = match anyfunc.func_index {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
eprintln!("empty table entry");
|
2019-03-17 10:54:50 +08:00
|
|
|
protect_unix::trigger_trap();
|
2019-03-17 10:27:14 +08:00
|
|
|
}
|
|
|
|
};
|
2019-03-09 02:57:23 +08:00
|
|
|
|
2019-03-12 11:31:55 +08:00
|
|
|
/*println!(
|
|
|
|
"SIG INDEX = {}, FUNC INDEX = {:?}, ELEM INDEX = {}",
|
|
|
|
sig_index, func_index, elem_index
|
|
|
|
);*/
|
2019-03-09 02:57:23 +08:00
|
|
|
|
|
|
|
if ctx.signatures[SigIndex::new(sig_index)]
|
2019-03-12 11:31:55 +08:00
|
|
|
!= ctx.signatures[ctx.function_signatures[func_index]]
|
2019-03-09 02:57:23 +08:00
|
|
|
{
|
2019-03-17 10:27:14 +08:00
|
|
|
eprintln!("signature mismatch");
|
2019-03-17 10:54:50 +08:00
|
|
|
protect_unix::trigger_trap();
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
|
2019-03-12 11:31:55 +08:00
|
|
|
let func = ctx.function_pointers[func_index.index() as usize].0;
|
2019-03-09 02:57:23 +08:00
|
|
|
CALL_WASM(
|
|
|
|
stack_top,
|
|
|
|
stack_base as usize - stack_top as usize,
|
|
|
|
func as _,
|
|
|
|
memory_base,
|
|
|
|
vmctx,
|
|
|
|
) as u64
|
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
|
|
|
|
#[repr(u64)]
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
enum MemoryKind {
|
|
|
|
DynamicLocal,
|
|
|
|
StaticLocal,
|
|
|
|
SharedLocal,
|
|
|
|
DynamicImport,
|
|
|
|
StaticImport,
|
|
|
|
SharedImport,
|
|
|
|
}
|
|
|
|
|
|
|
|
unsafe extern "C" fn _memory_size(
|
|
|
|
op: MemoryKind,
|
|
|
|
index: usize,
|
2019-03-17 10:54:50 +08:00
|
|
|
_stack_top: *mut u8,
|
|
|
|
_stack_base: *mut u8,
|
2019-03-14 10:30:24 +08:00
|
|
|
vmctx: *mut vm::Ctx,
|
2019-03-17 10:54:50 +08:00
|
|
|
_memory_base: *mut u8,
|
2019-03-14 10:30:24 +08:00
|
|
|
) -> u64 {
|
|
|
|
use wasmer_runtime_core::vmcalls;
|
|
|
|
let ret = match op {
|
2019-03-18 00:31:36 +08:00
|
|
|
MemoryKind::DynamicLocal => {
|
|
|
|
vmcalls::local_dynamic_memory_size(&*vmctx, LocalMemoryIndex::new(index))
|
|
|
|
}
|
|
|
|
MemoryKind::StaticLocal => {
|
|
|
|
vmcalls::local_static_memory_size(&*vmctx, LocalMemoryIndex::new(index))
|
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
MemoryKind::SharedLocal => unreachable!(),
|
2019-03-18 00:31:36 +08:00
|
|
|
MemoryKind::DynamicImport => {
|
|
|
|
vmcalls::imported_dynamic_memory_size(&*vmctx, ImportedMemoryIndex::new(index))
|
|
|
|
}
|
|
|
|
MemoryKind::StaticImport => {
|
|
|
|
vmcalls::imported_static_memory_size(&*vmctx, ImportedMemoryIndex::new(index))
|
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
MemoryKind::SharedImport => unreachable!(),
|
|
|
|
};
|
|
|
|
ret.0 as u32 as u64
|
|
|
|
}
|
|
|
|
|
|
|
|
unsafe extern "C" fn _memory_grow(
|
|
|
|
op: MemoryKind,
|
|
|
|
index: usize,
|
2019-03-17 10:54:50 +08:00
|
|
|
stack_top: *mut u8,
|
2019-03-14 10:30:24 +08:00
|
|
|
stack_base: *mut u8,
|
|
|
|
vmctx: *mut vm::Ctx,
|
2019-03-17 10:54:50 +08:00
|
|
|
_memory_base: *mut u8,
|
2019-03-14 10:30:24 +08:00
|
|
|
) -> u64 {
|
|
|
|
use wasmer_runtime_core::vmcalls;
|
|
|
|
assert_eq!(stack_base as usize - stack_top as usize, 8);
|
|
|
|
let pages = Pages(*(stack_top as *mut u32));
|
|
|
|
let ret = match op {
|
2019-03-18 00:31:36 +08:00
|
|
|
MemoryKind::DynamicLocal => {
|
|
|
|
vmcalls::local_dynamic_memory_grow(&mut *vmctx, LocalMemoryIndex::new(index), pages)
|
|
|
|
}
|
|
|
|
MemoryKind::StaticLocal => {
|
|
|
|
vmcalls::local_static_memory_grow(&mut *vmctx, LocalMemoryIndex::new(index), pages)
|
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
MemoryKind::SharedLocal => unreachable!(),
|
2019-03-18 00:31:36 +08:00
|
|
|
MemoryKind::DynamicImport => vmcalls::imported_dynamic_memory_grow(
|
|
|
|
&mut *vmctx,
|
|
|
|
ImportedMemoryIndex::new(index),
|
|
|
|
pages,
|
|
|
|
),
|
|
|
|
MemoryKind::StaticImport => vmcalls::imported_static_memory_grow(
|
|
|
|
&mut *vmctx,
|
|
|
|
ImportedMemoryIndex::new(index),
|
|
|
|
pages,
|
|
|
|
),
|
2019-03-14 10:30:24 +08:00
|
|
|
MemoryKind::SharedImport => unreachable!(),
|
|
|
|
};
|
|
|
|
ret as u32 as u64
|
|
|
|
}
|