2019-03-20 01:52:00 +08:00
|
|
|
#![allow(clippy::forget_copy)] // Used by dynasm.
|
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
use super::codegen::*;
|
2019-03-18 00:31:36 +08:00
|
|
|
use crate::protect_unix;
|
2019-02-24 00:52:32 +08:00
|
|
|
use byteorder::{ByteOrder, LittleEndian};
|
2019-02-15 02:21:52 +08:00
|
|
|
use dynasmrt::{
|
|
|
|
x64::Assembler, AssemblyOffset, DynamicLabel, DynasmApi, DynasmLabelApi, ExecutableBuffer,
|
|
|
|
};
|
2019-03-09 02:57:23 +08:00
|
|
|
use std::cell::RefCell;
|
|
|
|
use std::ptr::NonNull;
|
2019-03-12 20:59:10 -05:00
|
|
|
use std::{any::Any, collections::HashMap, sync::Arc};
|
2019-02-15 02:21:52 +08:00
|
|
|
use wasmer_runtime_core::{
|
2019-03-17 10:54:50 +08:00
|
|
|
backend::{FuncResolver, ProtectedCaller, Token, UserTrapper},
|
|
|
|
error::{RuntimeError, RuntimeResult},
|
2019-03-18 00:31:36 +08:00
|
|
|
memory::MemoryType,
|
2019-03-17 10:54:50 +08:00
|
|
|
module::{ModuleInfo, ModuleInner},
|
2019-02-15 02:21:52 +08:00
|
|
|
structures::{Map, TypedIndex},
|
|
|
|
types::{
|
2019-03-18 00:31:36 +08:00
|
|
|
FuncIndex, FuncSig, ImportedMemoryIndex, LocalFuncIndex, LocalGlobalIndex,
|
|
|
|
LocalMemoryIndex, LocalOrImport, MemoryIndex, SigIndex, Type, Value,
|
2019-02-15 02:21:52 +08:00
|
|
|
},
|
2019-03-14 10:30:24 +08:00
|
|
|
units::Pages,
|
2019-03-18 00:31:36 +08:00
|
|
|
vm::{self, ImportBacking, LocalGlobal, LocalMemory, LocalTable},
|
2019-02-15 02:21:52 +08:00
|
|
|
};
|
2019-02-12 00:52:17 +08:00
|
|
|
use wasmparser::{Operator, Type as WpType};
|
2019-04-01 20:33:33 +08:00
|
|
|
use crate::machine::*;
|
|
|
|
use crate::emitter_x64::*;
|
2019-03-09 02:57:23 +08:00
|
|
|
|
2019-02-24 00:52:32 +08:00
|
|
|
lazy_static! {
|
2019-04-01 20:33:33 +08:00
|
|
|
static ref CONSTRUCT_STACK_AND_CALL_WASM: unsafe extern "C" fn (stack_top: *const u8, stack_base: *const u8, ctx: *mut vm::Ctx, target: *const vm::Func) -> u64 = {
|
2019-03-09 00:07:13 +08:00
|
|
|
let mut assembler = Assembler::new().unwrap();
|
|
|
|
let offset = assembler.offset();
|
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; push r15
|
|
|
|
; push r14
|
|
|
|
; push r13
|
|
|
|
; push r12
|
2019-03-19 11:47:38 +08:00
|
|
|
; push r11
|
|
|
|
; push rbp
|
|
|
|
; mov rbp, rsp
|
2019-03-09 00:07:13 +08:00
|
|
|
|
|
|
|
; mov r15, rdi
|
|
|
|
; mov r14, rsi
|
|
|
|
; mov r13, rdx
|
|
|
|
; mov r12, rcx
|
|
|
|
|
2019-03-17 03:07:27 +08:00
|
|
|
; mov rdi, r13 // ctx
|
|
|
|
|
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
|
2019-03-19 11:47:38 +08:00
|
|
|
; mov rsi, [r14]
|
2019-03-17 03:07:27 +08:00
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
|
2019-03-19 11:47:38 +08:00
|
|
|
; mov rdx, [r14]
|
2019-03-17 03:07:27 +08:00
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
|
2019-03-19 11:47:38 +08:00
|
|
|
; mov rcx, [r14]
|
2019-03-17 03:07:27 +08:00
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
|
2019-03-19 11:47:38 +08:00
|
|
|
; mov r8, [r14]
|
2019-03-17 03:07:27 +08:00
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
2019-03-19 11:47:38 +08:00
|
|
|
|
2019-03-17 03:07:27 +08:00
|
|
|
; mov r9, [r14]
|
2019-03-19 11:47:38 +08:00
|
|
|
; sub r14, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
2019-03-17 03:07:27 +08:00
|
|
|
|
2019-03-19 11:47:38 +08:00
|
|
|
; mov rax, r14
|
|
|
|
; sub rax, r15
|
|
|
|
; sub rsp, rax
|
|
|
|
; sub rsp, 8
|
|
|
|
; mov rax, QWORD 0xfffffffffffffff0u64 as i64
|
|
|
|
; and rsp, rax
|
|
|
|
; mov rax, rsp
|
|
|
|
; loop_begin:
|
|
|
|
; mov r11, [r14]
|
|
|
|
; mov [rax], r11
|
|
|
|
; sub r14, 8
|
|
|
|
; add rax, 8
|
|
|
|
; cmp r14, r15
|
|
|
|
; jb >stack_ready
|
|
|
|
; jmp <loop_begin
|
2019-03-09 00:07:13 +08:00
|
|
|
|
|
|
|
; stack_ready:
|
2019-03-19 11:47:38 +08:00
|
|
|
; mov rax, QWORD 0xfffffffffffffff0u64 as i64
|
|
|
|
; and rsp, rax
|
2019-03-09 00:07:13 +08:00
|
|
|
; call r12
|
|
|
|
|
2019-03-19 11:47:38 +08:00
|
|
|
; mov rsp, rbp
|
|
|
|
; pop rbp
|
|
|
|
; pop r11
|
2019-03-09 00:07:13 +08:00
|
|
|
; pop r12
|
|
|
|
; pop r13
|
|
|
|
; pop r14
|
|
|
|
; pop r15
|
|
|
|
; ret
|
|
|
|
);
|
|
|
|
let buf = assembler.finalize().unwrap();
|
|
|
|
let ret = unsafe { ::std::mem::transmute(buf.ptr(offset)) };
|
|
|
|
::std::mem::forget(buf);
|
|
|
|
ret
|
|
|
|
};
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
pub struct X64ModuleCodeGenerator {
|
|
|
|
functions: Vec<X64FunctionCode>,
|
2019-03-12 20:59:10 -05:00
|
|
|
signatures: Option<Arc<Map<SigIndex, FuncSig>>>,
|
2019-02-24 00:52:32 +08:00
|
|
|
function_signatures: Option<Arc<Map<FuncIndex, SigIndex>>>,
|
2019-03-09 02:57:23 +08:00
|
|
|
function_labels: Option<HashMap<usize, (DynamicLabel, Option<AssemblyOffset>)>>,
|
2019-02-26 20:56:10 +08:00
|
|
|
assembler: Option<Assembler>,
|
2019-03-08 01:31:37 +08:00
|
|
|
func_import_count: usize,
|
2019-02-12 00:52:17 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
|
|
|
enum LocalOrTemp {
|
|
|
|
Local,
|
|
|
|
Temp
|
|
|
|
}
|
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
pub struct X64FunctionCode {
|
2019-03-12 20:59:10 -05:00
|
|
|
signatures: Arc<Map<SigIndex, FuncSig>>,
|
2019-02-24 00:52:32 +08:00
|
|
|
function_signatures: Arc<Map<FuncIndex, SigIndex>>,
|
|
|
|
|
2019-02-15 02:21:52 +08:00
|
|
|
begin_offset: AssemblyOffset,
|
2019-02-12 00:52:17 +08:00
|
|
|
assembler: Option<Assembler>,
|
2019-03-09 02:57:23 +08:00
|
|
|
function_labels: Option<HashMap<usize, (DynamicLabel, Option<AssemblyOffset>)>>,
|
2019-02-24 12:00:35 +08:00
|
|
|
br_table_data: Option<Vec<Vec<usize>>>,
|
2019-02-14 00:53:06 +08:00
|
|
|
returns: Vec<WpType>,
|
2019-04-01 20:33:33 +08:00
|
|
|
locals: Vec<Location>,
|
|
|
|
vmctx_location: Option<Location>,
|
2019-02-12 23:15:57 +08:00
|
|
|
num_params: usize,
|
2019-04-01 20:33:33 +08:00
|
|
|
num_locals: usize,
|
|
|
|
value_stack: Vec<(Location, LocalOrTemp)>,
|
|
|
|
control_stack: Vec<ControlFrame>,
|
|
|
|
machine: Machine,
|
2019-02-21 21:14:10 +08:00
|
|
|
unreachable_depth: usize,
|
2019-02-12 00:52:17 +08:00
|
|
|
}
|
|
|
|
|
2019-03-09 02:57:23 +08:00
|
|
|
enum FuncPtrInner {}
|
|
|
|
#[repr(transparent)]
|
2019-03-17 03:07:27 +08:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
2019-03-09 02:57:23 +08:00
|
|
|
struct FuncPtr(*const FuncPtrInner);
|
|
|
|
unsafe impl Send for FuncPtr {}
|
|
|
|
unsafe impl Sync for FuncPtr {}
|
|
|
|
|
2019-02-15 02:21:52 +08:00
|
|
|
pub struct X64ExecutionContext {
|
|
|
|
code: ExecutableBuffer,
|
|
|
|
functions: Vec<X64FunctionCode>,
|
2019-03-12 20:59:10 -05:00
|
|
|
signatures: Arc<Map<SigIndex, FuncSig>>,
|
2019-03-09 02:57:23 +08:00
|
|
|
function_signatures: Arc<Map<FuncIndex, SigIndex>>,
|
|
|
|
function_pointers: Vec<FuncPtr>,
|
2019-03-17 10:54:50 +08:00
|
|
|
_br_table_data: Vec<Vec<usize>>,
|
2019-03-08 01:31:37 +08:00
|
|
|
func_import_count: usize,
|
2019-02-15 02:21:52 +08:00
|
|
|
}
|
|
|
|
|
2019-03-17 03:07:27 +08:00
|
|
|
pub struct X64RuntimeResolver {
|
2019-04-01 20:33:33 +08:00
|
|
|
function_pointers: Vec<FuncPtr>,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct ControlFrame {
|
|
|
|
pub label: DynamicLabel,
|
|
|
|
pub loop_like: bool,
|
|
|
|
pub if_else: IfElseState,
|
|
|
|
pub returns: Vec<WpType>,
|
|
|
|
pub value_stack_depth: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
|
|
pub enum IfElseState {
|
|
|
|
None,
|
|
|
|
If(DynamicLabel),
|
|
|
|
Else,
|
2019-03-17 03:07:27 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
impl X64ExecutionContext {
|
2019-03-18 00:31:36 +08:00
|
|
|
fn get_runtime_resolver(
|
|
|
|
&self,
|
|
|
|
module_info: &ModuleInfo,
|
|
|
|
) -> Result<X64RuntimeResolver, CodegenError> {
|
2019-03-17 03:07:27 +08:00
|
|
|
Ok(X64RuntimeResolver {
|
2019-04-01 20:33:33 +08:00
|
|
|
function_pointers: self.function_pointers.clone(),
|
2019-03-17 03:07:27 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FuncResolver for X64RuntimeResolver {
|
2019-03-09 02:57:23 +08:00
|
|
|
fn get(
|
|
|
|
&self,
|
|
|
|
_module: &ModuleInner,
|
|
|
|
_local_func_index: LocalFuncIndex,
|
|
|
|
) -> Option<NonNull<vm::Func>> {
|
2019-04-01 20:33:33 +08:00
|
|
|
NonNull::new(self.function_pointers[_local_func_index.index() as usize].0 as *mut vm::Func)
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-15 02:21:52 +08:00
|
|
|
impl ProtectedCaller for X64ExecutionContext {
|
|
|
|
fn call(
|
|
|
|
&self,
|
|
|
|
_module: &ModuleInner,
|
|
|
|
_func_index: FuncIndex,
|
|
|
|
_params: &[Value],
|
|
|
|
_import_backing: &ImportBacking,
|
|
|
|
_vmctx: *mut vm::Ctx,
|
|
|
|
_: Token,
|
|
|
|
) -> RuntimeResult<Vec<Value>> {
|
2019-03-08 01:31:37 +08:00
|
|
|
let index = _func_index.index() - self.func_import_count;
|
2019-02-15 02:21:52 +08:00
|
|
|
let ptr = self.code.ptr(self.functions[index].begin_offset);
|
|
|
|
let return_ty = self.functions[index].returns.last().cloned();
|
2019-04-01 20:33:33 +08:00
|
|
|
let buffer: Vec<u64> = _params.iter().rev().map(|x| {
|
|
|
|
match *x {
|
|
|
|
Value::I32(x) => x as u32 as u64,
|
|
|
|
Value::I64(x) => x as u64,
|
|
|
|
Value::F32(x) => f32::to_bits(x) as u64,
|
|
|
|
Value::F64(x) => f64::to_bits(x),
|
|
|
|
}
|
|
|
|
}).collect();
|
2019-03-09 00:07:13 +08:00
|
|
|
let ret = unsafe {
|
2019-04-01 20:33:33 +08:00
|
|
|
CONSTRUCT_STACK_AND_CALL_WASM(
|
|
|
|
buffer.as_ptr() as *const u8,
|
|
|
|
buffer.as_ptr().offset(buffer.len() as isize) as *const u8,
|
|
|
|
_vmctx,
|
|
|
|
ptr as _,
|
|
|
|
)
|
2019-03-09 00:07:13 +08:00
|
|
|
};
|
2019-02-24 00:52:32 +08:00
|
|
|
Ok(if let Some(ty) = return_ty {
|
2019-03-13 18:23:50 +08:00
|
|
|
vec![match ty {
|
|
|
|
WpType::I32 => Value::I32(ret as i32),
|
2019-04-01 20:33:33 +08:00
|
|
|
WpType::I64 => Value::I64(ret as i64),
|
|
|
|
WpType::F32 => Value::F32(f32::from_bits(ret as u32)),
|
2019-03-13 18:23:50 +08:00
|
|
|
WpType::F64 => Value::F64(f64::from_bits(ret as u64)),
|
|
|
|
_ => unreachable!(),
|
|
|
|
}]
|
2019-02-24 00:52:32 +08:00
|
|
|
} else {
|
|
|
|
vec![]
|
|
|
|
})
|
2019-02-15 02:21:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
fn get_early_trapper(&self) -> Box<dyn UserTrapper> {
|
|
|
|
pub struct Trapper;
|
|
|
|
|
|
|
|
impl UserTrapper for Trapper {
|
2019-03-17 10:54:50 +08:00
|
|
|
unsafe fn do_early_trap(&self, _data: Box<Any>) -> ! {
|
2019-03-12 20:59:10 -05:00
|
|
|
panic!("do_early_trap");
|
2019-02-15 02:21:52 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Box::new(Trapper)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
impl X64ModuleCodeGenerator {
|
|
|
|
pub fn new() -> X64ModuleCodeGenerator {
|
2019-02-26 20:56:10 +08:00
|
|
|
let mut assembler = Assembler::new().unwrap();
|
|
|
|
|
|
|
|
X64ModuleCodeGenerator {
|
|
|
|
functions: vec![],
|
|
|
|
signatures: None,
|
|
|
|
function_signatures: None,
|
2019-03-08 01:31:37 +08:00
|
|
|
function_labels: Some(HashMap::new()),
|
2019-02-26 20:56:10 +08:00
|
|
|
assembler: Some(assembler),
|
2019-03-08 01:31:37 +08:00
|
|
|
func_import_count: 0,
|
2019-02-26 20:56:10 +08:00
|
|
|
}
|
2019-02-12 00:52:17 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 00:31:36 +08:00
|
|
|
impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, X64RuntimeResolver>
|
|
|
|
for X64ModuleCodeGenerator
|
|
|
|
{
|
2019-03-19 21:30:26 +08:00
|
|
|
fn check_precondition(&mut self, _module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
2019-03-17 10:27:14 +08:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
fn next_function(&mut self) -> Result<&mut X64FunctionCode, CodegenError> {
|
2019-02-24 12:00:35 +08:00
|
|
|
let (mut assembler, mut function_labels, br_table_data) = match self.functions.last_mut() {
|
2019-02-24 00:52:32 +08:00
|
|
|
Some(x) => (
|
|
|
|
x.assembler.take().unwrap(),
|
|
|
|
x.function_labels.take().unwrap(),
|
2019-02-24 12:00:35 +08:00
|
|
|
x.br_table_data.take().unwrap(),
|
2019-02-24 00:52:32 +08:00
|
|
|
),
|
2019-03-08 01:31:37 +08:00
|
|
|
None => (
|
|
|
|
self.assembler.take().unwrap(),
|
|
|
|
self.function_labels.take().unwrap(),
|
|
|
|
vec![],
|
|
|
|
),
|
2019-02-12 23:15:57 +08:00
|
|
|
};
|
2019-02-15 02:21:52 +08:00
|
|
|
let begin_offset = assembler.offset();
|
2019-03-09 02:57:23 +08:00
|
|
|
let begin_label_info = function_labels
|
|
|
|
.entry(self.functions.len() + self.func_import_count)
|
|
|
|
.or_insert_with(|| (assembler.new_dynamic_label(), None));
|
|
|
|
|
|
|
|
begin_label_info.1 = Some(begin_offset);
|
|
|
|
let begin_label = begin_label_info.0;
|
|
|
|
|
2019-02-12 23:15:57 +08:00
|
|
|
dynasm!(
|
|
|
|
assembler
|
|
|
|
; => begin_label
|
2019-02-21 22:04:43 +08:00
|
|
|
//; int 3
|
2019-02-12 23:15:57 +08:00
|
|
|
);
|
2019-02-12 00:52:17 +08:00
|
|
|
let code = X64FunctionCode {
|
2019-02-24 00:52:32 +08:00
|
|
|
signatures: self.signatures.as_ref().unwrap().clone(),
|
|
|
|
function_signatures: self.function_signatures.as_ref().unwrap().clone(),
|
|
|
|
|
2019-02-15 02:21:52 +08:00
|
|
|
begin_offset: begin_offset,
|
2019-02-12 23:15:57 +08:00
|
|
|
assembler: Some(assembler),
|
2019-02-24 00:52:32 +08:00
|
|
|
function_labels: Some(function_labels),
|
2019-02-24 12:00:35 +08:00
|
|
|
br_table_data: Some(br_table_data),
|
2019-02-14 00:53:06 +08:00
|
|
|
returns: vec![],
|
2019-02-12 00:52:17 +08:00
|
|
|
locals: vec![],
|
2019-02-12 23:15:57 +08:00
|
|
|
num_params: 0,
|
2019-04-01 20:33:33 +08:00
|
|
|
num_locals: 0,
|
|
|
|
vmctx_location: None,
|
|
|
|
value_stack: vec! [],
|
|
|
|
control_stack: vec! [],
|
|
|
|
machine: Machine::new(),
|
2019-02-21 21:14:10 +08:00
|
|
|
unreachable_depth: 0,
|
2019-02-12 00:52:17 +08:00
|
|
|
};
|
|
|
|
self.functions.push(code);
|
|
|
|
Ok(self.functions.last_mut().unwrap())
|
|
|
|
}
|
2019-02-14 00:53:06 +08:00
|
|
|
|
2019-03-18 00:31:36 +08:00
|
|
|
fn finalize(
|
|
|
|
mut self,
|
|
|
|
module_info: &ModuleInfo,
|
|
|
|
) -> Result<(X64ExecutionContext, X64RuntimeResolver), CodegenError> {
|
2019-03-17 10:54:50 +08:00
|
|
|
let (assembler, mut br_table_data) = match self.functions.last_mut() {
|
2019-02-24 12:00:35 +08:00
|
|
|
Some(x) => (x.assembler.take().unwrap(), x.br_table_data.take().unwrap()),
|
2019-02-15 02:21:52 +08:00
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "no function",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-02-15 02:21:52 +08:00
|
|
|
}
|
2019-02-14 00:53:06 +08:00
|
|
|
};
|
|
|
|
let output = assembler.finalize().unwrap();
|
2019-02-24 12:00:35 +08:00
|
|
|
|
|
|
|
for table in &mut br_table_data {
|
|
|
|
for entry in table {
|
|
|
|
*entry = output.ptr(AssemblyOffset(*entry)) as usize;
|
|
|
|
}
|
|
|
|
}
|
2019-03-09 02:57:23 +08:00
|
|
|
|
|
|
|
let function_labels = if let Some(x) = self.functions.last() {
|
|
|
|
x.function_labels.as_ref().unwrap()
|
|
|
|
} else {
|
|
|
|
self.function_labels.as_ref().unwrap()
|
|
|
|
};
|
|
|
|
let mut out_labels: Vec<FuncPtr> = vec![];
|
|
|
|
|
|
|
|
for i in 0..function_labels.len() {
|
|
|
|
let (_, offset) = match function_labels.get(&i) {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "label not found",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
let offset = match offset {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "offset is none",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
out_labels.push(FuncPtr(output.ptr(*offset) as _));
|
|
|
|
}
|
|
|
|
|
2019-03-17 03:07:27 +08:00
|
|
|
let ctx = X64ExecutionContext {
|
2019-02-15 02:21:52 +08:00
|
|
|
code: output,
|
|
|
|
functions: self.functions,
|
2019-03-17 10:54:50 +08:00
|
|
|
_br_table_data: br_table_data,
|
2019-03-08 01:31:37 +08:00
|
|
|
func_import_count: self.func_import_count,
|
2019-03-09 02:57:23 +08:00
|
|
|
signatures: match self.signatures {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "no signatures",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
},
|
|
|
|
function_pointers: out_labels,
|
|
|
|
function_signatures: match self.function_signatures {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "no function signatures",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
|
|
|
},
|
2019-03-17 03:07:27 +08:00
|
|
|
};
|
|
|
|
let resolver = ctx.get_runtime_resolver(module_info)?;
|
|
|
|
|
|
|
|
Ok((ctx, resolver))
|
2019-02-14 00:53:06 +08:00
|
|
|
}
|
2019-02-24 00:52:32 +08:00
|
|
|
|
2019-03-18 00:31:36 +08:00
|
|
|
fn feed_signatures(&mut self, signatures: Map<SigIndex, FuncSig>) -> Result<(), CodegenError> {
|
2019-02-24 00:52:32 +08:00
|
|
|
self.signatures = Some(Arc::new(signatures));
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn feed_function_signatures(
|
|
|
|
&mut self,
|
|
|
|
assoc: Map<FuncIndex, SigIndex>,
|
|
|
|
) -> Result<(), CodegenError> {
|
|
|
|
self.function_signatures = Some(Arc::new(assoc));
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-03-08 01:31:37 +08:00
|
|
|
|
|
|
|
fn feed_import_function(&mut self) -> Result<(), CodegenError> {
|
2019-04-01 20:33:33 +08:00
|
|
|
/*
|
2019-03-08 01:31:37 +08:00
|
|
|
let labels = match self.function_labels.as_mut() {
|
|
|
|
Some(x) => x,
|
|
|
|
None => {
|
|
|
|
return Err(CodegenError {
|
|
|
|
message: "got function import after code",
|
2019-03-18 00:48:50 +08:00
|
|
|
});
|
2019-03-08 01:31:37 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
let id = labels.len();
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
let assembler = self.assembler.as_mut().unwrap();
|
|
|
|
|
|
|
|
let offset = assembler.offset();
|
2019-03-09 02:57:23 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
let label = X64FunctionCode::emit_native_call(
|
2019-03-08 01:31:37 +08:00
|
|
|
self.assembler.as_mut().unwrap(),
|
|
|
|
invoke_import,
|
2019-03-09 00:07:13 +08:00
|
|
|
0,
|
2019-03-08 01:31:37 +08:00
|
|
|
id,
|
|
|
|
);
|
2019-03-09 02:57:23 +08:00
|
|
|
labels.insert(id, (label, Some(offset)));
|
2019-03-08 01:31:37 +08:00
|
|
|
|
|
|
|
self.func_import_count += 1;
|
|
|
|
|
|
|
|
Ok(())
|
2019-04-01 20:33:33 +08:00
|
|
|
*/
|
|
|
|
unimplemented!()
|
2019-03-08 01:31:37 +08:00
|
|
|
}
|
2019-02-12 00:52:17 +08:00
|
|
|
}
|
|
|
|
|
2019-02-13 20:04:10 +08:00
|
|
|
impl X64FunctionCode {
|
2019-04-01 23:49:46 +08:00
|
|
|
fn emit_relaxed_xdiv(
|
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
op: fn(&mut Assembler, Size, Location),
|
|
|
|
sz: Size,
|
|
|
|
loc: Location,
|
|
|
|
) {
|
|
|
|
match loc {
|
|
|
|
Location::Imm64(_) | Location::Imm32(_) => {
|
|
|
|
a.emit_mov(sz, loc, Location::GPR(GPR::RCX)); // must not be used during div (rax, rdx)
|
|
|
|
op(a, sz, Location::GPR(GPR::RCX));
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
op(a, sz, loc);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-01 20:56:32 +08:00
|
|
|
fn emit_relaxed_binop(
|
2019-04-01 20:33:33 +08:00
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
2019-04-01 20:56:32 +08:00
|
|
|
op: fn(&mut Assembler, Size, Location, Location),
|
2019-04-01 20:33:33 +08:00
|
|
|
sz: Size,
|
|
|
|
src: Location,
|
|
|
|
dst: Location,
|
|
|
|
) {
|
2019-04-01 20:56:32 +08:00
|
|
|
enum RelaxMode {
|
|
|
|
Direct,
|
|
|
|
SrcToGPR,
|
|
|
|
DstToGPR,
|
2019-04-01 23:49:46 +08:00
|
|
|
BothToGPR,
|
2019-04-01 20:56:32 +08:00
|
|
|
}
|
|
|
|
let mode = match (src, dst) {
|
|
|
|
(Location::Memory(_, _), Location::Memory(_, _)) => RelaxMode::SrcToGPR,
|
2019-04-01 23:49:46 +08:00
|
|
|
(Location::Imm64(_), Location::Imm64(_)) | (Location::Imm64(_), Location::Imm32(_)) => RelaxMode::BothToGPR,
|
2019-04-01 20:56:32 +08:00
|
|
|
(_, Location::Imm32(_)) | (_, Location::Imm64(_)) => RelaxMode::DstToGPR,
|
|
|
|
(Location::Imm64(_), Location::Memory(_, _)) => RelaxMode::SrcToGPR,
|
|
|
|
(Location::Imm64(_), Location::GPR(_)) if (op as *const u8 != Assembler::emit_mov as *const u8) => RelaxMode::SrcToGPR,
|
|
|
|
_ => RelaxMode::Direct,
|
|
|
|
};
|
|
|
|
|
|
|
|
match mode {
|
|
|
|
RelaxMode::SrcToGPR => {
|
2019-04-01 20:33:33 +08:00
|
|
|
let temp = m.acquire_temp_gpr().unwrap();
|
|
|
|
a.emit_mov(sz, src, Location::GPR(temp));
|
|
|
|
op(a, sz, Location::GPR(temp), dst);
|
|
|
|
m.release_temp_gpr(temp);
|
|
|
|
},
|
2019-04-01 20:56:32 +08:00
|
|
|
RelaxMode::DstToGPR => {
|
2019-04-01 20:33:33 +08:00
|
|
|
let temp = m.acquire_temp_gpr().unwrap();
|
|
|
|
a.emit_mov(sz, dst, Location::GPR(temp));
|
|
|
|
op(a, sz, src, Location::GPR(temp));
|
|
|
|
m.release_temp_gpr(temp);
|
2019-04-01 20:56:32 +08:00
|
|
|
},
|
2019-04-01 23:49:46 +08:00
|
|
|
RelaxMode::BothToGPR => {
|
|
|
|
let temp_src = m.acquire_temp_gpr().unwrap();
|
|
|
|
let temp_dst = m.acquire_temp_gpr().unwrap();
|
|
|
|
a.emit_mov(sz, src, Location::GPR(temp_src));
|
|
|
|
a.emit_mov(sz, dst, Location::GPR(temp_dst));
|
|
|
|
op(a, sz, Location::GPR(temp_src), Location::GPR(temp_dst));
|
|
|
|
m.release_temp_gpr(temp_dst);
|
|
|
|
m.release_temp_gpr(temp_src);
|
|
|
|
},
|
2019-04-01 20:56:32 +08:00
|
|
|
RelaxMode::Direct => {
|
2019-04-01 20:33:33 +08:00
|
|
|
op(a, sz, src, dst);
|
2019-02-23 01:54:16 +08:00
|
|
|
}
|
|
|
|
}
|
2019-03-05 00:59:05 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 20:56:32 +08:00
|
|
|
fn emit_binop_i32(
|
2019-04-01 20:33:33 +08:00
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
2019-04-01 20:56:32 +08:00
|
|
|
f: fn(&mut Assembler, Size, Location, Location),
|
2019-04-01 20:33:33 +08:00
|
|
|
) {
|
|
|
|
// Using Red Zone here.
|
|
|
|
let loc_b = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let ret = m.acquire_locations(a, &[WpType::I32], false)[0];
|
|
|
|
|
|
|
|
if loc_a != ret {
|
|
|
|
let tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_mov,
|
|
|
|
Size::S32, loc_a, Location::GPR(tmp),
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
2019-04-01 20:33:33 +08:00
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, f,
|
|
|
|
Size::S32, loc_b, Location::GPR(tmp),
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
2019-04-01 20:33:33 +08:00
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_mov,
|
|
|
|
Size::S32, Location::GPR(tmp), ret,
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
2019-04-01 20:33:33 +08:00
|
|
|
m.release_temp_gpr(tmp);
|
2019-02-19 20:25:09 +08:00
|
|
|
} else {
|
2019-04-01 20:33:33 +08:00
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, f,
|
|
|
|
Size::S32, loc_b, ret,
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
value_stack.push((ret, LocalOrTemp::Temp));
|
2019-02-19 20:25:09 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 23:49:46 +08:00
|
|
|
fn emit_binop_i64(
|
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
|
|
|
f: fn(&mut Assembler, Size, Location, Location),
|
|
|
|
) {
|
|
|
|
// Using Red Zone here.
|
|
|
|
let loc_b = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let ret = m.acquire_locations(a, &[WpType::I64], false)[0];
|
|
|
|
|
|
|
|
if loc_a != ret {
|
|
|
|
let tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_mov,
|
|
|
|
Size::S64, loc_a, Location::GPR(tmp),
|
|
|
|
);
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, f,
|
|
|
|
Size::S64, loc_b, Location::GPR(tmp),
|
|
|
|
);
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_mov,
|
|
|
|
Size::S64, Location::GPR(tmp), ret,
|
|
|
|
);
|
|
|
|
m.release_temp_gpr(tmp);
|
|
|
|
} else {
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, f,
|
|
|
|
Size::S64, loc_b, ret,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
fn emit_cmpop_i32_dynamic_b(
|
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
|
|
|
c: Condition,
|
|
|
|
loc_b: Location,
|
|
|
|
) {
|
|
|
|
// Using Red Zone here.
|
|
|
|
let loc_a = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
|
|
|
|
let ret = m.acquire_locations(a, &[WpType::I32], false)[0];
|
|
|
|
match ret {
|
|
|
|
Location::GPR(x) => {
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_cmp,
|
|
|
|
Size::S32, loc_b, loc_a,
|
|
|
|
);
|
|
|
|
a.emit_set(c, x);
|
|
|
|
a.emit_and(Size::S32, Location::Imm32(0xff), Location::GPR(x));
|
|
|
|
},
|
|
|
|
Location::Memory(_, _) => {
|
|
|
|
let tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_cmp,
|
|
|
|
Size::S32, loc_b, loc_a,
|
|
|
|
);
|
|
|
|
a.emit_set(c, tmp);
|
|
|
|
a.emit_and(Size::S32, Location::Imm32(0xff), Location::GPR(tmp));
|
|
|
|
a.emit_mov(Size::S32, Location::GPR(tmp), ret);
|
|
|
|
m.release_temp_gpr(tmp);
|
|
|
|
},
|
|
|
|
_ => unreachable!()
|
|
|
|
}
|
|
|
|
value_stack.push((ret, LocalOrTemp::Temp));
|
2019-03-05 00:59:05 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
fn emit_cmpop_i32(
|
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
|
|
|
c: Condition,
|
|
|
|
) {
|
|
|
|
let loc_b = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
Self::emit_cmpop_i32_dynamic_b(a, m, value_stack, c, loc_b);
|
2019-03-05 00:59:05 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 23:49:46 +08:00
|
|
|
fn emit_cmpop_i64_dynamic_b(
|
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
|
|
|
c: Condition,
|
|
|
|
loc_b: Location,
|
|
|
|
) {
|
|
|
|
// Using Red Zone here.
|
|
|
|
let loc_a = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
|
|
|
|
let ret = m.acquire_locations(a, &[WpType::I32], false)[0];
|
|
|
|
match ret {
|
|
|
|
Location::GPR(x) => {
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_cmp,
|
|
|
|
Size::S64, loc_b, loc_a,
|
|
|
|
);
|
|
|
|
a.emit_set(c, x);
|
|
|
|
a.emit_and(Size::S32, Location::Imm32(0xff), Location::GPR(x));
|
|
|
|
},
|
|
|
|
Location::Memory(_, _) => {
|
|
|
|
let tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_cmp,
|
|
|
|
Size::S64, loc_b, loc_a,
|
|
|
|
);
|
|
|
|
a.emit_set(c, tmp);
|
|
|
|
a.emit_and(Size::S32, Location::Imm32(0xff), Location::GPR(tmp));
|
|
|
|
a.emit_mov(Size::S32, Location::GPR(tmp), ret);
|
|
|
|
m.release_temp_gpr(tmp);
|
|
|
|
},
|
|
|
|
_ => unreachable!()
|
|
|
|
}
|
|
|
|
value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
|
|
|
|
fn emit_cmpop_i64(
|
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
|
|
|
c: Condition,
|
|
|
|
) {
|
|
|
|
let loc_b = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
Self::emit_cmpop_i64_dynamic_b(a, m, value_stack, c, loc_b);
|
|
|
|
}
|
|
|
|
|
2019-04-01 20:56:32 +08:00
|
|
|
fn emit_xcnt_i32(
|
2019-04-01 20:33:33 +08:00
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
2019-04-01 20:56:32 +08:00
|
|
|
f: fn(&mut Assembler, Size, Location, Location),
|
2019-03-13 18:23:50 +08:00
|
|
|
) {
|
2019-04-01 20:33:33 +08:00
|
|
|
let loc = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let ret = m.acquire_locations(a, &[WpType::I32], false)[0];
|
2019-04-01 23:49:46 +08:00
|
|
|
|
|
|
|
match loc {
|
|
|
|
Location::Imm32(_) => {
|
|
|
|
let tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
a.emit_mov(Size::S32, loc, Location::GPR(tmp));
|
|
|
|
if let Location::Memory(_, _) = ret {
|
|
|
|
let out_tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
f(a, Size::S32, Location::GPR(tmp), Location::GPR(out_tmp));
|
|
|
|
a.emit_mov(Size::S32, Location::GPR(out_tmp), ret);
|
|
|
|
m.release_temp_gpr(out_tmp);
|
|
|
|
} else {
|
|
|
|
f(a, Size::S32, Location::GPR(tmp), ret);
|
|
|
|
}
|
|
|
|
m.release_temp_gpr(tmp);
|
|
|
|
},
|
|
|
|
Location::Memory(_, _) | Location::GPR(_) => {
|
|
|
|
if let Location::Memory(_, _) = ret {
|
|
|
|
let out_tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
f(a, Size::S32, loc, Location::GPR(out_tmp));
|
|
|
|
a.emit_mov(Size::S32, Location::GPR(out_tmp), ret);
|
|
|
|
m.release_temp_gpr(out_tmp);
|
|
|
|
} else {
|
|
|
|
f(a, Size::S32, loc, ret);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
|
|
|
|
fn emit_xcnt_i64(
|
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
|
|
|
f: fn(&mut Assembler, Size, Location, Location),
|
|
|
|
) {
|
|
|
|
let loc = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let ret = m.acquire_locations(a, &[WpType::I64], false)[0];
|
|
|
|
|
|
|
|
match loc {
|
|
|
|
Location::Imm64(_) | Location::Imm32(_) => {
|
|
|
|
let tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
a.emit_mov(Size::S64, loc, Location::GPR(tmp));
|
|
|
|
if let Location::Memory(_, _) = ret {
|
|
|
|
let out_tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
f(a, Size::S64, Location::GPR(tmp), Location::GPR(out_tmp));
|
|
|
|
a.emit_mov(Size::S64, Location::GPR(out_tmp), ret);
|
|
|
|
m.release_temp_gpr(out_tmp);
|
|
|
|
} else {
|
|
|
|
f(a, Size::S64, Location::GPR(tmp), ret);
|
|
|
|
}
|
|
|
|
m.release_temp_gpr(tmp);
|
|
|
|
},
|
|
|
|
Location::Memory(_, _) | Location::GPR(_) => {
|
|
|
|
if let Location::Memory(_, _) = ret {
|
|
|
|
let out_tmp = m.acquire_temp_gpr().unwrap();
|
|
|
|
f(a, Size::S64, loc, Location::GPR(out_tmp));
|
|
|
|
a.emit_mov(Size::S64, Location::GPR(out_tmp), ret);
|
|
|
|
m.release_temp_gpr(out_tmp);
|
|
|
|
} else {
|
|
|
|
f(a, Size::S64, loc, ret);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_ => unreachable!(),
|
2019-03-13 18:23:50 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
value_stack.push((ret, LocalOrTemp::Temp));
|
2019-03-13 18:23:50 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 20:56:32 +08:00
|
|
|
fn emit_shift_i32(
|
2019-04-01 20:33:33 +08:00
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
2019-04-01 20:56:32 +08:00
|
|
|
f: fn(&mut Assembler, Size, Location, Location),
|
2019-02-19 20:25:09 +08:00
|
|
|
) {
|
2019-04-01 20:33:33 +08:00
|
|
|
let loc_b = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let ret = m.acquire_locations(a, &[WpType::I32], false)[0];
|
2019-02-19 20:25:09 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_mov(Size::S32, loc_b, Location::GPR(GPR::RCX));
|
2019-02-19 20:25:09 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
if loc_a != ret {
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_mov,
|
|
|
|
Size::S32, loc_a, ret
|
2019-02-19 20:25:09 +08:00
|
|
|
);
|
2019-03-06 01:16:24 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
f(a, Size::S32, Location::GPR(GPR::RCX), ret);
|
|
|
|
value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
2019-02-19 20:25:09 +08:00
|
|
|
|
2019-04-01 23:49:46 +08:00
|
|
|
fn emit_shift_i64(
|
|
|
|
a: &mut Assembler,
|
|
|
|
m: &mut Machine,
|
|
|
|
value_stack: &mut Vec<(Location, LocalOrTemp)>,
|
|
|
|
f: fn(&mut Assembler, Size, Location, Location),
|
|
|
|
) {
|
|
|
|
let loc_b = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, m, value_stack.pop().unwrap());
|
|
|
|
let ret = m.acquire_locations(a, &[WpType::I64], false)[0];
|
|
|
|
|
|
|
|
a.emit_mov(Size::S32, loc_b, Location::GPR(GPR::RCX));
|
|
|
|
|
|
|
|
if loc_a != ret {
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, m, Assembler::emit_mov,
|
|
|
|
Size::S64, loc_a, ret
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
f(a, Size::S64, Location::GPR(GPR::RCX), ret);
|
|
|
|
value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
fn get_param_location(
|
|
|
|
idx: usize
|
|
|
|
) -> Location {
|
|
|
|
match idx {
|
|
|
|
0 => Location::GPR(GPR::RDI),
|
|
|
|
1 => Location::GPR(GPR::RSI),
|
|
|
|
2 => Location::GPR(GPR::RDX),
|
|
|
|
3 => Location::GPR(GPR::RCX),
|
|
|
|
4 => Location::GPR(GPR::R8),
|
|
|
|
5 => Location::GPR(GPR::R9),
|
|
|
|
_ => Location::Memory(GPR::RBP, (16 + (idx - 6) * 8) as i32),
|
2019-02-19 20:25:09 +08:00
|
|
|
}
|
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
}
|
2019-02-20 22:56:32 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
impl FunctionCodeGenerator for X64FunctionCode {
|
|
|
|
fn feed_return(&mut self, ty: WpType) -> Result<(), CodegenError> {
|
|
|
|
self.returns.push(ty);
|
|
|
|
Ok(())
|
2019-02-23 01:54:16 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
fn feed_param(&mut self, ty: WpType) -> Result<(), CodegenError> {
|
|
|
|
self.num_params += 1;
|
|
|
|
self.num_locals += 1;
|
|
|
|
Ok(())
|
2019-03-05 00:59:05 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
fn feed_local(&mut self, ty: WpType, n: usize) -> Result<(), CodegenError> {
|
|
|
|
self.num_locals += n;
|
|
|
|
Ok(())
|
2019-02-21 22:04:43 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
fn begin_body(&mut self) -> Result<(), CodegenError> {
|
|
|
|
let a = self.assembler.as_mut().unwrap();
|
|
|
|
a.emit_push(Size::S64, Location::GPR(GPR::RBP));
|
|
|
|
a.emit_mov(Size::S64, Location::GPR(GPR::RSP), Location::GPR(GPR::RBP));
|
|
|
|
|
|
|
|
let locations = self.machine.acquire_stack_locations(a, 1 + self.num_locals, false);
|
|
|
|
self.vmctx_location = Some(locations[0]);
|
|
|
|
self.locals = locations[1..].to_vec();
|
2019-02-20 23:21:33 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_mov(Size::S64, Self::get_param_location(0), self.vmctx_location.unwrap());
|
2019-03-02 01:41:31 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
for i in 0..self.num_params {
|
|
|
|
a.emit_mov(Size::S64, Self::get_param_location(i + 1), self.locals[i]);
|
|
|
|
}
|
|
|
|
for i in self.num_params..self.num_locals {
|
|
|
|
a.emit_mov(Size::S32, Location::Imm32(0), self.locals[i]);
|
2019-02-24 00:52:32 +08:00
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
self.control_stack.push(ControlFrame {
|
|
|
|
label: a.get_label(),
|
|
|
|
loop_like: false,
|
|
|
|
if_else: IfElseState::None,
|
|
|
|
returns: self.returns.clone(),
|
|
|
|
value_stack_depth: 0,
|
|
|
|
});
|
2019-02-24 00:52:32 +08:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
fn finalize(&mut self) -> Result<(), CodegenError> {
|
|
|
|
let a = self.assembler.as_mut().unwrap();
|
|
|
|
a.emit_ud2();
|
|
|
|
Ok(())
|
2019-03-02 01:41:31 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
|
|
|
|
fn feed_opcode(&mut self, op: Operator, module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
|
|
|
let was_unreachable;
|
2019-02-21 21:14:10 +08:00
|
|
|
|
|
|
|
if self.unreachable_depth > 0 {
|
|
|
|
was_unreachable = true;
|
|
|
|
match op {
|
|
|
|
Operator::Block { .. } | Operator::Loop { .. } | Operator::If { .. } => {
|
|
|
|
self.unreachable_depth += 1;
|
|
|
|
}
|
2019-03-14 17:11:35 +08:00
|
|
|
Operator::End => {
|
2019-02-21 21:14:10 +08:00
|
|
|
self.unreachable_depth -= 1;
|
|
|
|
}
|
2019-03-14 17:11:35 +08:00
|
|
|
Operator::Else => {
|
|
|
|
// We are in a reachable true branch
|
|
|
|
if self.unreachable_depth == 1 {
|
2019-03-18 00:31:36 +08:00
|
|
|
if let Some(IfElseState::If(_)) = self
|
|
|
|
.control_stack
|
|
|
|
.last()
|
|
|
|
.map(|x| x.if_else)
|
|
|
|
{
|
2019-03-14 17:11:35 +08:00
|
|
|
self.unreachable_depth -= 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-02-21 21:14:10 +08:00
|
|
|
_ => {}
|
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
if self.unreachable_depth > 0 {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
was_unreachable = false;
|
|
|
|
}
|
2019-03-09 02:57:23 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
let a = self.assembler.as_mut().unwrap();
|
|
|
|
match op {
|
|
|
|
Operator::GetGlobal { global_index } => {
|
|
|
|
let mut global_index = global_index as usize;
|
|
|
|
let loc = self.machine.acquire_locations(
|
|
|
|
a,
|
|
|
|
&[type_to_wp_type(
|
|
|
|
module_info.globals[LocalGlobalIndex::new(global_index)]
|
|
|
|
.desc
|
|
|
|
.ty,
|
|
|
|
)],
|
|
|
|
false
|
|
|
|
)[0];
|
|
|
|
self.value_stack.push((loc, LocalOrTemp::Temp));
|
2019-03-09 02:57:23 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
let tmp = self.machine.acquire_temp_gpr().unwrap();
|
2019-03-09 02:57:23 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
if global_index < module_info.imported_globals.len() {
|
|
|
|
a.emit_mov(Size::S64, Location::Memory(GPR::RDI, vm::Ctx::offset_imported_globals() as i32), Location::GPR(tmp));
|
|
|
|
} else {
|
|
|
|
global_index -= module_info.imported_globals.len();
|
|
|
|
assert!(global_index < module_info.globals.len());
|
|
|
|
a.emit_mov(Size::S64, Location::Memory(GPR::RDI, vm::Ctx::offset_globals() as i32), Location::GPR(tmp));
|
|
|
|
}
|
|
|
|
a.emit_mov(Size::S64, Location::Memory(tmp, (global_index as i32) * 8), Location::GPR(tmp));
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S64, Location::Memory(tmp, LocalGlobal::offset_data() as i32), loc
|
2019-03-09 02:57:23 +08:00
|
|
|
);
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
self.machine.release_temp_gpr(tmp);
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::SetGlobal { global_index } => {
|
|
|
|
let mut global_index = global_index as usize;
|
|
|
|
let loc = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
2019-02-21 22:04:43 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
let tmp = self.machine.acquire_temp_gpr().unwrap();
|
2019-02-21 22:04:43 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
if global_index < module_info.imported_globals.len() {
|
|
|
|
a.emit_mov(Size::S64, Location::Memory(GPR::RDI, vm::Ctx::offset_imported_globals() as i32), Location::GPR(tmp));
|
2019-02-21 21:14:10 +08:00
|
|
|
} else {
|
2019-04-01 20:33:33 +08:00
|
|
|
global_index -= module_info.imported_globals.len();
|
|
|
|
assert!(global_index < module_info.globals.len());
|
|
|
|
a.emit_mov(Size::S64, Location::Memory(GPR::RDI, vm::Ctx::offset_globals() as i32), Location::GPR(tmp));
|
2019-02-14 00:53:06 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_mov(Size::S64, Location::Memory(tmp, (global_index as i32) * 8), Location::GPR(tmp));
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S64, loc, Location::Memory(tmp, LocalGlobal::offset_data() as i32)
|
|
|
|
);
|
|
|
|
|
|
|
|
self.machine.release_temp_gpr(tmp);
|
2019-02-14 00:53:06 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::GetLocal { local_index } => {
|
|
|
|
let local_index = local_index as usize;
|
|
|
|
self.value_stack.push((self.locals[local_index], LocalOrTemp::Local));
|
|
|
|
}
|
|
|
|
Operator::SetLocal { local_index } => {
|
|
|
|
let local_index = local_index as usize;
|
|
|
|
let loc = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S64, loc, self.locals[local_index],
|
|
|
|
);
|
|
|
|
}
|
|
|
|
Operator::TeeLocal { local_index } => {
|
|
|
|
let local_index = local_index as usize;
|
|
|
|
let (loc, _) = *self.value_stack.last().unwrap();
|
|
|
|
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S64, loc, self.locals[local_index],
|
2019-02-23 01:54:16 +08:00
|
|
|
);
|
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::I32Const { value } => self.value_stack.push((Location::Imm32(value as u32), LocalOrTemp::Temp)),
|
|
|
|
Operator::I32Add => Self::emit_binop_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_add),
|
|
|
|
Operator::I32Sub => Self::emit_binop_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_sub),
|
|
|
|
Operator::I32Mul => Self::emit_binop_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_imul),
|
|
|
|
Operator::I32DivU => {
|
|
|
|
// We assume that RAX and RDX are temporary registers here.
|
|
|
|
let loc_b = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I32], false)[0];
|
|
|
|
a.emit_mov(Size::S32, loc_a, Location::GPR(GPR::RAX));
|
|
|
|
a.emit_xor(Size::S32, Location::GPR(GPR::RDX), Location::GPR(GPR::RDX));
|
2019-04-01 23:49:46 +08:00
|
|
|
Self::emit_relaxed_xdiv(a, &mut self.machine, Assembler::emit_div, Size::S32, loc_b);
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_mov(Size::S32, Location::GPR(GPR::RAX), ret);
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
Operator::I32DivS => {
|
|
|
|
// We assume that RAX and RDX are temporary registers here.
|
|
|
|
let loc_b = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I32], false)[0];
|
|
|
|
a.emit_mov(Size::S32, loc_a, Location::GPR(GPR::RAX));
|
|
|
|
a.emit_xor(Size::S32, Location::GPR(GPR::RDX), Location::GPR(GPR::RDX));
|
2019-04-01 23:49:46 +08:00
|
|
|
Self::emit_relaxed_xdiv(a, &mut self.machine, Assembler::emit_idiv, Size::S32, loc_b);
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_mov(Size::S32, Location::GPR(GPR::RAX), ret);
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
Operator::I32RemU => {
|
|
|
|
// We assume that RAX and RDX are temporary registers here.
|
|
|
|
let loc_b = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I32], false)[0];
|
|
|
|
a.emit_mov(Size::S32, loc_a, Location::GPR(GPR::RAX));
|
|
|
|
a.emit_xor(Size::S32, Location::GPR(GPR::RDX), Location::GPR(GPR::RDX));
|
|
|
|
a.emit_div(Size::S32, loc_b);
|
|
|
|
a.emit_mov(Size::S32, Location::GPR(GPR::RDX), ret);
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
Operator::I32RemS => {
|
|
|
|
// We assume that RAX and RDX are temporary registers here.
|
|
|
|
let loc_b = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I32], false)[0];
|
|
|
|
a.emit_mov(Size::S32, loc_a, Location::GPR(GPR::RAX));
|
|
|
|
a.emit_xor(Size::S32, Location::GPR(GPR::RDX), Location::GPR(GPR::RDX));
|
|
|
|
a.emit_idiv(Size::S32, loc_b);
|
|
|
|
a.emit_mov(Size::S32, Location::GPR(GPR::RDX), ret);
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
Operator::I32And => Self::emit_binop_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_and),
|
|
|
|
Operator::I32Or => Self::emit_binop_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_or),
|
|
|
|
Operator::I32Xor => Self::emit_binop_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_xor),
|
|
|
|
Operator::I32Eq => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::Equal),
|
|
|
|
Operator::I32Ne => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::NotEqual),
|
|
|
|
Operator::I32Eqz => Self::emit_cmpop_i32_dynamic_b(a, &mut self.machine, &mut self.value_stack, Condition::Equal, Location::Imm32(0)),
|
|
|
|
Operator::I32Clz => Self::emit_xcnt_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_lzcnt),
|
|
|
|
Operator::I32Ctz => Self::emit_xcnt_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_tzcnt),
|
|
|
|
Operator::I32Popcnt => Self::emit_xcnt_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_popcnt),
|
|
|
|
Operator::I32Shl => Self::emit_shift_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_shl),
|
|
|
|
Operator::I32ShrU => Self::emit_shift_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_sar),
|
|
|
|
Operator::I32ShrS => Self::emit_shift_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_shr),
|
|
|
|
Operator::I32Rotl => Self::emit_shift_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_rol),
|
|
|
|
Operator::I32Rotr => Self::emit_shift_i32(a, &mut self.machine, &mut self.value_stack, Assembler::emit_ror),
|
|
|
|
Operator::I32LtU => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::Below),
|
|
|
|
Operator::I32LeU => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::BelowEqual),
|
|
|
|
Operator::I32GtU => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::Above),
|
|
|
|
Operator::I32GeU => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::AboveEqual),
|
|
|
|
Operator::I32LtS => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::Less),
|
|
|
|
Operator::I32LeS => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::LessEqual),
|
|
|
|
Operator::I32GtS => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::Greater),
|
|
|
|
Operator::I32GeS => Self::emit_cmpop_i32(a, &mut self.machine, &mut self.value_stack, Condition::GreaterEqual),
|
2019-04-01 23:49:46 +08:00
|
|
|
Operator::I64Const { value } => {
|
|
|
|
let value = value as u64;
|
|
|
|
if value <= ::std::u32::MAX as u64 {
|
|
|
|
self.value_stack.push((Location::Imm32(value as u32), LocalOrTemp::Temp))
|
|
|
|
} else {
|
|
|
|
self.value_stack.push((Location::Imm64(value), LocalOrTemp::Temp))
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Operator::I64Add => Self::emit_binop_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_add),
|
|
|
|
Operator::I64Sub => Self::emit_binop_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_sub),
|
|
|
|
Operator::I64Mul => Self::emit_binop_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_imul),
|
|
|
|
Operator::I64DivU => {
|
|
|
|
// We assume that RAX and RDX are temporary registers here.
|
|
|
|
let loc_b = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I64], false)[0];
|
|
|
|
a.emit_mov(Size::S64, loc_a, Location::GPR(GPR::RAX));
|
|
|
|
a.emit_xor(Size::S64, Location::GPR(GPR::RDX), Location::GPR(GPR::RDX));
|
|
|
|
Self::emit_relaxed_xdiv(a, &mut self.machine, Assembler::emit_div, Size::S64, loc_b);
|
|
|
|
a.emit_mov(Size::S64, Location::GPR(GPR::RAX), ret);
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
Operator::I64DivS => {
|
|
|
|
// We assume that RAX and RDX are temporary registers here.
|
|
|
|
let loc_b = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I64], false)[0];
|
|
|
|
a.emit_mov(Size::S64, loc_a, Location::GPR(GPR::RAX));
|
|
|
|
a.emit_xor(Size::S64, Location::GPR(GPR::RDX), Location::GPR(GPR::RDX));
|
|
|
|
Self::emit_relaxed_xdiv(a, &mut self.machine, Assembler::emit_idiv, Size::S64, loc_b);
|
|
|
|
a.emit_mov(Size::S64, Location::GPR(GPR::RAX), ret);
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
Operator::I64RemU => {
|
|
|
|
// We assume that RAX and RDX are temporary registers here.
|
|
|
|
let loc_b = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I64], false)[0];
|
|
|
|
a.emit_mov(Size::S64, loc_a, Location::GPR(GPR::RAX));
|
|
|
|
a.emit_xor(Size::S64, Location::GPR(GPR::RDX), Location::GPR(GPR::RDX));
|
|
|
|
a.emit_div(Size::S64, loc_b);
|
|
|
|
a.emit_mov(Size::S64, Location::GPR(GPR::RDX), ret);
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
Operator::I64RemS => {
|
|
|
|
// We assume that RAX and RDX are temporary registers here.
|
|
|
|
let loc_b = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let loc_a = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I64], false)[0];
|
|
|
|
a.emit_mov(Size::S64, loc_a, Location::GPR(GPR::RAX));
|
|
|
|
a.emit_xor(Size::S64, Location::GPR(GPR::RDX), Location::GPR(GPR::RDX));
|
|
|
|
a.emit_idiv(Size::S64, loc_b);
|
|
|
|
a.emit_mov(Size::S64, Location::GPR(GPR::RDX), ret);
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
}
|
|
|
|
Operator::I64And => Self::emit_binop_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_and),
|
|
|
|
Operator::I64Or => Self::emit_binop_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_or),
|
|
|
|
Operator::I64Xor => Self::emit_binop_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_xor),
|
|
|
|
Operator::I64Eq => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::Equal),
|
|
|
|
Operator::I64Ne => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::NotEqual),
|
|
|
|
Operator::I64Eqz => Self::emit_cmpop_i64_dynamic_b(a, &mut self.machine, &mut self.value_stack, Condition::Equal, Location::Imm64(0)),
|
|
|
|
Operator::I64Clz => Self::emit_xcnt_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_lzcnt),
|
|
|
|
Operator::I64Ctz => Self::emit_xcnt_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_tzcnt),
|
|
|
|
Operator::I64Popcnt => Self::emit_xcnt_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_popcnt),
|
|
|
|
Operator::I64Shl => Self::emit_shift_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_shl),
|
|
|
|
Operator::I64ShrU => Self::emit_shift_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_sar),
|
|
|
|
Operator::I64ShrS => Self::emit_shift_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_shr),
|
|
|
|
Operator::I64Rotl => Self::emit_shift_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_rol),
|
|
|
|
Operator::I64Rotr => Self::emit_shift_i64(a, &mut self.machine, &mut self.value_stack, Assembler::emit_ror),
|
|
|
|
Operator::I64LtU => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::Below),
|
|
|
|
Operator::I64LeU => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::BelowEqual),
|
|
|
|
Operator::I64GtU => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::Above),
|
|
|
|
Operator::I64GeU => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::AboveEqual),
|
|
|
|
Operator::I64LtS => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::Less),
|
|
|
|
Operator::I64LeS => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::LessEqual),
|
|
|
|
Operator::I64GtS => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::Greater),
|
|
|
|
Operator::I64GeS => Self::emit_cmpop_i64(a, &mut self.machine, &mut self.value_stack, Condition::GreaterEqual),
|
|
|
|
Operator::I64ExtendUI32 => {
|
|
|
|
let loc = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I64], false)[0];
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S32, loc, ret,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
Operator::I32WrapI64 => {
|
|
|
|
let loc = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I32], false)[0];
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S32, loc, ret,
|
|
|
|
);
|
|
|
|
}
|
2019-02-27 23:38:45 +08:00
|
|
|
Operator::If { ty } => {
|
2019-04-01 20:33:33 +08:00
|
|
|
let label_end = a.get_label();
|
|
|
|
let label_else = a.get_label();
|
2019-02-27 23:38:45 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
let cond = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
2019-02-27 23:38:45 +08:00
|
|
|
|
|
|
|
self.control_stack
|
|
|
|
.push(ControlFrame {
|
|
|
|
label: label_end,
|
|
|
|
loop_like: false,
|
|
|
|
if_else: IfElseState::If(label_else),
|
|
|
|
returns: match ty {
|
|
|
|
WpType::EmptyBlockType => vec![],
|
|
|
|
_ => vec![ty],
|
|
|
|
},
|
2019-04-01 20:33:33 +08:00
|
|
|
value_stack_depth: self.value_stack.len(),
|
2019-02-27 23:38:45 +08:00
|
|
|
});
|
2019-04-01 20:33:33 +08:00
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_cmp,
|
|
|
|
Size::S32, Location::Imm32(0), cond,
|
2019-02-27 23:38:45 +08:00
|
|
|
);
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_jmp(Condition::Equal, label_else);
|
2019-02-27 23:38:45 +08:00
|
|
|
}
|
|
|
|
Operator::Else => {
|
2019-04-01 20:33:33 +08:00
|
|
|
let mut frame = self.control_stack.last_mut().unwrap();
|
2019-04-01 23:49:46 +08:00
|
|
|
let released: Vec<Location> = self.value_stack.drain(frame.value_stack_depth..)
|
2019-04-01 20:33:33 +08:00
|
|
|
.filter(|&(_, lot)| lot == LocalOrTemp::Temp)
|
|
|
|
.map(|(x, _)| x)
|
|
|
|
.collect();
|
|
|
|
self.machine.release_locations(a, &released);
|
|
|
|
|
|
|
|
match frame.if_else {
|
|
|
|
IfElseState::If(label) => {
|
|
|
|
a.emit_jmp(Condition::None, frame.label);
|
|
|
|
a.emit_label(label);
|
|
|
|
frame.if_else = IfElseState::Else;
|
|
|
|
}
|
|
|
|
_ => unreachable!()
|
|
|
|
}
|
2019-02-27 23:38:45 +08:00
|
|
|
}
|
2019-02-28 23:12:42 +08:00
|
|
|
Operator::Select => {
|
2019-04-01 20:33:33 +08:00
|
|
|
let cond = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let v_b = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let v_a = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
let ret = self.machine.acquire_locations(a, &[WpType::I64], false)[0];
|
|
|
|
self.value_stack.push((ret, LocalOrTemp::Temp));
|
|
|
|
|
|
|
|
let end_label = a.get_label();
|
|
|
|
let zero_label = a.get_label();
|
|
|
|
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_cmp,
|
|
|
|
Size::S32, Location::Imm32(0), cond,
|
2019-02-28 23:12:42 +08:00
|
|
|
);
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_jmp(Condition::Equal, zero_label);
|
|
|
|
if v_a != ret {
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S64, v_a, ret,
|
|
|
|
);
|
2019-02-28 23:12:42 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_jmp(Condition::None, end_label);
|
|
|
|
a.emit_label(zero_label);
|
|
|
|
if v_b != ret {
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S64, v_b, ret,
|
|
|
|
);
|
2019-02-28 23:12:42 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_label(end_label);
|
2019-02-28 23:12:42 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::Block { ty } => {
|
|
|
|
self.control_stack
|
|
|
|
.push(ControlFrame {
|
|
|
|
label: a.get_label(),
|
|
|
|
loop_like: false,
|
|
|
|
if_else: IfElseState::None,
|
|
|
|
returns: match ty {
|
|
|
|
WpType::EmptyBlockType => vec![],
|
|
|
|
_ => vec![ty],
|
|
|
|
},
|
|
|
|
value_stack_depth: self.value_stack.len(),
|
|
|
|
});
|
2019-02-20 23:21:33 +08:00
|
|
|
}
|
2019-04-02 00:11:01 +08:00
|
|
|
Operator::Loop { ty } => {
|
|
|
|
let label = a.get_label();
|
|
|
|
self.control_stack
|
|
|
|
.push(ControlFrame {
|
|
|
|
label: label,
|
|
|
|
loop_like: true,
|
|
|
|
if_else: IfElseState::None,
|
|
|
|
returns: match ty {
|
|
|
|
WpType::EmptyBlockType => vec![],
|
|
|
|
_ => vec![ty],
|
|
|
|
},
|
|
|
|
value_stack_depth: self.value_stack.len(),
|
|
|
|
});
|
|
|
|
a.emit_label(label);
|
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::Unreachable => {
|
|
|
|
a.emit_ud2();
|
|
|
|
self.unreachable_depth = 1;
|
2019-02-21 22:04:43 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::Return => {
|
|
|
|
let frame = &self.control_stack[0];
|
|
|
|
let has_return = if frame.returns.len() > 0 {
|
|
|
|
assert_eq!(frame.returns.len(), 1);
|
|
|
|
let (loc, _) = *self.value_stack.last().unwrap();
|
|
|
|
a.emit_mov(Size::S64, loc, Location::GPR(GPR::RAX));
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
2019-02-24 12:00:35 +08:00
|
|
|
};
|
2019-04-01 23:49:46 +08:00
|
|
|
let released: Vec<Location> = self.value_stack.drain(frame.value_stack_depth..)
|
2019-04-01 20:33:33 +08:00
|
|
|
.filter(|&(_, lot)| lot == LocalOrTemp::Temp)
|
|
|
|
.map(|(x, _)| x)
|
|
|
|
.collect();
|
|
|
|
self.machine.release_locations(a, &released);
|
|
|
|
a.emit_jmp(Condition::None, frame.label);
|
2019-02-24 12:00:35 +08:00
|
|
|
self.unreachable_depth = 1;
|
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::Br { relative_depth } => {
|
|
|
|
let frame = &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
|
2019-04-02 00:11:01 +08:00
|
|
|
let has_return = if !frame.loop_like && frame.returns.len() > 0 {
|
2019-04-01 20:33:33 +08:00
|
|
|
assert_eq!(frame.returns.len(), 1);
|
|
|
|
let (loc, _) = *self.value_stack.last().unwrap();
|
|
|
|
a.emit_mov(Size::S64, loc, Location::GPR(GPR::RAX));
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
};
|
2019-04-01 23:49:46 +08:00
|
|
|
let released: Vec<Location> = self.value_stack.drain(frame.value_stack_depth..)
|
2019-04-01 20:33:33 +08:00
|
|
|
.filter(|&(_, lot)| lot == LocalOrTemp::Temp)
|
|
|
|
.map(|(x, _)| x)
|
|
|
|
.collect();
|
|
|
|
self.machine.release_locations(a, &released);
|
|
|
|
a.emit_jmp(Condition::None, frame.label);
|
|
|
|
self.unreachable_depth = 1;
|
2019-03-17 10:27:14 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::BrIf { relative_depth }=> {
|
|
|
|
let after = a.get_label();
|
|
|
|
let cond = get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
|
|
|
a.emit_cmp(Size::S32, Location::Imm32(0), cond);
|
|
|
|
a.emit_jmp(Condition::Equal, after);
|
|
|
|
|
|
|
|
let frame = &self.control_stack[self.control_stack.len() - 1 - (relative_depth as usize)];
|
2019-04-02 00:11:01 +08:00
|
|
|
let has_return = if !frame.loop_like && frame.returns.len() > 0 {
|
2019-04-01 20:33:33 +08:00
|
|
|
assert_eq!(frame.returns.len(), 1);
|
|
|
|
let (loc, _) = *self.value_stack.last().unwrap();
|
2019-04-02 00:11:01 +08:00
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S64, loc, Location::GPR(GPR::RAX),
|
|
|
|
);
|
2019-04-01 20:33:33 +08:00
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
};
|
|
|
|
let released: Vec<Location> = self.value_stack[frame.value_stack_depth..].iter()
|
|
|
|
.filter(|&&(_, lot)| lot == LocalOrTemp::Temp)
|
|
|
|
.map(|&(x, _)| x)
|
|
|
|
.collect();
|
|
|
|
self.machine.release_locations_keep_state(a, &released);
|
|
|
|
a.emit_jmp(Condition::None, frame.label);
|
2019-03-17 19:54:20 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
a.emit_label(after);
|
2019-03-17 10:27:14 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::Drop => {
|
|
|
|
get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
|
2019-03-15 01:10:31 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
Operator::End => {
|
|
|
|
let frame = self.control_stack.pop().unwrap();
|
2019-04-02 00:11:01 +08:00
|
|
|
|
|
|
|
if !was_unreachable && frame.returns.len() > 0 {
|
|
|
|
let (loc, _) = *self.value_stack.last().unwrap();
|
|
|
|
Self::emit_relaxed_binop(
|
|
|
|
a, &mut self.machine, Assembler::emit_mov,
|
|
|
|
Size::S64, loc, Location::GPR(GPR::RAX),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
if self.control_stack.len() == 0 {
|
|
|
|
a.emit_label(frame.label);
|
|
|
|
a.emit_mov(Size::S64, Location::GPR(GPR::RBP), Location::GPR(GPR::RSP));
|
|
|
|
a.emit_pop(Size::S64, Location::GPR(GPR::RBP));
|
|
|
|
a.emit_ret();
|
|
|
|
} else {
|
2019-04-01 23:49:46 +08:00
|
|
|
let released: Vec<Location> = self.value_stack.drain(frame.value_stack_depth..)
|
2019-04-01 20:33:33 +08:00
|
|
|
.filter(|&(_, lot)| lot == LocalOrTemp::Temp)
|
|
|
|
.map(|(x, _)| x)
|
|
|
|
.collect();
|
|
|
|
self.machine.release_locations(a, &released);
|
|
|
|
|
|
|
|
if !frame.loop_like {
|
|
|
|
a.emit_label(frame.label);
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
|
|
|
|
if let IfElseState::If(label) = frame.if_else {
|
|
|
|
a.emit_label(label);
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
|
|
|
|
if frame.returns.len() > 0 {
|
|
|
|
assert_eq!(frame.returns.len(), 1);
|
|
|
|
let loc = self.machine.acquire_locations(a, &frame.returns, false)[0];
|
|
|
|
a.emit_mov(Size::S64, Location::GPR(GPR::RAX), loc);
|
|
|
|
self.value_stack.push((loc, LocalOrTemp::Temp));
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
}
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|
2019-03-13 18:23:50 +08:00
|
|
|
_ => {
|
2019-04-01 20:33:33 +08:00
|
|
|
panic!("not yet implemented: {:?}", op);
|
2019-03-18 00:31:36 +08:00
|
|
|
}
|
2019-02-12 23:15:57 +08:00
|
|
|
}
|
2019-02-20 22:56:32 +08:00
|
|
|
|
2019-02-12 00:52:17 +08:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-24 00:52:32 +08:00
|
|
|
fn type_to_wp_type(ty: Type) -> WpType {
|
|
|
|
match ty {
|
|
|
|
Type::I32 => WpType::I32,
|
|
|
|
Type::I64 => WpType::I64,
|
|
|
|
Type::F32 => WpType::F32,
|
|
|
|
Type::F64 => WpType::F64,
|
|
|
|
}
|
|
|
|
}
|
2019-02-26 20:56:10 +08:00
|
|
|
|
2019-04-01 20:33:33 +08:00
|
|
|
fn get_location_released(a: &mut Assembler, m: &mut Machine, (loc, lot): (Location, LocalOrTemp)) -> Location {
|
|
|
|
if lot == LocalOrTemp::Temp {
|
|
|
|
m.release_locations(a, &[loc]);
|
2019-03-09 02:57:23 +08:00
|
|
|
}
|
2019-04-01 20:33:33 +08:00
|
|
|
loc
|
2019-03-14 10:30:24 +08:00
|
|
|
}
|