mirror of
https://github.com/fluencelabs/wasmer
synced 2025-06-24 22:21:32 +00:00
Cleanup.
This commit is contained in:
@ -2,24 +2,21 @@
|
||||
|
||||
use super::codegen::*;
|
||||
use crate::protect_unix;
|
||||
use byteorder::{ByteOrder, LittleEndian};
|
||||
use dynasmrt::{
|
||||
x64::Assembler, AssemblyOffset, DynamicLabel, DynasmApi, DynasmLabelApi, ExecutableBuffer,
|
||||
};
|
||||
use std::cell::RefCell;
|
||||
use std::ptr::NonNull;
|
||||
use std::{any::Any, collections::HashMap, sync::Arc};
|
||||
use wasmer_runtime_core::{
|
||||
backend::{FuncResolver, ProtectedCaller, Token, UserTrapper},
|
||||
error::{RuntimeError, RuntimeResult},
|
||||
error::RuntimeResult,
|
||||
memory::MemoryType,
|
||||
module::{ModuleInfo, ModuleInner},
|
||||
structures::{Map, TypedIndex},
|
||||
types::{
|
||||
FuncIndex, FuncSig, ImportedMemoryIndex, LocalFuncIndex, LocalGlobalIndex, GlobalIndex,
|
||||
LocalMemoryIndex, LocalOrImport, MemoryIndex, SigIndex, Type, Value, TableIndex,
|
||||
FuncIndex, FuncSig, LocalFuncIndex, GlobalIndex,
|
||||
LocalOrImport, MemoryIndex, SigIndex, Type, Value, TableIndex,
|
||||
},
|
||||
units::Pages,
|
||||
vm::{self, ImportBacking, LocalGlobal, LocalMemory, LocalTable},
|
||||
vmcalls,
|
||||
};
|
||||
@ -158,8 +155,6 @@ unsafe impl Sync for FuncPtr {}
|
||||
pub struct X64ExecutionContext {
|
||||
code: ExecutableBuffer,
|
||||
functions: Vec<X64FunctionCode>,
|
||||
signatures: Arc<Map<SigIndex, FuncSig>>,
|
||||
function_signatures: Arc<Map<FuncIndex, SigIndex>>,
|
||||
function_pointers: Vec<FuncPtr>,
|
||||
_br_table_data: Vec<Vec<usize>>,
|
||||
func_import_count: usize,
|
||||
@ -188,7 +183,7 @@ pub enum IfElseState {
|
||||
impl X64ExecutionContext {
|
||||
fn get_runtime_resolver(
|
||||
&self,
|
||||
module_info: &ModuleInfo,
|
||||
_module_info: &ModuleInfo,
|
||||
) -> Result<X64RuntimeResolver, CodegenError> {
|
||||
Ok(X64RuntimeResolver {
|
||||
local_function_pointers: self.function_pointers[self.func_import_count..].to_vec(),
|
||||
@ -265,14 +260,12 @@ impl ProtectedCaller for X64ExecutionContext {
|
||||
|
||||
impl X64ModuleCodeGenerator {
|
||||
pub fn new() -> X64ModuleCodeGenerator {
|
||||
let mut assembler = Assembler::new().unwrap();
|
||||
|
||||
X64ModuleCodeGenerator {
|
||||
functions: vec![],
|
||||
signatures: None,
|
||||
function_signatures: None,
|
||||
function_labels: Some(HashMap::new()),
|
||||
assembler: Some(assembler),
|
||||
assembler: Some(Assembler::new().unwrap()),
|
||||
func_import_count: 0,
|
||||
}
|
||||
}
|
||||
@ -384,23 +377,7 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, X64RuntimeResolve
|
||||
functions: self.functions,
|
||||
_br_table_data: br_table_data,
|
||||
func_import_count: self.func_import_count,
|
||||
signatures: match self.signatures {
|
||||
Some(x) => x,
|
||||
None => {
|
||||
return Err(CodegenError {
|
||||
message: "no signatures",
|
||||
});
|
||||
}
|
||||
},
|
||||
function_pointers: out_labels,
|
||||
function_signatures: match self.function_signatures {
|
||||
Some(x) => x,
|
||||
None => {
|
||||
return Err(CodegenError {
|
||||
message: "no function signatures",
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
let resolver = ctx.get_runtime_resolver(module_info)?;
|
||||
|
||||
@ -453,7 +430,7 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, X64RuntimeResolve
|
||||
impl X64FunctionCode {
|
||||
fn emit_relaxed_xdiv(
|
||||
a: &mut Assembler,
|
||||
m: &mut Machine,
|
||||
_m: &mut Machine,
|
||||
op: fn(&mut Assembler, Size, Location),
|
||||
sz: Size,
|
||||
loc: Location,
|
||||
@ -999,7 +976,7 @@ impl X64FunctionCode {
|
||||
let mut stack_offset: usize = 0;
|
||||
|
||||
// Calculate stack offset.
|
||||
for (i, param) in params.iter().enumerate() {
|
||||
for (i, _param) in params.iter().enumerate() {
|
||||
let loc = Machine::get_param_location(1 + i);
|
||||
match loc {
|
||||
Location::Memory(_, _) => {
|
||||
@ -1245,13 +1222,13 @@ impl FunctionCodeGenerator for X64FunctionCode {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn feed_param(&mut self, ty: WpType) -> Result<(), CodegenError> {
|
||||
fn feed_param(&mut self, _ty: WpType) -> Result<(), CodegenError> {
|
||||
self.num_params += 1;
|
||||
self.num_locals += 1;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn feed_local(&mut self, ty: WpType, n: usize) -> Result<(), CodegenError> {
|
||||
fn feed_local(&mut self, _ty: WpType, n: usize) -> Result<(), CodegenError> {
|
||||
self.num_locals += n;
|
||||
Ok(())
|
||||
}
|
||||
@ -1316,7 +1293,7 @@ impl FunctionCodeGenerator for X64FunctionCode {
|
||||
let a = self.assembler.as_mut().unwrap();
|
||||
match op {
|
||||
Operator::GetGlobal { global_index } => {
|
||||
let mut global_index = global_index as usize;
|
||||
let global_index = global_index as usize;
|
||||
|
||||
let tmp = self.machine.acquire_temp_gpr().unwrap();
|
||||
|
||||
@ -2678,17 +2655,14 @@ impl FunctionCodeGenerator for X64FunctionCode {
|
||||
}
|
||||
Operator::Return => {
|
||||
let frame = &self.control_stack[0];
|
||||
let has_return = if frame.returns.len() > 0 {
|
||||
if frame.returns.len() > 0 {
|
||||
assert_eq!(frame.returns.len(), 1);
|
||||
let (loc, _) = *self.value_stack.last().unwrap();
|
||||
Self::emit_relaxed_binop(
|
||||
a, &mut self.machine, Assembler::emit_mov,
|
||||
Size::S64, loc, Location::GPR(GPR::RAX),
|
||||
);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
}
|
||||
let released: Vec<Location> = self.value_stack[frame.value_stack_depth..].iter()
|
||||
.filter(|&&(_, lot)| lot == LocalOrTemp::Temp)
|
||||
.map(|&(x, _)| x)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -71,12 +71,14 @@ pub enum Size {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub enum XMMOrMemory {
|
||||
XMM(XMM),
|
||||
Memory(GPR, i32),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub enum GPROrMemory {
|
||||
GPR(GPR),
|
||||
Memory(GPR, i32),
|
||||
@ -788,11 +790,11 @@ impl Emitter for Assembler {
|
||||
}
|
||||
|
||||
fn emit_btc_gpr_imm8_32(&mut self, src: u8, dst: GPR) {
|
||||
dynasm!(self ; btc Rd(dst as u8), BYTE (src as i8));
|
||||
dynasm!(self ; btc Rd(dst as u8), BYTE src as i8);
|
||||
}
|
||||
|
||||
fn emit_btc_gpr_imm8_64(&mut self, src: u8, dst: GPR) {
|
||||
dynasm!(self ; btc Rq(dst as u8), BYTE (src as i8));
|
||||
dynasm!(self ; btc Rq(dst as u8), BYTE src as i8);
|
||||
}
|
||||
|
||||
fn emit_cmovae_gpr_32(&mut self, src: GPR, dst: GPR) {
|
||||
|
@ -22,10 +22,8 @@ mod codegen;
|
||||
mod codegen_x64;
|
||||
mod parse;
|
||||
mod protect_unix;
|
||||
mod stack;
|
||||
mod emitter_x64;
|
||||
mod machine;
|
||||
mod codegen_x64_v1;
|
||||
|
||||
use crate::codegen::{CodegenError, ModuleCodeGenerator};
|
||||
use crate::parse::LoadError;
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::emitter_x64::*;
|
||||
use std::collections::HashSet;
|
||||
use wasmparser::Type as WpType;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
struct MachineStackOffset(usize);
|
||||
|
||||
@ -142,36 +143,6 @@ impl Machine {
|
||||
assert_eq!(self.used_xmms.remove(&xmm), true);
|
||||
}
|
||||
|
||||
/// Acquires stack locations from the machine state.
|
||||
pub fn acquire_stack_locations<E: Emitter>(
|
||||
&mut self,
|
||||
assembler: &mut E,
|
||||
n: usize,
|
||||
zeroed: bool,
|
||||
) -> Vec<Location> {
|
||||
let mut ret = vec![];
|
||||
let mut delta_stack_offset: usize = 0;
|
||||
|
||||
for _ in 0..n {
|
||||
let loc = {
|
||||
self.stack_offset.0 += 8;
|
||||
delta_stack_offset += 8;
|
||||
Location::Memory(GPR::RBP, -(self.stack_offset.0 as i32))
|
||||
};
|
||||
ret.push(loc);
|
||||
}
|
||||
|
||||
if delta_stack_offset != 0 {
|
||||
assembler.emit_sub(Size::S64, Location::Imm32(delta_stack_offset as u32), Location::GPR(GPR::RSP));
|
||||
}
|
||||
if zeroed {
|
||||
for i in 0..n {
|
||||
assembler.emit_mov(Size::S64, Location::Imm32(0), ret[i]);
|
||||
}
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
/// Acquires locations from the machine state.
|
||||
///
|
||||
/// If the returned locations are used for stack value, `release_location` needs to be called on them;
|
||||
@ -181,8 +152,8 @@ impl Machine {
|
||||
assembler: &mut E,
|
||||
tys: &[WpType],
|
||||
zeroed: bool,
|
||||
) -> Vec<Location> {
|
||||
let mut ret = vec![];
|
||||
) -> SmallVec<[Location; 1]> {
|
||||
let mut ret = smallvec![];
|
||||
let mut delta_stack_offset: usize = 0;
|
||||
|
||||
for ty in tys {
|
||||
@ -351,7 +322,7 @@ impl Machine {
|
||||
for i in 0..n_params {
|
||||
let loc = Self::get_param_location(i + 1);
|
||||
locations.push(match loc {
|
||||
Location::GPR(x) => {
|
||||
Location::GPR(_) => {
|
||||
let old_idx = allocated;
|
||||
allocated += 1;
|
||||
get_local_location(old_idx)
|
||||
@ -362,7 +333,7 @@ impl Machine {
|
||||
}
|
||||
|
||||
// Determine locations for normal locals.
|
||||
for i in n_params..n {
|
||||
for _ in n_params..n {
|
||||
locations.push(get_local_location(allocated));
|
||||
allocated += 1;
|
||||
}
|
||||
@ -383,7 +354,7 @@ impl Machine {
|
||||
|
||||
// Save callee-saved registers.
|
||||
for loc in locations.iter() {
|
||||
if let Location::GPR(x) = *loc {
|
||||
if let Location::GPR(_) = *loc {
|
||||
a.emit_push(Size::S64, *loc);
|
||||
self.stack_offset.0 += 8;
|
||||
}
|
||||
@ -400,7 +371,7 @@ impl Machine {
|
||||
for i in 0..n_params {
|
||||
let loc = Self::get_param_location(i + 1);
|
||||
match loc {
|
||||
Location::GPR(x) => {
|
||||
Location::GPR(_) => {
|
||||
a.emit_mov(Size::S64, loc, locations[i]);
|
||||
},
|
||||
_ => break
|
||||
@ -427,7 +398,7 @@ impl Machine {
|
||||
|
||||
// Restore callee-saved registers.
|
||||
for loc in locations.iter().rev() {
|
||||
if let Location::GPR(x) = *loc {
|
||||
if let Location::GPR(_) = *loc {
|
||||
a.emit_pop(Size::S64, *loc);
|
||||
}
|
||||
}
|
||||
|
@ -54,12 +54,6 @@ thread_local! {
|
||||
pub static CURRENT_EXECUTABLE_BUFFER: Cell<*const c_void> = Cell::new(ptr::null());
|
||||
}
|
||||
|
||||
pub unsafe fn trigger_trap() -> ! {
|
||||
let jmp_buf = SETJMP_BUFFER.with(|buf| buf.get());
|
||||
|
||||
longjmp(jmp_buf as *mut c_void, 0)
|
||||
}
|
||||
|
||||
pub fn call_protected<T>(f: impl FnOnce() -> T) -> RuntimeResult<T> {
|
||||
unsafe {
|
||||
let jmp_buf = SETJMP_BUFFER.with(|buf| buf.get());
|
||||
|
@ -1,164 +0,0 @@
|
||||
use crate::codegen::CodegenError;
|
||||
use dynasmrt::DynamicLabel;
|
||||
use wasmparser::Type as WpType;
|
||||
|
||||
/*#[repr(u8)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum RegisterName {
|
||||
RDI,
|
||||
RSI,
|
||||
RDX,
|
||||
RCX,
|
||||
R8,
|
||||
R9,
|
||||
R10,
|
||||
R11,
|
||||
RBX,
|
||||
R12,
|
||||
R13,
|
||||
R14,
|
||||
R15,
|
||||
Invalid,
|
||||
}*/
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum IfElseState {
|
||||
None,
|
||||
If(DynamicLabel),
|
||||
Else,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ControlFrame {
|
||||
pub label: DynamicLabel,
|
||||
pub loop_like: bool,
|
||||
pub if_else: IfElseState,
|
||||
pub returns: Vec<WpType>,
|
||||
pub value_stack_depth_before: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ControlStack {
|
||||
pub frames: Vec<ControlFrame>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ValueStack {
|
||||
pub num_regs: u8,
|
||||
pub values: Vec<ValueInfo>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct ValueInfo {
|
||||
pub ty: WpType,
|
||||
pub location: ValueLocation,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum ValueLocation {
|
||||
Register(ScratchRegister),
|
||||
Stack,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ScratchRegister(u8);
|
||||
|
||||
impl ScratchRegister {
|
||||
pub fn raw_id(&self) -> u8 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl ValueLocation {
|
||||
pub fn is_register(&self) -> bool {
|
||||
if let ValueLocation::Register(_) = *self {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_register(&self) -> Result<ScratchRegister, CodegenError> {
|
||||
if let ValueLocation::Register(id) = *self {
|
||||
Ok(id)
|
||||
} else {
|
||||
Err(CodegenError {
|
||||
message: "not a register location",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ValueStack {
|
||||
pub fn new(num_regs: u8) -> ValueStack {
|
||||
ValueStack {
|
||||
num_regs: num_regs,
|
||||
values: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn next_location(&self, loc: &ValueLocation) -> ValueLocation {
|
||||
match *loc {
|
||||
ValueLocation::Register(ScratchRegister(x)) => {
|
||||
if x >= self.num_regs - 1 {
|
||||
ValueLocation::Stack
|
||||
} else {
|
||||
ValueLocation::Register(ScratchRegister(x + 1))
|
||||
}
|
||||
}
|
||||
ValueLocation::Stack => ValueLocation::Stack,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, ty: WpType) -> ValueLocation {
|
||||
let loc = self
|
||||
.values
|
||||
.last()
|
||||
.map(|x| self.next_location(&x.location))
|
||||
.unwrap_or(ValueLocation::Register(ScratchRegister(0)));
|
||||
self.values.push(ValueInfo {
|
||||
ty: ty,
|
||||
location: loc,
|
||||
});
|
||||
loc
|
||||
}
|
||||
|
||||
pub fn pop(&mut self) -> Result<ValueInfo, CodegenError> {
|
||||
match self.values.pop() {
|
||||
Some(x) => Ok(x),
|
||||
None => Err(CodegenError {
|
||||
message: "no value on top of stack",
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pop2(&mut self) -> Result<(ValueInfo, ValueInfo), CodegenError> {
|
||||
if self.values.len() < 2 {
|
||||
Err(CodegenError {
|
||||
message: "less than 2 values on top of stack",
|
||||
})
|
||||
} else {
|
||||
let v2 = self.values.pop().unwrap();
|
||||
let v1 = self.values.pop().unwrap();
|
||||
Ok((v1, v2))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset_depth(&mut self, target_depth: usize) {
|
||||
self.values.truncate(target_depth);
|
||||
}
|
||||
}
|
||||
|
||||
impl ControlStack {
|
||||
pub fn new(label: DynamicLabel, returns: Vec<WpType>) -> ControlStack {
|
||||
ControlStack {
|
||||
frames: vec![ControlFrame {
|
||||
label: label,
|
||||
loop_like: false,
|
||||
if_else: IfElseState::None,
|
||||
returns: returns,
|
||||
value_stack_depth_before: 0,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user