runtime-core: Move ArgumentRegisterAllocator into state.

This commit is contained in:
losfair
2020-03-06 00:37:48 +08:00
parent a5de17fb18
commit e62095da5d
2 changed files with 53 additions and 49 deletions

View File

@ -480,10 +480,11 @@ impl InstanceImage {
} }
} }
/// Declarations for x86-64 registers. /// X64-specific structures and methods that do not depend on an x64 machine to run.
#[cfg(unix)] #[cfg(unix)]
pub mod x64_decl { pub mod x64_decl {
use super::*; use super::*;
use crate::types::Type;
/// General-purpose registers. /// General-purpose registers.
#[repr(u8)] #[repr(u8)]
@ -640,8 +641,58 @@ pub mod x64_decl {
}) })
} }
} }
/// An allocator that allocates registers for function arguments according to the System V ABI.
#[derive(Default)]
pub struct ArgumentRegisterAllocator {
n_gprs: usize,
n_xmms: usize,
}
impl ArgumentRegisterAllocator {
/// Allocates a register for argument type `ty`. Returns `None` if no register is available for this type..
pub fn next(&mut self, ty: Type) -> Option<X64Register> {
static GPR_SEQ: &'static [GPR] =
&[GPR::RDI, GPR::RSI, GPR::RDX, GPR::RCX, GPR::R8, GPR::R9];
static XMM_SEQ: &'static [XMM] = &[
XMM::XMM0,
XMM::XMM1,
XMM::XMM2,
XMM::XMM3,
XMM::XMM4,
XMM::XMM5,
XMM::XMM6,
XMM::XMM7,
];
match ty {
Type::I32 | Type::I64 => {
if self.n_gprs < GPR_SEQ.len() {
let gpr = GPR_SEQ[self.n_gprs];
self.n_gprs += 1;
Some(X64Register::GPR(gpr))
} else {
None
}
}
Type::F32 | Type::F64 => {
if self.n_xmms < XMM_SEQ.len() {
let xmm = XMM_SEQ[self.n_xmms];
self.n_xmms += 1;
Some(X64Register::XMM(xmm))
} else {
None
}
}
_ => todo!(
"ArgumentRegisterAllocator::next: Unsupported type: {:?}",
ty
),
}
}
}
} }
/// X64-specific structures and methods that only work on an x64 machine.
#[cfg(unix)] #[cfg(unix)]
pub mod x64 { pub mod x64 {
//! The x64 state module contains functions to generate state and code for x64 targets. //! The x64 state module contains functions to generate state and code for x64 targets.

View File

@ -7,7 +7,7 @@
//! Variadic functions are not supported because `rax` is used by the trampoline code. //! Variadic functions are not supported because `rax` is used by the trampoline code.
use crate::loader::CodeMemory; use crate::loader::CodeMemory;
use crate::state::x64_decl::{X64Register, GPR, XMM}; use crate::state::x64_decl::ArgumentRegisterAllocator;
use crate::types::Type; use crate::types::Type;
use crate::vm::Ctx; use crate::vm::Ctx;
use std::collections::BTreeMap; use std::collections::BTreeMap;
@ -147,53 +147,6 @@ pub struct TrampolineBuffer {
offsets: Vec<usize>, offsets: Vec<usize>,
} }
#[derive(Default)]
struct ArgumentRegisterAllocator {
n_gprs: usize,
n_xmms: usize,
}
impl ArgumentRegisterAllocator {
fn next(&mut self, ty: Type) -> Option<X64Register> {
static GPR_SEQ: &'static [GPR] =
&[GPR::RDI, GPR::RSI, GPR::RDX, GPR::RCX, GPR::R8, GPR::R9];
static XMM_SEQ: &'static [XMM] = &[
XMM::XMM0,
XMM::XMM1,
XMM::XMM2,
XMM::XMM3,
XMM::XMM4,
XMM::XMM5,
XMM::XMM6,
XMM::XMM7,
];
match ty {
Type::I32 | Type::I64 => {
if self.n_gprs < GPR_SEQ.len() {
let gpr = GPR_SEQ[self.n_gprs];
self.n_gprs += 1;
Some(X64Register::GPR(gpr))
} else {
None
}
}
Type::F32 | Type::F64 => {
if self.n_xmms < XMM_SEQ.len() {
let xmm = XMM_SEQ[self.n_xmms];
self.n_xmms += 1;
Some(X64Register::XMM(xmm))
} else {
None
}
}
_ => todo!(
"ArgumentRegisterAllocator::next: Unsupported type: {:?}",
ty
),
}
}
}
fn value_to_bytes<T: Copy>(ptr: &T) -> &[u8] { fn value_to_bytes<T: Copy>(ptr: &T) -> &[u8] {
unsafe { slice::from_raw_parts(ptr as *const T as *const u8, mem::size_of::<T>()) } unsafe { slice::from_raw_parts(ptr as *const T as *const u8, mem::size_of::<T>()) }
} }