Merge branch 'master' into deterministic

This commit is contained in:
Mark McCaskey
2019-12-05 11:50:16 -08:00
committed by GitHub
189 changed files with 14826 additions and 4724 deletions

View File

@ -28,6 +28,7 @@ pub enum Backend {
Cranelift,
Singlepass,
LLVM,
Auto,
}
impl Backend {
@ -40,6 +41,7 @@ impl Backend {
"singlepass",
#[cfg(feature = "backend-llvm")]
"llvm",
"auto",
]
}
@ -50,6 +52,7 @@ impl Backend {
Backend::Cranelift => "cranelift",
Backend::Singlepass => "singlepass",
Backend::LLVM => "llvm",
Backend::Auto => "auto",
}
}
}
@ -67,11 +70,88 @@ impl std::str::FromStr for Backend {
"singlepass" => Ok(Backend::Singlepass),
"cranelift" => Ok(Backend::Cranelift),
"llvm" => Ok(Backend::LLVM),
"auto" => Ok(Backend::Auto),
_ => Err(format!("The backend {} doesn't exist", s)),
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum Architecture {
X64,
Aarch64,
}
#[repr(u8)]
#[derive(Copy, Clone, Debug)]
pub enum InlineBreakpointType {
Trace,
Middleware,
Unknown,
}
#[derive(Clone, Debug)]
pub struct InlineBreakpoint {
pub size: usize,
pub ty: InlineBreakpointType,
}
pub fn get_inline_breakpoint_size(arch: Architecture, backend: Backend) -> Option<usize> {
match (arch, backend) {
(Architecture::X64, Backend::Singlepass) => Some(7),
(Architecture::Aarch64, Backend::Singlepass) => Some(12),
_ => None,
}
}
pub fn read_inline_breakpoint(
arch: Architecture,
backend: Backend,
code: &[u8],
) -> Option<InlineBreakpoint> {
match arch {
Architecture::X64 => match backend {
Backend::Singlepass => {
if code.len() < 7 {
None
} else if &code[..6] == &[0x0f, 0x0b, 0x0f, 0xb9, 0xcd, 0xff] {
// ud2 ud (int 0xff) code
Some(InlineBreakpoint {
size: 7,
ty: match code[6] {
0 => InlineBreakpointType::Trace,
1 => InlineBreakpointType::Middleware,
_ => InlineBreakpointType::Unknown,
},
})
} else {
None
}
}
_ => None,
},
Architecture::Aarch64 => match backend {
Backend::Singlepass => {
if code.len() < 12 {
None
} else if &code[..8] == &[0, 0, 0, 0, 0xff, 0xff, 0xff, 0xff] {
Some(InlineBreakpoint {
size: 12,
ty: match code[8] {
0 => InlineBreakpointType::Trace,
1 => InlineBreakpointType::Middleware,
_ => InlineBreakpointType::Unknown,
},
})
} else {
None
}
}
_ => None,
},
}
}
#[cfg(test)]
mod backend_test {
use super::*;
@ -129,6 +209,11 @@ pub struct CompilerConfig {
pub enforce_stack_check: bool,
pub track_state: bool,
pub features: Features,
// target info used by LLVM
pub triple: Option<String>,
pub cpu_name: Option<String>,
pub cpu_features: Option<String>,
}
pub trait Compiler {

View File

@ -15,8 +15,13 @@ use crate::{
},
vm,
};
use std::{fmt::Debug, slice};
use std::{
fmt::Debug,
ptr::{self, NonNull},
slice,
};
/// Size of the array for internal instance usage
pub const INTERNALS_SIZE: usize = 256;
pub(crate) struct Internals(pub(crate) [u64; INTERNALS_SIZE]);
@ -72,7 +77,7 @@ impl LocalBacking {
}
};
let mut tables = Self::generate_tables(module);
let mut globals = Self::generate_globals(module, imports);
let mut globals = Self::generate_globals(module, imports)?;
// Ensure all initializers are valid before running finalizers
Self::validate_memories(module, imports)?;
@ -382,9 +387,9 @@ impl LocalBacking {
vmctx,
),
LocalOrImport::Import(imported_func_index) => {
let vm::ImportedFunc { func, vmctx } =
let vm::ImportedFunc { func, func_ctx } =
imports.vm_functions[imported_func_index];
(func, vmctx)
(func, unsafe { func_ctx.as_ref() }.vmctx.as_ptr())
}
};
@ -415,9 +420,9 @@ impl LocalBacking {
vmctx,
),
LocalOrImport::Import(imported_func_index) => {
let vm::ImportedFunc { func, vmctx } =
let vm::ImportedFunc { func, func_ctx } =
imports.vm_functions[imported_func_index];
(func, vmctx)
(func, unsafe { func_ctx.as_ref() }.vmctx.as_ptr())
}
};
@ -438,13 +443,22 @@ impl LocalBacking {
fn generate_globals(
module: &ModuleInner,
imports: &ImportBacking,
) -> BoxedMap<LocalGlobalIndex, Global> {
) -> LinkResult<BoxedMap<LocalGlobalIndex, Global>> {
let mut globals = Map::with_capacity(module.info.globals.len());
for (_, global_init) in module.info.globals.iter() {
let value = match &global_init.init {
Initializer::Const(value) => value.clone(),
Initializer::GetGlobal(import_global_index) => {
if imports.globals.len() <= import_global_index.index() {
return Err(vec![LinkError::Generic {
message: format!(
"Trying to read the `{:?}` global that is not properly initialized.",
import_global_index.index()
),
}]);
}
imports.globals[*import_global_index].get()
}
};
@ -458,7 +472,7 @@ impl LocalBacking {
globals.push(global);
}
globals.into_boxed_map()
Ok(globals.into_boxed_map())
}
fn finalize_globals(
@ -472,6 +486,8 @@ impl LocalBacking {
}
}
/// The `ImportBacking` stores references to the imported resources of an Instance. This includes
/// imported memories, tables, globals and functions.
#[derive(Debug)]
pub struct ImportBacking {
pub(crate) memories: BoxedMap<ImportedMemoryIndex, Memory>,
@ -488,6 +504,7 @@ pub struct ImportBacking {
unsafe impl Send for ImportBacking {}
impl ImportBacking {
/// Creates a new `ImportBacking` from the given `ModuleInner`, `ImportObject`, and `Ctx`.
pub fn new(
module: &ModuleInner,
imports: &ImportObject,
@ -536,11 +553,21 @@ impl ImportBacking {
}
}
/// Gets a `ImportedFunc` from the given `ImportedFuncIndex`.
pub fn imported_func(&self, index: ImportedFuncIndex) -> vm::ImportedFunc {
self.vm_functions[index].clone()
}
}
impl Drop for ImportBacking {
fn drop(&mut self) {
// Properly drop the `vm::FuncCtx` in `vm::ImportedFunc`.
for (_imported_func_index, imported_func) in (*self.vm_functions).iter_mut() {
let _: Box<vm::FuncCtx> = unsafe { Box::from_raw(imported_func.func_ctx.as_ptr()) };
}
}
}
fn import_functions(
module: &ModuleInner,
imports: &ImportObject,
@ -564,6 +591,7 @@ fn import_functions(
let import =
imports.maybe_with_namespace(namespace, |namespace| namespace.get_export(name));
match import {
Some(Export::Function {
func,
@ -573,10 +601,28 @@ fn import_functions(
if *expected_sig == *signature {
functions.push(vm::ImportedFunc {
func: func.inner(),
vmctx: match ctx {
Context::External(ctx) => ctx,
Context::Internal => vmctx,
},
func_ctx: NonNull::new(Box::into_raw(Box::new(vm::FuncCtx {
// ^^^^^^^^ `vm::FuncCtx` is purposely leaked.
// It is dropped by the specific `Drop`
// implementation of `ImportBacking`.
vmctx: NonNull::new(match ctx {
Context::External(vmctx) => vmctx,
Context::ExternalWithEnv(vmctx_, _) => {
if vmctx_.is_null() {
vmctx
} else {
vmctx_
}
}
Context::Internal => vmctx,
})
.expect("`vmctx` must not be null."),
func_env: match ctx {
Context::ExternalWithEnv(_, func_env) => func_env,
_ => None,
},
})))
.unwrap(),
});
} else {
link_errors.push(LinkError::IncorrectImportSignature {
@ -605,8 +651,8 @@ fn import_functions(
None => {
if imports.allow_missing_functions {
functions.push(vm::ImportedFunc {
func: ::std::ptr::null(),
vmctx: ::std::ptr::null_mut(),
func: ptr::null(),
func_ctx: unsafe { NonNull::new_unchecked(ptr::null_mut()) }, // TODO: Non-sense…
});
} else {
link_errors.push(LinkError::ImportNotFound {

View File

@ -1,3 +1,7 @@
//! The cache module provides the common data structures used by compiler backends to allow
//! serializing compiled wasm code to a binary format. The binary format can be persisted,
//! and loaded to allow skipping compilation and fast startup.
use crate::{
backend::Backend,
module::{Module, ModuleInfo},
@ -6,20 +10,31 @@ use crate::{
use blake2b_simd::blake2bp;
use std::{fmt, io, mem, slice};
/// Indicates the invalid type of invalid cache file
#[derive(Debug)]
pub enum InvalidFileType {
/// Given cache header slice does not match the expected size of an `ArtifactHeader`
InvalidSize,
/// Given cache header slice does not contain the expected magic bytes
InvalidMagic,
}
/// Kinds of caching errors
#[derive(Debug)]
pub enum Error {
/// An IO error while reading/writing a cache binary.
IoError(io::Error),
/// An error deserializing bytes into a cache data structure.
DeserializeError(String),
/// An error serializing bytes from a cache data structure.
SerializeError(String),
/// An undefined caching error with a message.
Unknown(String),
/// An invalid cache binary given.
InvalidFile(InvalidFileType),
/// The cached binary has been invalidated.
InvalidatedCache,
/// The current backend does not support caching.
UnsupportedBackend(Backend),
}
@ -164,6 +179,8 @@ struct ArtifactInner {
compiled_code: Memory,
}
/// Artifact are produced by caching, are serialized/deserialized to binaries, and contain
/// module info, backend metadata, and compiled code.
pub struct Artifact {
inner: ArtifactInner,
}
@ -183,6 +200,7 @@ impl Artifact {
}
}
/// Deserializes an `Artifact` from the given byte slice.
pub fn deserialize(bytes: &[u8]) -> Result<Self, Error> {
let (_, body_slice) = ArtifactHeader::read_from_slice(bytes)?;
@ -192,6 +210,7 @@ impl Artifact {
Ok(Artifact { inner })
}
/// A reference to the `Artifact`'s stored `ModuleInfo`
pub fn info(&self) -> &ModuleInfo {
&self.inner.info
}
@ -205,6 +224,7 @@ impl Artifact {
)
}
/// Serializes the `Artifact` into a vector of bytes
pub fn serialize(&self) -> Result<Vec<u8>, Error> {
let cache_header = ArtifactHeader {
magic: WASMER_CACHE_MAGIC,
@ -230,7 +250,9 @@ impl Artifact {
///
/// The `wasmer-runtime` supplies a naive `FileSystemCache` api.
pub trait Cache {
/// Error type to return when load error occurs
type LoadError: fmt::Debug;
/// Error type to return when store error occurs
type StoreError: fmt::Debug;
/// loads a module using the default `Backend`
@ -238,6 +260,7 @@ pub trait Cache {
/// loads a cached module using a specific `Backend`
fn load_with_backend(&self, key: WasmHash, backend: Backend)
-> Result<Module, Self::LoadError>;
/// Store a module into the cache with the given key
fn store(&mut self, key: WasmHash, module: Module) -> Result<(), Self::StoreError>;
}

View File

@ -1,3 +1,7 @@
//! The codegen module provides common functions and data structures used by multiple backends
//! during the code generation process.
#[cfg(unix)]
use crate::fault::FaultInfo;
use crate::{
backend::RunnableModule,
backend::{Backend, CacheGen, Compiler, CompilerConfig, Features, Token},
@ -17,22 +21,35 @@ use std::sync::{Arc, RwLock};
use wasmparser::{self, WasmDecoder};
use wasmparser::{Operator, Type as WpType};
/// A type that defines a function pointer, which is called when breakpoints occur.
pub type BreakpointHandler =
Box<dyn Fn(BreakpointInfo) -> Result<(), Box<dyn Any>> + Send + Sync + 'static>;
/// Maps instruction pointers to their breakpoint handlers.
pub type BreakpointMap = Arc<HashMap<usize, BreakpointHandler>>;
/// An event generated during parsing of a wasm binary
#[derive(Debug)]
pub enum Event<'a, 'b> {
/// An internal event created by the parser used to provide hooks during code generation.
Internal(InternalEvent),
/// An event generated by parsing a wasm operator
Wasm(&'b Operator<'a>),
/// An event generated by parsing a wasm operator that contains an owned `Operator`
WasmOwned(Operator<'a>),
}
/// Kinds of `InternalEvent`s created during parsing.
pub enum InternalEvent {
/// A function parse is about to begin.
FunctionBegin(u32),
/// A function parsing has just completed.
FunctionEnd,
/// A breakpoint emitted during parsing.
Breakpoint(BreakpointHandler),
/// Indicates setting an internal field.
SetInternal(u32),
/// Indicates getting an internal field.
GetInternal(u32),
}
@ -48,14 +65,32 @@ impl fmt::Debug for InternalEvent {
}
}
/// Information for a breakpoint
#[cfg(unix)]
pub struct BreakpointInfo<'a> {
pub fault: Option<&'a dyn Any>,
/// Fault.
pub fault: Option<&'a FaultInfo>,
}
/// Information for a breakpoint
#[cfg(not(unix))]
pub struct BreakpointInfo {
/// Fault placeholder.
pub fault: Option<()>,
}
/// A trait that represents the functions needed to be implemented to generate code for a module.
pub trait ModuleCodeGenerator<FCG: FunctionCodeGenerator<E>, RM: RunnableModule, E: Debug> {
/// Creates a new module code generator.
fn new() -> Self;
/// Creates a new module code generator for specified target.
fn new_with_target(
triple: Option<String>,
cpu_name: Option<String>,
cpu_features: Option<String>,
) -> Self;
/// Returns the backend id associated with this MCG.
fn backend_id() -> Backend;
@ -65,7 +100,7 @@ pub trait ModuleCodeGenerator<FCG: FunctionCodeGenerator<E>, RM: RunnableModule,
}
/// Adds an import function.
fn feed_import_function(&mut self) -> Result<(), E>;
/// Sets the signatures.
fn feed_signatures(&mut self, signatures: Map<SigIndex, FuncSig>) -> Result<(), E>;
/// Sets function signatures.
fn feed_function_signatures(&mut self, assoc: Map<FuncIndex, SigIndex>) -> Result<(), E>;
@ -80,6 +115,8 @@ pub trait ModuleCodeGenerator<FCG: FunctionCodeGenerator<E>, RM: RunnableModule,
unsafe fn from_cache(cache: Artifact, _: Token) -> Result<ModuleInner, CacheError>;
}
/// A streaming compiler which is designed to generated code for a module based on a stream
/// of wasm parser events.
pub struct StreamingCompiler<
MCG: ModuleCodeGenerator<FCG, RM, E>,
FCG: FunctionCodeGenerator<E>,
@ -94,6 +131,7 @@ pub struct StreamingCompiler<
_phantom_e: PhantomData<E>,
}
/// A simple generator for a `StreamingCompiler`.
pub struct SimpleStreamingCompilerGen<
MCG: ModuleCodeGenerator<FCG, RM, E>,
FCG: FunctionCodeGenerator<E>,
@ -113,6 +151,7 @@ impl<
E: Debug,
> SimpleStreamingCompilerGen<MCG, FCG, RM, E>
{
/// Create a new `StreamingCompiler`.
pub fn new() -> StreamingCompiler<MCG, FCG, RM, E, impl Fn() -> MiddlewareChain> {
StreamingCompiler::new(|| MiddlewareChain::new())
}
@ -126,6 +165,7 @@ impl<
CGEN: Fn() -> MiddlewareChain,
> StreamingCompiler<MCG, FCG, RM, E, CGEN>
{
/// Create a new `StreamingCompiler` with the given `MiddlewareChain`.
pub fn new(chain_gen: CGEN) -> Self {
Self {
middleware_chain_generator: chain_gen,
@ -137,6 +177,7 @@ impl<
}
}
/// Create a new `ValidatingParserConfig` with the given features.
pub fn validating_parser_config(features: &Features) -> wasmparser::ValidatingParserConfig {
wasmparser::ValidatingParserConfig {
operator_config: wasmparser::OperatorValidatorConfig {
@ -185,7 +226,14 @@ impl<
validate_with_features(wasm, &compiler_config.features)?;
}
let mut mcg = MCG::new();
let mut mcg = match MCG::backend_id() {
Backend::LLVM => MCG::new_with_target(
compiler_config.triple.clone(),
compiler_config.cpu_name.clone(),
compiler_config.cpu_features.clone(),
),
_ => MCG::new(),
};
let mut chain = (self.middleware_chain_generator)();
let info = crate::parse::read_module(
wasm,
@ -218,34 +266,41 @@ impl<
fn requires_pre_validation(backend: Backend) -> bool {
match backend {
Backend::Cranelift => true,
Backend::LLVM => false,
Backend::LLVM => true,
Backend::Singlepass => false,
Backend::Auto => false,
}
}
/// A sink for parse events.
pub struct EventSink<'a, 'b> {
buffer: SmallVec<[Event<'a, 'b>; 2]>,
}
impl<'a, 'b> EventSink<'a, 'b> {
/// Push a new `Event` to this sink.
pub fn push(&mut self, ev: Event<'a, 'b>) {
self.buffer.push(ev);
}
}
/// A container for a chain of middlewares.
pub struct MiddlewareChain {
chain: Vec<Box<dyn GenericFunctionMiddleware>>,
}
impl MiddlewareChain {
/// Create a new empty `MiddlewareChain`.
pub fn new() -> MiddlewareChain {
MiddlewareChain { chain: vec![] }
}
/// Push a new `FunctionMiddleware` to this `MiddlewareChain`.
pub fn push<M: FunctionMiddleware + 'static>(&mut self, m: M) {
self.chain.push(Box::new(m));
}
/// Run this chain with the provided function code generator, event and module info.
pub(crate) fn run<E: Debug, FCG: FunctionCodeGenerator<E>>(
&mut self,
fcg: Option<&mut FCG>,
@ -273,8 +328,11 @@ impl MiddlewareChain {
}
}
/// A trait that represents the signature required to implement middleware for a function.
pub trait FunctionMiddleware {
/// The error type for this middleware's functions.
type Error: Debug;
/// Processes the given event, module info and sink.
fn feed_event<'a, 'b: 'a>(
&mut self,
op: Event<'a, 'b>,

View File

@ -1,13 +1,28 @@
//! The error module contains the data structures and helper functions used to implement errors that
//! are produced and returned from the wasmer runtime core.
use crate::types::{FuncSig, GlobalDescriptor, MemoryDescriptor, TableDescriptor, Type};
use core::borrow::Borrow;
use std::any::Any;
/// Aliases the standard `Result` type as `Result` within this module.
pub type Result<T> = std::result::Result<T, Error>;
/// Result of an attempt to compile the provided WebAssembly module into a `Module`.
/// Aliases the standard `Result` with `CompileError` as the default error type.
pub type CompileResult<T> = std::result::Result<T, CompileError>;
/// Result of an attempt to link the provided WebAssembly instance.
/// Aliases the standard `Result` with `Vec<LinkError>` as the default error type.
pub type LinkResult<T> = std::result::Result<T, Vec<LinkError>>;
/// Result of an attempt to run the provided WebAssembly instance.
/// Aliases the standard `Result` with `RuntimeError` as the default error type.
pub type RuntimeResult<T> = std::result::Result<T, RuntimeError>;
/// Result of an attempt to call the provided WebAssembly instance.
/// Aliases the standard `Result` with `CallError` as the default error type.
pub type CallResult<T> = std::result::Result<T, CallError>;
/// Result of an attempt to resolve a WebAssembly function by name.
/// Aliases the standard `Result` with `ResolveError` as the default error type.
pub type ResolveResult<T> = std::result::Result<T, ResolveError>;
/// Result of an attempt to parse bytes into a WebAssembly module.
/// Aliases the standard `Result` with `ParseError` as the default error type.
pub type ParseResult<T> = std::result::Result<T, ParseError>;
/// This is returned when the chosen compiler is unable to
@ -17,8 +32,16 @@ pub type ParseResult<T> = std::result::Result<T, ParseError>;
/// Comparing two `CompileError`s always evaluates to false.
#[derive(Debug, Clone)]
pub enum CompileError {
ValidationError { msg: String },
InternalError { msg: String },
/// A validation error containing an error message.
ValidationError {
/// An error message.
msg: String,
},
/// A internal error containing an error message.
InternalError {
/// An error message.
msg: String,
},
}
impl PartialEq for CompileError {
@ -46,41 +69,71 @@ impl std::error::Error for CompileError {}
/// Comparing two `LinkError`s always evaluates to false.
#[derive(Debug, Clone)]
pub enum LinkError {
/// The type of the provided import does not match the expected type.
IncorrectImportType {
/// Namespace.
namespace: String,
/// Name.
name: String,
/// Expected.
expected: String,
/// Found.
found: String,
},
/// The signature of the provided import does not match the expected signature.
IncorrectImportSignature {
/// Namespace.
namespace: String,
/// Name.
name: String,
/// Expected.
expected: FuncSig,
/// Found.
found: FuncSig,
},
/// An expected import was not provided.
ImportNotFound {
/// Namespace.
namespace: String,
/// Name.
name: String,
},
/// The memory descriptor provided does not match the expected descriptor.
IncorrectMemoryDescriptor {
/// Namespace.
namespace: String,
/// Name.
name: String,
/// Expected.
expected: MemoryDescriptor,
/// Found.
found: MemoryDescriptor,
},
/// The table descriptor provided does not match the expected descriptor.
IncorrectTableDescriptor {
/// Namespace.
namespace: String,
/// Name.
name: String,
/// Expected.
expected: TableDescriptor,
/// Found.
found: TableDescriptor,
},
/// The global descriptor provided does not match the expected descriptor.
IncorrectGlobalDescriptor {
/// Namespace.
namespace: String,
/// Name.
name: String,
/// Expected.
expected: GlobalDescriptor,
/// Found.
found: GlobalDescriptor,
},
/// A generic error with a message.
Generic {
/// Error message.
message: String,
},
}
@ -126,8 +179,16 @@ impl std::error::Error for LinkError {}
///
/// Comparing two `RuntimeError`s always evaluates to false.
pub enum RuntimeError {
Trap { msg: Box<str> },
Error { data: Box<dyn Any> },
/// Trap.
Trap {
/// Trap message.
msg: Box<str>,
},
/// Error.
Error {
/// Error data.
data: Box<dyn Any>,
},
}
impl PartialEq for RuntimeError {
@ -169,9 +230,23 @@ impl std::error::Error for RuntimeError {}
/// Comparing two `ResolveError`s always evaluates to false.
#[derive(Debug, Clone)]
pub enum ResolveError {
Signature { expected: FuncSig, found: Vec<Type> },
ExportNotFound { name: String },
ExportWrongType { name: String },
/// Found signature did not match expected signature.
Signature {
/// Expected `FuncSig`.
expected: FuncSig,
/// Found type.
found: Vec<Type>,
},
/// Export not found.
ExportNotFound {
/// Name.
name: String,
},
/// Export found with the wrong type.
ExportWrongType {
/// Name.
name: String,
},
}
impl PartialEq for ResolveError {
@ -213,7 +288,9 @@ impl std::error::Error for ResolveError {}
///
/// Comparing two `CallError`s always evaluates to false.
pub enum CallError {
/// An error occured resolving the functions name or types.
Resolve(ResolveError),
/// A runtime error occurred during the function call.
Runtime(RuntimeError),
}
@ -247,8 +324,11 @@ impl std::error::Error for CallError {}
/// like a `Memory` or a `Table`.
#[derive(Debug, Clone)]
pub enum CreationError {
/// Unable to create memory error.
UnableToCreateMemory,
/// Unable to create table error.
UnableToCreateTable,
/// Invalid descriptor error with message.
InvalidDescriptor(String),
}
@ -281,11 +361,17 @@ impl std::error::Error for CreationError {}
/// Comparing two `Error`s always evaluates to false.
#[derive(Debug)]
pub enum Error {
/// Compile error.
CompileError(CompileError),
/// Link errors.
LinkError(Vec<LinkError>),
/// Runtime error.
RuntimeError(RuntimeError),
/// Resolve error.
ResolveError(ResolveError),
/// Call error.
CallError(CallError),
/// Creation error.
CreationError(CreationError),
}
@ -368,13 +454,20 @@ impl std::fmt::Display for Error {
impl std::error::Error for Error {}
/// An error occurred while growing a memory or table.
#[derive(Debug)]
pub enum GrowError {
/// Error growing memory.
MemoryGrowError,
/// Error growing table.
TableGrowError,
/// Max pages were exceeded.
ExceededMaxPages(PageError),
/// Max pages for memory were exceeded.
ExceededMaxPagesForMemory(usize, usize),
/// Error protecting memory.
CouldNotProtectMemory(MemoryProtectionError),
/// Error creating memory.
CouldNotCreateMemory(MemoryCreationError),
}
@ -393,9 +486,11 @@ impl std::fmt::Display for GrowError {
impl std::error::Error for GrowError {}
/// A kind of page error.
#[derive(Debug)]
pub enum PageError {
// left, right, added
/// Max pages were exceeded error.
ExceededMaxPages(usize, usize, usize),
}
@ -414,9 +509,12 @@ impl Into<GrowError> for PageError {
}
}
/// Error occured while creating memory.
#[derive(Debug)]
pub enum MemoryCreationError {
/// Allocation of virtual memory failed error.
VirtualMemoryAllocationFailed(usize, String),
/// Error creating memory from file.
CouldNotCreateMemoryFromFile(std::io::Error),
}
@ -446,8 +544,10 @@ impl From<std::io::Error> for MemoryCreationError {
}
}
/// Error protecting memory.
#[derive(Debug)]
pub enum MemoryProtectionError {
/// Protection failed error.
ProtectionFailed(usize, usize, String),
}
@ -470,8 +570,10 @@ impl Into<GrowError> for MemoryProtectionError {
}
}
/// Parse Error.
#[derive(Debug)]
pub enum ParseError {
/// Error reading binary.
BinaryReadError,
}

View File

@ -1,31 +1,51 @@
//! The export module contains the implementation data structures and helper functions used to
//! manipulate and access a wasm module's exports including memories, tables, globals, and
//! functions.
use crate::{
global::Global, instance::InstanceInner, memory::Memory, module::ExportIndex,
module::ModuleInner, table::Table, types::FuncSig, vm,
};
use indexmap::map::Iter as IndexMapIter;
use std::sync::Arc;
use std::{ptr::NonNull, sync::Arc};
/// A kind of Context.
#[derive(Debug, Copy, Clone)]
pub enum Context {
/// External context include a mutable pointer to `Ctx`.
External(*mut vm::Ctx),
/// External context with an environment include a mutable pointer
/// to `Ctx` and an optional non-null pointer to `FuncEnv`.
ExternalWithEnv(*mut vm::Ctx, Option<NonNull<vm::FuncEnv>>),
/// Internal context.
Internal,
}
// Manually implemented because context contains a raw pointer to Ctx
unsafe impl Send for Context {}
/// Kind of WebAssembly export.
#[derive(Debug, Clone)]
pub enum Export {
/// Function export.
Function {
/// A pointer to a function.
func: FuncPointer,
/// A kind of context.
ctx: Context,
/// The signature of the function.
signature: Arc<FuncSig>,
},
/// Memory export.
Memory(Memory),
/// Table export.
Table(Table),
/// Global export.
Global(Global),
}
/// Const pointer to a `Func`.
#[derive(Debug, Clone)]
pub struct FuncPointer(*const vm::Func);
@ -45,6 +65,7 @@ impl FuncPointer {
}
}
/// An iterator to an instance's exports.
pub struct ExportIter<'a> {
inner: &'a InstanceInner,
iter: IndexMapIter<'a, String, ExportIndex>,

View File

@ -1,17 +1,35 @@
//! The fault module contains the implementation for handling breakpoints, traps, and signals
//! for wasm code.
pub mod raw {
//! The raw module contains required externed function interfaces for the fault module.
use std::ffi::c_void;
#[cfg(target_arch = "x86_64")]
extern "C" {
/// Load registers and return on the stack [stack_end..stack_begin].
pub fn run_on_alternative_stack(stack_end: *mut u64, stack_begin: *mut u64) -> u64;
/// Internal routine for switching into a backend without information about where registers are preserved.
pub fn register_preservation_trampoline(); // NOT safe to call directly
}
/// Internal routine for switching into a backend without information about where registers are preserved.
#[cfg(not(target_arch = "x86_64"))]
pub extern "C" fn register_preservation_trampoline() {
unimplemented!("register_preservation_trampoline");
}
extern "C" {
/// libc setjmp
pub fn setjmp(env: *mut c_void) -> i32;
/// libc longjmp
pub fn longjmp(env: *mut c_void, val: i32) -> !;
}
}
use crate::codegen::{BreakpointInfo, BreakpointMap};
use crate::state::x64::{build_instance_image, read_stack, X64Register, GPR, XMM};
use crate::state::CodeVersion;
use crate::state::x64::{build_instance_image, read_stack, X64Register, GPR};
use crate::state::{CodeVersion, ExecutionStateImage};
use crate::vm;
use libc::{mmap, mprotect, siginfo_t, MAP_ANON, MAP_PRIVATE, PROT_NONE, PROT_READ, PROT_WRITE};
use nix::sys::signal::{
@ -25,13 +43,19 @@ use std::process;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Once;
#[cfg(target_arch = "x86_64")]
pub(crate) unsafe fn run_on_alternative_stack(stack_end: *mut u64, stack_begin: *mut u64) -> u64 {
raw::run_on_alternative_stack(stack_end, stack_begin)
}
#[cfg(not(target_arch = "x86_64"))]
pub(crate) unsafe fn run_on_alternative_stack(_stack_end: *mut u64, _stack_begin: *mut u64) -> u64 {
unimplemented!("run_on_alternative_stack");
}
const TRAP_STACK_SIZE: usize = 1048576; // 1MB
const SETJMP_BUFFER_LEN: usize = 27;
const SETJMP_BUFFER_LEN: usize = 128;
type SetJmpBuffer = [i32; SETJMP_BUFFER_LEN];
struct UnwindInfo {
@ -40,13 +64,19 @@ struct UnwindInfo {
payload: Option<Box<dyn Any>>, // out
}
/// A store for boundary register preservation.
#[repr(packed)]
#[derive(Default, Copy, Clone)]
pub struct BoundaryRegisterPreservation {
/// R15.
pub r15: u64,
/// R14.
pub r14: u64,
/// R13.
pub r13: u64,
/// R12.
pub r12: u64,
/// RBX.
pub rbx: u64,
}
@ -58,6 +88,7 @@ thread_local! {
static BOUNDARY_REGISTER_PRESERVATION: UnsafeCell<BoundaryRegisterPreservation> = UnsafeCell::new(BoundaryRegisterPreservation::default());
}
/// Gets a mutable pointer to the `BoundaryRegisterPreservation`.
#[no_mangle]
pub unsafe extern "C" fn get_boundary_register_preservation() -> *mut BoundaryRegisterPreservation {
BOUNDARY_REGISTER_PRESERVATION.with(|x| x.get())
@ -89,10 +120,12 @@ lazy_static! {
}
static INTERRUPT_SIGNAL_DELIVERED: AtomicBool = AtomicBool::new(false);
/// Returns a boolean indicating if SIGINT triggered the fault.
pub fn was_sigint_triggered_fault() -> bool {
WAS_SIGINT_TRIGGERED.with(|x| x.get())
}
/// Runs a callback function with the given `Ctx`.
pub unsafe fn with_ctx<R, F: FnOnce() -> R>(ctx: *mut vm::Ctx, cb: F) -> R {
let addr = CURRENT_CTX.with(|x| x.get());
let old = *addr;
@ -102,18 +135,22 @@ pub unsafe fn with_ctx<R, F: FnOnce() -> R>(ctx: *mut vm::Ctx, cb: F) -> R {
ret
}
/// Pushes a new `CodeVersion` to the current code versions.
pub fn push_code_version(version: CodeVersion) {
CURRENT_CODE_VERSIONS.with(|x| x.borrow_mut().push(version));
}
/// Pops a `CodeVersion` from the current code versions.
pub fn pop_code_version() -> Option<CodeVersion> {
CURRENT_CODE_VERSIONS.with(|x| x.borrow_mut().pop())
}
/// Gets the wasm interrupt signal mem.
pub unsafe fn get_wasm_interrupt_signal_mem() -> *mut u8 {
INTERRUPT_SIGNAL_MEM.0
}
/// Sets the wasm interrupt on the given `Ctx`.
pub unsafe fn set_wasm_interrupt_on_ctx(ctx: *mut vm::Ctx) {
if mprotect(
(&*ctx).internal.interrupt_signal_mem as _,
@ -125,6 +162,7 @@ pub unsafe fn set_wasm_interrupt_on_ctx(ctx: *mut vm::Ctx) {
}
}
/// Sets a wasm interrupt.
pub unsafe fn set_wasm_interrupt() {
let mem: *mut u8 = INTERRUPT_SIGNAL_MEM.0;
if mprotect(mem as _, INTERRUPT_SIGNAL_MEM_SIZE, PROT_NONE) < 0 {
@ -132,6 +170,7 @@ pub unsafe fn set_wasm_interrupt() {
}
}
/// Clears the wasm interrupt.
pub unsafe fn clear_wasm_interrupt() {
let mem: *mut u8 = INTERRUPT_SIGNAL_MEM.0;
if mprotect(mem as _, INTERRUPT_SIGNAL_MEM_SIZE, PROT_READ | PROT_WRITE) < 0 {
@ -139,6 +178,7 @@ pub unsafe fn clear_wasm_interrupt() {
}
}
/// Catches an unsafe unwind with the given functions and breakpoints.
pub unsafe fn catch_unsafe_unwind<R, F: FnOnce() -> R>(
f: F,
breakpoints: Option<BreakpointMap>,
@ -164,6 +204,7 @@ pub unsafe fn catch_unsafe_unwind<R, F: FnOnce() -> R>(
}
}
/// Begins an unsafe unwind.
pub unsafe fn begin_unsafe_unwind(e: Box<dyn Any>) -> ! {
let unwind = UNWIND.with(|x| x.get());
let inner = (*unwind)
@ -181,6 +222,14 @@ unsafe fn with_breakpoint_map<R, F: FnOnce(Option<&BreakpointMap>) -> R>(f: F) -
f(inner.breakpoints.as_ref())
}
#[cfg(not(target_arch = "x86_64"))]
/// Allocates and runs with the given stack size and closure.
pub fn allocate_and_run<R, F: FnOnce() -> R>(_size: usize, f: F) -> R {
f()
}
#[cfg(target_arch = "x86_64")]
/// Allocates and runs with the given stack size and closure.
pub fn allocate_and_run<R, F: FnOnce() -> R>(size: usize, f: F) -> R {
struct Context<F: FnOnce() -> R, R> {
f: Option<F>,
@ -207,7 +256,7 @@ pub fn allocate_and_run<R, F: FnOnce() -> R>(size: usize, f: F) -> R {
// NOTE: Keep this consistent with `image-loading-*.s`.
stack[end_offset - 4 - 10] = &mut ctx as *mut Context<F, R> as usize as u64; // rdi
const NUM_SAVED_REGISTERS: usize = 23;
const NUM_SAVED_REGISTERS: usize = 31;
let stack_begin = stack
.as_mut_ptr()
.offset((end_offset - 4 - NUM_SAVED_REGISTERS) as isize);
@ -223,12 +272,75 @@ extern "C" fn signal_trap_handler(
siginfo: *mut siginfo_t,
ucontext: *mut c_void,
) {
use crate::backend::{
get_inline_breakpoint_size, read_inline_breakpoint, Architecture, InlineBreakpointType,
};
#[cfg(target_arch = "x86_64")]
static ARCH: Architecture = Architecture::X64;
#[cfg(target_arch = "aarch64")]
static ARCH: Architecture = Architecture::Aarch64;
let mut should_unwind = false;
let mut unwind_result: Box<dyn Any> = Box::new(());
unsafe {
let fault = get_fault_info(siginfo as _, ucontext);
let early_return = allocate_and_run(TRAP_STACK_SIZE, || {
CURRENT_CODE_VERSIONS.with(|versions| {
let versions = versions.borrow();
for v in versions.iter() {
let magic_size = if let Some(x) = get_inline_breakpoint_size(ARCH, v.backend) {
x
} else {
continue;
};
let ip = fault.ip.get();
let end = v.base + v.msm.total_size;
if ip >= v.base && ip < end && ip + magic_size <= end {
if let Some(ib) = read_inline_breakpoint(
ARCH,
v.backend,
std::slice::from_raw_parts(ip as *const u8, magic_size),
) {
match ib.ty {
InlineBreakpointType::Trace => {}
InlineBreakpointType::Middleware => {
let out: Option<Result<(), Box<dyn Any>>> =
with_breakpoint_map(|bkpt_map| {
bkpt_map.and_then(|x| x.get(&ip)).map(|x| {
x(BreakpointInfo {
fault: Some(&fault),
})
})
});
if let Some(Ok(())) = out {
} else if let Some(Err(e)) = out {
should_unwind = true;
unwind_result = e;
}
}
_ => println!("Unknown breakpoint type: {:?}", ib.ty),
}
let mut unwind_result: Box<dyn Any> = Box::new(());
fault.ip.set(ip + magic_size);
return true;
}
break;
}
}
false
})
});
if should_unwind {
begin_unsafe_unwind(unwind_result);
}
if early_return {
return;
}
let should_unwind = allocate_and_run(TRAP_STACK_SIZE, || {
should_unwind = allocate_and_run(TRAP_STACK_SIZE, || {
let mut is_suspend_signal = false;
WAS_SIGINT_TRIGGERED.with(|x| x.set(false));
@ -237,7 +349,7 @@ extern "C" fn signal_trap_handler(
Ok(SIGTRAP) => {
// breakpoint
let out: Option<Result<(), Box<dyn Any>>> = with_breakpoint_map(|bkpt_map| {
bkpt_map.and_then(|x| x.get(&(fault.ip as usize))).map(|x| {
bkpt_map.and_then(|x| x.get(&(fault.ip.get()))).map(|x| {
x(BreakpointInfo {
fault: Some(&fault),
})
@ -267,17 +379,9 @@ extern "C" fn signal_trap_handler(
}
let ctx: &mut vm::Ctx = &mut **CURRENT_CTX.with(|x| x.get());
let rsp = fault.known_registers[X64Register::GPR(GPR::RSP).to_index().0].unwrap();
let es_image = CURRENT_CODE_VERSIONS.with(|versions| {
let versions = versions.borrow();
read_stack(
|| versions.iter(),
rsp as usize as *const u64,
fault.known_registers,
Some(fault.ip as usize as u64),
)
});
let es_image = fault
.read_stack(None)
.expect("fault.read_stack() failed. Broken invariants?");
if is_suspend_signal {
let image = build_instance_image(ctx, es_image);
@ -290,7 +394,7 @@ extern "C" fn signal_trap_handler(
);
es_image.print_backtrace_if_needed();
}
// Just let the error propagate otherrwise
// Just let the error propagate otherwise
}
true
@ -316,6 +420,7 @@ extern "C" fn sigint_handler(
}
}
/// Ensure the signal handler is installed.
pub fn ensure_sighandler() {
INSTALL_SIGHANDLER.call_once(|| unsafe {
install_sighandler();
@ -344,14 +449,109 @@ unsafe fn install_sighandler() {
sigaction(SIGINT, &sa_interrupt).unwrap();
}
#[derive(Debug, Clone)]
/// Info about the fault
pub struct FaultInfo {
/// Faulting address.
pub faulting_addr: *const c_void,
pub ip: *const c_void,
pub known_registers: [Option<u64>; 24],
/// Instruction pointer.
pub ip: &'static Cell<usize>,
/// Values of known registers.
pub known_registers: [Option<u64>; 32],
}
impl FaultInfo {
/// Parses the stack and builds an execution state image.
pub unsafe fn read_stack(&self, max_depth: Option<usize>) -> Option<ExecutionStateImage> {
let rsp = match self.known_registers[X64Register::GPR(GPR::RSP).to_index().0] {
Some(x) => x,
None => return None,
};
Some(CURRENT_CODE_VERSIONS.with(|versions| {
let versions = versions.borrow();
read_stack(
|| versions.iter(),
rsp as usize as *const u64,
self.known_registers,
Some(self.ip.get() as u64),
max_depth,
)
}))
}
}
#[cfg(all(target_os = "linux", target_arch = "aarch64"))]
/// Get fault info from siginfo and ucontext.
pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *mut c_void) -> FaultInfo {
#[allow(dead_code)]
#[allow(non_camel_case_types)]
#[repr(packed)]
struct sigcontext {
fault_address: u64,
regs: [u64; 31],
sp: u64,
pc: u64,
pstate: u64,
reserved: [u8; 4096],
}
#[allow(dead_code)]
#[allow(non_camel_case_types)]
#[repr(packed)]
struct ucontext {
unknown: [u8; 176],
uc_mcontext: sigcontext,
}
#[allow(dead_code)]
#[allow(non_camel_case_types)]
#[repr(C)]
struct siginfo_t {
si_signo: i32,
si_errno: i32,
si_code: i32,
si_addr: u64,
// ...
}
let siginfo = siginfo as *const siginfo_t;
let si_addr = (*siginfo).si_addr;
let ucontext = ucontext as *mut ucontext;
let gregs = &(*ucontext).uc_mcontext.regs;
let mut known_registers: [Option<u64>; 32] = [None; 32];
known_registers[X64Register::GPR(GPR::R15).to_index().0] = Some(gregs[15] as _);
known_registers[X64Register::GPR(GPR::R14).to_index().0] = Some(gregs[14] as _);
known_registers[X64Register::GPR(GPR::R13).to_index().0] = Some(gregs[13] as _);
known_registers[X64Register::GPR(GPR::R12).to_index().0] = Some(gregs[12] as _);
known_registers[X64Register::GPR(GPR::R11).to_index().0] = Some(gregs[11] as _);
known_registers[X64Register::GPR(GPR::R10).to_index().0] = Some(gregs[10] as _);
known_registers[X64Register::GPR(GPR::R9).to_index().0] = Some(gregs[9] as _);
known_registers[X64Register::GPR(GPR::R8).to_index().0] = Some(gregs[8] as _);
known_registers[X64Register::GPR(GPR::RSI).to_index().0] = Some(gregs[6] as _);
known_registers[X64Register::GPR(GPR::RDI).to_index().0] = Some(gregs[7] as _);
known_registers[X64Register::GPR(GPR::RDX).to_index().0] = Some(gregs[2] as _);
known_registers[X64Register::GPR(GPR::RCX).to_index().0] = Some(gregs[1] as _);
known_registers[X64Register::GPR(GPR::RBX).to_index().0] = Some(gregs[3] as _);
known_registers[X64Register::GPR(GPR::RAX).to_index().0] = Some(gregs[0] as _);
known_registers[X64Register::GPR(GPR::RBP).to_index().0] = Some(gregs[5] as _);
known_registers[X64Register::GPR(GPR::RSP).to_index().0] = Some(gregs[28] as _);
FaultInfo {
faulting_addr: si_addr as usize as _,
ip: std::mem::transmute::<&mut u64, &'static Cell<usize>>(&mut (*ucontext).uc_mcontext.pc),
known_registers,
}
}
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) -> FaultInfo {
/// Get fault info from siginfo and ucontext.
pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *mut c_void) -> FaultInfo {
use crate::state::x64::XMM;
use libc::{
_libc_xmmreg, ucontext_t, REG_R10, REG_R11, REG_R12, REG_R13, REG_R14, REG_R15, REG_R8,
REG_R9, REG_RAX, REG_RBP, REG_RBX, REG_RCX, REG_RDI, REG_RDX, REG_RIP, REG_RSI, REG_RSP,
@ -374,11 +574,10 @@ pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) ->
let siginfo = siginfo as *const siginfo_t;
let si_addr = (*siginfo).si_addr;
let ucontext = ucontext as *const ucontext_t;
let gregs = &(*ucontext).uc_mcontext.gregs;
let fpregs = &*(*ucontext).uc_mcontext.fpregs;
let ucontext = ucontext as *mut ucontext_t;
let gregs = &mut (*ucontext).uc_mcontext.gregs;
let mut known_registers: [Option<u64>; 24] = [None; 24];
let mut known_registers: [Option<u64>; 32] = [None; 32];
known_registers[X64Register::GPR(GPR::R15).to_index().0] = Some(gregs[REG_R15 as usize] as _);
known_registers[X64Register::GPR(GPR::R14).to_index().0] = Some(gregs[REG_R14 as usize] as _);
known_registers[X64Register::GPR(GPR::R13).to_index().0] = Some(gregs[REG_R13 as usize] as _);
@ -397,24 +596,43 @@ pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) ->
known_registers[X64Register::GPR(GPR::RBP).to_index().0] = Some(gregs[REG_RBP as usize] as _);
known_registers[X64Register::GPR(GPR::RSP).to_index().0] = Some(gregs[REG_RSP as usize] as _);
known_registers[X64Register::XMM(XMM::XMM0).to_index().0] = Some(read_xmm(&fpregs._xmm[0]));
known_registers[X64Register::XMM(XMM::XMM1).to_index().0] = Some(read_xmm(&fpregs._xmm[1]));
known_registers[X64Register::XMM(XMM::XMM2).to_index().0] = Some(read_xmm(&fpregs._xmm[2]));
known_registers[X64Register::XMM(XMM::XMM3).to_index().0] = Some(read_xmm(&fpregs._xmm[3]));
known_registers[X64Register::XMM(XMM::XMM4).to_index().0] = Some(read_xmm(&fpregs._xmm[4]));
known_registers[X64Register::XMM(XMM::XMM5).to_index().0] = Some(read_xmm(&fpregs._xmm[5]));
known_registers[X64Register::XMM(XMM::XMM6).to_index().0] = Some(read_xmm(&fpregs._xmm[6]));
known_registers[X64Register::XMM(XMM::XMM7).to_index().0] = Some(read_xmm(&fpregs._xmm[7]));
if !(*ucontext).uc_mcontext.fpregs.is_null() {
let fpregs = &*(*ucontext).uc_mcontext.fpregs;
known_registers[X64Register::XMM(XMM::XMM0).to_index().0] = Some(read_xmm(&fpregs._xmm[0]));
known_registers[X64Register::XMM(XMM::XMM1).to_index().0] = Some(read_xmm(&fpregs._xmm[1]));
known_registers[X64Register::XMM(XMM::XMM2).to_index().0] = Some(read_xmm(&fpregs._xmm[2]));
known_registers[X64Register::XMM(XMM::XMM3).to_index().0] = Some(read_xmm(&fpregs._xmm[3]));
known_registers[X64Register::XMM(XMM::XMM4).to_index().0] = Some(read_xmm(&fpregs._xmm[4]));
known_registers[X64Register::XMM(XMM::XMM5).to_index().0] = Some(read_xmm(&fpregs._xmm[5]));
known_registers[X64Register::XMM(XMM::XMM6).to_index().0] = Some(read_xmm(&fpregs._xmm[6]));
known_registers[X64Register::XMM(XMM::XMM7).to_index().0] = Some(read_xmm(&fpregs._xmm[7]));
known_registers[X64Register::XMM(XMM::XMM8).to_index().0] = Some(read_xmm(&fpregs._xmm[8]));
known_registers[X64Register::XMM(XMM::XMM9).to_index().0] = Some(read_xmm(&fpregs._xmm[9]));
known_registers[X64Register::XMM(XMM::XMM10).to_index().0] =
Some(read_xmm(&fpregs._xmm[10]));
known_registers[X64Register::XMM(XMM::XMM11).to_index().0] =
Some(read_xmm(&fpregs._xmm[11]));
known_registers[X64Register::XMM(XMM::XMM12).to_index().0] =
Some(read_xmm(&fpregs._xmm[12]));
known_registers[X64Register::XMM(XMM::XMM13).to_index().0] =
Some(read_xmm(&fpregs._xmm[13]));
known_registers[X64Register::XMM(XMM::XMM14).to_index().0] =
Some(read_xmm(&fpregs._xmm[14]));
known_registers[X64Register::XMM(XMM::XMM15).to_index().0] =
Some(read_xmm(&fpregs._xmm[15]));
}
FaultInfo {
faulting_addr: si_addr as usize as _,
ip: gregs[REG_RIP as usize] as _,
ip: std::mem::transmute::<&mut i64, &'static Cell<usize>>(&mut gregs[REG_RIP as usize]),
known_registers,
}
}
/// Get fault info from siginfo and ucontext.
#[cfg(all(target_os = "macos", target_arch = "x86_64"))]
pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) -> FaultInfo {
pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *mut c_void) -> FaultInfo {
use crate::state::x64::XMM;
#[allow(dead_code)]
#[repr(C)]
struct ucontext_t {
@ -423,7 +641,7 @@ pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) ->
uc_stack: libc::stack_t,
uc_link: *const ucontext_t,
uc_mcsize: u64,
uc_mcontext: *const mcontext_t,
uc_mcontext: *mut mcontext_t,
}
#[repr(C)]
struct exception_state {
@ -458,8 +676,17 @@ pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) ->
}
#[repr(C)]
struct fpstate {
_unused: [u8; 168],
xmm: [[u64; 2]; 8],
_cwd: u16,
_swd: u16,
_ftw: u16,
_fop: u16,
_rip: u64,
_rdp: u64,
_mxcsr: u32,
_mxcr_mask: u32,
_st: [[u16; 8]; 8],
xmm: [[u64; 2]; 16],
_padding: [u32; 24],
}
#[allow(dead_code)]
#[repr(C)]
@ -472,11 +699,11 @@ pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) ->
let siginfo = siginfo as *const siginfo_t;
let si_addr = (*siginfo).si_addr;
let ucontext = ucontext as *const ucontext_t;
let ss = &(*(*ucontext).uc_mcontext).ss;
let ucontext = ucontext as *mut ucontext_t;
let ss = &mut (*(*ucontext).uc_mcontext).ss;
let fs = &(*(*ucontext).uc_mcontext).fs;
let mut known_registers: [Option<u64>; 24] = [None; 24];
let mut known_registers: [Option<u64>; 32] = [None; 32];
known_registers[X64Register::GPR(GPR::R15).to_index().0] = Some(ss.r15);
known_registers[X64Register::GPR(GPR::R14).to_index().0] = Some(ss.r14);
@ -504,10 +731,18 @@ pub unsafe fn get_fault_info(siginfo: *const c_void, ucontext: *const c_void) ->
known_registers[X64Register::XMM(XMM::XMM5).to_index().0] = Some(fs.xmm[5][0]);
known_registers[X64Register::XMM(XMM::XMM6).to_index().0] = Some(fs.xmm[6][0]);
known_registers[X64Register::XMM(XMM::XMM7).to_index().0] = Some(fs.xmm[7][0]);
known_registers[X64Register::XMM(XMM::XMM8).to_index().0] = Some(fs.xmm[8][0]);
known_registers[X64Register::XMM(XMM::XMM9).to_index().0] = Some(fs.xmm[9][0]);
known_registers[X64Register::XMM(XMM::XMM10).to_index().0] = Some(fs.xmm[10][0]);
known_registers[X64Register::XMM(XMM::XMM11).to_index().0] = Some(fs.xmm[11][0]);
known_registers[X64Register::XMM(XMM::XMM12).to_index().0] = Some(fs.xmm[12][0]);
known_registers[X64Register::XMM(XMM::XMM13).to_index().0] = Some(fs.xmm[13][0]);
known_registers[X64Register::XMM(XMM::XMM14).to_index().0] = Some(fs.xmm[14][0]);
known_registers[X64Register::XMM(XMM::XMM15).to_index().0] = Some(fs.xmm[15][0]);
FaultInfo {
faulting_addr: si_addr,
ip: ss.rip as _,
ip: std::mem::transmute::<&mut u64, &'static Cell<usize>>(&mut ss.rip),
known_registers,
}
}

View File

@ -1,3 +1,5 @@
//! The global module contains the implementation data structures and helper functions used to
//! manipulate and access a wasm globals.
use crate::{
export::Export,
import::IsExport,
@ -9,6 +11,7 @@ use std::{
sync::{Arc, Mutex},
};
/// Container with a descriptor and a reference to a global value.
pub struct Global {
desc: GlobalDescriptor,
storage: Arc<Mutex<vm::LocalGlobal>>,

View File

@ -1,3 +1,6 @@
//! The import module contains the implementation data structures and helper functions used to
//! manipulate and access a wasm module's imports including memories, tables, globals, and
//! functions.
use crate::export::Export;
use std::collections::VecDeque;
use std::collections::{hash_map::Entry, HashMap};
@ -7,13 +10,20 @@ use std::{
sync::{Arc, Mutex},
};
/// This trait represents objects that act as a namespace for imports. For example, an `Instance`
/// or `ImportObject` could be considered namespaces that could provide imports to an instance.
pub trait LikeNamespace {
/// Gets an export by name.
fn get_export(&self, name: &str) -> Option<Export>;
/// Gets all exports in the namespace.
fn get_exports(&self) -> Vec<(String, Export)>;
/// Maybe insert an `Export` by name into the namespace.
fn maybe_insert(&mut self, name: &str, export: Export) -> Option<()>;
}
/// A trait that represents `Export` values.
pub trait IsExport {
/// Gets self as `Export`.
fn to_export(&self) -> Export;
}
@ -48,6 +58,8 @@ pub struct ImportObject {
map: Arc<Mutex<HashMap<String, Box<dyn LikeNamespace + Send>>>>,
pub(crate) state_creator:
Option<Arc<dyn Fn() -> (*mut c_void, fn(*mut c_void)) + Send + Sync + 'static>>,
/// Allow missing functions to be generated and instantiation to continue when required
/// functions are not provided.
pub allow_missing_functions: bool,
}
@ -61,6 +73,7 @@ impl ImportObject {
}
}
/// Create a new `ImportObject` which generates data from the provided state creator.
pub fn new_with_data<F>(state_creator: F) -> Self
where
F: Fn() -> (*mut c_void, fn(*mut c_void)) + 'static + Send + Sync,
@ -145,6 +158,7 @@ impl ImportObject {
.and_then(|ns| f(ns))
}
/// Create a clone ref of this namespace.
pub fn clone_ref(&self) -> Self {
Self {
map: Arc::clone(&self.map),
@ -166,6 +180,7 @@ impl ImportObject {
}
}
/// Iterator for an `ImportObject`'s exports.
pub struct ImportObjectIterator {
elements: VecDeque<(String, String, Export)>,
}
@ -204,17 +219,20 @@ impl Extend<(String, String, Export)> for ImportObject {
}
}
/// The top-level container for the two-level wasm imports
pub struct Namespace {
map: HashMap<String, Box<dyn IsExport + Send>>,
}
impl Namespace {
/// Create a new empty `Namespace`.
pub fn new() -> Self {
Self {
map: HashMap::new(),
}
}
/// Insert a new `Export` into the namespace with the given name.
pub fn insert<S, E>(&mut self, name: S, export: E) -> Option<Box<dyn IsExport + Send>>
where
S: Into<String>,
@ -223,6 +241,7 @@ impl Namespace {
self.map.insert(name.into(), Box::new(export))
}
/// Returns true if the `Namespace` contains the given name.
pub fn contains_key<S>(&mut self, key: S) -> bool
where
S: Into<String>,

View File

@ -1,3 +1,5 @@
//! The instance module contains the implementation data structures and helper functions used to
//! manipulate and access wasm instances.
use crate::{
backend::RunnableModule,
backing::{ImportBacking, LocalBacking},
@ -48,6 +50,7 @@ impl Drop for InstanceInner {
///
/// [`ImportObject`]: struct.ImportObject.html
pub struct Instance {
/// Reference to the module used to instantiate this instance.
pub module: Arc<ModuleInner>,
inner: Pin<Box<InstanceInner>>,
#[allow(dead_code)]
@ -110,9 +113,13 @@ impl Instance {
let ctx_ptr = match start_index.local_or_import(&instance.module.info) {
LocalOrImport::Local(_) => instance.inner.vmctx,
LocalOrImport::Import(imported_func_index) => {
instance.inner.import_backing.vm_functions[imported_func_index].vmctx
LocalOrImport::Import(imported_func_index) => unsafe {
instance.inner.import_backing.vm_functions[imported_func_index]
.func_ctx
.as_ref()
}
.vmctx
.as_ptr(),
};
let sig_index = *instance
@ -129,7 +136,7 @@ impl Instance {
.expect("wasm trampoline");
let start_func: Func<(), (), Wasm> =
unsafe { Func::from_raw_parts(wasm_trampoline, func_ptr, ctx_ptr) };
unsafe { Func::from_raw_parts(wasm_trampoline, func_ptr, None, ctx_ptr) };
start_func.call()?;
}
@ -137,6 +144,7 @@ impl Instance {
Ok(instance)
}
/// Load an `Instance` using the given loader.
pub fn load<T: Loader>(&self, loader: T) -> ::std::result::Result<T::Instance, T::Error> {
loader.load(&*self.module.runnable_module, &self.module.info, unsafe {
&*self.inner.vmctx
@ -195,9 +203,13 @@ impl Instance {
let ctx = match func_index.local_or_import(&self.module.info) {
LocalOrImport::Local(_) => self.inner.vmctx,
LocalOrImport::Import(imported_func_index) => {
self.inner.import_backing.vm_functions[imported_func_index].vmctx
LocalOrImport::Import(imported_func_index) => unsafe {
self.inner.import_backing.vm_functions[imported_func_index]
.func_ctx
.as_ref()
}
.vmctx
.as_ptr(),
};
let func_wasm_inner = self
@ -206,20 +218,26 @@ impl Instance {
.get_trampoline(&self.module.info, sig_index)
.unwrap();
let func_ptr = match func_index.local_or_import(&self.module.info) {
LocalOrImport::Local(local_func_index) => self
.module
.runnable_module
.get_func(&self.module.info, local_func_index)
.unwrap(),
LocalOrImport::Import(import_func_index) => NonNull::new(
self.inner.import_backing.vm_functions[import_func_index].func as *mut _,
)
.unwrap(),
let (func_ptr, func_env) = match func_index.local_or_import(&self.module.info) {
LocalOrImport::Local(local_func_index) => (
self.module
.runnable_module
.get_func(&self.module.info, local_func_index)
.unwrap(),
None,
),
LocalOrImport::Import(import_func_index) => {
let imported_func = &self.inner.import_backing.vm_functions[import_func_index];
(
NonNull::new(imported_func.func as *mut _).unwrap(),
unsafe { imported_func.func_ctx.as_ref() }.func_env,
)
}
};
let typed_func: Func<Args, Rets, Wasm> =
unsafe { Func::from_raw_parts(func_wasm_inner, func_ptr, ctx) };
unsafe { Func::from_raw_parts(func_wasm_inner, func_ptr, func_env, ctx) };
Ok(typed_func)
} else {
@ -230,6 +248,7 @@ impl Instance {
}
}
/// Resolve a function by name.
pub fn resolve_func(&self, name: &str) -> ResolveResult<usize> {
let export_index =
self.module
@ -381,10 +400,12 @@ impl Instance {
Module::new(Arc::clone(&self.module))
}
/// Get the value of an internal field
pub fn get_internal(&self, field: &InternalField) -> u64 {
self.inner.backing.internals.0[field.index()]
}
/// Set the value of an internal field.
pub fn set_internal(&mut self, field: &InternalField, value: u64) {
self.inner.backing.internals.0[field.index()] = value;
}
@ -405,6 +426,7 @@ impl InstanceInner {
ctx: match ctx {
Context::Internal => Context::External(self.vmctx),
ctx @ Context::External(_) => ctx,
ctx @ Context::ExternalWithEnv(_, _) => ctx,
},
signature,
}
@ -447,15 +469,16 @@ impl InstanceInner {
),
LocalOrImport::Import(imported_func_index) => {
let imported_func = &self.import_backing.vm_functions[imported_func_index];
let func_ctx = unsafe { imported_func.func_ctx.as_ref() };
(
imported_func.func as *const _,
Context::External(imported_func.vmctx),
Context::ExternalWithEnv(func_ctx.vmctx.as_ptr(), func_ctx.func_env),
)
}
};
let signature = SigRegistry.lookup_signature_ref(&module.info.signatures[sig_index]);
// let signature = &module.info.signatures[sig_index];
(unsafe { FuncPointer::new(func_ptr) }, ctx, signature)
}
@ -574,9 +597,13 @@ fn call_func_with_index(
let ctx_ptr = match func_index.local_or_import(info) {
LocalOrImport::Local(_) => local_ctx,
LocalOrImport::Import(imported_func_index) => {
import_backing.vm_functions[imported_func_index].vmctx
LocalOrImport::Import(imported_func_index) => unsafe {
import_backing.vm_functions[imported_func_index]
.func_ctx
.as_ref()
}
.vmctx
.as_ptr(),
};
let wasm = runnable
@ -774,10 +801,12 @@ impl<'a> DynFunc<'a> {
Ok(results)
}
/// Gets the signature of this `Dynfunc`.
pub fn signature(&self) -> &FuncSig {
&*self.signature
}
/// Gets a const pointer to the function represent by this `DynFunc`.
pub fn raw(&self) -> *const vm::Func {
match self.func_index.local_or_import(&self.module.info) {
LocalOrImport::Local(local_func_index) => self

View File

@ -1,5 +1,20 @@
//! Wasmer Runtime Core Library
//!
//! The runtime core library provides common data structures which are shared by compiler backends
//! to implement a Web Assembly runtime.
//!
//! The runtime core also provides an API for users who use wasmer as an embedded wasm runtime which
//! allows operations like compiling, instantiating, providing imports, access exports, memories,
//! and tables for example.
//!
//! The runtime core library is recommended to be used by only power users who wish to customize the
//! wasmer runtime. Most wasmer users should prefer the API which is re-exported by the wasmer
//! runtime library which provides common defaults and a friendly API.
//!
#![deny(
dead_code,
missing_docs,
nonstandard_style,
unused_imports,
unused_mut,
@ -11,10 +26,6 @@
#![doc(html_favicon_url = "https://wasmer.io/static/icons/favicon.ico")]
#![doc(html_logo_url = "https://avatars3.githubusercontent.com/u/44205449?s=200&v=4")]
#[cfg(test)]
#[macro_use]
extern crate field_offset;
#[macro_use]
extern crate serde_derive;
@ -53,7 +64,7 @@ pub mod vm;
pub mod vmcalls;
#[cfg(all(unix, target_arch = "x86_64"))]
pub use trampoline_x64 as trampoline;
#[cfg(all(unix, target_arch = "x86_64"))]
#[cfg(unix)]
pub mod fault;
pub mod state;
#[cfg(feature = "managed")]
@ -77,6 +88,9 @@ pub use wasmparser;
use self::cache::{Artifact, Error as CacheError};
pub mod prelude {
//! The prelude module is a helper module used to bring commonly used runtime core imports into
//! scope.
pub use crate::import::{ImportObject, Namespace};
pub use crate::types::{
FuncIndex, GlobalIndex, ImportedFuncIndex, ImportedGlobalIndex, ImportedMemoryIndex,
@ -161,6 +175,7 @@ pub fn validate_and_report_errors_with_features(
}
}
/// Creates a new module from the given cache `Artifact` for the specified compiler backend
pub unsafe fn load_cache_with(
cache: Artifact,
compiler: &dyn backend::Compiler,

View File

@ -1,3 +1,4 @@
//! The loader module functions are used to load an instance.
use crate::{backend::RunnableModule, module::ModuleInfo, types::Type, types::Value, vm::Ctx};
#[cfg(unix)]
use libc::{mmap, mprotect, munmap, MAP_ANON, MAP_PRIVATE, PROT_EXEC, PROT_READ, PROT_WRITE};
@ -6,10 +7,14 @@ use std::{
ops::{Deref, DerefMut},
};
/// The loader trait represents the functions used to load an instance.
pub trait Loader {
/// The type of `Instance` for the loader.
type Instance: Instance;
/// The error type returned by the loader.
type Error: Debug;
/// Loads the given module and context into an instance.
fn load(
&self,
rm: &dyn RunnableModule,
@ -18,18 +23,23 @@ pub trait Loader {
) -> Result<Self::Instance, Self::Error>;
}
/// This trait represents an instance used by the loader.
pub trait Instance {
/// The error type returned by this instance.
type Error: Debug;
/// Call a function by id with the given args.
fn call(&mut self, id: usize, args: &[Value]) -> Result<u128, Self::Error>;
/// Read memory at the given offset and length.
fn read_memory(&mut self, _offset: u32, _len: u32) -> Result<Vec<u8>, Self::Error> {
unimplemented!()
unimplemented!("Instance::read_memory")
}
/// Write memory at the given offset and length.
fn write_memory(&mut self, _offset: u32, _len: u32, _buf: &[u8]) -> Result<(), Self::Error> {
unimplemented!()
unimplemented!("Instance::write_memory")
}
}
/// A local implementation for `Loader`.
pub struct LocalLoader;
impl Loader for LocalLoader {
@ -54,6 +64,7 @@ impl Loader for LocalLoader {
}
}
/// A local instance.
pub struct LocalInstance {
code: CodeMemory,
offsets: Vec<usize>,
@ -111,6 +122,7 @@ impl Instance for LocalInstance {
}
}
/// A pointer to code in memory.
pub struct CodeMemory {
ptr: *mut u8,
size: usize,
@ -121,21 +133,25 @@ unsafe impl Sync for CodeMemory {}
#[cfg(not(unix))]
impl CodeMemory {
/// Creates a new code memory with the given size.
pub fn new(_size: usize) -> CodeMemory {
unimplemented!();
unimplemented!("CodeMemory::new");
}
/// Makes this code memory executable.
pub fn make_executable(&self) {
unimplemented!();
unimplemented!("CodeMemory::make_executable");
}
/// Makes this code memory writable.
pub fn make_writable(&self) {
unimplemented!();
unimplemented!("CodeMemory::make_writable");
}
}
#[cfg(unix)]
impl CodeMemory {
/// Creates a new code memory with the given size.
pub fn new(size: usize) -> CodeMemory {
if size == 0 {
return CodeMemory {
@ -167,12 +183,14 @@ impl CodeMemory {
}
}
/// Makes this code memory executable.
pub fn make_executable(&self) {
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_EXEC) } != 0 {
panic!("cannot set code memory to executable");
}
}
/// Makes this code memory writable.
pub fn make_writable(&self) {
if unsafe { mprotect(self.ptr as _, self.size, PROT_READ | PROT_WRITE) } != 0 {
panic!("cannot set code memory to writable");

View File

@ -1,3 +1,5 @@
/// Prints a log message with args, similar to println, when the debug feature is enabled.
/// If the debug feature is disabled, arguments are not evaluated or printed.
#[macro_export]
#[cfg(feature = "debug")]
macro_rules! debug {
@ -11,6 +13,8 @@ macro_rules! debug {
}, line!(), $($arg)*));
}
/// Prints a log message with args, similar to println, when the debug feature is enabled.
/// If the debug feature is disabled, arguments are not evaluated or printed.
#[macro_export]
#[cfg(not(feature = "debug"))]
macro_rules! debug {
@ -18,6 +22,8 @@ macro_rules! debug {
($fmt:expr, $($arg:tt)*) => {};
}
/// Prints a log message with args, similar to println, when the trace feature is enabled.
/// If the trace feature is disabled, arguments are not evaluated or printed.
#[macro_export]
#[cfg(feature = "trace")]
macro_rules! trace {
@ -29,6 +35,8 @@ macro_rules! trace {
}
}
/// Prints a log message with args, similar to println, when the trace feature is enabled.
/// If the trace feature is disabled, arguments are not evaluated or printed.
#[macro_export]
#[cfg(not(feature = "trace"))]
macro_rules! trace {
@ -36,9 +44,48 @@ macro_rules! trace {
($fmt:expr, $($arg:tt)*) => {};
}
/// Helper macro to create a new `Func` object using the provided function pointer.
///
/// # Usage
///
/// Function pointers or closures are supported. Closures can capture
/// their environment (with `move). The first parameter is likely to
/// be of kind `vm::Ctx`, though it can be optional.
///
/// ```
/// # use wasmer_runtime_core::{imports, func};
/// # use wasmer_runtime_core::vm;
///
/// // A function that has access to `vm::Ctx`.
/// fn func_with_vmctx(_: &mut vm::Ctx, n: i32) -> i32 {
/// n
/// }
///
/// // A function that cannot access `vm::Ctx`.
/// fn func(n: i32) -> i32 {
/// n
/// }
///
/// let i = 7;
///
/// let import_object = imports! {
/// "env" => {
/// "foo" => func!(func_with_vmctx),
/// "bar" => func!(func),
/// // A closure with a captured environment, and an access to `vm::Ctx`.
/// "baz" => func!(move |_: &mut vm::Ctx, n: i32| -> i32 {
/// n + i
/// }),
/// // A closure without a captured environment, and no access to `vm::Ctx`.
/// "qux" => func!(|n: i32| -> i32 {
/// n
/// }),
/// },
/// };
/// ```
#[macro_export]
macro_rules! func {
($func:path) => {{
($func:expr) => {{
$crate::Func::new($func)
}};
}
@ -47,12 +94,12 @@ macro_rules! func {
///
/// [`ImportObject`]: struct.ImportObject.html
///
/// # Note:
/// # Note
/// The `import` macro currently only supports
/// importing functions.
///
///
/// # Usage:
/// # Usage
/// ```
/// # use wasmer_runtime_core::{imports, func};
/// # use wasmer_runtime_core::vm::Ctx;

View File

@ -62,10 +62,12 @@ impl DynamicMemory {
Ok(storage)
}
/// The size of this memory in `Pages`.
pub fn size(&self) -> Pages {
self.current
}
/// Try to grow self by the given number of delta pages.
pub fn grow(&mut self, delta: Pages, local: &mut vm::LocalMemory) -> Result<Pages, GrowError> {
if delta == Pages(0) {
return Ok(self.current);
@ -104,10 +106,12 @@ impl DynamicMemory {
Ok(old_pages)
}
/// Get this memory represented as a slice of bytes.
pub fn as_slice(&self) -> &[u8] {
unsafe { &self.memory.as_slice()[0..self.current.bytes().0] }
}
/// Get this memory represented as a mutable slice of bytes
pub fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe { &mut self.memory.as_slice_mut()[0..self.current.bytes().0] }
}

View File

@ -1,3 +1,5 @@
//! The memory module contains the implementation data structures and helper functions used to
//! manipulate and access wasm memory.
use crate::{
error::{CreationError, GrowError},
export::Export,
@ -170,10 +172,14 @@ impl fmt::Debug for Memory {
}
}
/// A kind a memory.
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MemoryType {
/// A dynamic memory.
Dynamic,
/// A static memory.
Static,
/// A shared static memory.
SharedStatic,
}
@ -200,6 +206,7 @@ enum UnsharedMemoryStorage {
Static(Box<StaticMemory>),
}
/// A reference to an unshared memory.
pub struct UnsharedMemory {
internal: Arc<UnsharedMemoryInternal>,
}
@ -214,6 +221,7 @@ struct UnsharedMemoryInternal {
unsafe impl Sync for UnsharedMemoryInternal {}
impl UnsharedMemory {
/// Create a new `UnsharedMemory` from the given memory descriptor.
pub fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
let mut local = vm::LocalMemory {
base: std::ptr::null_mut(),
@ -243,6 +251,7 @@ impl UnsharedMemory {
})
}
/// Try to grow this memory by the given number of delta pages.
pub fn grow(&self, delta: Pages) -> Result<Pages, GrowError> {
let mut storage = self.internal.storage.lock().unwrap();
@ -260,6 +269,7 @@ impl UnsharedMemory {
pages
}
/// Size of this memory in pages.
pub fn size(&self) -> Pages {
let storage = self.internal.storage.lock().unwrap();
@ -282,10 +292,12 @@ impl Clone for UnsharedMemory {
}
}
/// A reference to a shared memory.
pub struct SharedMemory {
internal: Arc<SharedMemoryInternal>,
}
/// Data structure for a shared internal memory.
pub struct SharedMemoryInternal {
memory: StdMutex<Box<StaticMemory>>,
local: Cell<vm::LocalMemory>,
@ -315,6 +327,7 @@ impl SharedMemory {
})
}
/// Try to grow this memory by the given number of delta pages.
pub fn grow(&self, delta: Pages) -> Result<Pages, GrowError> {
let _guard = self.internal.lock.lock();
let mut local = self.internal.local.get();
@ -323,12 +336,14 @@ impl SharedMemory {
pages
}
/// Size of this memory in pages.
pub fn size(&self) -> Pages {
let _guard = self.internal.lock.lock();
let memory = self.internal.memory.lock().unwrap();
memory.size()
}
/// Gets a mutable pointer to the `LocalMemory`.
// This function is scary, because the mutex is not locked here
pub(crate) fn vm_local_memory(&self) -> *mut vm::LocalMemory {
self.internal.local.as_ptr()

View File

@ -12,9 +12,12 @@ use crate::{
};
use std::{cell::Cell, fmt, marker::PhantomData, mem};
/// Array.
pub struct Array;
/// Item.
pub struct Item;
/// A pointer to a Wasm item.
#[repr(transparent)]
pub struct WasmPtr<T: Copy, Ty = Item> {
offset: u32,
@ -22,6 +25,7 @@ pub struct WasmPtr<T: Copy, Ty = Item> {
}
impl<T: Copy, Ty> WasmPtr<T, Ty> {
/// Create a new `WasmPtr` at the given offset.
#[inline]
pub fn new(offset: u32) -> Self {
Self {
@ -30,6 +34,7 @@ impl<T: Copy, Ty> WasmPtr<T, Ty> {
}
}
/// Get the offset for this `WasmPtr`.
#[inline]
pub fn offset(self) -> u32 {
self.offset
@ -44,6 +49,7 @@ fn align_pointer(ptr: usize, align: usize) -> usize {
}
impl<T: Copy + ValueType> WasmPtr<T, Item> {
/// Dereference this `WasmPtr`.
#[inline]
pub fn deref<'a>(self, memory: &'a Memory) -> Option<&'a Cell<T>> {
if (self.offset as usize) + mem::size_of::<T>() >= memory.size().bytes().0 {
@ -58,6 +64,7 @@ impl<T: Copy + ValueType> WasmPtr<T, Item> {
}
}
/// Mutable dereference this `WasmPtr`.
#[inline]
pub unsafe fn deref_mut<'a>(self, memory: &'a Memory) -> Option<&'a mut Cell<T>> {
if (self.offset as usize) + mem::size_of::<T>() >= memory.size().bytes().0 {
@ -72,6 +79,7 @@ impl<T: Copy + ValueType> WasmPtr<T, Item> {
}
impl<T: Copy + ValueType> WasmPtr<T, Array> {
/// Dereference this `WasmPtr`.
#[inline]
pub fn deref<'a>(self, memory: &'a Memory, index: u32, length: u32) -> Option<&'a [Cell<T>]> {
// gets the size of the item in the array with padding added such that
@ -94,6 +102,7 @@ impl<T: Copy + ValueType> WasmPtr<T, Array> {
}
}
/// Mutable dereference this `WasmPtr`.
#[inline]
pub unsafe fn deref_mut<'a>(
self,
@ -119,6 +128,7 @@ impl<T: Copy + ValueType> WasmPtr<T, Array> {
Some(cell_ptrs)
}
/// Get a UTF-8 string representation of this `WasmPtr` with the given length.
pub fn get_utf8_string<'a>(self, memory: &'a Memory, str_len: u32) -> Option<&'a str> {
if self.offset as usize + str_len as usize > memory.size().bytes().0 {
return None;

View File

@ -56,10 +56,12 @@ impl StaticMemory {
Ok(storage)
}
/// The size of this memory in `Pages`.
pub fn size(&self) -> Pages {
self.current
}
/// Try to grow this memory by the given number of delta pages.
pub fn grow(&mut self, delta: Pages, local: &mut vm::LocalMemory) -> Result<Pages, GrowError> {
if delta == Pages(0) {
return Ok(self.current);
@ -94,10 +96,12 @@ impl StaticMemory {
Ok(old_pages)
}
/// Get this memory represented as a slice of bytes.
pub fn as_slice(&self) -> &[u8] {
unsafe { &self.memory.as_slice()[0..self.current.bytes().0] }
}
/// Get this memory represented as a mutable slice of bytes.
pub fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe { &mut self.memory.as_slice_mut()[0..self.current.bytes().0] }
}

View File

@ -39,12 +39,16 @@ impl Atomic for f64 {
type Output = AtomicU64;
}
/// A trait that represants an atomic type.
pub trait Atomicity {}
/// Atomically.
pub struct Atomically;
impl Atomicity for Atomically {}
/// Non-atomically.
pub struct NonAtomically;
impl Atomicity for NonAtomically {}
/// A view into a memory.
pub struct MemoryView<'a, T: 'a, A = NonAtomically> {
ptr: *mut T,
length: usize,
@ -65,6 +69,7 @@ where
}
impl<'a, T: Atomic> MemoryView<'a, T> {
/// Get atomic access to a memory view.
pub fn atomically(&self) -> MemoryView<'a, T::Output, Atomically> {
MemoryView {
ptr: self.ptr as *mut T::Output,

View File

@ -1,3 +1,5 @@
//! The module module contains the implementation data structures and helper functions used to
//! manipulate and access wasm modules.
use crate::{
backend::{Backend, RunnableModule},
cache::{Artifact, Error as CacheError},
@ -27,40 +29,59 @@ pub struct ModuleInner {
pub info: ModuleInfo,
}
/// Container for module data including memories, globals, tables, imports, and exports.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ModuleInfo {
// This are strictly local and the typsystem ensures that.
/// Map of memory index to memory descriptors.
// This are strictly local and the typesystem ensures that.
pub memories: Map<LocalMemoryIndex, MemoryDescriptor>,
/// Map of global index to global descriptors.
pub globals: Map<LocalGlobalIndex, GlobalInit>,
/// Map of table index to table descriptors.
pub tables: Map<LocalTableIndex, TableDescriptor>,
/// Map of imported function index to import name.
// These are strictly imported and the typesystem ensures that.
pub imported_functions: Map<ImportedFuncIndex, ImportName>,
/// Map of imported memory index to import name and memory descriptor.
pub imported_memories: Map<ImportedMemoryIndex, (ImportName, MemoryDescriptor)>,
/// Map of imported table index to import name and table descriptor.
pub imported_tables: Map<ImportedTableIndex, (ImportName, TableDescriptor)>,
/// Map of imported global index to import name and global descriptor.
pub imported_globals: Map<ImportedGlobalIndex, (ImportName, GlobalDescriptor)>,
/// Map of string to export index.
pub exports: IndexMap<String, ExportIndex>,
/// Vector of data initializers.
pub data_initializers: Vec<DataInitializer>,
/// Vector of table initializers.
pub elem_initializers: Vec<TableInitializer>,
/// Index of optional start function.
pub start_func: Option<FuncIndex>,
/// Map function index to signature index.
pub func_assoc: Map<FuncIndex, SigIndex>,
/// Map signature index to function signature.
pub signatures: Map<SigIndex, FuncSig>,
/// Backend.
pub backend: Backend,
/// Table of namespace indexes.
pub namespace_table: StringTable<NamespaceIndex>,
/// Table of name indexes.
pub name_table: StringTable<NameIndex>,
/// Symbol information from emscripten
/// Symbol information from emscripten.
pub em_symbol_map: Option<HashMap<u32, String>>,
/// Custom sections.
pub custom_sections: HashMap<String, Vec<u8>>,
}
impl ModuleInfo {
/// Creates custom section info from the given wasm file.
pub fn import_custom_sections(&mut self, wasm: &[u8]) -> crate::error::ParseResult<()> {
let mut parser = wasmparser::ModuleReader::new(wasm)?;
while !parser.eof() {
@ -120,6 +141,7 @@ impl Module {
Instance::new(Arc::clone(&self.inner), import_object)
}
/// Create a cache artifact from this module.
pub fn cache(&self) -> Result<Artifact, CacheError> {
let (backend_metadata, code) = self.inner.cache_gen.generate_cache()?;
Ok(Artifact::from_parts(
@ -129,6 +151,7 @@ impl Module {
))
}
/// Get the module data for this module.
pub fn info(&self) -> &ModuleInfo {
&self.inner.info
}
@ -151,11 +174,16 @@ pub struct ImportName {
pub name_index: NameIndex,
}
/// Kinds of export indexes.
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
pub enum ExportIndex {
/// Function export index.
Func(FuncIndex),
/// Memory export index.
Memory(MemoryIndex),
/// Global export index.
Global(GlobalIndex),
/// Table export index.
Table(TableIndex),
}
@ -182,6 +210,7 @@ pub struct TableInitializer {
pub elements: Vec<FuncIndex>,
}
/// String table builder.
pub struct StringTableBuilder<K: TypedIndex> {
map: IndexMap<String, (K, u32, u32)>,
buffer: String,
@ -189,6 +218,7 @@ pub struct StringTableBuilder<K: TypedIndex> {
}
impl<K: TypedIndex> StringTableBuilder<K> {
/// Creates a new `StringTableBuilder`.
pub fn new() -> Self {
Self {
map: IndexMap::new(),
@ -197,6 +227,7 @@ impl<K: TypedIndex> StringTableBuilder<K> {
}
}
/// Register a new string into table.
pub fn register<S>(&mut self, s: S) -> K
where
S: Into<String> + AsRef<str>,
@ -219,6 +250,7 @@ impl<K: TypedIndex> StringTableBuilder<K> {
}
}
/// Finish building the `StringTable`.
pub fn finish(self) -> StringTable<K> {
let table = self
.map
@ -233,6 +265,7 @@ impl<K: TypedIndex> StringTableBuilder<K> {
}
}
/// A map of index to string.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StringTable<K: TypedIndex> {
table: Map<K, (u32, u32)>,
@ -240,6 +273,7 @@ pub struct StringTable<K: TypedIndex> {
}
impl<K: TypedIndex> StringTable<K> {
/// Creates a `StringTable`.
pub fn new() -> Self {
Self {
table: Map::new(),
@ -247,6 +281,7 @@ impl<K: TypedIndex> StringTable<K> {
}
}
/// Gets a reference to a string at the given index.
pub fn get(&self, index: K) -> &str {
let (offset, length) = self.table[index];
let offset = offset as usize;
@ -256,6 +291,7 @@ impl<K: TypedIndex> StringTable<K> {
}
}
/// Namespace index.
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct NamespaceIndex(u32);
@ -271,6 +307,7 @@ impl TypedIndex for NamespaceIndex {
}
}
/// Name index.
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct NameIndex(u32);

View File

@ -1,3 +1,6 @@
//! The parse module contains common data structures and functions using to parse wasm files into
//! runtime data structures.
use crate::codegen::*;
use crate::{
backend::{Backend, CompilerConfig, RunnableModule},
@ -22,9 +25,12 @@ use wasmparser::{
WasmDecoder,
};
/// Kind of load error.
#[derive(Debug)]
pub enum LoadError {
/// Parse error.
Parse(BinaryReaderError),
/// Code generation error.
Codegen(String),
}
@ -42,6 +48,8 @@ impl From<BinaryReaderError> for LoadError {
}
}
/// Read wasm binary into module data using the given backend, module code generator, middlewares,
/// and compiler configuration.
pub fn read_module<
MCG: ModuleCodeGenerator<FCG, RM, E>,
FCG: FunctionCodeGenerator<E>,
@ -394,6 +402,7 @@ pub fn read_module<
Ok(info)
}
/// Convert given `WpType` to `Type`.
pub fn wp_type_to_type(ty: WpType) -> Result<Type, BinaryReaderError> {
match ty {
WpType::I32 => Ok(Type::I32),
@ -410,6 +419,7 @@ pub fn wp_type_to_type(ty: WpType) -> Result<Type, BinaryReaderError> {
}
}
/// Convert given `Type` to `WpType`.
pub fn type_to_wp_type(ty: Type) -> WpType {
match ty {
Type::I32 => WpType::I32,

View File

@ -1,119 +1,195 @@
//! The state module is used to track state of a running web assembly instances so that
//! state could read or updated at runtime. Use cases include generating stack traces, switching
//! generated code from one tier to another, or serializing state of a running instace.
use crate::backend::Backend;
use std::collections::BTreeMap;
use std::ops::Bound::{Included, Unbounded};
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
/// An index to a register
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct RegisterIndex(pub usize);
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
/// A kind of wasm or constant value
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub enum WasmAbstractValue {
/// A wasm runtime value
Runtime,
/// A wasm constant value
Const(u64),
}
#[derive(Clone, Debug)]
/// A container for the state of a running wasm instance.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MachineState {
/// Stack values.
pub stack_values: Vec<MachineValue>,
/// Register values.
pub register_values: Vec<MachineValue>,
/// Previous frame.
pub prev_frame: BTreeMap<usize, MachineValue>,
/// Wasm stack.
pub wasm_stack: Vec<WasmAbstractValue>,
/// Private depth of the wasm stack.
pub wasm_stack_private_depth: usize,
/// Wasm instruction offset.
pub wasm_inst_offset: usize,
}
#[derive(Clone, Debug, Default)]
/// A diff of two `MachineState`s.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct MachineStateDiff {
/// Last.
pub last: Option<usize>,
/// Stack push.
pub stack_push: Vec<MachineValue>,
/// Stack pop.
pub stack_pop: usize,
/// Register diff.
pub reg_diff: Vec<(RegisterIndex, MachineValue)>,
/// Previous frame diff.
pub prev_frame_diff: BTreeMap<usize, Option<MachineValue>>, // None for removal
/// Wasm stack push.
pub wasm_stack_push: Vec<WasmAbstractValue>,
/// Wasm stack pop.
pub wasm_stack_pop: usize,
/// Private depth of the wasm stack.
pub wasm_stack_private_depth: usize, // absolute value; not a diff.
/// Wasm instruction offset.
pub wasm_inst_offset: usize, // absolute value; not a diff.
}
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
/// A kind of machine value.
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub enum MachineValue {
/// Undefined.
Undefined,
/// Vmctx.
Vmctx,
/// Vmctx Deref.
VmctxDeref(Vec<usize>),
/// Preserve Register.
PreserveRegister(RegisterIndex),
/// Copy Stack BP Relative.
CopyStackBPRelative(i32), // relative to Base Pointer, in byte offset
ExplicitShadow, // indicates that all values above this are above the shadow region
/// Explicit Shadow.
ExplicitShadow, // indicates that all values above this are above the shadow region
/// Wasm Stack.
WasmStack(usize),
/// Wasm Local.
WasmLocal(usize),
/// Two Halves.
TwoHalves(Box<(MachineValue, MachineValue)>), // 32-bit values. TODO: optimize: add another type for inner "half" value to avoid boxing?
}
#[derive(Clone, Debug)]
/// A map of function states.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FunctionStateMap {
/// Initial.
pub initial: MachineState,
/// Local Function Id.
pub local_function_id: usize,
/// Locals.
pub locals: Vec<WasmAbstractValue>,
/// Shadow size.
pub shadow_size: usize, // for single-pass backend, 32 bytes on x86-64
/// Diffs.
pub diffs: Vec<MachineStateDiff>,
/// Wasm Function Header target offset.
pub wasm_function_header_target_offset: Option<SuspendOffset>,
/// Wasm offset to target offset
pub wasm_offset_to_target_offset: BTreeMap<usize, SuspendOffset>,
/// Loop offsets.
pub loop_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
/// Call offsets.
pub call_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
/// Trappable offsets.
pub trappable_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
}
#[derive(Clone, Copy, Debug)]
/// A kind of suspend offset.
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub enum SuspendOffset {
/// A loop.
Loop(usize),
/// A call.
Call(usize),
/// A trappable.
Trappable(usize),
}
#[derive(Clone, Debug)]
/// Info for an offset.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OffsetInfo {
/// End offset.
pub end_offset: usize, // excluded bound
/// Diff Id.
pub diff_id: usize,
/// Activate offset.
pub activate_offset: usize,
}
#[derive(Clone, Debug)]
/// A map of module state.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ModuleStateMap {
/// Local functions.
pub local_functions: BTreeMap<usize, FunctionStateMap>,
/// Total size.
pub total_size: usize,
}
/// State dump of a wasm function.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct WasmFunctionStateDump {
/// Local function id.
pub local_function_id: usize,
/// Wasm instruction offset.
pub wasm_inst_offset: usize,
/// Stack.
pub stack: Vec<Option<u64>>,
/// Locals.
pub locals: Vec<Option<u64>>,
}
/// An image of the execution state.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ExecutionStateImage {
/// Frames.
pub frames: Vec<WasmFunctionStateDump>,
}
/// Represents an image of an `Instance` including its memory, globals, and execution state.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InstanceImage {
/// Memory for this `InstanceImage`
pub memory: Option<Vec<u8>>,
/// Stored globals for this `InstanceImage`
pub globals: Vec<u128>,
/// `ExecutionStateImage` for this `InstanceImage`
pub execution_state: ExecutionStateImage,
}
/// A `CodeVersion` is a container for a unit of generated code for a module.
#[derive(Debug, Clone)]
pub struct CodeVersion {
/// Indicates if this code version is the baseline version.
pub baseline: bool,
/// `ModuleStateMap` for this code version.
pub msm: ModuleStateMap,
/// A pointer to the machine code for this module.
pub base: usize,
/// The backend used to compile this module.
pub backend: Backend,
}
impl ModuleStateMap {
/// Looks up an ip from self using the given ip, base, and offset table provider.
pub fn lookup_ip<F: FnOnce(&FunctionStateMap) -> &BTreeMap<usize, OffsetInfo>>(
&self,
ip: usize,
@ -146,6 +222,7 @@ impl ModuleStateMap {
}
}
}
/// Looks up a call ip from self using the given ip and base values.
pub fn lookup_call_ip(
&self,
ip: usize,
@ -154,6 +231,7 @@ impl ModuleStateMap {
self.lookup_ip(ip, base, |fsm| &fsm.call_offsets)
}
/// Looks up a trappable ip from self using the given ip and base values.
pub fn lookup_trappable_ip(
&self,
ip: usize,
@ -162,6 +240,7 @@ impl ModuleStateMap {
self.lookup_ip(ip, base, |fsm| &fsm.trappable_offsets)
}
/// Looks up a loop ip from self using the given ip and base values.
pub fn lookup_loop_ip(
&self,
ip: usize,
@ -172,6 +251,7 @@ impl ModuleStateMap {
}
impl FunctionStateMap {
/// Creates a new `FunctionStateMap` with the given parameters.
pub fn new(
initial: MachineState,
local_function_id: usize,
@ -194,6 +274,7 @@ impl FunctionStateMap {
}
impl MachineState {
/// Creates a `MachineStateDiff` from self and the given `&MachineState`.
pub fn diff(&self, old: &MachineState) -> MachineStateDiff {
let first_diff_stack_depth: usize = self
.stack_values
@ -256,6 +337,7 @@ impl MachineState {
}
impl MachineStateDiff {
/// Creates a `MachineState` from the given `&FunctionStateMap`.
pub fn build_state(&self, m: &FunctionStateMap) -> MachineState {
let mut chain: Vec<&MachineStateDiff> = vec![];
chain.push(self);
@ -298,6 +380,7 @@ impl MachineStateDiff {
}
impl ExecutionStateImage {
/// Prints a backtrace if the `WASMER_BACKTRACE` environment variable is 1.
pub fn print_backtrace_if_needed(&self) {
use std::env;
@ -311,6 +394,7 @@ impl ExecutionStateImage {
eprintln!("Run with `WASMER_BACKTRACE=1` environment variable to display a backtrace.");
}
/// Converts self into a `String`, used for display purposes.
pub fn output(&self) -> String {
fn join_strings(x: impl Iterator<Item = String>, sep: &str) -> String {
let mut ret = String::new();
@ -376,6 +460,7 @@ impl ExecutionStateImage {
}
impl InstanceImage {
/// Converts a slice of bytes into an `Option<InstanceImage>`
pub fn from_bytes(input: &[u8]) -> Option<InstanceImage> {
use bincode::deserialize;
match deserialize(input) {
@ -384,14 +469,150 @@ impl InstanceImage {
}
}
/// Converts self into a vector of bytes.
pub fn to_bytes(&self) -> Vec<u8> {
use bincode::serialize;
serialize(self).unwrap()
}
}
#[cfg(all(unix, target_arch = "x86_64"))]
/// Declarations for x86-64 registers.
#[cfg(unix)]
pub mod x64_decl {
use super::*;
/// General-purpose registers.
#[repr(u8)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum GPR {
/// RAX register
RAX,
/// RCX register
RCX,
/// RDX register
RDX,
/// RBX register
RBX,
/// RSP register
RSP,
/// RBP register
RBP,
/// RSI register
RSI,
/// RDI register
RDI,
/// R8 register
R8,
/// R9 register
R9,
/// R10 register
R10,
/// R11 register
R11,
/// R12 register
R12,
/// R13 register
R13,
/// R14 register
R14,
/// R15 register
R15,
}
/// XMM registers.
#[repr(u8)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum XMM {
/// XMM register 0
XMM0,
/// XMM register 1
XMM1,
/// XMM register 2
XMM2,
/// XMM register 3
XMM3,
/// XMM register 4
XMM4,
/// XMM register 5
XMM5,
/// XMM register 6
XMM6,
/// XMM register 7
XMM7,
/// XMM register 8
XMM8,
/// XMM register 9
XMM9,
/// XMM register 10
XMM10,
/// XMM register 11
XMM11,
/// XMM register 12
XMM12,
/// XMM register 13
XMM13,
/// XMM register 14
XMM14,
/// XMM register 15
XMM15,
}
/// A machine register under the x86-64 architecture.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum X64Register {
/// General-purpose registers.
GPR(GPR),
/// XMM (floating point/SIMD) registers.
XMM(XMM),
}
impl X64Register {
/// Returns the index of the register.
pub fn to_index(&self) -> RegisterIndex {
match *self {
X64Register::GPR(x) => RegisterIndex(x as usize),
X64Register::XMM(x) => RegisterIndex(x as usize + 16),
}
}
/// Converts a DWARD regnum to X64Register.
pub fn from_dwarf_regnum(x: u16) -> Option<X64Register> {
Some(match x {
0 => X64Register::GPR(GPR::RAX),
1 => X64Register::GPR(GPR::RDX),
2 => X64Register::GPR(GPR::RCX),
3 => X64Register::GPR(GPR::RBX),
4 => X64Register::GPR(GPR::RSI),
5 => X64Register::GPR(GPR::RDI),
6 => X64Register::GPR(GPR::RBP),
7 => X64Register::GPR(GPR::RSP),
8 => X64Register::GPR(GPR::R8),
9 => X64Register::GPR(GPR::R9),
10 => X64Register::GPR(GPR::R10),
11 => X64Register::GPR(GPR::R11),
12 => X64Register::GPR(GPR::R12),
13 => X64Register::GPR(GPR::R13),
14 => X64Register::GPR(GPR::R14),
15 => X64Register::GPR(GPR::R15),
17 => X64Register::XMM(XMM::XMM0),
18 => X64Register::XMM(XMM::XMM1),
19 => X64Register::XMM(XMM::XMM2),
20 => X64Register::XMM(XMM::XMM3),
21 => X64Register::XMM(XMM::XMM4),
22 => X64Register::XMM(XMM::XMM5),
23 => X64Register::XMM(XMM::XMM6),
24 => X64Register::XMM(XMM::XMM7),
_ => return None,
})
}
}
}
#[cfg(unix)]
pub mod x64 {
//! The x64 state module contains functions to generate state and code for x64 targets.
pub use super::x64_decl::*;
use super::*;
use crate::codegen::BreakpointMap;
use crate::fault::{
@ -410,6 +631,7 @@ pub mod x64 {
ptr as usize as u64
}
/// Create a new `MachineState` with default values.
pub fn new_machine_state() -> MachineState {
MachineState {
stack_values: vec![],
@ -421,6 +643,8 @@ pub mod x64 {
}
}
/// Invokes a call return on the stack for the given module state map, code base, instance
/// image and context.
#[warn(unused_variables)]
pub unsafe fn invoke_call_return_on_stack(
msm: &ModuleStateMap,
@ -436,7 +660,7 @@ pub mod x64 {
let mut last_stack_offset: u64 = 0; // rbp
let mut known_registers: [Option<u64>; 24] = [None; 24];
let mut known_registers: [Option<u64>; 32] = [None; 32];
let local_functions_vec: Vec<&FunctionStateMap> =
msm.local_functions.iter().map(|(_, v)| v).collect();
@ -672,6 +896,37 @@ pub mod x64 {
stack_offset -= 1;
stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; // rbp
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM15).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM14).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM13).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM12).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM11).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM10).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM9).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM8).to_index().0].unwrap_or(0);
stack_offset -= 1;
stack[stack_offset] =
known_registers[X64Register::XMM(XMM::XMM7).to_index().0].unwrap_or(0);
@ -741,6 +996,7 @@ pub mod x64 {
)
}
/// Builds an `InstanceImage` for the given `Ctx` and `ExecutionStateImage`.
pub fn build_instance_image(
vmctx: &mut Ctx,
execution_state: ExecutionStateImage,
@ -776,18 +1032,27 @@ pub mod x64 {
}
}
/// Returns a `ExecutionStateImage` for the given versions, stack, initial registers and
/// initial address.
#[warn(unused_variables)]
pub unsafe fn read_stack<'a, I: Iterator<Item = &'a CodeVersion>, F: Fn() -> I + 'a>(
versions: F,
mut stack: *const u64,
initially_known_registers: [Option<u64>; 24],
initially_known_registers: [Option<u64>; 32],
mut initial_address: Option<u64>,
max_depth: Option<usize>,
) -> ExecutionStateImage {
let mut known_registers: [Option<u64>; 24] = initially_known_registers;
let mut known_registers: [Option<u64>; 32] = initially_known_registers;
let mut results: Vec<WasmFunctionStateDump> = vec![];
let mut was_baseline = true;
for _ in 0.. {
for depth in 0.. {
if let Some(max_depth) = max_depth {
if depth >= max_depth {
return ExecutionStateImage { frames: results };
}
}
let ret_addr = initial_address.take().unwrap_or_else(|| {
let x = *stack;
stack = stack.offset(1);
@ -798,6 +1063,7 @@ pub mod x64 {
let mut is_baseline: Option<bool> = None;
for version in versions() {
//println!("Lookup IP: {:x}", ret_addr);
match version
.msm
.lookup_call_ip(ret_addr as usize, version.base)
@ -985,89 +1251,10 @@ pub mod x64 {
stack: wasm_stack,
locals: wasm_locals,
};
//println!("WFS = {:?}", wfs);
results.push(wfs);
}
unreachable!();
}
#[repr(u8)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum GPR {
RAX,
RCX,
RDX,
RBX,
RSP,
RBP,
RSI,
RDI,
R8,
R9,
R10,
R11,
R12,
R13,
R14,
R15,
}
#[repr(u8)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum XMM {
XMM0,
XMM1,
XMM2,
XMM3,
XMM4,
XMM5,
XMM6,
XMM7,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum X64Register {
GPR(GPR),
XMM(XMM),
}
impl X64Register {
pub fn to_index(&self) -> RegisterIndex {
match *self {
X64Register::GPR(x) => RegisterIndex(x as usize),
X64Register::XMM(x) => RegisterIndex(x as usize + 16),
}
}
pub fn from_dwarf_regnum(x: u16) -> Option<X64Register> {
Some(match x {
0 => X64Register::GPR(GPR::RAX),
1 => X64Register::GPR(GPR::RDX),
2 => X64Register::GPR(GPR::RCX),
3 => X64Register::GPR(GPR::RBX),
4 => X64Register::GPR(GPR::RSI),
5 => X64Register::GPR(GPR::RDI),
6 => X64Register::GPR(GPR::RBP),
7 => X64Register::GPR(GPR::RSP),
8 => X64Register::GPR(GPR::R8),
9 => X64Register::GPR(GPR::R9),
10 => X64Register::GPR(GPR::R10),
11 => X64Register::GPR(GPR::R11),
12 => X64Register::GPR(GPR::R12),
13 => X64Register::GPR(GPR::R13),
14 => X64Register::GPR(GPR::R14),
15 => X64Register::GPR(GPR::R15),
17 => X64Register::XMM(XMM::XMM0),
18 => X64Register::XMM(XMM::XMM1),
19 => X64Register::XMM(XMM::XMM2),
20 => X64Register::XMM(XMM::XMM3),
21 => X64Register::XMM(XMM::XMM4),
22 => X64Register::XMM(XMM::XMM5),
23 => X64Register::XMM(XMM::XMM6),
24 => X64Register::XMM(XMM::XMM7),
_ => return None,
})
}
}
}

View File

@ -5,6 +5,7 @@ use std::{
ops::{Deref, DerefMut},
};
/// Boxed map.
#[derive(Debug, Clone)]
pub struct BoxedMap<K, V>
where

View File

@ -21,6 +21,7 @@ impl<K, V> Map<K, V>
where
K: TypedIndex,
{
/// Creates a new `Map`.
pub fn new() -> Self {
Self {
elems: Vec::new(),
@ -28,6 +29,7 @@ where
}
}
/// Creates a new empty `Map` with the given capacity.
pub fn with_capacity(capacity: usize) -> Self {
Self {
elems: Vec::with_capacity(capacity),
@ -35,32 +37,44 @@ where
}
}
/// Clears the map. Keeps the allocated memory for future use.
pub fn clear(&mut self) {
self.elems.clear();
}
/// Returns the size of this map.
pub fn len(&self) -> usize {
self.elems.len()
}
/// Returns true if this map is empty.
pub fn is_empty(&self) -> bool {
self.elems.is_empty()
}
/// Adds a new value to this map.
pub fn push(&mut self, value: V) -> K {
let len = self.len();
self.elems.push(value);
K::new(len)
}
/// Returns the next index into the map.
pub fn next_index(&self) -> K {
K::new(self.len())
}
/// Reserves the given size.
pub fn reserve_exact(&mut self, size: usize) {
self.elems.reserve_exact(size);
}
/// Convert this into a `BoxedMap`.
pub fn into_boxed_map(self) -> BoxedMap<K, V> {
BoxedMap::new(self.elems.into_boxed_slice())
}
/// Convert this into a `Vec`.
pub fn into_vec(self) -> Vec<V> {
self.elems
}
@ -71,6 +85,7 @@ where
K: TypedIndex,
V: Clone,
{
/// Resize this map to the given new length and value.
pub fn resize(&mut self, new_len: usize, value: V) {
self.elems.resize(new_len, value);
}
@ -184,6 +199,7 @@ where
}
}
/// Iterator for a `Map`.
pub struct Iter<'a, K: TypedIndex, V: 'a> {
enumerated: iter::Enumerate<slice::Iter<'a, V>>,
_marker: PhantomData<K>,
@ -206,6 +222,7 @@ impl<'a, K: TypedIndex, V: 'a> Iterator for Iter<'a, K, V> {
}
}
/// Mutable iterator for a `Map`.
pub struct IterMut<'a, K: TypedIndex, V: 'a> {
enumerated: iter::Enumerate<slice::IterMut<'a, V>>,
_marker: PhantomData<K>,

View File

@ -1,3 +1,4 @@
//! The structures module contains commonly used data structures.
mod boxed;
mod map;
mod slice;
@ -6,6 +7,7 @@ pub use self::boxed::BoxedMap;
pub use self::map::{Iter, IterMut, Map};
pub use self::slice::SliceMap;
/// Represents a typed index.
pub trait TypedIndex: Copy + Clone {
#[doc(hidden)]
fn new(index: usize) -> Self;

View File

@ -20,30 +20,37 @@ impl<K, V> SliceMap<K, V>
where
K: TypedIndex,
{
/// Gets a reference to the value at the given index.
pub fn get(&self, index: K) -> Option<&V> {
self.slice.get(index.index())
}
/// Gets a mutable reference to the value at the given index.
pub fn get_mut(&mut self, index: K) -> Option<&mut V> {
self.slice.get_mut(index.index())
}
/// Gets the length of this slice map.
pub fn len(&self) -> usize {
self.slice.len()
}
/// Returns an iterator for this slice map.
pub fn iter(&self) -> Iter<K, V> {
Iter::new(self.slice.iter())
}
/// Returns a mutable iterator for this slice map.
pub fn iter_mut(&mut self) -> IterMut<K, V> {
IterMut::new(self.slice.iter_mut())
}
/// Gets a pointer to the `SliceMap`.
pub fn as_ptr(&self) -> *const V {
self as *const SliceMap<K, V> as *const V
}
/// Gets a mutable pointer to the `SliceMap`.
pub fn as_mut_ptr(&mut self) -> *mut V {
self as *mut SliceMap<K, V> as *mut V
}

View File

@ -9,6 +9,7 @@ use std::{fs::File, os::unix::io::IntoRawFd, path::Path, ptr, slice, sync::Arc};
unsafe impl Send for Memory {}
unsafe impl Sync for Memory {}
/// Data for a sized and protected region of memory.
#[derive(Debug)]
pub struct Memory {
ptr: *mut u8,
@ -18,6 +19,7 @@ pub struct Memory {
}
impl Memory {
/// Create a new memory from the given path value and protection.
pub fn from_file_path<P>(path: P, protection: Protect) -> Result<Self, MemoryCreationError>
where
P: AsRef<Path>,
@ -54,6 +56,7 @@ impl Memory {
}
}
/// Create a new memory with the given size and protection.
pub fn with_size_protect(size: usize, protection: Protect) -> Result<Self, String> {
if size == 0 {
return Ok(Self {
@ -89,6 +92,7 @@ impl Memory {
}
}
/// Create a new memory with the given size.
pub fn with_size(size: usize) -> Result<Self, MemoryCreationError> {
if size == 0 {
return Ok(Self {
@ -127,6 +131,7 @@ impl Memory {
}
}
/// Protect this memory with the given range bounds and protection.
pub unsafe fn protect(
&mut self,
range: impl RangeBounds<usize>,
@ -166,6 +171,7 @@ impl Memory {
}
}
/// Split this memory into multiple memories by the given offset.
pub fn split_at(mut self, offset: usize) -> (Memory, Memory) {
let page_size = page_size::get();
if offset % page_size == 0 {
@ -187,22 +193,27 @@ impl Memory {
}
}
/// Gets the size of this memory.
pub fn size(&self) -> usize {
self.size
}
/// Gets a slice for this memory.
pub unsafe fn as_slice(&self) -> &[u8] {
slice::from_raw_parts(self.ptr, self.size)
}
/// Gets a mutable slice for this memory.
pub unsafe fn as_slice_mut(&mut self) -> &mut [u8] {
slice::from_raw_parts_mut(self.ptr, self.size)
}
/// Gets the protect kind of this memory.
pub fn protection(&self) -> Protect {
self.protection
}
/// Gets mutable pointer to the memory.
pub fn as_ptr(&self) -> *mut u8 {
self.ptr
}
@ -238,13 +249,19 @@ impl Clone for Memory {
}
}
/// Kinds of memory protection.
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]
#[allow(dead_code)]
pub enum Protect {
/// Read/write/exec allowed.
None,
/// Read only.
Read,
/// Read/write only.
ReadWrite,
/// Read/exec only.
ReadExec,
/// Read/write/exec only.
ReadWriteExec,
}
@ -259,6 +276,7 @@ impl Protect {
}
}
/// Returns true if this memory is readable.
pub fn is_readable(self) -> bool {
match self {
Protect::Read | Protect::ReadWrite | Protect::ReadExec | Protect::ReadWriteExec => true,
@ -266,6 +284,7 @@ impl Protect {
}
}
/// Returns true if this memory is writable.
pub fn is_writable(self) -> bool {
match self {
Protect::ReadWrite | Protect::ReadWriteExec => true,

View File

@ -12,6 +12,7 @@ use winapi::um::winnt::{
unsafe impl Send for Memory {}
unsafe impl Sync for Memory {}
/// Data for a sized and protected region of memory.
#[derive(Debug)]
pub struct Memory {
ptr: *mut u8,
@ -20,6 +21,7 @@ pub struct Memory {
}
impl Memory {
/// Create a new memory from the given path value and protection.
pub fn with_size_protect(size: usize, protection: Protect) -> Result<Self, String> {
if size == 0 {
return Ok(Self {
@ -52,6 +54,7 @@ impl Memory {
}
}
/// Create a new memory with the given size.
pub fn with_size(size: usize) -> Result<Self, MemoryCreationError> {
if size == 0 {
return Ok(Self {
@ -79,6 +82,7 @@ impl Memory {
}
}
/// Protect this memory with the given range bounds and protection.
pub unsafe fn protect(
&mut self,
range: impl RangeBounds<usize>,
@ -120,6 +124,7 @@ impl Memory {
}
}
/// Split this memory into multiple memories by the given offset.
pub fn split_at(mut self, offset: usize) -> (Memory, Memory) {
let page_size = page_size::get();
if offset % page_size == 0 {
@ -140,22 +145,27 @@ impl Memory {
}
}
/// Gets the size of this memory.
pub fn size(&self) -> usize {
self.size
}
/// Gets a slice for this memory.
pub unsafe fn as_slice(&self) -> &[u8] {
slice::from_raw_parts(self.ptr, self.size)
}
/// Gets a mutable slice for this memory.
pub unsafe fn as_slice_mut(&mut self) -> &mut [u8] {
slice::from_raw_parts_mut(self.ptr, self.size)
}
/// Gets the protect kind of this memory.
pub fn protection(&self) -> Protect {
self.protection
}
/// Gets mutable pointer to the memory.
pub fn as_ptr(&self) -> *mut u8 {
self.ptr
}
@ -192,12 +202,17 @@ impl Clone for Memory {
}
}
/// Kinds of memory protection.
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]
#[allow(dead_code)]
pub enum Protect {
/// Read/write/exec allowed.
None,
/// Read only.
Read,
/// Read/write only.
ReadWrite,
/// Read/exec only.
ReadExec,
}
@ -211,6 +226,7 @@ impl Protect {
}
}
/// Returns true if this memory is readable.
pub fn is_readable(self) -> bool {
match self {
Protect::Read | Protect::ReadWrite | Protect::ReadExec => true,
@ -218,6 +234,7 @@ impl Protect {
}
}
/// Returns true if this memory is writable.
pub fn is_writable(self) -> bool {
match self {
Protect::ReadWrite => true,

View File

@ -17,11 +17,13 @@ enum AnyfuncInner<'a> {
Managed(DynFunc<'a>),
}
/// Anyfunc data type.
pub struct Anyfunc<'a> {
inner: AnyfuncInner<'a>,
}
impl<'a> Anyfunc<'a> {
/// Create a new `Anyfunc`.
pub unsafe fn new<Sig>(func: *const vm::Func, signature: Sig) -> Self
where
Sig: Into<Arc<FuncSig>>,

View File

@ -1,3 +1,5 @@
//! The runtime table module contains data structures and functions used to create and update wasm
//! tables.
use crate::{
error::CreationError,
export::Export,
@ -16,16 +18,20 @@ pub use self::anyfunc::Anyfunc;
pub(crate) use self::anyfunc::AnyfuncTable;
use crate::error::GrowError;
/// Kind of table element.
pub enum Element<'a> {
/// Anyfunc.
Anyfunc(Anyfunc<'a>),
}
/// Kind of table storage.
// #[derive(Debug)]
pub enum TableStorage {
/// This is intended to be a caller-checked Anyfunc.
Anyfunc(Box<AnyfuncTable>),
}
/// Container with a descriptor and a reference to a table storage.
pub struct Table {
desc: TableDescriptor,
storage: Arc<Mutex<(TableStorage, vm::LocalTable)>>,
@ -128,6 +134,7 @@ impl Table {
}
}
/// Get a mutable pointer to underlying table storage.
pub fn vm_local_table(&mut self) -> *mut vm::LocalTable {
let mut storage = self.storage.lock().unwrap();
&mut storage.1

View File

@ -1,4 +1,6 @@
use crate::backend::{Compiler, CompilerConfig};
//! The tiering module supports switching between code compiled with different optimization levels
//! as runtime.
use crate::backend::{Backend, Compiler, CompilerConfig};
use crate::compile_with_config;
use crate::fault::{
catch_unsafe_unwind, ensure_sighandler, pop_code_version, push_code_version, with_ctx,
@ -22,12 +24,17 @@ impl<F: FnOnce()> Drop for Defer<F> {
}
}
/// Kind of shell exit operation.
pub enum ShellExitOperation {
/// Operation to continue with an instance image.
ContinueWith(InstanceImage),
}
/// Context for an interactive shell.
pub struct InteractiveShellContext {
/// Optional instance image.
pub image: Option<InstanceImage>,
/// Flag to indicate patching.
pub patched: bool,
}
@ -36,6 +43,7 @@ struct OptimizationState {
}
struct OptimizationOutcome {
backend_id: Backend,
module: Module,
}
@ -46,6 +54,7 @@ unsafe impl Sync for CtxWrapper {}
unsafe fn do_optimize(
binary: &[u8],
backend_id: Backend,
compiler: Box<dyn Compiler>,
ctx: &Mutex<CtxWrapper>,
state: &OptimizationState,
@ -65,11 +74,12 @@ unsafe fn do_optimize(
let ctx_inner = ctx.lock().unwrap();
if !ctx_inner.0.is_null() {
*state.outcome.lock().unwrap() = Some(OptimizationOutcome { module });
*state.outcome.lock().unwrap() = Some(OptimizationOutcome { backend_id, module });
set_wasm_interrupt_on_ctx(ctx_inner.0);
}
}
/// Runs an instance with tiering.
pub unsafe fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>(
module_info: &ModuleInfo,
wasm_binary: &[u8],
@ -77,7 +87,8 @@ pub unsafe fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>(
import_object: &ImportObject,
start_raw: extern "C" fn(&mut Ctx),
baseline: &mut Instance,
optimized_backends: Vec<Box<dyn Fn() -> Box<dyn Compiler> + Send>>,
baseline_backend: Backend,
optimized_backends: Vec<(Backend, Box<dyn Fn() -> Box<dyn Compiler> + Send>)>,
interactive_shell: F,
) -> Result<(), String> {
ensure_sighandler();
@ -99,9 +110,9 @@ pub unsafe fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>(
let ctx_box = ctx_box.clone();
let opt_state = opt_state.clone();
::std::thread::spawn(move || {
for backend in optimized_backends {
for (backend_id, backend) in optimized_backends {
if !ctx_box.lock().unwrap().0.is_null() {
do_optimize(&wasm_binary, backend(), &ctx_box, &opt_state);
do_optimize(&wasm_binary, backend_id, backend(), &ctx_box, &opt_state);
}
}
});
@ -117,6 +128,7 @@ pub unsafe fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>(
.get_module_state_map()
.unwrap(),
base: baseline.module.runnable_module.get_code().unwrap().as_ptr() as usize,
backend: baseline_backend,
});
let n_versions: Cell<usize> = Cell::new(1);
@ -127,7 +139,7 @@ pub unsafe fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>(
}));
loop {
let new_optimized: Option<&mut Instance> = {
let new_optimized: Option<(Backend, &mut Instance)> = {
let mut outcome = opt_state.outcome.lock().unwrap();
if let Some(x) = outcome.take() {
let instance = x
@ -136,12 +148,12 @@ pub unsafe fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>(
.map_err(|e| format!("Can't instantiate module: {:?}", e))?;
// Keep the optimized code alive.
optimized_instances.push(instance);
optimized_instances.last_mut()
optimized_instances.last_mut().map(|y| (x.backend_id, y))
} else {
None
}
};
if let Some(optimized) = new_optimized {
if let Some((backend_id, optimized)) = new_optimized {
let base = module_info.imported_functions.len();
let code_ptr = optimized
.module
@ -178,6 +190,7 @@ pub unsafe fn run_tiering<F: Fn(InteractiveShellContext) -> ShellExitOperation>(
.get_code()
.unwrap()
.as_ptr() as usize,
backend: backend_id,
});
n_versions.set(n_versions.get() + 1);

View File

@ -62,6 +62,7 @@ pub fn get_context() -> *const CallContext {
}
impl TrampolineBufferBuilder {
/// Creates a new empty `TrampolineBufferBuilder`.
pub fn new() -> TrampolineBufferBuilder {
TrampolineBufferBuilder {
code: vec![],
@ -100,6 +101,7 @@ impl TrampolineBufferBuilder {
idx
}
/// Adds context RSP state preserving trampoline to the buffer.
pub fn add_context_rsp_state_preserving_trampoline(
&mut self,
target: unsafe extern "C" fn(&mut Ctx, *const CallContext, *const u64),

View File

@ -1,9 +1,11 @@
//! The typed func module implements a way of representing a wasm function
//! with the correct types from rust. Function calls using a typed func have a low overhead.
use crate::{
error::RuntimeError,
export::{Context, Export, FuncPointer},
import::IsExport,
types::{FuncSig, NativeWasmType, Type, WasmExternType},
vm::{self, Ctx},
vm,
};
use std::{
any::Any,
@ -16,14 +18,22 @@ use std::{
sync::Arc,
};
/// Wasm trap info.
#[repr(C)]
pub enum WasmTrapInfo {
/// Unreachable trap.
Unreachable = 0,
/// Call indirect incorrect signature trap.
IncorrectCallIndirectSignature = 1,
/// Memory out of bounds trap.
MemoryOutOfBounds = 2,
/// Call indirect out of bounds trap.
CallIndirectOOB = 3,
/// Illegal arithmetic trap.
IllegalArithmetic = 4,
/// Misaligned atomic access trap.
MisalignedAtomicAccess = 5,
/// Unknown trap.
Unknown,
}
@ -52,16 +62,24 @@ impl fmt::Display for WasmTrapInfo {
/// of the `Func` struct.
pub trait Kind {}
pub type Trampoline = unsafe extern "C" fn(*mut Ctx, NonNull<vm::Func>, *const u64, *mut u64);
/// Aliases to an extern "C" type used as a trampoline to a function.
pub type Trampoline = unsafe extern "C" fn(
vmctx: *mut vm::Ctx,
func: NonNull<vm::Func>,
args: *const u64,
rets: *mut u64,
);
/// Aliases to an extern "C" type used to invoke a function.
pub type Invoke = unsafe extern "C" fn(
Trampoline,
*mut Ctx,
NonNull<vm::Func>,
*const u64,
*mut u64,
*mut WasmTrapInfo,
*mut Option<Box<dyn Any>>,
Option<NonNull<c_void>>,
trampoline: Trampoline,
vmctx: *mut vm::Ctx,
func: NonNull<vm::Func>,
args: *const u64,
rets: *mut u64,
trap_info: *mut WasmTrapInfo,
user_error: *mut Option<Box<dyn Any>>,
extra: Option<NonNull<c_void>>,
) -> bool;
/// TODO(lachlan): Naming TBD.
@ -75,6 +93,7 @@ pub struct Wasm {
}
impl Wasm {
/// Create new `Wasm` from given parts.
pub unsafe fn from_raw_parts(
trampoline: Trampoline,
invoke: Invoke,
@ -91,40 +110,99 @@ impl Wasm {
/// This type, as part of the `Func` type signature, represents a function that is created
/// by the host.
pub struct Host(());
impl Kind for Wasm {}
impl Kind for Host {}
/// Represents a list of WebAssembly values.
pub trait WasmTypeList {
/// CStruct type.
type CStruct;
/// Array of return values.
type RetArray: AsMut<[u64]>;
/// Construct `Self` based on an array of returned values.
fn from_ret_array(array: Self::RetArray) -> Self;
/// Generates an empty array that will hold the returned values of
/// the WebAssembly function.
fn empty_ret_array() -> Self::RetArray;
/// Transforms C values into Rust values.
fn from_c_struct(c_struct: Self::CStruct) -> Self;
/// Transforms Rust values into C values.
fn into_c_struct(self) -> Self::CStruct;
/// Get types of the current values.
fn types() -> &'static [Type];
/// This method is used to distribute the values onto a function,
/// e.g. `(1, 2).call(func, …)`. This form is unlikely to be used
/// directly in the code, see the `Func:call` implementation.
unsafe fn call<Rets>(
self,
f: NonNull<vm::Func>,
wasm: Wasm,
ctx: *mut Ctx,
ctx: *mut vm::Ctx,
) -> Result<Rets, RuntimeError>
where
Rets: WasmTypeList;
}
pub trait ExternalFunction<Args, Rets>
/// Empty trait to specify the kind of `ExternalFunction`: With or
/// without a `vm::Ctx` argument. See the `ExplicitVmCtx` and the
/// `ImplicitVmCtx` structures.
///
/// This type is never aimed to be used by a user. It is used by the
/// trait system to automatically generate an appropriate `wrap`
/// function.
pub trait ExternalFunctionKind {}
/// This empty structure indicates that an external function must
/// contain an explicit `vm::Ctx` argument (at first position).
///
/// ```rs,ignore
/// fn add_one(_: mut &vm::Ctx, x: i32) -> i32 {
/// x + 1
/// }
/// ```
pub struct ExplicitVmCtx {}
/// This empty structure indicates that an external function has no
/// `vm::Ctx` argument (at first position). Its signature is:
///
/// ```rs,ignore
/// fn add_one(x: i32) -> i32 {
/// x + 1
/// }
/// ```
pub struct ImplicitVmCtx {}
impl ExternalFunctionKind for ExplicitVmCtx {}
impl ExternalFunctionKind for ImplicitVmCtx {}
/// Represents a function that can be converted to a `vm::Func`
/// (function pointer) that can be called within WebAssembly.
pub trait ExternalFunction<Kind, Args, Rets>
where
Kind: ExternalFunctionKind,
Args: WasmTypeList,
Rets: WasmTypeList,
{
fn to_raw(&self) -> NonNull<vm::Func>;
/// Conver to function pointer.
fn to_raw(self) -> (NonNull<vm::Func>, Option<NonNull<vm::FuncEnv>>);
}
/// Represents a TrapEarly type.
pub trait TrapEarly<Rets>
where
Rets: WasmTypeList,
{
/// The error type for this trait.
type Error: 'static;
/// Get returns or error result.
fn report(self) -> Result<Rets, Self::Error>;
}
@ -149,19 +227,12 @@ where
}
}
// pub fn Func<'a, Args, Rets, F>(f: F) -> Func<'a, Args, Rets, Unsafe>
// where
// Args: WasmTypeList,
// Rets: WasmTypeList,
// F: ExternalFunction<Args, Rets>
// {
// Func::new(f)
// }
/// Represents a function that can be used by WebAssembly.
pub struct Func<'a, Args = (), Rets = (), Inner: Kind = Wasm> {
inner: Inner,
f: NonNull<vm::Func>,
ctx: *mut Ctx,
func: NonNull<vm::Func>,
func_env: Option<NonNull<vm::FuncEnv>>,
vmctx: *mut vm::Ctx,
_phantom: PhantomData<(&'a (), Args, Rets)>,
}
@ -175,19 +246,22 @@ where
{
pub(crate) unsafe fn from_raw_parts(
inner: Wasm,
f: NonNull<vm::Func>,
ctx: *mut Ctx,
func: NonNull<vm::Func>,
func_env: Option<NonNull<vm::FuncEnv>>,
vmctx: *mut vm::Ctx,
) -> Func<'a, Args, Rets, Wasm> {
Func {
inner,
f,
ctx,
func,
func_env,
vmctx,
_phantom: PhantomData,
}
}
/// Get the underlying func pointer.
pub fn get_vm_func(&self) -> NonNull<vm::Func> {
self.f
self.func
}
}
@ -196,14 +270,19 @@ where
Args: WasmTypeList,
Rets: WasmTypeList,
{
pub fn new<F>(f: F) -> Func<'a, Args, Rets, Host>
/// Creates a new `Func`.
pub fn new<F, Kind>(func: F) -> Func<'a, Args, Rets, Host>
where
F: ExternalFunction<Args, Rets>,
Kind: ExternalFunctionKind,
F: ExternalFunction<Kind, Args, Rets>,
{
let (func, func_env) = func.to_raw();
Func {
inner: Host(()),
f: f.to_raw(),
ctx: ptr::null_mut(),
func,
func_env,
vmctx: ptr::null_mut(),
_phantom: PhantomData,
}
}
@ -215,9 +294,12 @@ where
Rets: WasmTypeList,
Inner: Kind,
{
/// Returns the types of the function inputs.
pub fn params(&self) -> &'static [Type] {
Args::types()
}
/// Returns the types of the function outputs.
pub fn returns(&self) -> &'static [Type] {
Rets::types()
}
@ -226,139 +308,112 @@ where
impl WasmTypeList for Infallible {
type CStruct = Infallible;
type RetArray = [u64; 0];
fn from_ret_array(_: Self::RetArray) -> Self {
unreachable!()
}
fn empty_ret_array() -> Self::RetArray {
unreachable!()
}
fn from_c_struct(_: Self::CStruct) -> Self {
unreachable!()
}
fn into_c_struct(self) -> Self::CStruct {
unreachable!()
}
fn types() -> &'static [Type] {
&[]
}
#[allow(non_snake_case)]
unsafe fn call<Rets: WasmTypeList>(
unsafe fn call<Rets>(
self,
_: NonNull<vm::Func>,
_: Wasm,
_: *mut Ctx,
) -> Result<Rets, RuntimeError> {
_: *mut vm::Ctx,
) -> Result<Rets, RuntimeError>
where
Rets: WasmTypeList,
{
unreachable!()
}
}
impl<A: WasmExternType> WasmTypeList for (A,) {
type CStruct = S1<A>;
type RetArray = [u64; 1];
fn from_ret_array(array: Self::RetArray) -> Self {
(WasmExternType::from_native(NativeWasmType::from_binary(
array[0],
)),)
}
fn empty_ret_array() -> Self::RetArray {
[0u64]
}
fn from_c_struct(c_struct: Self::CStruct) -> Self {
let S1(a) = c_struct;
(WasmExternType::from_native(a),)
}
fn into_c_struct(self) -> Self::CStruct {
#[allow(unused_parens, non_snake_case)]
let (a,) = self;
S1(WasmExternType::to_native(a))
}
fn types() -> &'static [Type] {
&[A::Native::TYPE]
}
#[allow(non_snake_case)]
unsafe fn call<Rets: WasmTypeList>(
self,
f: NonNull<vm::Func>,
wasm: Wasm,
ctx: *mut Ctx,
) -> Result<Rets, RuntimeError> {
let (a,) = self;
let args = [a.to_native().to_binary()];
let mut rets = Rets::empty_ret_array();
let mut trap = WasmTrapInfo::Unknown;
let mut user_error = None;
if (wasm.invoke)(
wasm.trampoline,
ctx,
f,
args.as_ptr(),
rets.as_mut().as_mut_ptr(),
&mut trap,
&mut user_error,
wasm.invoke_env,
) {
Ok(Rets::from_ret_array(rets))
} else {
if let Some(data) = user_error {
Err(RuntimeError::Error { data })
} else {
Err(RuntimeError::Trap {
msg: trap.to_string().into(),
})
}
}
}
}
impl<'a, A: WasmExternType, Rets> Func<'a, (A,), Rets, Wasm>
where
Rets: WasmTypeList,
{
pub fn call(&self, a: A) -> Result<Rets, RuntimeError> {
unsafe { <A as WasmTypeList>::call(a, self.f, self.inner, self.ctx) }
}
}
macro_rules! impl_traits {
( [$repr:ident] $struct_name:ident, $( $x:ident ),* ) => {
/// Struct for typed funcs.
#[repr($repr)]
pub struct $struct_name <$( $x: WasmExternType ),*> ( $( <$x as WasmExternType>::Native ),* );
pub struct $struct_name< $( $x ),* > ( $( <$x as WasmExternType>::Native ),* )
where
$( $x: WasmExternType ),*;
impl< $( $x: WasmExternType, )* > WasmTypeList for ( $( $x ),* ) {
impl< $( $x ),* > WasmTypeList for ( $( $x ),* )
where
$( $x: WasmExternType ),*
{
type CStruct = $struct_name<$( $x ),*>;
type RetArray = [u64; count_idents!( $( $x ),* )];
fn from_ret_array(array: Self::RetArray) -> Self {
#[allow(non_snake_case)]
let [ $( $x ),* ] = array;
( $( WasmExternType::from_native(NativeWasmType::from_binary($x)) ),* )
}
fn empty_ret_array() -> Self::RetArray {
[0; count_idents!( $( $x ),* )]
}
fn from_c_struct(c_struct: Self::CStruct) -> Self {
#[allow(non_snake_case)]
let $struct_name ( $( $x ),* ) = c_struct;
( $( WasmExternType::from_native($x) ),* )
}
#[allow(unused_parens, non_snake_case)]
fn into_c_struct(self) -> Self::CStruct {
#[allow(unused_parens, non_snake_case)]
let ( $( $x ),* ) = self;
$struct_name ( $( WasmExternType::to_native($x) ),* )
}
fn types() -> &'static [Type] {
&[$( $x::Native::TYPE, )*]
&[$( $x::Native::TYPE ),*]
}
#[allow(non_snake_case)]
unsafe fn call<Rets: WasmTypeList>(self, f: NonNull<vm::Func>, wasm: Wasm, ctx: *mut Ctx) -> Result<Rets, RuntimeError> {
#[allow(unused_parens)]
#[allow(unused_parens, non_snake_case)]
unsafe fn call<Rets>(
self,
f: NonNull<vm::Func>,
wasm: Wasm,
ctx: *mut vm::Ctx,
) -> Result<Rets, RuntimeError>
where
Rets: WasmTypeList
{
let ( $( $x ),* ) = self;
let args = [ $( $x.to_native().to_binary()),* ];
let mut rets = Rets::empty_ret_array();
let mut trap = WasmTrapInfo::Unknown;
let mut user_error = None;
if (wasm.invoke)(wasm.trampoline, ctx, f, args.as_ptr(), rets.as_mut().as_mut_ptr(), &mut trap, &mut user_error, wasm.invoke_env) {
if (wasm.invoke)(
wasm.trampoline,
ctx,
f,
args.as_ptr(),
rets.as_mut().as_mut_ptr(),
&mut trap,
&mut user_error,
wasm.invoke_env
) {
Ok(Rets::from_ret_array(rets))
} else {
if let Some(data) = user_error {
@ -370,49 +425,250 @@ macro_rules! impl_traits {
}
}
impl< $( $x: WasmExternType, )* Rets: WasmTypeList, Trap: TrapEarly<Rets>, FN: Fn( &mut Ctx $( ,$x )* ) -> Trap> ExternalFunction<($( $x ),*), Rets> for FN {
impl< $( $x, )* Rets, Trap, FN > ExternalFunction<ExplicitVmCtx, ( $( $x ),* ), Rets> for FN
where
$( $x: WasmExternType, )*
Rets: WasmTypeList,
Trap: TrapEarly<Rets>,
FN: Fn(&mut vm::Ctx $( , $x )*) -> Trap + 'static,
{
#[allow(non_snake_case)]
fn to_raw(&self) -> NonNull<vm::Func> {
if mem::size_of::<Self>() == 0 {
/// This is required for the llvm backend to be able to unwind through this function.
#[cfg_attr(nightly, unwind(allowed))]
extern fn wrap<$( $x: WasmExternType, )* Rets: WasmTypeList, Trap: TrapEarly<Rets>, FN: Fn( &mut Ctx $( ,$x )* ) -> Trap>( ctx: &mut Ctx $( ,$x: <$x as WasmExternType>::Native )* ) -> Rets::CStruct {
let f: FN = unsafe { mem::transmute_copy(&()) };
fn to_raw(self) -> (NonNull<vm::Func>, Option<NonNull<vm::FuncEnv>>) {
// The `wrap` function is a wrapper around the
// imported function. It manages the argument passed
// to the imported function (in this case, the
// `vmctx` along with the regular WebAssembly
// arguments), and it manages the trapping.
//
// It is also required for the LLVM backend to be
// able to unwind through this function.
#[cfg_attr(nightly, unwind(allowed))]
extern fn wrap<$( $x, )* Rets, Trap, FN>(
vmctx: &vm::Ctx $( , $x: <$x as WasmExternType>::Native )*
) -> Rets::CStruct
where
$( $x: WasmExternType, )*
Rets: WasmTypeList,
Trap: TrapEarly<Rets>,
FN: Fn(&mut vm::Ctx, $( $x, )*) -> Trap,
{
// Get the pointer to this `wrap` function.
let self_pointer = wrap::<$( $x, )* Rets, Trap, FN> as *const vm::Func;
let err = match panic::catch_unwind(panic::AssertUnwindSafe(|| {
f( ctx $( ,WasmExternType::from_native($x) )* ).report()
})) {
Ok(Ok(returns)) => return returns.into_c_struct(),
Ok(Err(err)) => {
let b: Box<_> = err.into();
b as Box<dyn Any>
},
Err(err) => err,
};
// Get the collection of imported functions.
let vm_imported_functions = unsafe { &(*vmctx.import_backing).vm_functions };
unsafe {
(&*ctx.module).runnable_module.do_early_trap(err)
}
// Retrieve the `vm::FuncCtx`.
let mut func_ctx: NonNull<vm::FuncCtx> = vm_imported_functions
.iter()
.find_map(|(_, imported_func)| {
if imported_func.func == self_pointer {
Some(imported_func.func_ctx)
} else {
None
}
})
.expect("Import backing is not well-formed, cannot find `func_ctx`.");
let func_ctx = unsafe { func_ctx.as_mut() };
// Extract `vm::Ctx` from `vm::FuncCtx`. The
// pointer is always non-null.
let vmctx = unsafe { func_ctx.vmctx.as_mut() };
// Extract `vm::FuncEnv` from `vm::FuncCtx`.
let func_env = func_ctx.func_env;
let func: &FN = match func_env {
// The imported function is a regular
// function, a closure without a captured
// environment, or a closure with a captured
// environment.
Some(func_env) => unsafe {
let func: NonNull<FN> = func_env.cast();
&*func.as_ptr()
},
// This branch is supposed to be unreachable.
None => unreachable!()
};
// Catch unwind in case of errors.
let err = match panic::catch_unwind(
panic::AssertUnwindSafe(
|| {
func(vmctx $( , WasmExternType::from_native($x) )* ).report()
// ^^^^^ The imported function
// expects `vm::Ctx` as first
// argument; provide it.
}
)
) {
Ok(Ok(returns)) => return returns.into_c_struct(),
Ok(Err(err)) => {
let b: Box<_> = err.into();
b as Box<dyn Any>
},
Err(err) => err,
};
// At this point, there is an error that needs to
// be trapped.
unsafe {
(&*vmctx.module).runnable_module.do_early_trap(err)
}
NonNull::new(wrap::<$( $x, )* Rets, Trap, Self> as *mut vm::Func).unwrap()
} else {
assert_eq!(mem::size_of::<Self>(), mem::size_of::<usize>(), "you cannot use a closure that captures state for `Func`.");
NonNull::new(unsafe {
::std::mem::transmute_copy::<_, *mut vm::Func>(self)
}).unwrap()
}
// Extract the captured environment of the imported
// function if any.
let func_env: Option<NonNull<vm::FuncEnv>> =
// `FN` is a function pointer, or a closure
// _without_ a captured environment.
if mem::size_of::<Self>() == 0 {
NonNull::new(&self as *const _ as *mut vm::FuncEnv)
}
// `FN` is a closure _with_ a captured
// environment.
else {
NonNull::new(Box::into_raw(Box::new(self))).map(NonNull::cast)
};
(
NonNull::new(wrap::<$( $x, )* Rets, Trap, Self> as *mut vm::Func).unwrap(),
func_env
)
}
}
impl<'a, $( $x: WasmExternType, )* Rets> Func<'a, ( $( $x ),* ), Rets, Wasm>
impl< $( $x, )* Rets, Trap, FN > ExternalFunction<ImplicitVmCtx, ( $( $x ),* ), Rets> for FN
where
$( $x: WasmExternType, )*
Rets: WasmTypeList,
Trap: TrapEarly<Rets>,
FN: Fn($( $x, )*) -> Trap + 'static,
{
#[allow(non_snake_case)]
fn to_raw(self) -> (NonNull<vm::Func>, Option<NonNull<vm::FuncEnv>>) {
// The `wrap` function is a wrapper around the
// imported function. It manages the argument passed
// to the imported function (in this case, only the
// regular WebAssembly arguments), and it manages the
// trapping.
//
// It is also required for the LLVM backend to be
// able to unwind through this function.
#[cfg_attr(nightly, unwind(allowed))]
extern fn wrap<$( $x, )* Rets, Trap, FN>(
vmctx: &vm::Ctx $( , $x: <$x as WasmExternType>::Native )*
) -> Rets::CStruct
where
$( $x: WasmExternType, )*
Rets: WasmTypeList,
Trap: TrapEarly<Rets>,
FN: Fn($( $x, )*) -> Trap,
{
// Get the pointer to this `wrap` function.
let self_pointer = wrap::<$( $x, )* Rets, Trap, FN> as *const vm::Func;
// Get the collection of imported functions.
let vm_imported_functions = unsafe { &(*vmctx.import_backing).vm_functions };
// Retrieve the `vm::FuncCtx`.
let mut func_ctx: NonNull<vm::FuncCtx> = vm_imported_functions
.iter()
.find_map(|(_, imported_func)| {
if imported_func.func == self_pointer {
Some(imported_func.func_ctx)
} else {
None
}
})
.expect("Import backing is not well-formed, cannot find `func_ctx`.");
let func_ctx = unsafe { func_ctx.as_mut() };
// Extract `vm::Ctx` from `vm::FuncCtx`. The
// pointer is always non-null.
let vmctx = unsafe { func_ctx.vmctx.as_mut() };
// Extract `vm::FuncEnv` from `vm::FuncCtx`.
let func_env = func_ctx.func_env;
let func: &FN = match func_env {
// The imported function is a regular
// function, a closure without a captured
// environment, or a closure with a captured
// environment.
Some(func_env) => unsafe {
let func: NonNull<FN> = func_env.cast();
&*func.as_ptr()
},
// This branch is supposed to be unreachable.
None => unreachable!()
};
// Catch unwind in case of errors.
let err = match panic::catch_unwind(
panic::AssertUnwindSafe(
|| {
func($( WasmExternType::from_native($x), )* ).report()
}
)
) {
Ok(Ok(returns)) => return returns.into_c_struct(),
Ok(Err(err)) => {
let b: Box<_> = err.into();
b as Box<dyn Any>
},
Err(err) => err,
};
// At this point, there is an error that needs to
// be trapped.
unsafe {
(&*vmctx.module).runnable_module.do_early_trap(err)
}
}
// Extract the captured environment of the imported
// function if any.
let func_env: Option<NonNull<vm::FuncEnv>> =
// `FN` is a function pointer, or a closure
// _without_ a captured environment.
if mem::size_of::<Self>() == 0 {
NonNull::new(&self as *const _ as *mut vm::FuncEnv)
}
// `FN` is a closure _with_ a captured
// environment.
else {
NonNull::new(Box::into_raw(Box::new(self))).map(NonNull::cast)
};
(
NonNull::new(wrap::<$( $x, )* Rets, Trap, Self> as *mut vm::Func).unwrap(),
func_env
)
}
}
impl<'a $( , $x )*, Rets> Func<'a, ( $( $x ),* ), Rets, Wasm>
where
$( $x: WasmExternType, )*
Rets: WasmTypeList,
{
/// Call the typed func and return results.
#[allow(non_snake_case)]
pub fn call(&self, $( $x: $x, )* ) -> Result<Rets, RuntimeError> {
#[allow(unused_parens)]
unsafe { <( $( $x ),* ) as WasmTypeList>::call(( $($x),* ), self.f, self.inner, self.ctx) }
unsafe {
<( $( $x ),* ) as WasmTypeList>::call(
( $( $x ),* ),
self.func,
self.inner,
self.vmctx
)
}
}
}
};
@ -448,8 +704,11 @@ where
Inner: Kind,
{
fn to_export(&self) -> Export {
let func = unsafe { FuncPointer::new(self.f.as_ptr()) };
let ctx = Context::Internal;
let func = unsafe { FuncPointer::new(self.func.as_ptr()) };
let ctx = match self.func_env {
func_env @ Some(_) => Context::ExternalWithEnv(self.vmctx, func_env),
None => Context::Internal,
};
let signature = Arc::new(FuncSig::new(Args::types(), Rets::types()));
Export::Function {
@ -463,9 +722,65 @@ where
#[cfg(test)]
mod tests {
use super::*;
macro_rules! test_func_arity_n {
($test_name:ident, $($x:ident),*) => {
#[test]
fn $test_name() {
use crate::vm;
fn with_vmctx(_: &mut vm::Ctx, $($x: i32),*) -> i32 {
vec![$($x),*].iter().sum()
}
fn without_vmctx($($x: i32),*) -> i32 {
vec![$($x),*].iter().sum()
}
let _ = Func::new(with_vmctx);
let _ = Func::new(without_vmctx);
let _ = Func::new(|_: &mut vm::Ctx, $($x: i32),*| -> i32 {
vec![$($x),*].iter().sum()
});
let _ = Func::new(|$($x: i32),*| -> i32 {
vec![$($x),*].iter().sum()
});
}
}
}
#[test]
fn test_func_arity_0() {
fn foo(_: &mut vm::Ctx) -> i32 {
0
}
fn bar() -> i32 {
0
}
let _ = Func::new(foo);
let _ = Func::new(bar);
let _ = Func::new(|_: &mut vm::Ctx| -> i32 { 0 });
let _ = Func::new(|| -> i32 { 0 });
}
test_func_arity_n!(test_func_arity_1, a);
test_func_arity_n!(test_func_arity_2, a, b);
test_func_arity_n!(test_func_arity_3, a, b, c);
test_func_arity_n!(test_func_arity_4, a, b, c, d);
test_func_arity_n!(test_func_arity_5, a, b, c, d, e);
test_func_arity_n!(test_func_arity_6, a, b, c, d, e, f);
test_func_arity_n!(test_func_arity_7, a, b, c, d, e, f, g);
test_func_arity_n!(test_func_arity_8, a, b, c, d, e, f, g, h);
test_func_arity_n!(test_func_arity_9, a, b, c, d, e, f, g, h, i);
test_func_arity_n!(test_func_arity_10, a, b, c, d, e, f, g, h, i, j);
test_func_arity_n!(test_func_arity_11, a, b, c, d, e, f, g, h, i, j, k);
test_func_arity_n!(test_func_arity_12, a, b, c, d, e, f, g, h, i, j, k, l);
#[test]
fn test_call() {
fn foo(_ctx: &mut Ctx, a: i32, b: i32) -> (i32, i32) {
fn foo(_ctx: &mut vm::Ctx, a: i32, b: i32) -> (i32, i32) {
(a, b)
}
@ -476,7 +791,7 @@ mod tests {
fn test_imports() {
use crate::{func, imports};
fn foo(_ctx: &mut Ctx, a: i32) -> i32 {
fn foo(_ctx: &mut vm::Ctx, a: i32) -> i32 {
a
}

View File

@ -1,3 +1,6 @@
//! The runtime types modules represent type used within the wasm runtime and helper functions to
//! convert to other represenations.
use crate::{memory::MemoryType, module::ModuleInfo, structures::TypedIndex, units::Pages};
use std::borrow::Cow;
@ -41,6 +44,7 @@ pub enum Value {
}
impl Value {
/// The `Type` of this `Value`.
pub fn ty(&self) -> Type {
match self {
Value::I32(_) => Type::I32,
@ -51,6 +55,7 @@ impl Value {
}
}
/// Convert this `Value` to a u128 binary representation.
pub fn to_u128(&self) -> u128 {
match *self {
Value::I32(x) => x as u128,
@ -92,12 +97,16 @@ impl From<u128> for Value {
}
}
/// Represents a native wasm type.
pub unsafe trait NativeWasmType: Copy + Into<Value>
where
Self: Sized,
{
/// Type for this `NativeWasmType`.
const TYPE: Type;
/// Convert from u64 bites to self.
fn from_binary(bits: u64) -> Self;
/// Convert self to u64 binary representation.
fn to_binary(self) -> u64;
}
@ -138,12 +147,16 @@ unsafe impl NativeWasmType for f64 {
}
}
/// A trait to represent a wasm extern type.
pub unsafe trait WasmExternType: Copy
where
Self: Sized,
{
/// Native wasm type for this `WasmExternType`.
type Native: NativeWasmType;
/// Convert from given `Native` type to self.
fn from_native(native: Self::Native) -> Self;
/// Convert self to `Native` type.
fn to_native(self) -> Self::Native;
}
@ -255,6 +268,7 @@ unsafe impl WasmExternType for f64 {
// fn swap(&self, other: Self::Primitive) -> Self::Primitive;
// }
/// Trait for a Value type.
pub unsafe trait ValueType: Copy
where
Self: Sized,
@ -274,12 +288,15 @@ macro_rules! convert_value_impl {
convert_value_impl!(u8, i8, u16, i16, u32, i32, u64, i64, f32, f64);
/// Kinds of element types.
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
pub enum ElementType {
/// Any wasm function.
Anyfunc,
}
/// Describes the properties of a table including the element types, minimum and optional maximum,
/// number of elements in the table.
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct TableDescriptor {
/// Type of data stored in this table.
@ -315,14 +332,18 @@ pub enum Initializer {
/// Describes the mutability and type of a Global
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
pub struct GlobalDescriptor {
/// Mutable flag.
pub mutable: bool,
/// Wasm type.
pub ty: Type,
}
/// A wasm global.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct GlobalInit {
/// Global descriptor.
pub desc: GlobalDescriptor,
/// Global initializer.
pub init: Initializer,
}
@ -340,6 +361,7 @@ pub struct MemoryDescriptor {
}
impl MemoryDescriptor {
/// Create a new memory descriptor with the given min/max pages and shared flag.
pub fn new(minimum: Pages, maximum: Option<Pages>, shared: bool) -> Result<Self, String> {
let memory_type = match (maximum.is_some(), shared) {
(true, true) => MemoryType::SharedStatic,
@ -357,6 +379,7 @@ impl MemoryDescriptor {
})
}
/// Returns the `MemoryType` for this descriptor.
pub fn memory_type(&self) -> MemoryType {
self.memory_type
}
@ -380,6 +403,7 @@ pub struct FuncSig {
}
impl FuncSig {
/// Creates a new function signatures with the given parameter and return types.
pub fn new<Params, Returns>(params: Params, returns: Returns) -> Self
where
Params: Into<Cow<'static, [Type]>>,
@ -391,14 +415,17 @@ impl FuncSig {
}
}
/// Parameter types.
pub fn params(&self) -> &[Type] {
&self.params
}
/// Return types.
pub fn returns(&self) -> &[Type] {
&self.returns
}
/// Returns true if parameter types match the function signature.
pub fn check_param_value_types(&self, params: &[Value]) -> bool {
self.params.len() == params.len()
&& self
@ -427,14 +454,18 @@ impl std::fmt::Display for FuncSig {
}
}
/// Trait that represents Local or Import.
pub trait LocalImport {
/// Local type.
type Local: TypedIndex;
/// Import type.
type Import: TypedIndex;
}
#[rustfmt::skip]
macro_rules! define_map_index {
($ty:ident) => {
/// Typed Index
#[derive(Serialize, Deserialize)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct $ty (u32);
@ -475,6 +506,7 @@ define_map_index![
macro_rules! define_local_or_import {
($ty:ident, $local_ty:ident, $imported_ty:ident, $imports:ident) => {
impl $ty {
/// Converts self into `LocalOrImport`.
pub fn local_or_import(self, info: &ModuleInfo) -> LocalOrImport<$ty> {
if self.index() < info.$imports.len() {
LocalOrImport::Import(<Self as LocalImport>::Import::new(self.index()))
@ -485,12 +517,14 @@ macro_rules! define_local_or_import {
}
impl $local_ty {
/// Convert up.
pub fn convert_up(self, info: &ModuleInfo) -> $ty {
$ty ((self.index() + info.$imports.len()) as u32)
}
}
impl $imported_ty {
/// Convert up.
pub fn convert_up(self, _info: &ModuleInfo) -> $ty {
$ty (self.index() as u32)
}
@ -511,6 +545,7 @@ define_local_or_import![
(GlobalIndex | (LocalGlobalIndex, ImportedGlobalIndex): imported_globals),
];
/// Index for signature.
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct SigIndex(u32);
impl TypedIndex for SigIndex {
@ -525,11 +560,14 @@ impl TypedIndex for SigIndex {
}
}
/// Kind of local or import type.
pub enum LocalOrImport<T>
where
T: LocalImport,
{
/// Local.
Local(T::Local),
/// Import.
Import(T::Import),
}
@ -537,6 +575,7 @@ impl<T> LocalOrImport<T>
where
T: LocalImport,
{
/// Returns `Some` if self is local, `None` if self is an import.
pub fn local(self) -> Option<T::Local> {
match self {
LocalOrImport::Local(local) => Some(local),
@ -544,6 +583,7 @@ where
}
}
/// Returns `Some` if self is an import, `None` if self is local.
pub fn import(self) -> Option<T::Import> {
match self {
LocalOrImport::Import(import) => Some(import),

View File

@ -1,12 +1,17 @@
//! The units module provides common WebAssembly units like `Pages` and conversion functions into
//! other units.
use crate::error::PageError;
use std::{
fmt,
ops::{Add, Sub},
};
/// The page size in bytes of a wasm page.
pub const WASM_PAGE_SIZE: usize = 65_536;
/// Tbe max number of wasm pages allowed.
pub const WASM_MAX_PAGES: usize = 65_536;
// From emscripten resize_heap implementation
/// The minimum number of wasm pages allowed.
pub const WASM_MIN_PAGES: usize = 256;
/// Units of WebAssembly pages (as specified to be 65,536 bytes).
@ -14,6 +19,7 @@ pub const WASM_MIN_PAGES: usize = 256;
pub struct Pages(pub u32);
impl Pages {
/// Checked add of Pages to Pages.
pub fn checked_add(self, rhs: Pages) -> Result<Pages, PageError> {
let added = (self.0 as usize) + (rhs.0 as usize);
if added <= WASM_MAX_PAGES {
@ -27,6 +33,7 @@ impl Pages {
}
}
/// Calculate number of bytes from pages.
pub fn bytes(self) -> Bytes {
self.into()
}

View File

@ -1,3 +1,5 @@
//! The runtime vm module contains data structures and helper functions used during runtime to
//! execute wasm instance functions.
pub use crate::backing::{ImportBacking, LocalBacking, INTERNALS_SIZE};
use crate::{
error::CallResult,
@ -36,6 +38,7 @@ use std::collections::HashMap;
#[repr(C)]
pub struct Ctx {
// `internal` must be the first field of `Ctx`.
/// InternalCtx data field
pub internal: InternalCtx,
pub(crate) local_functions: *const *const Func,
@ -43,7 +46,9 @@ pub struct Ctx {
/// These are pointers to things that are known to be owned
/// by the owning `Instance`.
pub local_backing: *mut LocalBacking,
/// Mutable pointer to import data
pub import_backing: *mut ImportBacking,
/// Const pointer to module inner data
pub module: *const ModuleInner,
/// This is intended to be user-supplied, per-instance
@ -110,22 +115,31 @@ pub struct InternalCtx {
/// modules safely.
pub dynamic_sigindices: *const SigId,
/// Const pointer to Intrinsics.
pub intrinsics: *const Intrinsics,
/// Stack lower bound.
pub stack_lower_bound: *mut u8,
/// Mutable pointer to memory base.
pub memory_base: *mut u8,
/// Memory bound.
pub memory_bound: usize,
/// Mutable pointer to internal fields.
pub internals: *mut [u64; INTERNALS_SIZE], // TODO: Make this dynamic?
/// Interrupt signal mem.
pub interrupt_signal_mem: *mut u8,
}
static INTERNAL_FIELDS: AtomicUsize = AtomicUsize::new(0);
/// An internal field.
pub struct InternalField {
/// Init once field.
init: Once,
/// Inner field.
inner: UnsafeCell<usize>,
}
@ -133,6 +147,7 @@ unsafe impl Send for InternalField {}
unsafe impl Sync for InternalField {}
impl InternalField {
/// Allocate and return an `InternalField`.
pub const fn allocate() -> InternalField {
InternalField {
init: Once::new(),
@ -140,6 +155,7 @@ impl InternalField {
}
}
/// Get the index of this `InternalField`.
pub fn index(&self) -> usize {
let inner: *mut usize = self.inner.get();
self.init.call_once(|| {
@ -157,9 +173,12 @@ impl InternalField {
}
}
/// A container for VM instrinsic functions
#[repr(C)]
pub struct Intrinsics {
/// Const pointer to memory grow `Func`.
pub memory_grow: *const Func,
/// Const pointer to memory size `Func`.
pub memory_size: *const Func,
/*pub memory_grow: unsafe extern "C" fn(
ctx: &mut Ctx,
@ -176,27 +195,33 @@ unsafe impl Send for Intrinsics {}
unsafe impl Sync for Intrinsics {}
impl Intrinsics {
/// Memory grow offset
#[allow(clippy::erasing_op)]
pub fn offset_memory_grow() -> u8 {
(0 * ::std::mem::size_of::<usize>()) as u8
}
/// Memory size offset
pub fn offset_memory_size() -> u8 {
(1 * ::std::mem::size_of::<usize>()) as u8
}
}
/// Local static memory intrinsics
pub static INTRINSICS_LOCAL_STATIC_MEMORY: Intrinsics = Intrinsics {
memory_grow: vmcalls::local_static_memory_grow as _,
memory_size: vmcalls::local_static_memory_size as _,
};
/// Local dynamic memory intrinsics
pub static INTRINSICS_LOCAL_DYNAMIC_MEMORY: Intrinsics = Intrinsics {
memory_grow: vmcalls::local_dynamic_memory_grow as _,
memory_size: vmcalls::local_dynamic_memory_size as _,
};
/// Imported static memory intrinsics
pub static INTRINSICS_IMPORTED_STATIC_MEMORY: Intrinsics = Intrinsics {
memory_grow: vmcalls::imported_static_memory_grow as _,
memory_size: vmcalls::imported_static_memory_size as _,
};
/// Imported dynamic memory intrinsics
pub static INTRINSICS_IMPORTED_DYNAMIC_MEMORY: Intrinsics = Intrinsics {
memory_grow: vmcalls::imported_dynamic_memory_grow as _,
memory_size: vmcalls::imported_dynamic_memory_size as _,
@ -498,34 +523,80 @@ impl Ctx {
}
}
enum InnerFunc {}
/// Used to provide type safety (ish) for passing around function pointers.
/// The typesystem ensures this cannot be dereferenced since an
/// empty enum cannot actually exist.
#[repr(C)]
pub struct Func(InnerFunc);
/// Represents a function pointer. It is mostly used in the
/// `typed_func` module within the `wrap` functions, to wrap imported
/// functions.
#[repr(transparent)]
pub struct Func(pub(self) *mut c_void);
/// An imported function, which contains the vmctx that owns this function.
/// Represents a function environment pointer, like a captured
/// environment of a closure. It is mostly used in the `typed_func`
/// module within the `wrap` functions, to wrap imported functions.
#[repr(transparent)]
pub struct FuncEnv(pub(self) *mut c_void);
/// Represents a function context. It is used by imported functions
/// only.
#[derive(Debug)]
#[repr(C)]
pub struct FuncCtx {
/// The `Ctx` pointer.
pub(crate) vmctx: NonNull<Ctx>,
/// A pointer to the function environment. It is used by imported
/// functions only to store the pointer to the real host function,
/// whether it is a regular function, or a closure with or without
/// a captured environment.
pub(crate) func_env: Option<NonNull<FuncEnv>>,
}
impl FuncCtx {
/// Offset to `vmctx`.
pub fn offset_vmctx() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
/// Offset to `func_env`.
pub fn offset_func_env() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
/// Size of a `FuncCtx`.
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
/// An imported function is a function pointer associated to a
/// function context.
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ImportedFunc {
pub func: *const Func,
pub vmctx: *mut Ctx,
/// Const pointer to `Func`.
pub(crate) func: *const Func,
/// Mutable non-null pointer to `FuncCtx`.
pub(crate) func_ctx: NonNull<FuncCtx>,
}
// manually implemented because ImportedFunc contains raw pointers directly; `Func` is marked Send (But `Ctx` actually isn't! (TODO: review this, shouldn't `Ctx` be Send?))
// Manually implemented because ImportedFunc contains raw pointers
// directly; `Func` is marked Send (But `Ctx` actually isn't! (TODO:
// review this, shouldn't `Ctx` be Send?))
unsafe impl Send for ImportedFunc {}
impl ImportedFunc {
/// Offset to func.
#[allow(clippy::erasing_op)] // TODO
pub fn offset_func() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
pub fn offset_vmctx() -> u8 {
/// Offset to func_ctx.
pub fn offset_func_ctx() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
/// Size of an `ImportedFunc`.
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
@ -547,15 +618,18 @@ pub struct LocalTable {
unsafe impl Send for LocalTable {}
impl LocalTable {
/// Offset to base.
#[allow(clippy::erasing_op)] // TODO
pub fn offset_base() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
/// Offset count.
pub fn offset_count() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
/// Size of a `LocalTable`.
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
@ -579,15 +653,18 @@ pub struct LocalMemory {
unsafe impl Send for LocalMemory {}
impl LocalMemory {
/// Offset base.
#[allow(clippy::erasing_op)] // TODO
pub fn offset_base() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
/// Offset bound.
pub fn offset_bound() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
/// Size of a `LocalMemory`.
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
@ -597,24 +674,29 @@ impl LocalMemory {
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct LocalGlobal {
/// Data.
pub data: u128,
}
impl LocalGlobal {
/// Offset data.
#[allow(clippy::erasing_op)] // TODO
pub fn offset_data() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
/// A null `LocalGlobal`.
pub fn null() -> Self {
Self { data: 0 }
}
/// Size of a `LocalGlobal`.
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
}
/// Identifier for a function signature.
#[derive(Debug, Clone, Copy)]
#[repr(transparent)]
pub struct SigId(pub u32);
@ -623,8 +705,11 @@ pub struct SigId(pub u32);
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct Anyfunc {
/// Const pointer to `Func`.
pub func: *const Func,
/// Mutable pointer to `Ctx`.
pub ctx: *mut Ctx,
/// Sig id of this function
pub sig_id: SigId,
}
@ -632,6 +717,7 @@ pub struct Anyfunc {
unsafe impl Send for Anyfunc {}
impl Anyfunc {
/// A null `Anyfunc` value.
pub fn null() -> Self {
Self {
func: ptr::null(),
@ -640,19 +726,23 @@ impl Anyfunc {
}
}
/// The offset for this func.
#[allow(clippy::erasing_op)] // TODO
pub fn offset_func() -> u8 {
0 * (mem::size_of::<usize>() as u8)
}
/// The offset of the vmctx.
pub fn offset_vmctx() -> u8 {
1 * (mem::size_of::<usize>() as u8)
}
/// The offset of the sig id.
pub fn offset_sig_id() -> u8 {
2 * (mem::size_of::<usize>() as u8)
}
/// The size of `Anyfunc`.
pub fn size() -> u8 {
mem::size_of::<Self>() as u8
}
@ -660,93 +750,160 @@ impl Anyfunc {
#[cfg(test)]
mod vm_offset_tests {
use super::{Anyfunc, Ctx, ImportedFunc, InternalCtx, LocalGlobal, LocalMemory, LocalTable};
use super::{
Anyfunc, Ctx, FuncCtx, ImportedFunc, InternalCtx, LocalGlobal, LocalMemory, LocalTable,
};
// Inspired by https://internals.rust-lang.org/t/discussion-on-offset-of/7440/2.
macro_rules! offset_of {
($struct:path, $field:ident) => {{
fn offset() -> usize {
use std::mem;
let structure = mem::MaybeUninit::<$struct>::uninit();
let &$struct {
$field: ref field, ..
} = unsafe { &*structure.as_ptr() };
let offset =
(field as *const _ as usize).wrapping_sub(&structure as *const _ as usize);
assert!((0..=mem::size_of_val(&structure)).contains(&offset));
offset
}
offset()
}};
}
#[test]
fn offset_of() {
use std::{mem, ptr::NonNull};
struct S0;
#[repr(C)]
struct S1 {
f1: u8,
f2: u16,
f3: u32,
f4: u64,
f5: u128,
f6: f32,
f7: f64,
f8: NonNull<S0>,
f9: Option<NonNull<S0>>,
f10: *mut S0,
z: u8,
}
assert_eq!(offset_of!(S1, f1), 0);
assert_eq!(offset_of!(S1, f2), 2);
assert_eq!(offset_of!(S1, f3), 4);
assert_eq!(offset_of!(S1, f4), 8);
assert_eq!(offset_of!(S1, f5), 16);
assert_eq!(offset_of!(S1, f6), 32);
assert_eq!(offset_of!(S1, f7), 40);
assert_eq!(offset_of!(S1, f8), 40 + mem::size_of::<usize>());
assert_eq!(offset_of!(S1, f9), 48 + mem::size_of::<usize>());
assert_eq!(offset_of!(S1, f10), 56 + mem::size_of::<usize>());
assert_eq!(offset_of!(S1, z), 64 + mem::size_of::<usize>());
}
#[test]
fn vmctx() {
assert_eq!(0usize, offset_of!(Ctx => internal).get_byte_offset(),);
assert_eq!(0usize, offset_of!(Ctx, internal));
assert_eq!(
Ctx::offset_memories() as usize,
offset_of!(InternalCtx => memories).get_byte_offset(),
offset_of!(InternalCtx, memories),
);
assert_eq!(
Ctx::offset_tables() as usize,
offset_of!(InternalCtx => tables).get_byte_offset(),
offset_of!(InternalCtx, tables),
);
assert_eq!(
Ctx::offset_globals() as usize,
offset_of!(InternalCtx => globals).get_byte_offset(),
offset_of!(InternalCtx, globals),
);
assert_eq!(
Ctx::offset_imported_memories() as usize,
offset_of!(InternalCtx => imported_memories).get_byte_offset(),
offset_of!(InternalCtx, imported_memories),
);
assert_eq!(
Ctx::offset_imported_tables() as usize,
offset_of!(InternalCtx => imported_tables).get_byte_offset(),
offset_of!(InternalCtx, imported_tables),
);
assert_eq!(
Ctx::offset_imported_globals() as usize,
offset_of!(InternalCtx => imported_globals).get_byte_offset(),
offset_of!(InternalCtx, imported_globals),
);
assert_eq!(
Ctx::offset_imported_funcs() as usize,
offset_of!(InternalCtx => imported_funcs).get_byte_offset(),
offset_of!(InternalCtx, imported_funcs),
);
assert_eq!(
Ctx::offset_intrinsics() as usize,
offset_of!(InternalCtx => intrinsics).get_byte_offset(),
offset_of!(InternalCtx, intrinsics),
);
assert_eq!(
Ctx::offset_stack_lower_bound() as usize,
offset_of!(InternalCtx => stack_lower_bound).get_byte_offset(),
offset_of!(InternalCtx, stack_lower_bound),
);
assert_eq!(
Ctx::offset_memory_base() as usize,
offset_of!(InternalCtx => memory_base).get_byte_offset(),
offset_of!(InternalCtx, memory_base),
);
assert_eq!(
Ctx::offset_memory_bound() as usize,
offset_of!(InternalCtx => memory_bound).get_byte_offset(),
offset_of!(InternalCtx, memory_bound),
);
assert_eq!(
Ctx::offset_internals() as usize,
offset_of!(InternalCtx => internals).get_byte_offset(),
offset_of!(InternalCtx, internals),
);
assert_eq!(
Ctx::offset_interrupt_signal_mem() as usize,
offset_of!(InternalCtx => interrupt_signal_mem).get_byte_offset(),
offset_of!(InternalCtx, interrupt_signal_mem),
);
assert_eq!(
Ctx::offset_local_functions() as usize,
offset_of!(Ctx => local_functions).get_byte_offset(),
offset_of!(Ctx, local_functions),
);
}
#[test]
fn func_ctx() {
assert_eq!(FuncCtx::offset_vmctx() as usize, 0,);
assert_eq!(FuncCtx::offset_func_env() as usize, 8,);
}
#[test]
fn imported_func() {
assert_eq!(
ImportedFunc::offset_func() as usize,
offset_of!(ImportedFunc => func).get_byte_offset(),
offset_of!(ImportedFunc, func),
);
assert_eq!(
ImportedFunc::offset_vmctx() as usize,
offset_of!(ImportedFunc => vmctx).get_byte_offset(),
ImportedFunc::offset_func_ctx() as usize,
offset_of!(ImportedFunc, func_ctx),
);
}
@ -754,12 +911,12 @@ mod vm_offset_tests {
fn local_table() {
assert_eq!(
LocalTable::offset_base() as usize,
offset_of!(LocalTable => base).get_byte_offset(),
offset_of!(LocalTable, base),
);
assert_eq!(
LocalTable::offset_count() as usize,
offset_of!(LocalTable => count).get_byte_offset(),
offset_of!(LocalTable, count),
);
}
@ -767,12 +924,12 @@ mod vm_offset_tests {
fn local_memory() {
assert_eq!(
LocalMemory::offset_base() as usize,
offset_of!(LocalMemory => base).get_byte_offset(),
offset_of!(LocalMemory, base),
);
assert_eq!(
LocalMemory::offset_bound() as usize,
offset_of!(LocalMemory => bound).get_byte_offset(),
offset_of!(LocalMemory, bound),
);
}
@ -780,25 +937,19 @@ mod vm_offset_tests {
fn local_global() {
assert_eq!(
LocalGlobal::offset_data() as usize,
offset_of!(LocalGlobal => data).get_byte_offset(),
offset_of!(LocalGlobal, data),
);
}
#[test]
fn cc_anyfunc() {
assert_eq!(
Anyfunc::offset_func() as usize,
offset_of!(Anyfunc => func).get_byte_offset(),
);
assert_eq!(Anyfunc::offset_func() as usize, offset_of!(Anyfunc, func),);
assert_eq!(
Anyfunc::offset_vmctx() as usize,
offset_of!(Anyfunc => ctx).get_byte_offset(),
);
assert_eq!(Anyfunc::offset_vmctx() as usize, offset_of!(Anyfunc, ctx),);
assert_eq!(
Anyfunc::offset_sig_id() as usize,
offset_of!(Anyfunc => sig_id).get_byte_offset(),
offset_of!(Anyfunc, sig_id),
);
}
}
@ -916,15 +1067,15 @@ mod vm_ctx_tests {
}
fn get_trampoline(&self, _module: &ModuleInfo, _sig_index: SigIndex) -> Option<Wasm> {
unimplemented!()
unimplemented!("generate_module::get_trampoline")
}
unsafe fn do_early_trap(&self, _: Box<dyn Any>) -> ! {
unimplemented!()
unimplemented!("generate_module::do_early_trap")
}
}
impl CacheGen for Placeholder {
fn generate_cache(&self) -> Result<(Box<[u8]>, Memory), CacheError> {
unimplemented!()
unimplemented!("generate_module::generate_cache")
}
}

View File

@ -10,7 +10,7 @@ use crate::{
// +*****************************+
// | LOCAL MEMORIES |
// +****************************+
// +*****************************+
pub unsafe extern "C" fn local_static_memory_grow(
ctx: &mut vm::Ctx,
@ -72,7 +72,7 @@ pub unsafe extern "C" fn local_dynamic_memory_size(
// +*****************************+
// | IMPORTED MEMORIES |
// +****************************+
// +*****************************+
pub unsafe extern "C" fn imported_static_memory_grow(
ctx: &mut vm::Ctx,
@ -140,7 +140,7 @@ pub unsafe extern "C" fn imported_dynamic_memory_size(
// +*****************************+
// | LOCAL TABLES |
// +****************************+
// +*****************************+
pub unsafe extern "C" fn local_table_grow(
ctx: &mut vm::Ctx,
@ -150,11 +150,11 @@ pub unsafe extern "C" fn local_table_grow(
let _ = table_index;
let _ = delta;
let _ = ctx;
unimplemented!()
unimplemented!("vmcalls::local_table_grow")
}
pub unsafe extern "C" fn local_table_size(ctx: &vm::Ctx, table_index: LocalTableIndex) -> u32 {
let _ = table_index;
let _ = ctx;
unimplemented!()
unimplemented!("vmcalls::local_table_size")
}