Merge branch 'master' into ventuzelo/fix-800-index-oob-backing

This commit is contained in:
Patrick Ventuzelo
2019-09-25 09:30:33 +02:00
committed by GitHub
74 changed files with 1043 additions and 674 deletions

View File

@ -8,47 +8,46 @@ repository = "https://github.com/wasmerio/wasmer"
edition = "2018"
[dependencies]
nix = "0.15.0"
page_size = "0.4.1"
wasmparser = "0.35.1"
parking_lot = "0.9.0"
lazy_static = "1.4.0"
errno = "0.2.4"
nix = "0.15"
page_size = "0.4"
wasmparser = "0.37.0"
parking_lot = "0.9"
lazy_static = "1.4"
errno = "0.2"
libc = "0.2.60"
hex = "0.3.2"
smallvec = "0.6.10"
hex = "0.3"
smallvec = "0.6"
bincode = "1.1"
colored = "1.8"
[dependencies.indexmap]
version = "1.2.0"
version = "1.2"
features = ["serde-1"]
# Dependencies for caching.
[dependencies.serde]
version = "1.0.99"
version = "1.0"
# This feature is required for serde to support serializing/deserializing reference counted pointers (e.g. Rc and Arc).
features = ["rc"]
[dependencies.serde_derive]
version = "1.0.98"
version = "1.0"
[dependencies.serde_bytes]
version = "0.11.2"
version = "0.11"
[dependencies.serde-bench]
version = "0.0.7"
[dependencies.blake2b_simd]
version = "0.5.8"
version = "0.5"
[dependencies.digest]
version = "0.8.1"
version = "0.8"
[target.'cfg(windows)'.dependencies]
winapi = { version = "0.3.8", features = ["memoryapi"] }
winapi = { version = "0.3", features = ["memoryapi"] }
[dev-dependencies]
field-offset = "0.1.1"
field-offset = "0.1"
[build-dependencies]
blake2b_simd = "0.5.8"
rustc_version = "0.2.3"
blake2b_simd = "0.5"
rustc_version = "0.2"
cc = "1.0"
[features]

View File

@ -1,21 +1,21 @@
<p align="center">
<a href="https://wasmer.io" target="_blank" rel="noopener noreferrer">
<img width="400" src="https://raw.githubusercontent.com/wasmerio/wasmer/master/logo.png" alt="Wasmer logo">
<img width="300" src="https://raw.githubusercontent.com/wasmerio/wasmer/master/logo.png" alt="Wasmer logo">
</a>
</p>
<p align="center">
<a href="https://circleci.com/gh/wasmerio/wasmer/">
<img src="https://img.shields.io/circleci/project/github/wasmerio/wasmer/master.svg" alt="Build Status">
<a href="https://dev.azure.com/wasmerio/wasmer/_build/latest?definitionId=3&branchName=master">
<img src="https://img.shields.io/azure-devops/build/wasmerio/wasmer/3.svg?style=flat-square" alt="Build Status">
</a>
<a href="https://github.com/wasmerio/wasmer/blob/master/LICENSE">
<img src="https://img.shields.io/github/license/wasmerio/wasmer.svg" alt="License">
<img src="https://img.shields.io/github/license/wasmerio/wasmer.svg?style=flat-square" alt="License">
</a>
<a href="https://spectrum.chat/wasmer">
<img src="https://withspectrum.github.io/badge/badge.svg" alt="Join the Wasmer Community">
</a>
<a href="https://crates.io/crates/wasmer-runtime-core">
<img src="https://img.shields.io/crates/d/wasmer-runtime-core.svg" alt="Number of downloads from crates.io">
<img src="https://img.shields.io/crates/d/wasmer-runtime-core.svg?style=flat-square" alt="Number of downloads from crates.io">
</a>
<a href="https://docs.rs/wasmer-runtime-core">
<img src="https://docs.rs/wasmer-runtime-core/badge.svg" alt="Read our API documentation">

View File

@ -81,7 +81,7 @@ pushq %r8
pushq %r9
pushq %r10
callq get_boundary_register_preservation
callq get_boundary_register_preservation@PLT
# Keep this consistent with BoundaryRegisterPreservation
movq %r15, 0(%rax)

View File

@ -54,6 +54,9 @@ pub struct LocalBacking {
pub(crate) internals: Internals,
}
// Manually implemented because LocalBacking contains raw pointers directly
unsafe impl Send for LocalBacking {}
impl LocalBacking {
pub(crate) fn new(
module: &ModuleInner,
@ -481,6 +484,9 @@ pub struct ImportBacking {
pub(crate) vm_globals: BoxedMap<ImportedGlobalIndex, *mut vm::LocalGlobal>,
}
// manually implemented because ImportBacking contains raw pointers directly
unsafe impl Send for ImportBacking {}
impl ImportBacking {
pub fn new(
module: &ModuleInner,
@ -556,9 +562,8 @@ fn import_functions(
let namespace = module.info.namespace_table.get(*namespace_index);
let name = module.info.name_table.get(*name_index);
let import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
let import =
imports.maybe_with_namespace(namespace, |namespace| namespace.get_export(name));
match import {
Some(Export::Function {
func,
@ -644,9 +649,8 @@ fn import_memories(
let namespace = module.info.namespace_table.get(*namespace_index);
let name = module.info.name_table.get(*name_index);
let memory_import = imports
.get_namespace(&namespace)
.and_then(|namespace| namespace.get_export(&name));
let memory_import =
imports.maybe_with_namespace(namespace, |namespace| namespace.get_export(name));
match memory_import {
Some(Export::Memory(memory)) => {
if expected_memory_desc.fits_in_imported(memory.descriptor()) {
@ -716,9 +720,8 @@ fn import_tables(
let namespace = module.info.namespace_table.get(*namespace_index);
let name = module.info.name_table.get(*name_index);
let table_import = imports
.get_namespace(&namespace)
.and_then(|namespace| namespace.get_export(&name));
let table_import =
imports.maybe_with_namespace(namespace, |namespace| namespace.get_export(name));
match table_import {
Some(Export::Table(mut table)) => {
if expected_table_desc.fits_in_imported(table.descriptor()) {
@ -787,9 +790,8 @@ fn import_globals(
{
let namespace = module.info.namespace_table.get(*namespace_index);
let name = module.info.name_table.get(*name_index);
let import = imports
.get_namespace(namespace)
.and_then(|namespace| namespace.get_export(name));
let import =
imports.maybe_with_namespace(namespace, |namespace| namespace.get_export(name));
match import {
Some(Export::Global(mut global)) => {
if global.descriptor() == *imported_global_desc {

View File

@ -146,7 +146,6 @@ pub fn validating_parser_config(features: &Features) -> wasmparser::ValidatingPa
enable_bulk_memory: false,
enable_multi_value: false,
},
mutable_global_imports: true,
}
}

View File

@ -11,6 +11,9 @@ pub enum Context {
Internal,
}
// Manually implemented because context contains a raw pointer to Ctx
unsafe impl Send for Context {}
#[derive(Debug, Clone)]
pub enum Export {
Function {
@ -26,6 +29,9 @@ pub enum Export {
#[derive(Debug, Clone)]
pub struct FuncPointer(*const vm::Func);
// Manually implemented because FuncPointer contains a raw pointer to Ctx
unsafe impl Send for FuncPointer {}
impl FuncPointer {
/// This needs to be unsafe because there is
/// no way to check whether the passed function

View File

@ -283,13 +283,10 @@ extern "C" fn signal_trap_handler(
let image = build_instance_image(ctx, es_image);
unwind_result = Box::new(image);
} else {
use colored::*;
if es_image.frames.len() > 0 {
eprintln!(
"\n{}",
"Wasmer encountered an error while running your WebAssembly program."
.bold()
.red()
);
es_image.print_backtrace_if_needed();
}

View File

@ -4,11 +4,14 @@ use crate::{
types::{GlobalDescriptor, Type, Value},
vm,
};
use std::{cell::RefCell, fmt, rc::Rc};
use std::{
fmt,
sync::{Arc, Mutex},
};
pub struct Global {
desc: GlobalDescriptor,
storage: Rc<RefCell<vm::LocalGlobal>>,
storage: Arc<Mutex<vm::LocalGlobal>>,
}
impl Global {
@ -56,7 +59,7 @@ impl Global {
Self {
desc,
storage: Rc::new(RefCell::new(local_global)),
storage: Arc::new(Mutex::new(local_global)),
}
}
@ -83,7 +86,8 @@ impl Global {
Value::V128(x) => x,
},
};
*self.storage.borrow_mut() = local_global;
let mut storage = self.storage.lock().unwrap();
*storage = local_global;
} else {
panic!("Wrong type for setting this global")
}
@ -94,7 +98,8 @@ impl Global {
/// Get the value held by this global.
pub fn get(&self) -> Value {
let data = self.storage.borrow().data;
let storage = self.storage.lock().unwrap();
let data = storage.data;
match self.desc.ty {
Type::I32 => Value::I32(data as i32),
@ -105,8 +110,10 @@ impl Global {
}
}
// TODO: think about this and if this should now be unsafe
pub(crate) fn vm_local_global(&mut self) -> *mut vm::LocalGlobal {
&mut *self.storage.borrow_mut()
let mut storage = self.storage.lock().unwrap();
&mut *storage
}
}
@ -120,7 +127,7 @@ impl Clone for Global {
fn clone(&self) -> Self {
Self {
desc: self.desc,
storage: Rc::clone(&self.storage),
storage: Arc::clone(&self.storage),
}
}
}

View File

@ -2,9 +2,9 @@ use crate::export::Export;
use std::collections::VecDeque;
use std::collections::{hash_map::Entry, HashMap};
use std::{
cell::{Ref, RefCell},
borrow::{Borrow, BorrowMut},
ffi::c_void,
rc::Rc,
sync::{Arc, Mutex},
};
pub trait LikeNamespace {
@ -45,8 +45,9 @@ impl IsExport for Export {
/// }
/// ```
pub struct ImportObject {
map: Rc<RefCell<HashMap<String, Box<dyn LikeNamespace>>>>,
pub(crate) state_creator: Option<Rc<dyn Fn() -> (*mut c_void, fn(*mut c_void))>>,
map: Arc<Mutex<HashMap<String, Box<dyn LikeNamespace + Send>>>>,
pub(crate) state_creator:
Option<Arc<dyn Fn() -> (*mut c_void, fn(*mut c_void)) + Send + Sync + 'static>>,
pub allow_missing_functions: bool,
}
@ -54,7 +55,7 @@ impl ImportObject {
/// Create a new `ImportObject`.
pub fn new() -> Self {
Self {
map: Rc::new(RefCell::new(HashMap::new())),
map: Arc::new(Mutex::new(HashMap::new())),
state_creator: None,
allow_missing_functions: false,
}
@ -62,11 +63,11 @@ impl ImportObject {
pub fn new_with_data<F>(state_creator: F) -> Self
where
F: Fn() -> (*mut c_void, fn(*mut c_void)) + 'static,
F: Fn() -> (*mut c_void, fn(*mut c_void)) + 'static + Send + Sync,
{
Self {
map: Rc::new(RefCell::new(HashMap::new())),
state_creator: Some(Rc::new(state_creator)),
map: Arc::new(Mutex::new(HashMap::new())),
state_creator: Some(Arc::new(state_creator)),
allow_missing_functions: false,
}
}
@ -92,9 +93,10 @@ impl ImportObject {
pub fn register<S, N>(&mut self, name: S, namespace: N) -> Option<Box<dyn LikeNamespace>>
where
S: Into<String>,
N: LikeNamespace + 'static,
N: LikeNamespace + Send + 'static,
{
let mut map = self.map.borrow_mut();
let mut guard = self.map.lock().unwrap();
let map = guard.borrow_mut();
match map.entry(name.into()) {
Entry::Vacant(empty) => {
@ -105,19 +107,39 @@ impl ImportObject {
}
}
pub fn get_namespace(&self, namespace: &str) -> Option<Ref<dyn LikeNamespace + 'static>> {
let map_ref = self.map.borrow();
/// Apply a function on the namespace if it exists
/// If your function can fail, consider using `maybe_with_namespace`
pub fn with_namespace<Func, InnerRet>(&self, namespace: &str, f: Func) -> Option<InnerRet>
where
Func: FnOnce(&(dyn LikeNamespace + Send)) -> InnerRet,
InnerRet: Sized,
{
let guard = self.map.lock().unwrap();
let map_ref = guard.borrow();
if map_ref.contains_key(namespace) {
Some(Ref::map(map_ref, |map| &*map[namespace]))
Some(f(map_ref[namespace].as_ref()))
} else {
None
}
}
/// The same as `with_namespace` but takes a function that may fail
pub fn maybe_with_namespace<Func, InnerRet>(&self, namespace: &str, f: Func) -> Option<InnerRet>
where
Func: FnOnce(&(dyn LikeNamespace + Send)) -> Option<InnerRet>,
InnerRet: Sized,
{
let guard = self.map.lock().unwrap();
let map_ref = guard.borrow();
map_ref
.get(namespace)
.map(|ns| ns.as_ref())
.and_then(|ns| f(ns))
}
pub fn clone_ref(&self) -> Self {
Self {
map: Rc::clone(&self.map),
map: Arc::clone(&self.map),
state_creator: self.state_creator.clone(),
allow_missing_functions: false,
}
@ -125,7 +147,9 @@ impl ImportObject {
fn get_objects(&self) -> VecDeque<(String, String, Export)> {
let mut out = VecDeque::new();
for (name, ns) in self.map.borrow().iter() {
let guard = self.map.lock().unwrap();
let map = guard.borrow();
for (name, ns) in map.iter() {
for (id, exp) in ns.get_exports() {
out.push_back((name.clone(), id, exp));
}
@ -158,7 +182,8 @@ impl IntoIterator for ImportObject {
impl Extend<(String, String, Export)> for ImportObject {
fn extend<T: IntoIterator<Item = (String, String, Export)>>(&mut self, iter: T) {
let mut map = self.map.borrow_mut();
let mut guard = self.map.lock().unwrap();
let map = guard.borrow_mut();
for (ns, id, exp) in iter.into_iter() {
if let Some(like_ns) = map.get_mut(&ns) {
like_ns.maybe_insert(&id, exp);
@ -172,7 +197,7 @@ impl Extend<(String, String, Export)> for ImportObject {
}
pub struct Namespace {
map: HashMap<String, Box<dyn IsExport>>,
map: HashMap<String, Box<dyn IsExport + Send>>,
}
impl Namespace {
@ -182,10 +207,10 @@ impl Namespace {
}
}
pub fn insert<S, E>(&mut self, name: S, export: E) -> Option<Box<dyn IsExport>>
pub fn insert<S, E>(&mut self, name: S, export: E) -> Option<Box<dyn IsExport + Send>>
where
S: Into<String>,
E: IsExport + 'static,
E: IsExport + Send + 'static,
{
self.map.insert(name.into(), Box::new(export))
}
@ -241,12 +266,14 @@ mod test {
imports1.extend(imports2);
let cat_ns = imports1.get_namespace("cat").unwrap();
assert!(cat_ns.get_export("small").is_some());
let small_cat_export =
imports1.maybe_with_namespace("cat", |cat_ns| cat_ns.get_export("small"));
assert!(small_cat_export.is_some());
let dog_ns = imports1.get_namespace("dog").unwrap();
assert!(dog_ns.get_export("happy").is_some());
assert!(dog_ns.get_export("small").is_some());
let entries = imports1.maybe_with_namespace("dog", |dog_ns| {
Some((dog_ns.get_export("happy")?, dog_ns.get_export("small")?))
});
assert!(entries.is_some());
}
#[test]
@ -264,14 +291,14 @@ mod test {
};
imports1.extend(imports2);
let dog_ns = imports1.get_namespace("dog").unwrap();
let happy_dog_entry = imports1
.maybe_with_namespace("dog", |dog_ns| dog_ns.get_export("happy"))
.unwrap();
assert!(
if let Export::Global(happy_dog_global) = dog_ns.get_export("happy").unwrap() {
happy_dog_global.get() == Value::I32(4)
} else {
false
}
);
assert!(if let Export::Global(happy_dog_global) = happy_dog_entry {
happy_dog_global.get() == Value::I32(4)
} else {
false
});
}
}

View File

@ -16,7 +16,12 @@ use crate::{
vm::{self, InternalField},
};
use smallvec::{smallvec, SmallVec};
use std::{mem, pin::Pin, ptr::NonNull, sync::Arc};
use std::{
mem,
pin::Pin,
ptr::NonNull,
sync::{Arc, Mutex},
};
pub(crate) struct InstanceInner {
#[allow(dead_code)]
@ -25,6 +30,9 @@ pub(crate) struct InstanceInner {
pub(crate) vmctx: *mut vm::Ctx,
}
// manually implemented because InstanceInner contains a raw pointer to Ctx
unsafe impl Send for InstanceInner {}
impl Drop for InstanceInner {
fn drop(&mut self) {
// Drop the vmctx.
@ -517,6 +525,27 @@ impl LikeNamespace for Rc<Instance> {
}
}
impl LikeNamespace for Arc<Mutex<Instance>> {
fn get_export(&self, name: &str) -> Option<Export> {
let instance = self.lock().unwrap();
let export_index = instance.module.info.exports.get(name)?;
Some(
instance
.inner
.get_export_from_index(&instance.module, export_index),
)
}
fn get_exports(&self) -> Vec<(String, Export)> {
unimplemented!("Use the exports method instead");
}
fn maybe_insert(&mut self, _name: &str, _export: Export) -> Option<()> {
None
}
}
#[must_use]
fn call_func_with_index(
info: &ModuleInfo,
@ -527,29 +556,12 @@ fn call_func_with_index(
args: &[Value],
rets: &mut Vec<Value>,
) -> CallResult<()> {
rets.clear();
let sig_index = *info
.func_assoc
.get(func_index)
.expect("broken invariant, incorrect func index");
let signature = &info.signatures[sig_index];
let num_results = signature.returns().len();
let num_results = num_results
+ signature
.returns()
.iter()
.filter(|&&ty| ty == Type::V128)
.count();
rets.reserve(num_results);
if !signature.check_param_value_types(args) {
Err(ResolveError::Signature {
expected: signature.clone(),
found: args.iter().map(|val| val.ty()).collect(),
})?
}
let func_ptr = match func_index.local_or_import(info) {
LocalOrImport::Local(local_func_index) => {
@ -567,6 +579,39 @@ fn call_func_with_index(
}
};
let wasm = runnable
.get_trampoline(info, sig_index)
.expect("wasm trampoline");
call_func_with_index_inner(ctx_ptr, func_ptr, signature, wasm, args, rets)
}
pub(crate) fn call_func_with_index_inner(
ctx_ptr: *mut vm::Ctx,
func_ptr: NonNull<vm::Func>,
signature: &FuncSig,
wasm: Wasm,
args: &[Value],
rets: &mut Vec<Value>,
) -> CallResult<()> {
rets.clear();
let num_results = signature.returns().len();
let num_results = num_results
+ signature
.returns()
.iter()
.filter(|&&ty| ty == Type::V128)
.count();
rets.reserve(num_results);
if !signature.check_param_value_types(args) {
Err(ResolveError::Signature {
expected: signature.clone(),
found: args.iter().map(|val| val.ty()).collect(),
})?
}
let mut raw_args: SmallVec<[u64; 8]> = SmallVec::new();
for v in args {
match v {
@ -598,9 +643,7 @@ fn call_func_with_index(
trampoline,
invoke,
invoke_env,
} = runnable
.get_trampoline(info, sig_index)
.expect("wasm trampoline");
} = wasm;
let run_wasm = |result_space: *mut u64| unsafe {
let mut trap_info = WasmTrapInfo::Unknown;
@ -749,3 +792,15 @@ impl<'a> DynFunc<'a> {
}
}
}
#[cfg(test)]
mod test {
use super::*;
fn is_send<T: Send>() {}
#[test]
fn test_instance_is_send() {
is_send::<Instance>();
}
}

View File

@ -146,7 +146,6 @@ pub fn validate_and_report_errors_with_features(
enable_reference_types: false,
enable_threads: features.threads,
},
mutable_global_imports: true,
};
let mut parser = wasmparser::ValidatingParser::new(wasm, Some(config));
loop {

View File

@ -8,12 +8,9 @@ use crate::{
units::Pages,
vm,
};
use std::{
cell::{Cell, RefCell},
fmt, mem,
rc::Rc,
sync::Arc,
};
use std::{cell::Cell, fmt, mem, sync::Arc};
use std::sync::Mutex as StdMutex;
pub use self::dynamic::DynamicMemory;
pub use self::static_::StaticMemory;
@ -53,16 +50,12 @@ impl Memory {
/// # use wasmer_runtime_core::memory::Memory;
/// # use wasmer_runtime_core::error::Result;
/// # use wasmer_runtime_core::units::Pages;
/// # fn create_memory() -> Result<()> {
/// let descriptor = MemoryDescriptor {
/// minimum: Pages(10),
/// maximum: None,
/// shared: false,
/// };
/// fn create_memory() -> Result<()> {
/// let descriptor = MemoryDescriptor::new(Pages(10), None, false).unwrap();
///
/// let memory = Memory::new(descriptor)?;
/// # Ok(())
/// # }
/// let memory = Memory::new(descriptor)?;
/// Ok(())
/// }
/// ```
pub fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
if let Some(max) = desc.maximum {
@ -131,11 +124,11 @@ impl Memory {
///
/// ```
/// # use wasmer_runtime_core::memory::{Memory, MemoryView};
/// # use std::sync::atomic::Ordering;
/// # use std::{cell::Cell, sync::atomic::Ordering};
/// # fn view_memory(memory: Memory) {
/// // Without synchronization.
/// let view: MemoryView<u8> = memory.view();
/// for byte in view[0x1000 .. 0x1010].iter().map(|cell| cell.get()) {
/// for byte in view[0x1000 .. 0x1010].iter().map(Cell::get) {
/// println!("byte: {}", byte);
/// }
///
@ -177,7 +170,7 @@ impl fmt::Debug for Memory {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MemoryType {
Dynamic,
Static,
@ -208,14 +201,18 @@ enum UnsharedMemoryStorage {
}
pub struct UnsharedMemory {
internal: Rc<UnsharedMemoryInternal>,
internal: Arc<UnsharedMemoryInternal>,
}
struct UnsharedMemoryInternal {
storage: RefCell<UnsharedMemoryStorage>,
storage: StdMutex<UnsharedMemoryStorage>,
local: Cell<vm::LocalMemory>,
}
// Manually implemented because UnsharedMemoryInternal uses `Cell` and is used in an Arc;
// this is safe because the lock for storage can be used to protect (seems like a weak reason: PLEASE REVIEW!)
unsafe impl Sync for UnsharedMemoryInternal {}
impl UnsharedMemory {
pub fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
let mut local = vm::LocalMemory {
@ -231,19 +228,23 @@ impl UnsharedMemory {
MemoryType::Static => {
UnsharedMemoryStorage::Static(StaticMemory::new(desc, &mut local)?)
}
MemoryType::SharedStatic => panic!("attempting to create shared unshared memory"),
MemoryType::SharedStatic => {
return Err(CreationError::InvalidDescriptor(
"attempting to create shared unshared memory".to_string(),
));
}
};
Ok(Self {
internal: Rc::new(UnsharedMemoryInternal {
storage: RefCell::new(storage),
internal: Arc::new(UnsharedMemoryInternal {
storage: StdMutex::new(storage),
local: Cell::new(local),
}),
})
}
pub fn grow(&self, delta: Pages) -> Result<Pages, GrowError> {
let mut storage = self.internal.storage.borrow_mut();
let mut storage = self.internal.storage.lock().unwrap();
let mut local = self.internal.local.get();
@ -260,7 +261,7 @@ impl UnsharedMemory {
}
pub fn size(&self) -> Pages {
let storage = self.internal.storage.borrow();
let storage = self.internal.storage.lock().unwrap();
match &*storage {
UnsharedMemoryStorage::Dynamic(ref dynamic_memory) => dynamic_memory.size(),
@ -276,7 +277,7 @@ impl UnsharedMemory {
impl Clone for UnsharedMemory {
fn clone(&self) -> Self {
UnsharedMemory {
internal: Rc::clone(&self.internal),
internal: Arc::clone(&self.internal),
}
}
}
@ -286,11 +287,15 @@ pub struct SharedMemory {
}
pub struct SharedMemoryInternal {
memory: RefCell<Box<StaticMemory>>,
memory: StdMutex<Box<StaticMemory>>,
local: Cell<vm::LocalMemory>,
lock: Mutex<()>,
}
// Manually implemented because SharedMemoryInternal uses `Cell` and is used in Arc;
// this is safe because of `lock`; accesing `local` without locking `lock` is not safe (Maybe we could put the lock on Local then?)
unsafe impl Sync for SharedMemoryInternal {}
impl SharedMemory {
fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
let mut local = vm::LocalMemory {
@ -303,7 +308,7 @@ impl SharedMemory {
Ok(Self {
internal: Arc::new(SharedMemoryInternal {
memory: RefCell::new(memory),
memory: StdMutex::new(memory),
local: Cell::new(local),
lock: Mutex::new(()),
}),
@ -313,15 +318,18 @@ impl SharedMemory {
pub fn grow(&self, delta: Pages) -> Result<Pages, GrowError> {
let _guard = self.internal.lock.lock();
let mut local = self.internal.local.get();
let pages = self.internal.memory.borrow_mut().grow(delta, &mut local);
let mut memory = self.internal.memory.lock().unwrap();
let pages = memory.grow(delta, &mut local);
pages
}
pub fn size(&self) -> Pages {
let _guard = self.internal.lock.lock();
self.internal.memory.borrow_mut().size()
let memory = self.internal.memory.lock().unwrap();
memory.size()
}
// This function is scary, because the mutex is not locked here
pub(crate) fn vm_local_memory(&self) -> *mut vm::LocalMemory {
self.internal.local.as_ptr()
}
@ -342,24 +350,16 @@ mod memory_tests {
#[test]
fn test_initial_memory_size() {
let unshared_memory = Memory::new(MemoryDescriptor {
minimum: Pages(10),
maximum: Some(Pages(20)),
shared: false,
})
.unwrap();
let memory_desc = MemoryDescriptor::new(Pages(10), Some(Pages(20)), false).unwrap();
let unshared_memory = Memory::new(memory_desc).unwrap();
assert_eq!(unshared_memory.size(), Pages(10));
}
#[test]
fn test_invalid_descriptor_returns_error() {
let result = Memory::new(MemoryDescriptor {
minimum: Pages(10),
maximum: None,
shared: true,
});
let memory_desc = MemoryDescriptor::new(Pages(10), None, true);
assert!(
result.is_err(),
memory_desc.is_err(),
"Max number of pages is required for shared memory"
)
}

View File

@ -46,9 +46,7 @@ fn align_pointer(ptr: usize, align: usize) -> usize {
impl<T: Copy + ValueType> WasmPtr<T, Item> {
#[inline]
pub fn deref<'a>(self, memory: &'a Memory) -> Option<&'a Cell<T>> {
if self.offset == 0
|| (self.offset as usize) + mem::size_of::<T>() >= memory.size().bytes().0
{
if (self.offset as usize) + mem::size_of::<T>() >= memory.size().bytes().0 {
return None;
}
unsafe {
@ -62,9 +60,7 @@ impl<T: Copy + ValueType> WasmPtr<T, Item> {
#[inline]
pub unsafe fn deref_mut<'a>(self, memory: &'a Memory) -> Option<&'a mut Cell<T>> {
if self.offset == 0
|| (self.offset as usize) + mem::size_of::<T>() >= memory.size().bytes().0
{
if (self.offset as usize) + mem::size_of::<T>() >= memory.size().bytes().0 {
return None;
}
let cell_ptr = align_pointer(
@ -83,9 +79,7 @@ impl<T: Copy + ValueType> WasmPtr<T, Array> {
let item_size = mem::size_of::<T>() + (mem::size_of::<T>() % mem::align_of::<T>());
let slice_full_len = index as usize + length as usize;
if self.offset == 0
|| (self.offset as usize) + (item_size * slice_full_len) >= memory.size().bytes().0
{
if (self.offset as usize) + (item_size * slice_full_len) >= memory.size().bytes().0 {
return None;
}
@ -112,9 +106,7 @@ impl<T: Copy + ValueType> WasmPtr<T, Array> {
let item_size = mem::size_of::<T>() + (mem::size_of::<T>() % mem::align_of::<T>());
let slice_full_len = index as usize + length as usize;
if self.offset == 0
|| (self.offset as usize) + (item_size * slice_full_len) >= memory.size().bytes().0
{
if (self.offset as usize) + (item_size * slice_full_len) >= memory.size().bytes().0 {
return None;
}

View File

@ -136,11 +136,13 @@ pub fn read_module<
.push((import_name, table_desc));
}
ImportSectionEntryType::Memory(memory_ty) => {
let mem_desc = MemoryDescriptor {
minimum: Pages(memory_ty.limits.initial),
maximum: memory_ty.limits.maximum.map(|max| Pages(max)),
shared: memory_ty.shared,
};
let mem_desc = MemoryDescriptor::new(
Pages(memory_ty.limits.initial),
memory_ty.limits.maximum.map(|max| Pages(max)),
memory_ty.shared,
)
.map_err(|x| LoadError::Codegen(format!("{:?}", x)))?;
info.write()
.unwrap()
.imported_memories
@ -172,11 +174,12 @@ pub fn read_module<
info.write().unwrap().tables.push(table_desc);
}
ParserState::MemorySectionEntry(memory_ty) => {
let mem_desc = MemoryDescriptor {
minimum: Pages(memory_ty.limits.initial),
maximum: memory_ty.limits.maximum.map(|max| Pages(max)),
shared: memory_ty.shared,
};
let mem_desc = MemoryDescriptor::new(
Pages(memory_ty.limits.initial),
memory_ty.limits.maximum.map(|max| Pages(max)),
memory_ty.shared,
)
.map_err(|x| LoadError::Codegen(format!("{:?}", x)))?;
info.write().unwrap().memories.push(mem_desc);
}
@ -392,14 +395,19 @@ pub fn read_module<
}
pub fn wp_type_to_type(ty: WpType) -> Result<Type, BinaryReaderError> {
Ok(match ty {
WpType::I32 => Type::I32,
WpType::I64 => Type::I64,
WpType::F32 => Type::F32,
WpType::F64 => Type::F64,
WpType::V128 => Type::V128,
_ => panic!("broken invariant, invalid type"),
})
match ty {
WpType::I32 => Ok(Type::I32),
WpType::I64 => Ok(Type::I64),
WpType::F32 => Ok(Type::F32),
WpType::F64 => Ok(Type::F64),
WpType::V128 => Ok(Type::V128),
_ => {
return Err(BinaryReaderError {
message: "broken invariant, invalid type",
offset: -1isize as usize,
});
}
}
}
pub fn type_to_wp_type(ty: Type) -> WpType {

View File

@ -303,7 +303,7 @@ impl ExecutionStateImage {
if let Ok(x) = env::var("WASMER_BACKTRACE") {
if x == "1" {
eprintln!("{}", self.colored_output());
eprintln!("{}", self.output());
return;
}
}
@ -311,9 +311,7 @@ impl ExecutionStateImage {
eprintln!("Run with `WASMER_BACKTRACE=1` environment variable to display a backtrace.");
}
pub fn colored_output(&self) -> String {
use colored::*;
pub fn output(&self) -> String {
fn join_strings(x: impl Iterator<Item = String>, sep: &str) -> String {
let mut ret = String::new();
let mut first = true;
@ -341,8 +339,6 @@ impl ExecutionStateImage {
i,
x.map(|x| format!("{}", x))
.unwrap_or_else(|| "?".to_string())
.bold()
.cyan()
)
}),
", ",
@ -353,27 +349,23 @@ impl ExecutionStateImage {
let mut ret = String::new();
if self.frames.len() == 0 {
ret += &"Unknown fault address, cannot read stack.".yellow();
ret += &"Unknown fault address, cannot read stack.";
ret += "\n";
} else {
ret += &"Backtrace:".bold();
ret += &"Backtrace:";
ret += "\n";
for (i, f) in self.frames.iter().enumerate() {
ret += &format!("* Frame {} @ Local function {}", i, f.local_function_id).bold();
ret += &format!("* Frame {} @ Local function {}", i, f.local_function_id);
ret += "\n";
ret += &format!(" {} {}\n", "Offset:", format!("{}", f.wasm_inst_offset),);
ret += &format!(
" {} {}\n",
"Offset:".bold().yellow(),
format!("{}", f.wasm_inst_offset).bold().cyan(),
);
ret += &format!(
" {} {}\n",
"Locals:".bold().yellow(),
"Locals:",
format_optional_u64_sequence(&f.locals)
);
ret += &format!(
" {} {}\n\n",
"Stack:".bold().yellow(),
"Stack:",
format_optional_u64_sequence(&f.stack)
);
}

View File

@ -39,6 +39,10 @@ where
self.elems.len()
}
pub fn is_empty(&self) -> bool {
self.elems.is_empty()
}
pub fn push(&mut self, value: V) -> K {
let len = self.len();
self.elems.push(value);

View File

@ -4,7 +4,7 @@ use errno;
use nix::libc;
use page_size;
use std::ops::{Bound, RangeBounds};
use std::{fs::File, os::unix::io::IntoRawFd, path::Path, ptr, rc::Rc, slice};
use std::{fs::File, os::unix::io::IntoRawFd, path::Path, ptr, slice, sync::Arc};
unsafe impl Send for Memory {}
unsafe impl Sync for Memory {}
@ -14,7 +14,7 @@ pub struct Memory {
ptr: *mut u8,
size: usize,
protection: Protect,
fd: Option<Rc<RawFd>>,
fd: Option<Arc<RawFd>>,
}
impl Memory {
@ -49,7 +49,7 @@ impl Memory {
ptr: ptr as *mut u8,
size: file_len as usize,
protection,
fd: Some(Rc::new(raw_fd)),
fd: Some(Arc::new(raw_fd)),
})
}
}

View File

@ -44,7 +44,7 @@ impl<'a> From<DynFunc<'a>> for Anyfunc<'a> {
}
pub struct AnyfuncTable {
backing: Vec<vm::Anyfunc>,
pub(crate) backing: Vec<vm::Anyfunc>,
max: Option<u32>,
}

View File

@ -5,12 +5,15 @@ use crate::{
types::{ElementType, TableDescriptor},
vm,
};
use std::{cell::RefCell, fmt, ptr, rc::Rc};
use std::{
fmt, ptr,
sync::{Arc, Mutex},
};
mod anyfunc;
pub use self::anyfunc::Anyfunc;
use self::anyfunc::AnyfuncTable;
pub(crate) use self::anyfunc::AnyfuncTable;
use crate::error::GrowError;
pub enum Element<'a> {
@ -25,7 +28,7 @@ pub enum TableStorage {
pub struct Table {
desc: TableDescriptor,
storage: Rc<RefCell<(TableStorage, vm::LocalTable)>>,
storage: Arc<Mutex<(TableStorage, vm::LocalTable)>>,
}
impl Table {
@ -71,7 +74,7 @@ impl Table {
Ok(Self {
desc,
storage: Rc::new(RefCell::new((storage, local))),
storage: Arc::new(Mutex::new((storage, local))),
})
}
@ -82,7 +85,8 @@ impl Table {
/// Set the element at index.
pub fn set(&self, index: u32, element: Element) -> Result<(), ()> {
match &mut *self.storage.borrow_mut() {
let mut storage = self.storage.lock().unwrap();
match &mut *storage {
(TableStorage::Anyfunc(ref mut anyfunc_table), _) => {
match element {
Element::Anyfunc(anyfunc) => anyfunc_table.set(index, anyfunc),
@ -96,14 +100,16 @@ impl Table {
where
F: FnOnce(&mut [vm::Anyfunc]) -> R,
{
match &mut *self.storage.borrow_mut() {
let mut storage = self.storage.lock().unwrap();
match &mut *storage {
(TableStorage::Anyfunc(ref mut anyfunc_table), _) => f(anyfunc_table.internal_buffer()),
}
}
/// The current size of this table.
pub fn size(&self) -> u32 {
match &*self.storage.borrow() {
let storage = self.storage.lock().unwrap();
match &*storage {
(TableStorage::Anyfunc(ref anyfunc_table), _) => anyfunc_table.current_size(),
}
}
@ -114,7 +120,8 @@ impl Table {
return Ok(self.size());
}
match &mut *self.storage.borrow_mut() {
let mut storage = self.storage.lock().unwrap();
match &mut *storage {
(TableStorage::Anyfunc(ref mut anyfunc_table), ref mut local) => anyfunc_table
.grow(delta, local)
.ok_or(GrowError::TableGrowError),
@ -122,7 +129,8 @@ impl Table {
}
pub fn vm_local_table(&mut self) -> *mut vm::LocalTable {
&mut self.storage.borrow_mut().1
let mut storage = self.storage.lock().unwrap();
&mut storage.1
}
}
@ -136,7 +144,7 @@ impl Clone for Table {
fn clone(&self) -> Self {
Self {
desc: self.desc,
storage: Rc::clone(&self.storage),
storage: Arc::clone(&self.storage),
}
}
}

View File

@ -165,6 +165,9 @@ pub struct Func<'a, Args = (), Rets = (), Inner: Kind = Wasm> {
_phantom: PhantomData<(&'a (), Args, Rets)>,
}
unsafe impl<'a, Args, Rets> Send for Func<'a, Args, Rets, Wasm> {}
unsafe impl<'a, Args, Rets> Send for Func<'a, Args, Rets, Host> {}
impl<'a, Args, Rets> Func<'a, Args, Rets, Wasm>
where
Args: WasmTypeList,

View File

@ -326,7 +326,7 @@ pub struct GlobalInit {
pub init: Initializer,
}
/// A wasm memory.
/// A wasm memory descriptor.
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
pub struct MemoryDescriptor {
/// The minimum number of allowed pages.
@ -335,16 +335,30 @@ pub struct MemoryDescriptor {
pub maximum: Option<Pages>,
/// This memory can be shared between wasm threads.
pub shared: bool,
/// The type of the memory
pub memory_type: MemoryType,
}
impl MemoryDescriptor {
pub fn memory_type(self) -> MemoryType {
match (self.maximum.is_some(), self.shared) {
pub fn new(minimum: Pages, maximum: Option<Pages>, shared: bool) -> Result<Self, String> {
let memory_type = match (maximum.is_some(), shared) {
(true, true) => MemoryType::SharedStatic,
(true, false) => MemoryType::Static,
(false, false) => MemoryType::Dynamic,
(false, true) => panic!("shared memory without a max is not allowed"),
}
(false, true) => {
return Err("Max number of pages is required for shared memory".to_string());
}
};
Ok(MemoryDescriptor {
minimum,
maximum,
shared,
memory_type,
})
}
pub fn memory_type(&self) -> MemoryType {
self.memory_type
}
pub(crate) fn fits_in_imported(&self, imported: MemoryDescriptor) -> bool {

View File

@ -1,15 +1,19 @@
pub use crate::backing::{ImportBacking, LocalBacking, INTERNALS_SIZE};
use crate::{
error::CallResult,
instance::call_func_with_index_inner,
memory::{Memory, MemoryType},
module::{ModuleInfo, ModuleInner},
sig_registry::SigRegistry,
structures::TypedIndex,
types::{LocalOrImport, MemoryIndex},
types::{LocalOrImport, MemoryIndex, TableIndex, Value},
vmcalls,
};
use std::{
cell::UnsafeCell,
ffi::c_void,
mem, ptr,
mem,
ptr::{self, NonNull},
sync::atomic::{AtomicUsize, Ordering},
sync::Once,
};
@ -393,6 +397,41 @@ impl Ctx {
(*self.internal.internals)[field.index()] = value;
}
}
/// Calls a host or Wasm function at the given table index
pub fn call_with_table_index(
&mut self,
index: TableIndex,
args: &[Value],
) -> CallResult<Vec<Value>> {
let anyfunc_table =
unsafe { &*((**self.internal.tables).table as *mut crate::table::AnyfuncTable) };
let Anyfunc { func, ctx, sig_id } = anyfunc_table.backing[index.index()];
let signature = SigRegistry.lookup_signature(unsafe { std::mem::transmute(sig_id.0) });
let mut rets = vec![];
let wasm = {
let module = unsafe { &*self.module };
let runnable = &module.runnable_module;
let sig_index = SigRegistry.lookup_sig_index(signature.clone());
runnable
.get_trampoline(&module.info, sig_index)
.expect("wasm trampoline")
};
call_func_with_index_inner(
ctx,
NonNull::new(func as *mut _).unwrap(),
&signature,
wasm,
args,
&mut rets,
)?;
Ok(rets)
}
}
#[doc(hidden)]
@ -474,6 +513,9 @@ pub struct ImportedFunc {
pub vmctx: *mut Ctx,
}
// manually implemented because ImportedFunc contains raw pointers directly; `Func` is marked Send (But `Ctx` actually isn't! (TODO: review this, shouldn't `Ctx` be Send?))
unsafe impl Send for ImportedFunc {}
impl ImportedFunc {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_func() -> u8 {
@ -501,6 +543,9 @@ pub struct LocalTable {
pub table: *mut (),
}
// manually implemented because LocalTable contains raw pointers directly
unsafe impl Send for LocalTable {}
impl LocalTable {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_base() -> u8 {
@ -530,6 +575,9 @@ pub struct LocalMemory {
pub memory: *mut (),
}
// manually implemented because LocalMemory contains raw pointers
unsafe impl Send for LocalMemory {}
impl LocalMemory {
#[allow(clippy::erasing_op)] // TODO
pub fn offset_base() -> u8 {
@ -580,6 +628,9 @@ pub struct Anyfunc {
pub sig_id: SigId,
}
// manually implemented because Anyfunc contains raw pointers directly
unsafe impl Send for Anyfunc {}
impl Anyfunc {
pub fn null() -> Self {
Self {