mirror of
https://github.com/fluencelabs/wasmer
synced 2025-06-29 00:21:34 +00:00
Change memory access api again
This commit is contained in:
262
lib/runtime-core/src/memory/atomic.rs
Normal file
262
lib/runtime-core/src/memory/atomic.rs
Normal file
@ -0,0 +1,262 @@
|
||||
//! This is mostly copied from https://docs.rs/integer-atomics/1.0.2/src/integer_atomics/atomic.rs.html
|
||||
//! Many thanks to "main()" for writing this.
|
||||
|
||||
use std::cell::UnsafeCell;
|
||||
use std::mem;
|
||||
use std::num::Wrapping;
|
||||
use std::ops::{Add, BitAnd, BitOr, BitXor, Sub};
|
||||
use std::panic::RefUnwindSafe;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
pub trait IntCast:
|
||||
Copy
|
||||
+ Eq
|
||||
+ Add<Output = Self>
|
||||
+ BitAnd<Output = Self>
|
||||
+ BitOr<Output = Self>
|
||||
+ BitXor<Output = Self>
|
||||
+ Sub<Output = Self>
|
||||
{
|
||||
type Public: PartialEq + Copy;
|
||||
|
||||
fn from(u: usize) -> Self;
|
||||
fn to(self) -> usize;
|
||||
|
||||
fn new(p: Self::Public) -> Self;
|
||||
fn unwrap(self) -> Self::Public;
|
||||
}
|
||||
|
||||
macro_rules! intcast {
|
||||
($($type:ident)+) => {
|
||||
$(
|
||||
impl IntCast for Wrapping<$type> {
|
||||
type Public = $type;
|
||||
|
||||
fn from(u: usize) -> Self {
|
||||
Wrapping(u as $type)
|
||||
}
|
||||
fn to(self) -> usize {
|
||||
self.0 as usize
|
||||
}
|
||||
|
||||
fn new(p: $type) -> Self {
|
||||
Wrapping(p)
|
||||
}
|
||||
|
||||
fn unwrap(self) -> $type {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
)+
|
||||
}
|
||||
}
|
||||
intcast! { u8 i8 u16 i16 u32 i32 u64 i64 }
|
||||
|
||||
pub struct Atomic<T> {
|
||||
v: UnsafeCell<Wrapping<T>>,
|
||||
}
|
||||
|
||||
impl<T: Default + IntCast> Default for Atomic<T> {
|
||||
fn default() -> Self {
|
||||
Self::new(T::default().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: impl Debug
|
||||
|
||||
unsafe impl<T> Sync for Atomic<T> {}
|
||||
impl<T> RefUnwindSafe for Atomic<T> {}
|
||||
|
||||
fn inject<T>(a: usize, b: usize, offset: usize) -> usize {
|
||||
let mask = ((1 << (mem::size_of::<T>() * 8)) - 1) << offset;
|
||||
(a & !mask) | (b << offset)
|
||||
}
|
||||
|
||||
// straight from libcore's atomic.rs
|
||||
#[inline]
|
||||
fn strongest_failure_ordering(order: Ordering) -> Ordering {
|
||||
use self::Ordering::*;
|
||||
match order {
|
||||
Release => Relaxed,
|
||||
Relaxed => Relaxed,
|
||||
SeqCst => SeqCst,
|
||||
Acquire => Acquire,
|
||||
AcqRel => Acquire,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IntCast> Atomic<T> {
|
||||
#[inline]
|
||||
fn proxy(&self) -> (&AtomicUsize, usize) {
|
||||
let ptr = self.v.get() as usize;
|
||||
let aligned = ptr & !(mem::size_of::<usize>() - 1);
|
||||
(
|
||||
unsafe { &*(aligned as *const AtomicUsize) },
|
||||
(ptr - aligned) * 8,
|
||||
)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(super) fn new(v: T::Public) -> Self {
|
||||
Atomic {
|
||||
v: UnsafeCell::new(Wrapping(T::new(v))),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn get_mut(&mut self) -> &mut T::Public {
|
||||
unsafe { &mut *(self.v.get() as *mut T::Public) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn into_inner(self) -> T::Public {
|
||||
self.v.into_inner().0.unwrap()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn load(&self, order: Ordering) -> T::Public {
|
||||
let (p, o) = self.proxy();
|
||||
T::from(p.load(order) >> o).unwrap()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn op<F: Fn(T) -> Option<T>>(&self, f: F, order: Ordering) -> T::Public {
|
||||
self.op_new(f, order, strongest_failure_ordering(order))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn op_new<F: Fn(T) -> Option<T>>(
|
||||
&self,
|
||||
f: F,
|
||||
success: Ordering,
|
||||
failure: Ordering,
|
||||
) -> T::Public {
|
||||
let (p, o) = self.proxy();
|
||||
let mut old = p.load(Ordering::Relaxed);
|
||||
loop {
|
||||
let old_t = T::from(old >> o);
|
||||
let new_t = match f(old_t) {
|
||||
Some(x) => x,
|
||||
None => return old_t.unwrap(),
|
||||
};
|
||||
|
||||
match Self::op_weak(p, o, old, new_t, success, failure) {
|
||||
Ok(()) => return T::from(old >> o).unwrap(),
|
||||
Err(prev) => old = prev,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn op_weak(
|
||||
p: &AtomicUsize,
|
||||
o: usize,
|
||||
old: usize,
|
||||
new_t: T,
|
||||
success: Ordering,
|
||||
failure: Ordering,
|
||||
) -> Result<(), usize> {
|
||||
let new = inject::<T>(old, new_t.to(), o);
|
||||
p.compare_exchange_weak(old, new, success, failure)
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn store(&self, val: T::Public, order: Ordering) {
|
||||
self.op(|_| Some(T::new(val)), order);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn swap(&self, val: T::Public, order: Ordering) -> T::Public {
|
||||
self.op(|_| Some(T::new(val)), order)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn compare_and_swap(
|
||||
&self,
|
||||
current: T::Public,
|
||||
new: T::Public,
|
||||
order: Ordering,
|
||||
) -> T::Public {
|
||||
self.op(
|
||||
|x| {
|
||||
if x == T::new(current) {
|
||||
Some(T::new(new))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
order,
|
||||
)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn compare_exchange(
|
||||
&self,
|
||||
current: T::Public,
|
||||
new: T::Public,
|
||||
success: Ordering,
|
||||
failure: Ordering,
|
||||
) -> Result<T::Public, T::Public> {
|
||||
match self.op_new(
|
||||
|x| {
|
||||
if x == T::new(current) {
|
||||
Some(T::new(new))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
success,
|
||||
failure,
|
||||
) {
|
||||
x if x == current => Ok(x),
|
||||
x => Err(x),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn compare_exchange_weak(
|
||||
&self,
|
||||
current: T::Public,
|
||||
new: T::Public,
|
||||
success: Ordering,
|
||||
failure: Ordering,
|
||||
) -> Result<T::Public, T::Public> {
|
||||
let (p, o) = self.proxy();
|
||||
let old = p.load(Ordering::Relaxed);
|
||||
let old_t = T::from(old >> o).unwrap();
|
||||
if old_t != current {
|
||||
return Err(old_t);
|
||||
}
|
||||
|
||||
Self::op_weak(p, o, old, T::new(new), success, failure)
|
||||
.map(|()| current)
|
||||
.map_err(|x| T::from(x >> o).unwrap())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn fetch_add(&self, val: T::Public, order: Ordering) -> T::Public {
|
||||
self.op(|x| Some(x + T::new(val)), order)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn fetch_sub(&self, val: T::Public, order: Ordering) -> T::Public {
|
||||
self.op(|x| Some(x - T::new(val)), order)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn fetch_and(&self, val: T::Public, order: Ordering) -> T::Public {
|
||||
self.op(|x| Some(x & T::new(val)), order)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn fetch_or(&self, val: T::Public, order: Ordering) -> T::Public {
|
||||
self.op(|x| Some(x | T::new(val)), order)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn fetch_xor(&self, val: T::Public, order: Ordering) -> T::Public {
|
||||
self.op(|x| Some(x ^ T::new(val)), order)
|
||||
}
|
||||
}
|
@ -4,7 +4,7 @@ use crate::{
|
||||
import::IsExport,
|
||||
memory::dynamic::DYNAMIC_GUARD_SIZE,
|
||||
memory::static_::{SAFE_STATIC_GUARD_SIZE, SAFE_STATIC_HEAP_SIZE},
|
||||
types::MemoryDescriptor,
|
||||
types::{MemoryDescriptor, ValueType},
|
||||
units::Pages,
|
||||
vm,
|
||||
};
|
||||
@ -12,79 +12,36 @@ use std::{
|
||||
cell::{Cell, Ref, RefCell, RefMut},
|
||||
fmt,
|
||||
marker::PhantomData,
|
||||
ops::{Deref, DerefMut},
|
||||
mem,
|
||||
ops::{Bound, Deref, DerefMut, Index, RangeBounds},
|
||||
ptr,
|
||||
rc::Rc,
|
||||
slice,
|
||||
};
|
||||
|
||||
pub use self::atomic::Atomic;
|
||||
pub use self::dynamic::DynamicMemory;
|
||||
pub use self::static_::{SharedStaticMemory, StaticMemory};
|
||||
pub use self::view::{Atomically, MemoryView};
|
||||
|
||||
mod atomic;
|
||||
mod dynamic;
|
||||
mod static_;
|
||||
mod view;
|
||||
|
||||
pub trait MemoryImpl<'a>: Clone {
|
||||
type Access: Deref<Target = [u8]>;
|
||||
type AccessMut: DerefMut<Target = [u8]>;
|
||||
|
||||
fn new(desc: MemoryDescriptor) -> Result<Self, CreationError>;
|
||||
fn grow(&'a self, delta: Pages) -> Option<Pages>;
|
||||
fn size(&'a self) -> Pages;
|
||||
fn vm_local_memory(&'a self) -> *mut vm::LocalMemory;
|
||||
fn access(&'a self) -> Self::Access;
|
||||
fn access_mut(&'a self) -> Self::AccessMut;
|
||||
#[derive(Clone)]
|
||||
enum MemoryVariant {
|
||||
Unshared(UnsharedMemory),
|
||||
Shared(SharedMemory),
|
||||
}
|
||||
|
||||
pub trait SharedPolicy
|
||||
where
|
||||
Self: Sized,
|
||||
for<'a> Self::Memory: MemoryImpl<'a>,
|
||||
{
|
||||
const SHARED: bool;
|
||||
type Memory;
|
||||
fn transform_variant(variants: &MemoryVariant) -> &Memory<Self>;
|
||||
}
|
||||
pub struct Shared;
|
||||
impl SharedPolicy for Shared {
|
||||
const SHARED: bool = true;
|
||||
type Memory = SharedMemory;
|
||||
fn transform_variant(variants: &MemoryVariant) -> &Memory<Self> {
|
||||
match variants {
|
||||
MemoryVariant::Shared(shared_mem) => shared_mem,
|
||||
MemoryVariant::Unshared(_) => {
|
||||
panic!("cannot transform unshared memory to shared memory")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pub struct Unshared;
|
||||
impl SharedPolicy for Unshared {
|
||||
const SHARED: bool = false;
|
||||
type Memory = UnsharedMemory;
|
||||
fn transform_variant(variants: &MemoryVariant) -> &Memory<Self> {
|
||||
match variants {
|
||||
MemoryVariant::Unshared(unshared_mem) => unshared_mem,
|
||||
MemoryVariant::Shared(_) => panic!("cannot transform shared memory to unshared memory"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Send for Memory<Shared> {}
|
||||
unsafe impl Sync for Memory<Shared> {}
|
||||
|
||||
pub struct Memory<S = Unshared>
|
||||
where
|
||||
S: SharedPolicy,
|
||||
{
|
||||
#[derive(Clone)]
|
||||
pub struct Memory {
|
||||
desc: MemoryDescriptor,
|
||||
memory: S::Memory,
|
||||
_phantom: PhantomData<S>,
|
||||
variant: MemoryVariant,
|
||||
}
|
||||
|
||||
impl<S> Memory<S>
|
||||
where
|
||||
S: SharedPolicy,
|
||||
{
|
||||
impl Memory {
|
||||
/// Create a new `Memory` from a [`MemoryDescriptor`]
|
||||
///
|
||||
/// [`MemoryDescriptor`]: struct.MemoryDescriptor.html
|
||||
@ -103,22 +60,18 @@ where
|
||||
/// shared: false,
|
||||
/// };
|
||||
///
|
||||
/// let memory: Memory = Memory::new(descriptor)?;
|
||||
/// let memory = Memory::new(descriptor)?;
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn new(desc: MemoryDescriptor) -> Result<Memory<S>, CreationError> {
|
||||
assert_eq!(
|
||||
desc.shared,
|
||||
S::SHARED,
|
||||
"type parameter must match description"
|
||||
);
|
||||
pub fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
|
||||
let variant = if !desc.shared {
|
||||
MemoryVariant::Unshared(UnsharedMemory::new(desc)?)
|
||||
} else {
|
||||
MemoryVariant::Shared(SharedMemory::new(desc)?)
|
||||
};
|
||||
|
||||
Ok(Memory {
|
||||
desc,
|
||||
memory: S::Memory::new(desc)?,
|
||||
_phantom: PhantomData,
|
||||
})
|
||||
Ok(Memory { desc, variant })
|
||||
}
|
||||
|
||||
/// Return the [`MemoryDescriptor`] that this memory
|
||||
@ -131,51 +84,81 @@ where
|
||||
|
||||
/// Grow this memory by the specfied number of pages.
|
||||
pub fn grow(&self, delta: Pages) -> Option<Pages> {
|
||||
self.memory.grow(delta)
|
||||
match &self.variant {
|
||||
MemoryVariant::Unshared(unshared_mem) => unshared_mem.grow(delta),
|
||||
MemoryVariant::Shared(shared_mem) => shared_mem.grow(delta),
|
||||
}
|
||||
}
|
||||
|
||||
/// The size, in wasm pages, of this memory.
|
||||
pub fn size(&self) -> Pages {
|
||||
self.memory.size()
|
||||
match &self.variant {
|
||||
MemoryVariant::Unshared(unshared_mem) => unshared_mem.size(),
|
||||
MemoryVariant::Shared(shared_mem) => shared_mem.size(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn access(&self) -> <S::Memory as MemoryImpl>::Access {
|
||||
self.memory.access()
|
||||
pub fn view<T: ValueType, R: RangeBounds<usize>>(&self, range: R) -> Option<MemoryView<T>> {
|
||||
let vm::LocalMemory {
|
||||
base,
|
||||
bound,
|
||||
memory: _,
|
||||
} = unsafe { *self.vm_local_memory() };
|
||||
|
||||
let range_start = match range.start_bound() {
|
||||
Bound::Included(start) => *start,
|
||||
Bound::Excluded(start) => *start + 1,
|
||||
Bound::Unbounded => 0,
|
||||
};
|
||||
|
||||
let range_end = match range.end_bound() {
|
||||
Bound::Included(end) => *end + 1,
|
||||
Bound::Excluded(end) => *end,
|
||||
Bound::Unbounded => bound as usize,
|
||||
};
|
||||
|
||||
let length = range_end - range_start;
|
||||
|
||||
let size_in_bytes = mem::size_of::<T>() * length;
|
||||
|
||||
if range_end < range_start || range_start + size_in_bytes >= bound {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(unsafe { MemoryView::new(base as _, length as u32) })
|
||||
}
|
||||
|
||||
pub fn access_mut(&self) -> <S::Memory as MemoryImpl>::AccessMut {
|
||||
self.memory.access_mut()
|
||||
pub fn shared(self) -> Option<SharedMemory> {
|
||||
if self.desc.shared {
|
||||
Some(SharedMemory { desc: self.desc })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn vm_local_memory(&self) -> *mut vm::LocalMemory {
|
||||
self.memory.vm_local_memory()
|
||||
}
|
||||
}
|
||||
|
||||
impl IsExport for Memory<Unshared> {
|
||||
fn to_export(&self) -> Export {
|
||||
Export::Memory(MemoryVariant::Unshared(self.clone()))
|
||||
}
|
||||
}
|
||||
impl IsExport for Memory<Shared> {
|
||||
fn to_export(&self) -> Export {
|
||||
Export::Memory(MemoryVariant::Shared(self.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Clone for Memory<S>
|
||||
where
|
||||
S: SharedPolicy,
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
desc: self.desc,
|
||||
memory: self.memory.clone(),
|
||||
_phantom: PhantomData,
|
||||
match &self.variant {
|
||||
MemoryVariant::Unshared(unshared_mem) => unshared_mem.vm_local_memory(),
|
||||
MemoryVariant::Shared(shared_mem) => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IsExport for Memory {
|
||||
fn to_export(&self) -> Export {
|
||||
Export::Memory(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Memory {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Memory")
|
||||
.field("desc", &self.desc)
|
||||
.field("size", &self.size())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum MemoryType {
|
||||
Dynamic,
|
||||
@ -201,24 +184,6 @@ impl MemoryType {
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> fmt::Debug for Memory<S>
|
||||
where
|
||||
S: SharedPolicy,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Memory")
|
||||
.field("desc", &self.desc)
|
||||
.field("size", &self.size())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum MemoryVariant {
|
||||
Unshared(Memory<Unshared>),
|
||||
Shared(Memory<Shared>),
|
||||
}
|
||||
|
||||
enum UnsharedMemoryStorage {
|
||||
Dynamic(Box<DynamicMemory>),
|
||||
Static(Box<StaticMemory>),
|
||||
@ -233,11 +198,8 @@ struct UnsharedMemoryInternal {
|
||||
local: Cell<vm::LocalMemory>,
|
||||
}
|
||||
|
||||
impl<'a> MemoryImpl<'a> for UnsharedMemory {
|
||||
type Access = Ref<'a, [u8]>;
|
||||
type AccessMut = RefMut<'a, [u8]>;
|
||||
|
||||
fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
|
||||
impl UnsharedMemory {
|
||||
pub fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
|
||||
let mut local = vm::LocalMemory {
|
||||
base: ptr::null_mut(),
|
||||
bound: 0,
|
||||
@ -262,7 +224,7 @@ impl<'a> MemoryImpl<'a> for UnsharedMemory {
|
||||
})
|
||||
}
|
||||
|
||||
fn grow(&self, delta: Pages) -> Option<Pages> {
|
||||
pub fn grow(&self, delta: Pages) -> Option<Pages> {
|
||||
let mut storage = self.internal.storage.borrow_mut();
|
||||
|
||||
let mut local = self.internal.local.get();
|
||||
@ -279,7 +241,7 @@ impl<'a> MemoryImpl<'a> for UnsharedMemory {
|
||||
pages
|
||||
}
|
||||
|
||||
fn size(&self) -> Pages {
|
||||
pub fn size(&self) -> Pages {
|
||||
let storage = self.internal.storage.borrow();
|
||||
|
||||
match &*storage {
|
||||
@ -288,29 +250,9 @@ impl<'a> MemoryImpl<'a> for UnsharedMemory {
|
||||
}
|
||||
}
|
||||
|
||||
fn vm_local_memory(&self) -> *mut vm::LocalMemory {
|
||||
pub(crate) fn vm_local_memory(&self) -> *mut vm::LocalMemory {
|
||||
self.internal.local.as_ptr()
|
||||
}
|
||||
|
||||
fn access(&'a self) -> Ref<'a, [u8]> {
|
||||
Ref::map(
|
||||
self.internal.storage.borrow(),
|
||||
|memory_storage| match memory_storage {
|
||||
UnsharedMemoryStorage::Dynamic(dynamic_memory) => dynamic_memory.as_slice(),
|
||||
UnsharedMemoryStorage::Static(static_memory) => static_memory.as_slice(),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn access_mut(&'a self) -> RefMut<'a, [u8]> {
|
||||
RefMut::map(
|
||||
self.internal.storage.borrow_mut(),
|
||||
|memory_storage| match memory_storage {
|
||||
UnsharedMemoryStorage::Dynamic(dynamic_memory) => dynamic_memory.as_slice_mut(),
|
||||
UnsharedMemoryStorage::Static(static_memory) => static_memory.as_slice_mut(),
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for UnsharedMemory {
|
||||
@ -321,33 +263,28 @@ impl Clone for UnsharedMemory {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SharedMemory {}
|
||||
pub struct SharedMemory {
|
||||
desc: MemoryDescriptor,
|
||||
}
|
||||
|
||||
impl<'a> MemoryImpl<'a> for SharedMemory {
|
||||
type Access = Vec<u8>;
|
||||
type AccessMut = Vec<u8>;
|
||||
impl SharedMemory {
|
||||
fn new(desc: MemoryDescriptor) -> Result<Self, CreationError> {
|
||||
Ok(Self { desc })
|
||||
}
|
||||
|
||||
fn new(_desc: MemoryDescriptor) -> Result<Self, CreationError> {
|
||||
pub fn grow(&self, _delta: Pages) -> Option<Pages> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn grow(&self, _delta: Pages) -> Option<Pages> {
|
||||
pub fn size(&self) -> Pages {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn size(&self) -> Pages {
|
||||
pub unsafe fn as_slice(&self) -> &[u8] {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn vm_local_memory(&self) -> *mut vm::LocalMemory {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn access(&self) -> Vec<u8> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn access_mut(&self) -> Vec<u8> {
|
||||
pub unsafe fn as_slice_mut(&self) -> &mut [u8] {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
53
lib/runtime-core/src/memory/view.rs
Normal file
53
lib/runtime-core/src/memory/view.rs
Normal file
@ -0,0 +1,53 @@
|
||||
use super::atomic::{Atomic, IntCast};
|
||||
use crate::types::ValueType;
|
||||
|
||||
use std::{cell::Cell, marker::PhantomData, ops::Deref, slice};
|
||||
|
||||
pub trait Atomicity {}
|
||||
pub struct Atomically;
|
||||
impl Atomicity for Atomically {}
|
||||
pub struct NonAtomically;
|
||||
impl Atomicity for NonAtomically {}
|
||||
|
||||
pub struct MemoryView<'a, T: 'a, A = NonAtomically> {
|
||||
ptr: *mut T,
|
||||
length: usize,
|
||||
_phantom: PhantomData<(&'a [Cell<T>], A)>,
|
||||
}
|
||||
|
||||
impl<'a, T> MemoryView<'a, T, NonAtomically>
|
||||
where
|
||||
T: ValueType,
|
||||
{
|
||||
pub(super) unsafe fn new(ptr: *mut T, length: u32) -> Self {
|
||||
Self {
|
||||
ptr,
|
||||
length: length as usize,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> MemoryView<'a, T, NonAtomically> {
|
||||
pub fn atomically(self) -> MemoryView<'a, T, Atomically> {
|
||||
MemoryView {
|
||||
ptr: self.ptr,
|
||||
length: self.length,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> Deref for MemoryView<'a, T, NonAtomically> {
|
||||
type Target = [Cell<T>];
|
||||
fn deref(&self) -> &[Cell<T>] {
|
||||
unsafe { slice::from_raw_parts(self.ptr as *const Cell<T>, self.length) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: IntCast> Deref for MemoryView<'a, T, Atomically> {
|
||||
type Target = [Atomic<T>];
|
||||
fn deref(&self) -> &[Atomic<T>] {
|
||||
unsafe { slice::from_raw_parts(self.ptr as *const Atomic<T>, self.length) }
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user