diff --git a/src/spec/tests/call.wast b/src/spec/tests/call.wast index dbfcecfd5..877d70c62 100644 --- a/src/spec/tests/call.wast +++ b/src/spec/tests/call.wast @@ -13,17 +13,25 @@ ;; ) ;; ) - (func $multiply (; 1 ;) (param i32 i32) (result i32) + (func $multiply (; 1 ;) (param i32 i32) (result i32) (i32.mul (get_local 0) (get_local 1) ) ) + (func (export "multiply_direct") (; 1 ;) (param $0 i32) (result i32) + (i32.mul + (get_local 0) + (i32.const 3) + ) + ) + (func (export "multiply_by_3") (; 1 ;) (param $0 i32) (result i32) (call $multiply - (get_local $0) (i32.const 3) + (get_local $0) +;; (i32.const 2) ) ) @@ -37,3 +45,5 @@ ) (assert_return (invoke "multiply_by_3_raw" (i32.const 2)) (i32.const 6)) + +;; (assert_return (invoke "multiply_by_3_raw" (i32.const 2)) (i32.const 6)) diff --git a/src/webassembly/instance.rs b/src/webassembly/instance.rs index ac4cc3545..55f914e52 100644 --- a/src/webassembly/instance.rs +++ b/src/webassembly/instance.rs @@ -13,16 +13,17 @@ use memmap::MmapMut; use region; use spin::RwLock; use std::marker::PhantomData; -use std::ptr; +use std::ptr::{self, write_unaligned}; use std::sync::Arc; use std::{mem, slice}; +use std::iter::Iterator; use super::super::common::slice::{BoundedSlice, UncheckedSlice}; use super::errors::ErrorKind; use super::memory::LinearMemory; use super::module::Module; use super::module::{DataInitializer, Exportable, Export}; -use super::relocation::{RelocSink, TrapSink}; +use super::relocation::{RelocSink, TrapSink, RelocationType, Reloc}; pub fn protect_codebuf(code_buf: &Vec) -> Result<(), String> { @@ -149,7 +150,7 @@ impl Instance { // let mut total_size: usize = 0; let mut context_and_offsets = Vec::with_capacity(module.info.function_bodies.len()); - + let mut relocations = Vec::new(); // Compile the functions (from cranelift IR to machine code) for function_body in module.info.function_bodies.values() { let mut func_context = Context::for_function(function_body.to_owned()); @@ -175,9 +176,98 @@ impl Instance { functions.push(func_offset); context_and_offsets.push(func_context); + relocations.push(reloc_sink.func_relocs); + // println!("FUNCTION RELOCATIONS {:?}", reloc_sink.func_relocs) // total_size += code_size_offset; } + // For each of the functions used, we see what are the calls inside this functions + // and relocate each call to the proper memory address. + // The relocations are relative to the relocation's address plus four bytes + // TODO: Support architectures other than x64, and other reloc kinds. + for (i, function_relocs) in relocations.iter().enumerate() { + // for r in function_relocs { + for (ref reloc, ref reloc_type) in function_relocs { + let target_func_address: isize = match reloc_type { + RelocationType::Normal(func_index) => { + functions[*func_index as usize].as_ptr() as isize + }, + _ => unimplemented!() + // RelocationType::Intrinsic(name) => { + // get_abi_intrinsic(name)? + // }, + // RelocationTarget::UserFunc(index) => { + // functions[module.defined_func_index(index).expect( + // "relocation to imported function not supported yet", + // )].as_ptr() as isize + // } + // RelocationTarget::GrowMemory => grow_memory as isize, + // RelocationTarget::CurrentMemory => current_memory as isize, + }; + // print!("FUNCTION {:?}", target_func_address); + let body = &mut functions[i]; + match reloc.reloc { + Reloc::Abs8 => unsafe { + let reloc_address = body.as_mut_ptr().offset(reloc.offset as isize) as i64; + let reloc_addend = reloc.addend; + let reloc_abs = target_func_address as i64 + reloc_addend; + write_unaligned(reloc_address as *mut i64, reloc_abs); + }, + Reloc::X86PCRel4 => unsafe { + let reloc_address = body.as_mut_ptr().offset(reloc.offset as isize) as isize; + let reloc_addend = reloc.addend as isize; + // TODO: Handle overflow. + let reloc_delta_i32 = + (target_func_address - reloc_address + reloc_addend) as i32; + write_unaligned(reloc_address as *mut i32, reloc_delta_i32); + }, + _ => panic!("unsupported reloc kind"), + } + // let reloc_address = unsafe { + // (target_func_address.to_owned().as_mut_ptr() as *const u8).offset(reloc.offset as isize) + // }; + + // match reloc.reloc { + // Reloc::Abs8 => { + // unsafe { + // // (reloc_address as *mut usize).write(target_func_address.to_owned().as_ptr() as usize); + // } + // } + // _ => unimplemented!() + // } + + // let target_func_address: isize = match r.reloc_target { + // RelocationTarget::UserFunc(index) => { + // functions[module.defined_func_index(index).expect( + // "relocation to imported function not supported yet", + // )].as_ptr() as isize + // } + // RelocationTarget::GrowMemory => grow_memory as isize, + // RelocationTarget::CurrentMemory => current_memory as isize, + // }; + + // let body = &mut functions[i]; + // match r.reloc { + // Reloc::Abs8 => unsafe { + // let reloc_address = body.as_mut_ptr().offset(r.offset as isize) as i64; + // let reloc_addend = r.addend; + // let reloc_abs = target_func_address as i64 + reloc_addend; + // write_unaligned(reloc_address as *mut i64, reloc_abs); + // }, + // Reloc::X86PCRel4 => unsafe { + // let reloc_address = body.as_mut_ptr().offset(r.offset as isize) as isize; + // let reloc_addend = r.addend as isize; + // // TODO: Handle overflow. + // let reloc_delta_i32 = + // (target_func_address - reloc_address + reloc_addend) as i32; + // write_unaligned(reloc_address as *mut i32, reloc_delta_i32); + // }, + // _ => panic!("unsupported reloc kind"), + // } + } + } + + // We only want to allocate in memory if there is more than // 0 functions. Otherwise reserving a 0-sized memory region // cause a panic error @@ -338,9 +428,11 @@ impl Instance { } } - pub fn invoke(&self, func_index: FuncIndex, _args: Vec) -> u32 { - let func: fn() -> u32 = self.get_function(func_index); - func() + pub fn invoke(&self, func_index: FuncIndex, _args: Vec) -> i32 { + let func: fn() -> i32 = self.get_function(func_index); + let result = func(); + println!("RESULT {:?}", result); + result } pub fn start(&self) { @@ -402,3 +494,25 @@ impl Clone for Instance { } } } + + +extern "C" fn grow_memory(size: u32, memory_index: u32, vmctx: *mut *mut u8) -> u32 { + unimplemented!(); + // unsafe { + // let instance = (*vmctx.offset(4)) as *mut Instance; + // (*instance) + // .memory_mut(memory_index as MemoryIndex) + // .grow(size) + // .unwrap_or(u32::max_value()) + // } +} + +extern "C" fn current_memory(memory_index: u32, vmctx: *mut *mut u8) -> u32 { + unimplemented!(); + // unsafe { + // let instance = (*vmctx.offset(4)) as *mut Instance; + // (*instance) + // .memory_mut(memory_index as MemoryIndex) + // .current_size() + // } +} diff --git a/src/webassembly/relocation.rs b/src/webassembly/relocation.rs index 9343b935d..347cd4106 100644 --- a/src/webassembly/relocation.rs +++ b/src/webassembly/relocation.rs @@ -1,6 +1,9 @@ use cranelift_codegen::binemit; use cranelift_codegen::ir::{self, ExternalName, SourceLoc, TrapCode}; +pub use cranelift_codegen::binemit::Reloc; +use cranelift_wasm::FuncIndex; + #[derive(Debug)] pub struct Relocation { /// The relocation code. @@ -92,6 +95,90 @@ impl RelocSink { } } + +/// Implementation of a relocation sink that just saves all the information for later +// pub struct RelocSink { +// /// Relocations recorded for the function. +// pub func_relocs: Vec, +// } + +// impl binemit::RelocSink for RelocSink { +// fn reloc_ebb( +// &mut self, +// _offset: binemit::CodeOffset, +// _reloc: binemit::Reloc, +// _ebb_offset: binemit::CodeOffset, +// ) { +// // This should use the `offsets` field of `ir::Function`. +// panic!("ebb headers not yet implemented"); +// } +// fn reloc_external( +// &mut self, +// offset: binemit::CodeOffset, +// reloc: binemit::Reloc, +// name: &ExternalName, +// addend: binemit::Addend, +// ) { +// let reloc_target = if let ExternalName::User { namespace, index } = *name { +// debug_assert!(namespace == 0); +// RelocationTarget::UserFunc(FuncIndex::new(index as usize)) +// } else if *name == ExternalName::testcase("grow_memory") { +// RelocationTarget::GrowMemory +// } else if *name == ExternalName::testcase("current_memory") { +// RelocationTarget::CurrentMemory +// } else { +// panic!("unrecognized external name") +// }; +// self.func_relocs.push(Relocation { +// reloc, +// reloc_target, +// offset, +// addend, +// }); +// } +// fn reloc_jt( +// &mut self, +// _offset: binemit::CodeOffset, +// _reloc: binemit::Reloc, +// _jt: ir::JumpTable, +// ) { +// panic!("jump tables not yet implemented"); +// } +// } + +// impl RelocSink { +// pub fn new() -> Self { +// Self { +// func_relocs: Vec::new(), +// } +// } +// } + +// /// A record of a relocation to perform. +// #[derive(Debug, Clone)] +// pub struct Relocation { +// /// The relocation code. +// pub reloc: binemit::Reloc, +// /// Relocation target. +// pub reloc_target: RelocationTarget, +// /// The offset where to apply the relocation. +// pub offset: binemit::CodeOffset, +// /// The addend to add to the relocation value. +// pub addend: binemit::Addend, +// } + +// /// Destination function. Can be either user function or some special one, like grow_memory. +// #[derive(Debug, Copy, Clone)] +// pub enum RelocationTarget { +// /// The user function index. +// UserFunc(FuncIndex), +// /// Function for growing the default memory by the specified amount of pages. +// GrowMemory, +// /// Function for query current size of the default linear memory. +// CurrentMemory, +// } + + pub struct TrapData { pub offset: usize, pub code: TrapCode,