mirror of
https://github.com/fluencelabs/wasmer
synced 2025-06-14 01:21:19 +00:00
Populating LLVM stack maps into MSM/FSM.
This commit is contained in:
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -1495,6 +1495,7 @@ dependencies = [
|
|||||||
name = "wasmer-llvm-backend"
|
name = "wasmer-llvm-backend"
|
||||||
version = "0.5.6"
|
version = "0.5.6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"capstone 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"capstone 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"goblin 0.0.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
"goblin 0.0.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -244,7 +244,7 @@ impl LLVMBackend {
|
|||||||
pub fn new(
|
pub fn new(
|
||||||
module: Module,
|
module: Module,
|
||||||
_intrinsics: Intrinsics,
|
_intrinsics: Intrinsics,
|
||||||
_stackmaps: &StackmapRegistry,
|
stackmaps: &StackmapRegistry,
|
||||||
) -> (Self, LLVMCache) {
|
) -> (Self, LLVMCache) {
|
||||||
Target::initialize_x86(&InitializationConfig {
|
Target::initialize_x86(&InitializationConfig {
|
||||||
asm_parser: true,
|
asm_parser: true,
|
||||||
@ -303,8 +303,36 @@ impl LLVMBackend {
|
|||||||
)
|
)
|
||||||
};
|
};
|
||||||
if raw_stackmap.len() > 0 {
|
if raw_stackmap.len() > 0 {
|
||||||
let map = stackmap::StackMap::parse(raw_stackmap);
|
let map = stackmap::StackMap::parse(raw_stackmap).unwrap();
|
||||||
eprintln!("{:?}", map);
|
eprintln!("{:?}", map);
|
||||||
|
|
||||||
|
let (code_ptr, code_size) = unsafe {
|
||||||
|
(
|
||||||
|
llvm_backend_get_code_ptr(module),
|
||||||
|
llvm_backend_get_code_size(module),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let mut msm = ModuleStateMap {
|
||||||
|
local_functions: Default::default(),
|
||||||
|
total_size: code_size,
|
||||||
|
};
|
||||||
|
let mut map_record_idx: usize = 0;
|
||||||
|
for size_record in &map.stk_size_records {
|
||||||
|
for _ in 0..size_record.record_count {
|
||||||
|
let map_record = &map.stk_map_records[map_record_idx];
|
||||||
|
let map_entry = &stackmaps.entries[map_record_idx];
|
||||||
|
assert_eq!(map_record.patchpoint_id, map_record_idx as u64);
|
||||||
|
map_record_idx += 1;
|
||||||
|
|
||||||
|
map_entry.populate_msm(
|
||||||
|
code_ptr as usize,
|
||||||
|
&map,
|
||||||
|
size_record,
|
||||||
|
map_record,
|
||||||
|
&mut msm,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
eprintln!("WARNING: No stack map");
|
eprintln!("WARNING: No stack map");
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ use wasmparser::{BinaryReaderError, MemoryImmediate, Operator, Type as WpType};
|
|||||||
use crate::backend::LLVMBackend;
|
use crate::backend::LLVMBackend;
|
||||||
use crate::intrinsics::{CtxType, GlobalCache, Intrinsics, MemoryCache};
|
use crate::intrinsics::{CtxType, GlobalCache, Intrinsics, MemoryCache};
|
||||||
use crate::read_info::{blocktype_to_type, type_to_type};
|
use crate::read_info::{blocktype_to_type, type_to_type};
|
||||||
use crate::stackmap::{StackmapEntry, StackmapEntryKind, StackmapRegistry};
|
use crate::stackmap::{StackmapEntry, StackmapEntryKind, StackmapRegistry, ValueSemantic};
|
||||||
use crate::state::{ControlFrame, IfElseState, State};
|
use crate::state::{ControlFrame, IfElseState, State};
|
||||||
use crate::trampolines::generate_trampolines;
|
use crate::trampolines::generate_trampolines;
|
||||||
|
|
||||||
@ -310,6 +310,7 @@ fn emit_stack_map(
|
|||||||
kind: StackmapEntryKind,
|
kind: StackmapEntryKind,
|
||||||
locals: &[PointerValue],
|
locals: &[PointerValue],
|
||||||
state: &State,
|
state: &State,
|
||||||
|
opcode_offset: usize,
|
||||||
) {
|
) {
|
||||||
let stackmap_id = target.entries.len();
|
let stackmap_id = target.entries.len();
|
||||||
|
|
||||||
@ -324,9 +325,13 @@ fn emit_stack_map(
|
|||||||
params.push(intrinsics.i32_ty.const_int(0, false).as_basic_value_enum());
|
params.push(intrinsics.i32_ty.const_int(0, false).as_basic_value_enum());
|
||||||
|
|
||||||
let locals: Vec<_> = locals.iter().map(|x| x.as_basic_value_enum()).collect();
|
let locals: Vec<_> = locals.iter().map(|x| x.as_basic_value_enum()).collect();
|
||||||
|
let mut value_semantics: Vec<ValueSemantic> = vec![];
|
||||||
|
|
||||||
params.extend_from_slice(&locals);
|
params.extend_from_slice(&locals);
|
||||||
|
value_semantics.extend((0..locals.len()).map(ValueSemantic::WasmLocal));
|
||||||
|
|
||||||
params.extend_from_slice(&state.stack);
|
params.extend_from_slice(&state.stack);
|
||||||
|
value_semantics.extend((0..state.stack.len()).map(ValueSemantic::WasmStack));
|
||||||
|
|
||||||
builder.build_call(intrinsics.experimental_stackmap, ¶ms, &state.var_name());
|
builder.build_call(intrinsics.experimental_stackmap, ¶ms, &state.var_name());
|
||||||
|
|
||||||
@ -335,6 +340,8 @@ fn emit_stack_map(
|
|||||||
local_function_id,
|
local_function_id,
|
||||||
local_count: locals.len(),
|
local_count: locals.len(),
|
||||||
stack_count: state.stack.len(),
|
stack_count: state.stack.len(),
|
||||||
|
opcode_offset,
|
||||||
|
value_semantics,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -371,6 +378,7 @@ pub struct LLVMFunctionCodeGenerator {
|
|||||||
unreachable_depth: usize,
|
unreachable_depth: usize,
|
||||||
stackmaps: Rc<RefCell<StackmapRegistry>>,
|
stackmaps: Rc<RefCell<StackmapRegistry>>,
|
||||||
index: usize,
|
index: usize,
|
||||||
|
opcode_offset: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||||
@ -443,8 +451,13 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn feed_event(&mut self, event: Event, module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
fn feed_event(&mut self, event: Event, module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
||||||
|
let mut opcode_offset: Option<usize> = None;
|
||||||
let op = match event {
|
let op = match event {
|
||||||
Event::Wasm(x) => x,
|
Event::Wasm(x) => {
|
||||||
|
opcode_offset = Some(self.opcode_offset);
|
||||||
|
self.opcode_offset += 1;
|
||||||
|
x
|
||||||
|
}
|
||||||
Event::Internal(_x) => {
|
Event::Internal(_x) => {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -530,7 +543,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
|
|
||||||
builder.position_at_end(&loop_body);
|
builder.position_at_end(&loop_body);
|
||||||
|
|
||||||
{
|
if let Some(offset) = opcode_offset {
|
||||||
let mut stackmaps = self.stackmaps.borrow_mut();
|
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||||
emit_stack_map(
|
emit_stack_map(
|
||||||
intrinsics,
|
intrinsics,
|
||||||
@ -540,6 +553,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
StackmapEntryKind::Loop,
|
StackmapEntryKind::Loop,
|
||||||
&self.locals,
|
&self.locals,
|
||||||
state,
|
state,
|
||||||
|
offset,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -804,7 +818,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
// If llvm cannot prove that this is never touched,
|
// If llvm cannot prove that this is never touched,
|
||||||
// it will emit a `ud2` instruction on x86_64 arches.
|
// it will emit a `ud2` instruction on x86_64 arches.
|
||||||
|
|
||||||
{
|
if let Some(offset) = opcode_offset {
|
||||||
let mut stackmaps = self.stackmaps.borrow_mut();
|
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||||
emit_stack_map(
|
emit_stack_map(
|
||||||
intrinsics,
|
intrinsics,
|
||||||
@ -814,6 +828,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
StackmapEntryKind::Trappable,
|
StackmapEntryKind::Trappable,
|
||||||
&self.locals,
|
&self.locals,
|
||||||
state,
|
state,
|
||||||
|
offset,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -954,7 +969,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
};
|
};
|
||||||
|
|
||||||
state.popn(func_sig.params().len())?;
|
state.popn(func_sig.params().len())?;
|
||||||
{
|
if let Some(offset) = opcode_offset {
|
||||||
let mut stackmaps = self.stackmaps.borrow_mut();
|
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||||
emit_stack_map(
|
emit_stack_map(
|
||||||
intrinsics,
|
intrinsics,
|
||||||
@ -964,6 +979,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
StackmapEntryKind::Call,
|
StackmapEntryKind::Call,
|
||||||
&self.locals,
|
&self.locals,
|
||||||
state,
|
state,
|
||||||
|
offset,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
let call_site = builder.build_call(func_ptr, ¶ms, &state.var_name());
|
let call_site = builder.build_call(func_ptr, ¶ms, &state.var_name());
|
||||||
@ -1131,7 +1147,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
"typed_func_ptr",
|
"typed_func_ptr",
|
||||||
);
|
);
|
||||||
|
|
||||||
{
|
if let Some(offset) = opcode_offset {
|
||||||
let mut stackmaps = self.stackmaps.borrow_mut();
|
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||||
emit_stack_map(
|
emit_stack_map(
|
||||||
intrinsics,
|
intrinsics,
|
||||||
@ -1141,6 +1157,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
StackmapEntryKind::Call,
|
StackmapEntryKind::Call,
|
||||||
&self.locals,
|
&self.locals,
|
||||||
state,
|
state,
|
||||||
|
offset,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
let call_site = builder.build_call(typed_func_ptr, &args, "indirect_call");
|
let call_site = builder.build_call(typed_func_ptr, &args, "indirect_call");
|
||||||
@ -2565,6 +2582,20 @@ impl ModuleCodeGenerator<LLVMFunctionCodeGenerator, LLVMBackend, CodegenError>
|
|||||||
|
|
||||||
let local_func_index = self.functions.len();
|
let local_func_index = self.functions.len();
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||||
|
emit_stack_map(
|
||||||
|
&intrinsics,
|
||||||
|
&builder,
|
||||||
|
local_func_index,
|
||||||
|
&mut *stackmaps,
|
||||||
|
StackmapEntryKind::FunctionHeader,
|
||||||
|
&locals,
|
||||||
|
&state,
|
||||||
|
::std::usize::MAX,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
let code = LLVMFunctionCodeGenerator {
|
let code = LLVMFunctionCodeGenerator {
|
||||||
state,
|
state,
|
||||||
context: Some(context),
|
context: Some(context),
|
||||||
@ -2579,6 +2610,7 @@ impl ModuleCodeGenerator<LLVMFunctionCodeGenerator, LLVMBackend, CodegenError>
|
|||||||
unreachable_depth: 0,
|
unreachable_depth: 0,
|
||||||
stackmaps: self.stackmaps.clone(),
|
stackmaps: self.stackmaps.clone(),
|
||||||
index: local_func_index,
|
index: local_func_index,
|
||||||
|
opcode_offset: 0,
|
||||||
};
|
};
|
||||||
self.functions.push(code);
|
self.functions.push(code);
|
||||||
Ok(self.functions.last_mut().unwrap())
|
Ok(self.functions.last_mut().unwrap())
|
||||||
|
@ -1,7 +1,13 @@
|
|||||||
// https://llvm.org/docs/StackMaps.html#stackmap-section
|
// https://llvm.org/docs/StackMaps.html#stackmap-section
|
||||||
|
|
||||||
use byteorder::{LittleEndian, ReadBytesExt};
|
use byteorder::{LittleEndian, ReadBytesExt};
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::io::{self, Cursor};
|
use std::io::{self, Cursor};
|
||||||
|
use wasmer_runtime_core::state::{
|
||||||
|
x64::{new_machine_state, X64Register, GPR},
|
||||||
|
FunctionStateMap, MachineStateDiff, MachineValue, ModuleStateMap, OffsetInfo, RegisterIndex,
|
||||||
|
SuspendOffset, WasmAbstractValue,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone)]
|
#[derive(Default, Debug, Clone)]
|
||||||
pub struct StackmapRegistry {
|
pub struct StackmapRegistry {
|
||||||
@ -12,17 +18,231 @@ pub struct StackmapRegistry {
|
|||||||
pub struct StackmapEntry {
|
pub struct StackmapEntry {
|
||||||
pub kind: StackmapEntryKind,
|
pub kind: StackmapEntryKind,
|
||||||
pub local_function_id: usize,
|
pub local_function_id: usize,
|
||||||
|
pub opcode_offset: usize,
|
||||||
|
pub value_semantics: Vec<ValueSemantic>,
|
||||||
pub local_count: usize,
|
pub local_count: usize,
|
||||||
pub stack_count: usize,
|
pub stack_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum ValueSemantic {
|
||||||
|
WasmLocal(usize),
|
||||||
|
WasmStack(usize),
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
pub enum StackmapEntryKind {
|
pub enum StackmapEntryKind {
|
||||||
|
FunctionHeader,
|
||||||
Loop,
|
Loop,
|
||||||
Call,
|
Call,
|
||||||
Trappable,
|
Trappable,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
pub struct FunctionStateMap {
|
||||||
|
pub initial: MachineState,
|
||||||
|
pub local_function_id: usize,
|
||||||
|
pub locals: Vec<WasmAbstractValue>,
|
||||||
|
pub shadow_size: usize, // for single-pass backend, 32 bytes on x86-64
|
||||||
|
pub diffs: Vec<MachineStateDiff>,
|
||||||
|
pub wasm_function_header_target_offset: Option<SuspendOffset>,
|
||||||
|
pub wasm_offset_to_target_offset: BTreeMap<usize, SuspendOffset>,
|
||||||
|
pub loop_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
|
||||||
|
pub call_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
|
||||||
|
pub trappable_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
|
||||||
|
}
|
||||||
|
pub struct MachineStateDiff {
|
||||||
|
pub last: Option<usize>,
|
||||||
|
pub stack_push: Vec<MachineValue>,
|
||||||
|
pub stack_pop: usize,
|
||||||
|
pub reg_diff: Vec<(RegisterIndex, MachineValue)>,
|
||||||
|
|
||||||
|
pub wasm_stack_push: Vec<WasmAbstractValue>,
|
||||||
|
pub wasm_stack_pop: usize,
|
||||||
|
pub wasm_stack_private_depth: usize, // absolute value; not a diff.
|
||||||
|
|
||||||
|
pub wasm_inst_offset: usize, // absolute value; not a diff.
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
impl StackmapEntry {
|
||||||
|
pub fn populate_msm(
|
||||||
|
&self,
|
||||||
|
code_addr: usize,
|
||||||
|
llvm_map: &StackMap,
|
||||||
|
size_record: &StkSizeRecord,
|
||||||
|
map_record: &StkMapRecord,
|
||||||
|
msm: &mut ModuleStateMap,
|
||||||
|
) {
|
||||||
|
#[derive(Copy, Clone, Debug)]
|
||||||
|
enum RuntimeOrConstant {
|
||||||
|
Runtime(MachineValue),
|
||||||
|
Constant(u64),
|
||||||
|
}
|
||||||
|
|
||||||
|
let fsm = msm
|
||||||
|
.local_functions
|
||||||
|
.entry(self.local_function_id)
|
||||||
|
.or_insert_with(|| {
|
||||||
|
FunctionStateMap::new(new_machine_state(), self.local_function_id, 0, vec![])
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(self.value_semantics.len(), map_record.locations.len());
|
||||||
|
assert!(size_record.stack_size % 8 == 0);
|
||||||
|
|
||||||
|
let mut machine_stack_layout: Vec<MachineValue> =
|
||||||
|
vec![MachineValue::Undefined; (size_record.stack_size as usize) / 8];
|
||||||
|
let mut regs: Vec<(RegisterIndex, MachineValue)> = vec![];
|
||||||
|
let mut stack_constants: HashMap<usize, u64> = HashMap::new();
|
||||||
|
|
||||||
|
let mut wasm_locals: Vec<WasmAbstractValue> = vec![];
|
||||||
|
let mut wasm_stack: Vec<WasmAbstractValue> = vec![];
|
||||||
|
|
||||||
|
for (i, loc) in map_record.locations.iter().enumerate() {
|
||||||
|
let mv = match self.value_semantics[i] {
|
||||||
|
ValueSemantic::WasmLocal(x) => {
|
||||||
|
if x != wasm_locals.len() {
|
||||||
|
panic!("unordered local values");
|
||||||
|
}
|
||||||
|
wasm_locals.push(WasmAbstractValue::Runtime);
|
||||||
|
MachineValue::WasmLocal(x)
|
||||||
|
}
|
||||||
|
ValueSemantic::WasmStack(x) => {
|
||||||
|
if x != wasm_stack.len() {
|
||||||
|
panic!("unordered stack values");
|
||||||
|
}
|
||||||
|
wasm_stack.push(WasmAbstractValue::Runtime);
|
||||||
|
MachineValue::WasmStack(x)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match loc.ty {
|
||||||
|
LocationType::Register => {
|
||||||
|
let index = X64Register::from_dwarf_regnum(loc.dwarf_regnum)
|
||||||
|
.expect("invalid regnum")
|
||||||
|
.to_index();
|
||||||
|
regs.push((index, mv));
|
||||||
|
}
|
||||||
|
LocationType::Constant => {
|
||||||
|
let v = loc.offset_or_small_constant as u32 as u64;
|
||||||
|
match mv {
|
||||||
|
MachineValue::WasmStack(x) => {
|
||||||
|
stack_constants.insert(x, v);
|
||||||
|
*wasm_stack.last_mut().unwrap() = WasmAbstractValue::Const(v);
|
||||||
|
}
|
||||||
|
_ => {} // TODO
|
||||||
|
}
|
||||||
|
}
|
||||||
|
LocationType::ConstantIndex => {
|
||||||
|
let v =
|
||||||
|
llvm_map.constants[loc.offset_or_small_constant as usize].large_constant;
|
||||||
|
match mv {
|
||||||
|
MachineValue::WasmStack(x) => {
|
||||||
|
stack_constants.insert(x, v);
|
||||||
|
*wasm_stack.last_mut().unwrap() = WasmAbstractValue::Const(v);
|
||||||
|
}
|
||||||
|
_ => {} // TODO
|
||||||
|
}
|
||||||
|
}
|
||||||
|
LocationType::Direct => match mv {
|
||||||
|
MachineValue::WasmLocal(_) => {
|
||||||
|
assert_eq!(loc.location_size, 8);
|
||||||
|
assert!(loc.offset_or_small_constant < 0);
|
||||||
|
assert!(
|
||||||
|
X64Register::from_dwarf_regnum(loc.dwarf_regnum).unwrap()
|
||||||
|
== X64Register::GPR(GPR::RBP)
|
||||||
|
);
|
||||||
|
let stack_offset = ((-loc.offset_or_small_constant) % 8) as usize;
|
||||||
|
assert!(stack_offset > 0 && stack_offset <= machine_stack_layout.len());
|
||||||
|
machine_stack_layout[stack_offset - 1] = mv;
|
||||||
|
}
|
||||||
|
_ => unreachable!(
|
||||||
|
"Direct location type is not expected for values other than local"
|
||||||
|
),
|
||||||
|
},
|
||||||
|
LocationType::Indirect => {
|
||||||
|
assert_eq!(loc.location_size, 8);
|
||||||
|
assert!(loc.offset_or_small_constant < 0);
|
||||||
|
assert!(
|
||||||
|
X64Register::from_dwarf_regnum(loc.dwarf_regnum).unwrap()
|
||||||
|
== X64Register::GPR(GPR::RBP)
|
||||||
|
);
|
||||||
|
let stack_offset = ((-loc.offset_or_small_constant) % 8) as usize;
|
||||||
|
assert!(stack_offset > 0 && stack_offset <= machine_stack_layout.len());
|
||||||
|
machine_stack_layout[stack_offset - 1] = mv;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(wasm_stack.len(), self.stack_count);
|
||||||
|
assert_eq!(wasm_locals.len(), self.local_count);
|
||||||
|
|
||||||
|
let diff = MachineStateDiff {
|
||||||
|
last: None,
|
||||||
|
stack_push: machine_stack_layout,
|
||||||
|
stack_pop: 0,
|
||||||
|
reg_diff: regs,
|
||||||
|
wasm_stack_push: wasm_stack,
|
||||||
|
wasm_stack_pop: 0,
|
||||||
|
wasm_stack_private_depth: 0,
|
||||||
|
wasm_inst_offset: self.opcode_offset,
|
||||||
|
};
|
||||||
|
let diff_id = fsm.diffs.len();
|
||||||
|
fsm.diffs.push(diff);
|
||||||
|
|
||||||
|
match self.kind {
|
||||||
|
StackmapEntryKind::FunctionHeader => {
|
||||||
|
fsm.locals = wasm_locals;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
assert_eq!(fsm.locals, wasm_locals);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let target_offset = (size_record.function_address as usize)
|
||||||
|
.checked_sub(code_addr)
|
||||||
|
.unwrap()
|
||||||
|
+ map_record.instruction_offset as usize;
|
||||||
|
|
||||||
|
match self.kind {
|
||||||
|
StackmapEntryKind::Loop => {
|
||||||
|
fsm.wasm_offset_to_target_offset
|
||||||
|
.insert(self.opcode_offset, SuspendOffset::Loop(target_offset));
|
||||||
|
fsm.loop_offsets.insert(
|
||||||
|
target_offset,
|
||||||
|
OffsetInfo {
|
||||||
|
diff_id,
|
||||||
|
activate_offset: target_offset,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
StackmapEntryKind::Call => {
|
||||||
|
fsm.wasm_offset_to_target_offset
|
||||||
|
.insert(self.opcode_offset, SuspendOffset::Call(target_offset));
|
||||||
|
fsm.call_offsets.insert(
|
||||||
|
target_offset,
|
||||||
|
OffsetInfo {
|
||||||
|
diff_id,
|
||||||
|
activate_offset: target_offset,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
StackmapEntryKind::Trappable => {
|
||||||
|
fsm.wasm_offset_to_target_offset
|
||||||
|
.insert(self.opcode_offset, SuspendOffset::Trappable(target_offset));
|
||||||
|
fsm.trappable_offsets.insert(
|
||||||
|
target_offset,
|
||||||
|
OffsetInfo {
|
||||||
|
diff_id,
|
||||||
|
activate_offset: target_offset,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
StackmapEntryKind::FunctionHeader => {
|
||||||
|
fsm.wasm_function_header_target_offset = Some(SuspendOffset::Loop(target_offset));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
#[derive(Clone, Debug, Default)]
|
||||||
pub struct StackMap {
|
pub struct StackMap {
|
||||||
pub version: u8,
|
pub version: u8,
|
||||||
|
@ -842,6 +842,7 @@ pub mod x64 {
|
|||||||
XMM7,
|
XMM7,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||||
pub enum X64Register {
|
pub enum X64Register {
|
||||||
GPR(GPR),
|
GPR(GPR),
|
||||||
XMM(XMM),
|
XMM(XMM),
|
||||||
@ -854,5 +855,36 @@ pub mod x64 {
|
|||||||
X64Register::XMM(x) => RegisterIndex(x as usize + 16),
|
X64Register::XMM(x) => RegisterIndex(x as usize + 16),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn from_dwarf_regnum(x: u16) -> Option<X64Register> {
|
||||||
|
Some(match x {
|
||||||
|
0 => X64Register::GPR(GPR::RAX),
|
||||||
|
1 => X64Register::GPR(GPR::RDX),
|
||||||
|
2 => X64Register::GPR(GPR::RCX),
|
||||||
|
3 => X64Register::GPR(GPR::RBX),
|
||||||
|
4 => X64Register::GPR(GPR::RSI),
|
||||||
|
5 => X64Register::GPR(GPR::RDI),
|
||||||
|
6 => X64Register::GPR(GPR::RBP),
|
||||||
|
7 => X64Register::GPR(GPR::RSP),
|
||||||
|
8 => X64Register::GPR(GPR::R8),
|
||||||
|
9 => X64Register::GPR(GPR::R9),
|
||||||
|
10 => X64Register::GPR(GPR::R10),
|
||||||
|
11 => X64Register::GPR(GPR::R11),
|
||||||
|
12 => X64Register::GPR(GPR::R12),
|
||||||
|
13 => X64Register::GPR(GPR::R13),
|
||||||
|
14 => X64Register::GPR(GPR::R14),
|
||||||
|
15 => X64Register::GPR(GPR::R15),
|
||||||
|
|
||||||
|
17 => X64Register::XMM(XMM::XMM0),
|
||||||
|
18 => X64Register::XMM(XMM::XMM1),
|
||||||
|
19 => X64Register::XMM(XMM::XMM2),
|
||||||
|
20 => X64Register::XMM(XMM::XMM3),
|
||||||
|
21 => X64Register::XMM(XMM::XMM4),
|
||||||
|
22 => X64Register::XMM(XMM::XMM5),
|
||||||
|
23 => X64Register::XMM(XMM::XMM6),
|
||||||
|
24 => X64Register::XMM(XMM::XMM7),
|
||||||
|
_ => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user