mirror of
https://github.com/fluencelabs/wasmer
synced 2025-06-18 03:11:21 +00:00
Populating LLVM stack maps into MSM/FSM.
This commit is contained in:
@ -244,7 +244,7 @@ impl LLVMBackend {
|
||||
pub fn new(
|
||||
module: Module,
|
||||
_intrinsics: Intrinsics,
|
||||
_stackmaps: &StackmapRegistry,
|
||||
stackmaps: &StackmapRegistry,
|
||||
) -> (Self, LLVMCache) {
|
||||
Target::initialize_x86(&InitializationConfig {
|
||||
asm_parser: true,
|
||||
@ -303,8 +303,36 @@ impl LLVMBackend {
|
||||
)
|
||||
};
|
||||
if raw_stackmap.len() > 0 {
|
||||
let map = stackmap::StackMap::parse(raw_stackmap);
|
||||
let map = stackmap::StackMap::parse(raw_stackmap).unwrap();
|
||||
eprintln!("{:?}", map);
|
||||
|
||||
let (code_ptr, code_size) = unsafe {
|
||||
(
|
||||
llvm_backend_get_code_ptr(module),
|
||||
llvm_backend_get_code_size(module),
|
||||
)
|
||||
};
|
||||
let mut msm = ModuleStateMap {
|
||||
local_functions: Default::default(),
|
||||
total_size: code_size,
|
||||
};
|
||||
let mut map_record_idx: usize = 0;
|
||||
for size_record in &map.stk_size_records {
|
||||
for _ in 0..size_record.record_count {
|
||||
let map_record = &map.stk_map_records[map_record_idx];
|
||||
let map_entry = &stackmaps.entries[map_record_idx];
|
||||
assert_eq!(map_record.patchpoint_id, map_record_idx as u64);
|
||||
map_record_idx += 1;
|
||||
|
||||
map_entry.populate_msm(
|
||||
code_ptr as usize,
|
||||
&map,
|
||||
size_record,
|
||||
map_record,
|
||||
&mut msm,
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eprintln!("WARNING: No stack map");
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ use wasmparser::{BinaryReaderError, MemoryImmediate, Operator, Type as WpType};
|
||||
use crate::backend::LLVMBackend;
|
||||
use crate::intrinsics::{CtxType, GlobalCache, Intrinsics, MemoryCache};
|
||||
use crate::read_info::{blocktype_to_type, type_to_type};
|
||||
use crate::stackmap::{StackmapEntry, StackmapEntryKind, StackmapRegistry};
|
||||
use crate::stackmap::{StackmapEntry, StackmapEntryKind, StackmapRegistry, ValueSemantic};
|
||||
use crate::state::{ControlFrame, IfElseState, State};
|
||||
use crate::trampolines::generate_trampolines;
|
||||
|
||||
@ -310,6 +310,7 @@ fn emit_stack_map(
|
||||
kind: StackmapEntryKind,
|
||||
locals: &[PointerValue],
|
||||
state: &State,
|
||||
opcode_offset: usize,
|
||||
) {
|
||||
let stackmap_id = target.entries.len();
|
||||
|
||||
@ -324,9 +325,13 @@ fn emit_stack_map(
|
||||
params.push(intrinsics.i32_ty.const_int(0, false).as_basic_value_enum());
|
||||
|
||||
let locals: Vec<_> = locals.iter().map(|x| x.as_basic_value_enum()).collect();
|
||||
let mut value_semantics: Vec<ValueSemantic> = vec![];
|
||||
|
||||
params.extend_from_slice(&locals);
|
||||
value_semantics.extend((0..locals.len()).map(ValueSemantic::WasmLocal));
|
||||
|
||||
params.extend_from_slice(&state.stack);
|
||||
value_semantics.extend((0..state.stack.len()).map(ValueSemantic::WasmStack));
|
||||
|
||||
builder.build_call(intrinsics.experimental_stackmap, ¶ms, &state.var_name());
|
||||
|
||||
@ -335,6 +340,8 @@ fn emit_stack_map(
|
||||
local_function_id,
|
||||
local_count: locals.len(),
|
||||
stack_count: state.stack.len(),
|
||||
opcode_offset,
|
||||
value_semantics,
|
||||
});
|
||||
}
|
||||
|
||||
@ -371,6 +378,7 @@ pub struct LLVMFunctionCodeGenerator {
|
||||
unreachable_depth: usize,
|
||||
stackmaps: Rc<RefCell<StackmapRegistry>>,
|
||||
index: usize,
|
||||
opcode_offset: usize,
|
||||
}
|
||||
|
||||
impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
@ -443,8 +451,13 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
}
|
||||
|
||||
fn feed_event(&mut self, event: Event, module_info: &ModuleInfo) -> Result<(), CodegenError> {
|
||||
let mut opcode_offset: Option<usize> = None;
|
||||
let op = match event {
|
||||
Event::Wasm(x) => x,
|
||||
Event::Wasm(x) => {
|
||||
opcode_offset = Some(self.opcode_offset);
|
||||
self.opcode_offset += 1;
|
||||
x
|
||||
}
|
||||
Event::Internal(_x) => {
|
||||
return Ok(());
|
||||
}
|
||||
@ -530,7 +543,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
|
||||
builder.position_at_end(&loop_body);
|
||||
|
||||
{
|
||||
if let Some(offset) = opcode_offset {
|
||||
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||
emit_stack_map(
|
||||
intrinsics,
|
||||
@ -540,6 +553,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
StackmapEntryKind::Loop,
|
||||
&self.locals,
|
||||
state,
|
||||
offset,
|
||||
)
|
||||
}
|
||||
|
||||
@ -804,7 +818,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
// If llvm cannot prove that this is never touched,
|
||||
// it will emit a `ud2` instruction on x86_64 arches.
|
||||
|
||||
{
|
||||
if let Some(offset) = opcode_offset {
|
||||
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||
emit_stack_map(
|
||||
intrinsics,
|
||||
@ -814,6 +828,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
StackmapEntryKind::Trappable,
|
||||
&self.locals,
|
||||
state,
|
||||
offset,
|
||||
)
|
||||
}
|
||||
|
||||
@ -954,7 +969,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
};
|
||||
|
||||
state.popn(func_sig.params().len())?;
|
||||
{
|
||||
if let Some(offset) = opcode_offset {
|
||||
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||
emit_stack_map(
|
||||
intrinsics,
|
||||
@ -964,6 +979,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
StackmapEntryKind::Call,
|
||||
&self.locals,
|
||||
state,
|
||||
offset,
|
||||
)
|
||||
}
|
||||
let call_site = builder.build_call(func_ptr, ¶ms, &state.var_name());
|
||||
@ -1131,7 +1147,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
"typed_func_ptr",
|
||||
);
|
||||
|
||||
{
|
||||
if let Some(offset) = opcode_offset {
|
||||
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||
emit_stack_map(
|
||||
intrinsics,
|
||||
@ -1141,6 +1157,7 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
||||
StackmapEntryKind::Call,
|
||||
&self.locals,
|
||||
state,
|
||||
offset,
|
||||
)
|
||||
}
|
||||
let call_site = builder.build_call(typed_func_ptr, &args, "indirect_call");
|
||||
@ -2565,6 +2582,20 @@ impl ModuleCodeGenerator<LLVMFunctionCodeGenerator, LLVMBackend, CodegenError>
|
||||
|
||||
let local_func_index = self.functions.len();
|
||||
|
||||
{
|
||||
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||
emit_stack_map(
|
||||
&intrinsics,
|
||||
&builder,
|
||||
local_func_index,
|
||||
&mut *stackmaps,
|
||||
StackmapEntryKind::FunctionHeader,
|
||||
&locals,
|
||||
&state,
|
||||
::std::usize::MAX,
|
||||
);
|
||||
}
|
||||
|
||||
let code = LLVMFunctionCodeGenerator {
|
||||
state,
|
||||
context: Some(context),
|
||||
@ -2579,6 +2610,7 @@ impl ModuleCodeGenerator<LLVMFunctionCodeGenerator, LLVMBackend, CodegenError>
|
||||
unreachable_depth: 0,
|
||||
stackmaps: self.stackmaps.clone(),
|
||||
index: local_func_index,
|
||||
opcode_offset: 0,
|
||||
};
|
||||
self.functions.push(code);
|
||||
Ok(self.functions.last_mut().unwrap())
|
||||
|
@ -1,7 +1,13 @@
|
||||
// https://llvm.org/docs/StackMaps.html#stackmap-section
|
||||
|
||||
use byteorder::{LittleEndian, ReadBytesExt};
|
||||
use std::collections::HashMap;
|
||||
use std::io::{self, Cursor};
|
||||
use wasmer_runtime_core::state::{
|
||||
x64::{new_machine_state, X64Register, GPR},
|
||||
FunctionStateMap, MachineStateDiff, MachineValue, ModuleStateMap, OffsetInfo, RegisterIndex,
|
||||
SuspendOffset, WasmAbstractValue,
|
||||
};
|
||||
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct StackmapRegistry {
|
||||
@ -12,17 +18,231 @@ pub struct StackmapRegistry {
|
||||
pub struct StackmapEntry {
|
||||
pub kind: StackmapEntryKind,
|
||||
pub local_function_id: usize,
|
||||
pub opcode_offset: usize,
|
||||
pub value_semantics: Vec<ValueSemantic>,
|
||||
pub local_count: usize,
|
||||
pub stack_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ValueSemantic {
|
||||
WasmLocal(usize),
|
||||
WasmStack(usize),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum StackmapEntryKind {
|
||||
FunctionHeader,
|
||||
Loop,
|
||||
Call,
|
||||
Trappable,
|
||||
}
|
||||
|
||||
/*
|
||||
pub struct FunctionStateMap {
|
||||
pub initial: MachineState,
|
||||
pub local_function_id: usize,
|
||||
pub locals: Vec<WasmAbstractValue>,
|
||||
pub shadow_size: usize, // for single-pass backend, 32 bytes on x86-64
|
||||
pub diffs: Vec<MachineStateDiff>,
|
||||
pub wasm_function_header_target_offset: Option<SuspendOffset>,
|
||||
pub wasm_offset_to_target_offset: BTreeMap<usize, SuspendOffset>,
|
||||
pub loop_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
|
||||
pub call_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
|
||||
pub trappable_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
|
||||
}
|
||||
pub struct MachineStateDiff {
|
||||
pub last: Option<usize>,
|
||||
pub stack_push: Vec<MachineValue>,
|
||||
pub stack_pop: usize,
|
||||
pub reg_diff: Vec<(RegisterIndex, MachineValue)>,
|
||||
|
||||
pub wasm_stack_push: Vec<WasmAbstractValue>,
|
||||
pub wasm_stack_pop: usize,
|
||||
pub wasm_stack_private_depth: usize, // absolute value; not a diff.
|
||||
|
||||
pub wasm_inst_offset: usize, // absolute value; not a diff.
|
||||
}
|
||||
*/
|
||||
|
||||
impl StackmapEntry {
|
||||
pub fn populate_msm(
|
||||
&self,
|
||||
code_addr: usize,
|
||||
llvm_map: &StackMap,
|
||||
size_record: &StkSizeRecord,
|
||||
map_record: &StkMapRecord,
|
||||
msm: &mut ModuleStateMap,
|
||||
) {
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum RuntimeOrConstant {
|
||||
Runtime(MachineValue),
|
||||
Constant(u64),
|
||||
}
|
||||
|
||||
let fsm = msm
|
||||
.local_functions
|
||||
.entry(self.local_function_id)
|
||||
.or_insert_with(|| {
|
||||
FunctionStateMap::new(new_machine_state(), self.local_function_id, 0, vec![])
|
||||
});
|
||||
|
||||
assert_eq!(self.value_semantics.len(), map_record.locations.len());
|
||||
assert!(size_record.stack_size % 8 == 0);
|
||||
|
||||
let mut machine_stack_layout: Vec<MachineValue> =
|
||||
vec![MachineValue::Undefined; (size_record.stack_size as usize) / 8];
|
||||
let mut regs: Vec<(RegisterIndex, MachineValue)> = vec![];
|
||||
let mut stack_constants: HashMap<usize, u64> = HashMap::new();
|
||||
|
||||
let mut wasm_locals: Vec<WasmAbstractValue> = vec![];
|
||||
let mut wasm_stack: Vec<WasmAbstractValue> = vec![];
|
||||
|
||||
for (i, loc) in map_record.locations.iter().enumerate() {
|
||||
let mv = match self.value_semantics[i] {
|
||||
ValueSemantic::WasmLocal(x) => {
|
||||
if x != wasm_locals.len() {
|
||||
panic!("unordered local values");
|
||||
}
|
||||
wasm_locals.push(WasmAbstractValue::Runtime);
|
||||
MachineValue::WasmLocal(x)
|
||||
}
|
||||
ValueSemantic::WasmStack(x) => {
|
||||
if x != wasm_stack.len() {
|
||||
panic!("unordered stack values");
|
||||
}
|
||||
wasm_stack.push(WasmAbstractValue::Runtime);
|
||||
MachineValue::WasmStack(x)
|
||||
}
|
||||
};
|
||||
match loc.ty {
|
||||
LocationType::Register => {
|
||||
let index = X64Register::from_dwarf_regnum(loc.dwarf_regnum)
|
||||
.expect("invalid regnum")
|
||||
.to_index();
|
||||
regs.push((index, mv));
|
||||
}
|
||||
LocationType::Constant => {
|
||||
let v = loc.offset_or_small_constant as u32 as u64;
|
||||
match mv {
|
||||
MachineValue::WasmStack(x) => {
|
||||
stack_constants.insert(x, v);
|
||||
*wasm_stack.last_mut().unwrap() = WasmAbstractValue::Const(v);
|
||||
}
|
||||
_ => {} // TODO
|
||||
}
|
||||
}
|
||||
LocationType::ConstantIndex => {
|
||||
let v =
|
||||
llvm_map.constants[loc.offset_or_small_constant as usize].large_constant;
|
||||
match mv {
|
||||
MachineValue::WasmStack(x) => {
|
||||
stack_constants.insert(x, v);
|
||||
*wasm_stack.last_mut().unwrap() = WasmAbstractValue::Const(v);
|
||||
}
|
||||
_ => {} // TODO
|
||||
}
|
||||
}
|
||||
LocationType::Direct => match mv {
|
||||
MachineValue::WasmLocal(_) => {
|
||||
assert_eq!(loc.location_size, 8);
|
||||
assert!(loc.offset_or_small_constant < 0);
|
||||
assert!(
|
||||
X64Register::from_dwarf_regnum(loc.dwarf_regnum).unwrap()
|
||||
== X64Register::GPR(GPR::RBP)
|
||||
);
|
||||
let stack_offset = ((-loc.offset_or_small_constant) % 8) as usize;
|
||||
assert!(stack_offset > 0 && stack_offset <= machine_stack_layout.len());
|
||||
machine_stack_layout[stack_offset - 1] = mv;
|
||||
}
|
||||
_ => unreachable!(
|
||||
"Direct location type is not expected for values other than local"
|
||||
),
|
||||
},
|
||||
LocationType::Indirect => {
|
||||
assert_eq!(loc.location_size, 8);
|
||||
assert!(loc.offset_or_small_constant < 0);
|
||||
assert!(
|
||||
X64Register::from_dwarf_regnum(loc.dwarf_regnum).unwrap()
|
||||
== X64Register::GPR(GPR::RBP)
|
||||
);
|
||||
let stack_offset = ((-loc.offset_or_small_constant) % 8) as usize;
|
||||
assert!(stack_offset > 0 && stack_offset <= machine_stack_layout.len());
|
||||
machine_stack_layout[stack_offset - 1] = mv;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(wasm_stack.len(), self.stack_count);
|
||||
assert_eq!(wasm_locals.len(), self.local_count);
|
||||
|
||||
let diff = MachineStateDiff {
|
||||
last: None,
|
||||
stack_push: machine_stack_layout,
|
||||
stack_pop: 0,
|
||||
reg_diff: regs,
|
||||
wasm_stack_push: wasm_stack,
|
||||
wasm_stack_pop: 0,
|
||||
wasm_stack_private_depth: 0,
|
||||
wasm_inst_offset: self.opcode_offset,
|
||||
};
|
||||
let diff_id = fsm.diffs.len();
|
||||
fsm.diffs.push(diff);
|
||||
|
||||
match self.kind {
|
||||
StackmapEntryKind::FunctionHeader => {
|
||||
fsm.locals = wasm_locals;
|
||||
}
|
||||
_ => {
|
||||
assert_eq!(fsm.locals, wasm_locals);
|
||||
}
|
||||
}
|
||||
let target_offset = (size_record.function_address as usize)
|
||||
.checked_sub(code_addr)
|
||||
.unwrap()
|
||||
+ map_record.instruction_offset as usize;
|
||||
|
||||
match self.kind {
|
||||
StackmapEntryKind::Loop => {
|
||||
fsm.wasm_offset_to_target_offset
|
||||
.insert(self.opcode_offset, SuspendOffset::Loop(target_offset));
|
||||
fsm.loop_offsets.insert(
|
||||
target_offset,
|
||||
OffsetInfo {
|
||||
diff_id,
|
||||
activate_offset: target_offset,
|
||||
},
|
||||
);
|
||||
}
|
||||
StackmapEntryKind::Call => {
|
||||
fsm.wasm_offset_to_target_offset
|
||||
.insert(self.opcode_offset, SuspendOffset::Call(target_offset));
|
||||
fsm.call_offsets.insert(
|
||||
target_offset,
|
||||
OffsetInfo {
|
||||
diff_id,
|
||||
activate_offset: target_offset,
|
||||
},
|
||||
);
|
||||
}
|
||||
StackmapEntryKind::Trappable => {
|
||||
fsm.wasm_offset_to_target_offset
|
||||
.insert(self.opcode_offset, SuspendOffset::Trappable(target_offset));
|
||||
fsm.trappable_offsets.insert(
|
||||
target_offset,
|
||||
OffsetInfo {
|
||||
diff_id,
|
||||
activate_offset: target_offset,
|
||||
},
|
||||
);
|
||||
}
|
||||
StackmapEntryKind::FunctionHeader => {
|
||||
fsm.wasm_function_header_target_offset = Some(SuspendOffset::Loop(target_offset));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct StackMap {
|
||||
pub version: u8,
|
||||
|
Reference in New Issue
Block a user