Improve stack trace collection and rendering.

This commit is contained in:
losfair
2019-06-25 03:55:33 +08:00
parent 73eb04d269
commit fb7c3eee8a
6 changed files with 377 additions and 136 deletions

View File

@ -24,7 +24,7 @@ use wasmer_runtime_core::{
module::{ModuleInfo, ModuleInner},
state::{
x64::new_machine_state, x64::X64Register, FunctionStateMap, MachineState, MachineStateDiff,
MachineValue, ModuleStateMap,
MachineValue, ModuleStateMap, WasmAbstractValue,
},
structures::{Map, TypedIndex},
typed_func::Wasm,
@ -141,23 +141,12 @@ enum LocalOrTemp {
Temp,
}
#[derive(Copy, Clone, Debug)]
pub struct InstMetadata {
offset: usize,
special: Option<(SpecialInst, usize /* state_diff_id */)>,
}
#[derive(Copy, Clone, Debug)]
pub enum SpecialInst {
Loop, /* header state */
Call { mid_offset: usize }, /* inside state */
}
pub struct X64FunctionCode {
local_function_id: usize,
signatures: Arc<Map<SigIndex, FuncSig>>,
function_signatures: Arc<Map<FuncIndex, SigIndex>>,
fsm: FunctionStateMap,
inst_metadata: Vec<InstMetadata>,
offset: usize,
assembler: Option<Assembler>,
@ -382,10 +371,11 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
//; int 3
);
let code = X64FunctionCode {
local_function_id: self.functions.len(),
signatures: self.signatures.as_ref().unwrap().clone(),
function_signatures: self.function_signatures.as_ref().unwrap().clone(),
fsm: FunctionStateMap::new(new_machine_state(), 32),
inst_metadata: vec![],
fsm: FunctionStateMap::new(new_machine_state(), self.functions.len(), 32, vec![]), // only a placeholder; this is initialized later in `begin_body`
offset: begin_offset.0,
assembler: Some(assembler),
@ -550,23 +540,35 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
}
impl X64FunctionCode {
fn mark_trappable(a: &mut Assembler, m: &Machine, fsm: &mut FunctionStateMap, control_stack: &mut [ControlFrame]) {
let state_diff_id = Self::get_state_diff(m, fsm, control_stack);
let offset = a.get_offset().0;
fsm.trappable_offsets.insert(offset, state_diff_id);
}
/// Moves `loc` to a valid location for `div`/`idiv`.
fn emit_relaxed_xdiv(
a: &mut Assembler,
_m: &mut Machine,
m: &mut Machine,
op: fn(&mut Assembler, Size, Location),
sz: Size,
loc: Location,
fsm: &mut FunctionStateMap,
control_stack: &mut [ControlFrame],
) {
m.state.wasm_stack_private_depth += 1;
match loc {
Location::Imm64(_) | Location::Imm32(_) => {
a.emit_mov(sz, loc, Location::GPR(GPR::RCX)); // must not be used during div (rax, rdx)
Self::mark_trappable(a, m, fsm, control_stack);
op(a, sz, Location::GPR(GPR::RCX));
}
_ => {
Self::mark_trappable(a, m, fsm, control_stack);
op(a, sz, loc);
}
}
m.state.wasm_stack_private_depth -= 1;
}
/// Moves `src` and `dst` to valid locations for `movzx`/`movsx`.
@ -1141,7 +1143,7 @@ impl X64FunctionCode {
m: &mut Machine,
cb: F,
params: I,
state_context: Option<(&mut FunctionStateMap, &mut InstMetadata, &[ControlFrame])>,
state_context: Option<(&mut FunctionStateMap, &mut [ControlFrame])>,
) {
// Values pushed in this function are above the shadow region.
m.state.stack_values.push(MachineValue::ExplicitShadow);
@ -1295,10 +1297,9 @@ impl X64FunctionCode {
cb(a);
// Offset needs to be after the 'call' instruction.
if let Some((fsm, inst_metadata, control_stack)) = state_context {
if let Some((fsm, control_stack)) = state_context {
let state_diff_id = Self::get_state_diff(m, fsm, control_stack);
let offset = a.get_offset().0;
inst_metadata.special = Some((SpecialInst::Call { mid_offset: offset }, state_diff_id));
fsm.call_offsets.insert(offset, state_diff_id);
}
@ -1354,7 +1355,7 @@ impl X64FunctionCode {
m: &mut Machine,
label: DynamicLabel,
params: I,
state_context: Option<(&mut FunctionStateMap, &mut InstMetadata, &[ControlFrame])>,
state_context: Option<(&mut FunctionStateMap, &mut [ControlFrame])>,
) {
Self::emit_call_sysv(a, m, |a| a.emit_call_label(label), params, state_context)
}
@ -1554,12 +1555,14 @@ impl X64FunctionCode {
pub fn get_state_diff(
m: &Machine,
fsm: &mut FunctionStateMap,
control_stack: &[ControlFrame],
control_stack: &mut [ControlFrame],
) -> usize {
let last_frame = control_stack.last().unwrap();
let last_frame = control_stack.last_mut().unwrap();
let mut diff = m.state.diff(&last_frame.state);
diff.last = Some(last_frame.state_diff_id);
let id = fsm.diffs.len();
last_frame.state = m.state.clone();
last_frame.state_diff_id = id;
fsm.diffs.push(diff);
id
}
@ -1604,6 +1607,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
.machine
.init_locals(a, self.num_locals, self.num_params);
self.fsm = FunctionStateMap::new(new_machine_state(), self.local_function_id, 32, (0..self.locals.len()).map(|_| WasmAbstractValue::Runtime).collect());
let diff = self.machine.state.diff(&new_machine_state());
let state_diff_id = self.fsm.diffs.len();
self.fsm.diffs.push(diff);
@ -1749,10 +1754,6 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
}
};
let mut inst_metadata = InstMetadata {
offset: a.get_offset().0,
special: None,
};
match *op {
Operator::GetGlobal { global_index } => {
let global_index = global_index as usize;
@ -1868,6 +1869,7 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
let local_index = local_index as usize;
self.value_stack
.push((self.locals[local_index], LocalOrTemp::Local));
self.machine.state.wasm_stack.push(WasmAbstractValue::Runtime);
}
Operator::SetLocal { local_index } => {
let local_index = local_index as usize;
@ -1896,9 +1898,12 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
self.locals[local_index],
);
}
Operator::I32Const { value } => self
Operator::I32Const { value } => {
self
.value_stack
.push((Location::Imm32(value as u32), LocalOrTemp::Temp)),
.push((Location::Imm32(value as u32), LocalOrTemp::Temp));
self.machine.state.wasm_stack.push(WasmAbstractValue::Const(value as u32 as u64));
},
Operator::I32Add => Self::emit_binop_i32(
a,
&mut self.machine,
@ -1936,6 +1941,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
Assembler::emit_div,
Size::S32,
loc_b,
&mut self.fsm,
&mut self.control_stack,
);
a.emit_mov(Size::S32, Location::GPR(GPR::RAX), ret);
self.value_stack.push((ret, LocalOrTemp::Temp));
@ -1959,6 +1966,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
Assembler::emit_idiv,
Size::S32,
loc_b,
&mut self.fsm,
&mut self.control_stack,
);
a.emit_mov(Size::S32, Location::GPR(GPR::RAX), ret);
self.value_stack.push((ret, LocalOrTemp::Temp));
@ -1982,6 +1991,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
Assembler::emit_div,
Size::S32,
loc_b,
&mut self.fsm,
&mut self.control_stack,
);
a.emit_mov(Size::S32, Location::GPR(GPR::RDX), ret);
self.value_stack.push((ret, LocalOrTemp::Temp));
@ -2031,6 +2042,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
Assembler::emit_idiv,
Size::S32,
loc_b,
&mut self.fsm,
&mut self.control_stack,
);
a.emit_mov(Size::S32, Location::GPR(GPR::RDX), ret);
self.value_stack.push((ret, LocalOrTemp::Temp));
@ -2171,6 +2184,7 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
let value = value as u64;
self.value_stack
.push((Location::Imm64(value), LocalOrTemp::Temp));
self.machine.state.wasm_stack.push(WasmAbstractValue::Const(value));
}
Operator::I64Add => Self::emit_binop_i64(
a,
@ -2209,6 +2223,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
Assembler::emit_div,
Size::S64,
loc_b,
&mut self.fsm,
&mut self.control_stack,
);
a.emit_mov(Size::S64, Location::GPR(GPR::RAX), ret);
self.value_stack.push((ret, LocalOrTemp::Temp));
@ -2232,6 +2248,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
Assembler::emit_idiv,
Size::S64,
loc_b,
&mut self.fsm,
&mut self.control_stack,
);
a.emit_mov(Size::S64, Location::GPR(GPR::RAX), ret);
self.value_stack.push((ret, LocalOrTemp::Temp));
@ -2255,6 +2273,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
Assembler::emit_div,
Size::S64,
loc_b,
&mut self.fsm,
&mut self.control_stack,
);
a.emit_mov(Size::S64, Location::GPR(GPR::RDX), ret);
self.value_stack.push((ret, LocalOrTemp::Temp));
@ -2305,6 +2325,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
Assembler::emit_idiv,
Size::S64,
loc_b,
&mut self.fsm,
&mut self.control_stack,
);
a.emit_mov(Size::S64, Location::GPR(GPR::RDX), ret);
self.value_stack.push((ret, LocalOrTemp::Temp));
@ -2496,9 +2518,12 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
);
}
Operator::F32Const { value } => self
Operator::F32Const { value } => {
self
.value_stack
.push((Location::Imm32(value.bits()), LocalOrTemp::Temp)),
.push((Location::Imm32(value.bits()), LocalOrTemp::Temp));
self.machine.state.wasm_stack.push(WasmAbstractValue::Const(value.bits() as u64));
},
Operator::F32Add => Self::emit_fp_binop_avx(
a,
&mut self.machine,
@ -2670,9 +2695,12 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
self.machine.release_temp_gpr(tmp);
}
Operator::F64Const { value } => self
Operator::F64Const { value } => {
self
.value_stack
.push((Location::Imm64(value.bits()), LocalOrTemp::Temp)),
.push((Location::Imm64(value.bits()), LocalOrTemp::Temp));
self.machine.state.wasm_stack.push(WasmAbstractValue::Const(value.bits()));
},
Operator::F64Add => Self::emit_fp_binop_avx(
a,
&mut self.machine,
@ -3509,14 +3537,15 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
.collect();
self.machine.release_locations_only_regs(&released);
self.machine.release_locations_only_osr_state(params.len());
Self::emit_call_sysv_label(
a,
&mut self.machine,
label,
params.iter().map(|&(x, _)| x),
Some((&mut self.fsm, &mut inst_metadata, &self.control_stack)),
Some((&mut self.fsm, &mut self.control_stack)),
);
assert!(inst_metadata.special.is_some());
self.machine.release_locations_only_stack(a, &released);
@ -3626,6 +3655,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
);
}
self.machine.release_locations_only_osr_state(params.len());
Self::emit_call_sysv(
a,
&mut self.machine,
@ -3636,9 +3667,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
));
},
params.iter().map(|&(x, _)| x),
Some((&mut self.fsm, &mut inst_metadata, &self.control_stack)),
Some((&mut self.fsm, &mut self.control_stack)),
);
assert!(inst_metadata.special.is_some());
self.machine.release_locations_only_stack(a, &released);
@ -3662,7 +3692,7 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
let cond =
get_location_released(a, &mut self.machine, self.value_stack.pop().unwrap());
self.control_stack.push(ControlFrame {
let frame = ControlFrame {
label: label_end,
loop_like: false,
if_else: IfElseState::If(label_else),
@ -3675,9 +3705,10 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
state_diff_id: Self::get_state_diff(
&self.machine,
&mut self.fsm,
&self.control_stack,
&mut self.control_stack,
),
});
};
self.control_stack.push(frame);
Self::emit_relaxed_binop(
a,
&mut self.machine,
@ -3771,7 +3802,7 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
a.emit_label(end_label);
}
Operator::Block { ty } => {
self.control_stack.push(ControlFrame {
let frame = ControlFrame {
label: a.get_label(),
loop_like: false,
if_else: IfElseState::None,
@ -3784,14 +3815,15 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
state_diff_id: Self::get_state_diff(
&self.machine,
&mut self.fsm,
&self.control_stack,
&mut self.control_stack,
),
});
};
self.control_stack.push(frame);
}
Operator::Loop { ty } => {
let label = a.get_label();
let state_diff_id =
Self::get_state_diff(&self.machine, &mut self.fsm, &self.control_stack);
Self::get_state_diff(&self.machine, &mut self.fsm, &mut self.control_stack);
self.control_stack.push(ControlFrame {
label: label,
loop_like: true,
@ -3804,7 +3836,6 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
state: self.machine.state.clone(),
state_diff_id,
});
inst_metadata.special = Some((SpecialInst::Loop, state_diff_id));
self.fsm
.loop_offsets
.insert(a.get_offset().0, state_diff_id);
@ -3865,6 +3896,8 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
Location::GPR(GPR::RAX),
);
self.machine.release_locations_only_osr_state(1);
Self::emit_call_sysv(
a,
&mut self.machine,
@ -4558,6 +4591,10 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
);
}
Operator::Unreachable => {
let state_diff_id = Self::get_state_diff(&self.machine, &mut self.fsm, &mut self.control_stack);
let offset = a.get_offset().0;
self.fsm.trappable_offsets.insert(offset, state_diff_id);
a.emit_ud2();
self.unreachable_depth = 1;
}
@ -4760,8 +4797,6 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
}
}
self.inst_metadata.push(inst_metadata);
Ok(())
}
}
@ -4782,6 +4817,8 @@ fn get_location_released(
) -> Location {
if lot == LocalOrTemp::Temp {
m.release_locations(a, &[loc]);
} else {
m.state.wasm_stack.pop().unwrap();
}
loc
}
@ -4796,4 +4833,38 @@ fn sort_call_movs(movs: &mut [(Location, GPR)]) {
}
}
}
/*
{
use std::collections::{HashMap, HashSet, VecDeque};
let mut mov_map: HashMap<GPR, HashSet<GPR>> = HashMap::new();
for mov in movs.iter() {
if let Location::GPR(src_gpr) = mov.0 {
if src_gpr != mov.1 {
mov_map.entry(src_gpr).or_insert_with(|| HashSet::new()).insert(mov.1);
}
}
}
for (start, _) in mov_map.iter() {
let mut q: VecDeque<GPR> = VecDeque::new();
let mut black: HashSet<GPR> = HashSet::new();
q.push_back(*start);
black.insert(*start);
while q.len() > 0 {
let reg = q.pop_front().unwrap();
let empty_set = HashSet::new();
for x in mov_map.get(&reg).unwrap_or(&empty_set).iter() {
if black.contains(x) {
panic!("cycle detected");
}
q.push_back(*x);
black.insert(*x);
}
}
}
}
*/
}

View File

@ -162,6 +162,7 @@ impl Machine {
} else {
self.state.stack_values.push(*mv);
}
self.state.wasm_stack.push(WasmAbstractValue::Runtime);
ret.push(loc);
}
@ -210,6 +211,7 @@ impl Machine {
}
_ => {}
}
self.state.wasm_stack.pop().unwrap();
}
if delta_stack_offset != 0 {
@ -236,6 +238,7 @@ impl Machine {
}
_ => {}
}
// Wasm state popping is deferred to `release_locations_only_osr_state`.
}
}
@ -262,6 +265,7 @@ impl Machine {
}
_ => {}
}
// Wasm state popping is deferred to `release_locations_only_osr_state`.
}
if delta_stack_offset != 0 {
@ -273,6 +277,15 @@ impl Machine {
}
}
pub fn release_locations_only_osr_state(
&mut self,
n: usize,
) {
for _ in 0..n {
self.state.wasm_stack.pop().unwrap();
}
}
pub fn release_locations_keep_state<E: Emitter>(&self, assembler: &mut E, locs: &[Location]) {
let mut delta_stack_offset: usize = 0;
let mut stack_offset = self.stack_offset.0;

View File

@ -22,6 +22,39 @@ use std::sync::Arc;
use std::sync::Once;
use wasmer_runtime_core::codegen::BkptInfo;
use wasmer_runtime_core::typed_func::WasmTrapInfo;
use wasmer_runtime_core::state::x64::{X64Register, GPR, read_stack};
use wasmer_runtime_core::vm;
fn join_strings(x: impl Iterator<Item = String>, sep: &str) -> String {
let mut ret = String::new();
let mut first = true;
for s in x {
if first {
first = false;
} else {
ret += sep;
}
ret += &s;
}
ret
}
fn format_optional_u64_sequence(x: &[Option<u64>]) -> String {
use colored::*;
if x.len() == 0 {
"(empty)".into()
} else {
join_strings(x.iter()
.enumerate()
.map(|(i, x)| {
format!("[{}] = {}", i, x.map(|x| format!("{}", x)).unwrap_or_else(|| "?".to_string()).bold().cyan())
})
, ", ")
}
}
extern "C" fn signal_trap_handler(
signum: ::nix::libc::c_int,
@ -29,12 +62,13 @@ extern "C" fn signal_trap_handler(
ucontext: *mut c_void,
) {
unsafe {
let fault = get_fault_info(siginfo as _, ucontext);
match Signal::from_c_int(signum) {
Ok(SIGTRAP) => {
let (_, ip) = get_faulting_addr_and_ip(siginfo as _, ucontext);
let bkpt_map = BKPT_MAP.with(|x| x.borrow().last().map(|x| x.clone()));
if let Some(bkpt_map) = bkpt_map {
if let Some(ref x) = bkpt_map.get(&(ip as usize)) {
if let Some(ref x) = bkpt_map.get(&(fault.ip as usize)) {
(x)(BkptInfo { throw: throw });
return;
}
@ -43,6 +77,32 @@ extern "C" fn signal_trap_handler(
_ => {}
}
// TODO: make this safer
let ctx = &*(fault.known_registers[X64Register::GPR(GPR::R15).to_index().0].unwrap() as *mut vm::Ctx);
let rsp = fault.known_registers[X64Register::GPR(GPR::RSP).to_index().0].unwrap();
let msm = (*ctx.module)
.runnable_module
.get_module_state_map()
.unwrap();
let code_base = (*ctx.module).runnable_module.get_code().unwrap().as_ptr() as usize;
let frames = self::read_stack(&msm, code_base, rsp as usize as *const u64, fault.known_registers, Some(fault.ip as usize as u64));
use colored::*;
eprintln!("\n{}\n", "Wasmer encountered an error while running your WebAssembly program.".bold().red());
if frames.len() == 0 {
eprintln!("{}", "Unknown fault address, cannot read stack.".yellow());
} else {
use colored::*;
eprintln!("{}\n", "Backtrace:".bold());
for (i, f) in frames.iter().enumerate() {
eprintln!("{}", format!("* Frame {} @ Local function {}", i, f.local_function_id).bold());
eprintln!(" {} {}", "Locals:".bold().yellow(), format_optional_u64_sequence(&f.locals));
eprintln!(" {} {}", "Stack:".bold().yellow(), format_optional_u64_sequence(&f.stack));
eprintln!("");
}
}
do_unwind(signum, siginfo as _, ucontext);
}
}
@ -70,7 +130,7 @@ pub static SIGHANDLER_INIT: Once = Once::new();
thread_local! {
pub static SETJMP_BUFFER: UnsafeCell<[c_int; SETJMP_BUFFER_LEN]> = UnsafeCell::new([0; SETJMP_BUFFER_LEN]);
pub static CAUGHT_ADDRESSES: Cell<(*const c_void, *const c_void)> = Cell::new((ptr::null(), ptr::null()));
pub static CAUGHT_FAULTS: Cell<Option<FaultInfo>> = Cell::new(None);
pub static CURRENT_EXECUTABLE_BUFFER: Cell<*const c_void> = Cell::new(ptr::null());
pub static TRAP_EARLY_DATA: Cell<Option<Box<dyn Any>>> = Cell::new(None);
pub static BKPT_MAP: RefCell<Vec<Arc<HashMap<usize, Box<Fn(BkptInfo) + Send + Sync + 'static>>>>> = RefCell::new(Vec::new());
@ -148,11 +208,17 @@ pub unsafe fn do_unwind(signum: i32, siginfo: *const c_void, ucontext: *const c_
::std::process::abort();
}
CAUGHT_ADDRESSES.with(|cell| cell.set(get_faulting_addr_and_ip(siginfo, ucontext)));
CAUGHT_FAULTS.with(|cell| cell.set(Some(get_fault_info(siginfo, ucontext))));
longjmp(jmp_buf as *mut ::nix::libc::c_void, signum)
}
pub struct FaultInfo {
faulting_addr: *const c_void,
ip: *const c_void,
known_registers: [Option<u64>; 24],
}
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
unsafe fn get_faulting_addr_and_ip(
siginfo: *const c_void,
@ -180,10 +246,10 @@ unsafe fn get_faulting_addr_and_ip(
}
#[cfg(all(target_os = "macos", target_arch = "x86_64"))]
unsafe fn get_faulting_addr_and_ip(
unsafe fn get_fault_info(
siginfo: *const c_void,
ucontext: *const c_void,
) -> (*const c_void, *const c_void) {
) -> FaultInfo {
#[allow(dead_code)]
#[repr(C)]
struct ucontext_t {
@ -237,7 +303,31 @@ unsafe fn get_faulting_addr_and_ip(
let si_addr = (*siginfo).si_addr;
let ucontext = ucontext as *const ucontext_t;
let rip = (*(*ucontext).uc_mcontext).ss.rip;
let ss = &(*(*ucontext).uc_mcontext).ss;
(si_addr, rip as _)
let mut known_registers: [Option<u64>; 24] = [None; 24];
known_registers[X64Register::GPR(GPR::R15).to_index().0] = Some(ss.r15);
known_registers[X64Register::GPR(GPR::R14).to_index().0] = Some(ss.r14);
known_registers[X64Register::GPR(GPR::R13).to_index().0] = Some(ss.r13);
known_registers[X64Register::GPR(GPR::R12).to_index().0] = Some(ss.r12);
known_registers[X64Register::GPR(GPR::R11).to_index().0] = Some(ss.r11);
known_registers[X64Register::GPR(GPR::R10).to_index().0] = Some(ss.r10);
known_registers[X64Register::GPR(GPR::R9).to_index().0] = Some(ss.r9);
known_registers[X64Register::GPR(GPR::R8).to_index().0] = Some(ss.r8);
known_registers[X64Register::GPR(GPR::RSI).to_index().0] = Some(ss.rsi);
known_registers[X64Register::GPR(GPR::RDI).to_index().0] = Some(ss.rdi);
known_registers[X64Register::GPR(GPR::RDX).to_index().0] = Some(ss.rdx);
known_registers[X64Register::GPR(GPR::RCX).to_index().0] = Some(ss.rcx);
known_registers[X64Register::GPR(GPR::RBX).to_index().0] = Some(ss.rbx);
known_registers[X64Register::GPR(GPR::RAX).to_index().0] = Some(ss.rax);
known_registers[X64Register::GPR(GPR::RBP).to_index().0] = Some(ss.rbp);
known_registers[X64Register::GPR(GPR::RSP).to_index().0] = Some(ss.rsp);
FaultInfo {
faulting_addr: si_addr,
ip: ss.rip as _,
known_registers,
}
}