Add headless runtime tooling and Campaign.win analysis

This commit is contained in:
Jan Petykiewicz 2026-04-10 01:22:47 -07:00
commit 27172e3786
37 changed files with 11867 additions and 302 deletions

View file

@ -5,7 +5,9 @@ edition.workspace = true
license.workspace = true
[dependencies]
rrt-fixtures = { path = "../rrt-fixtures" }
rrt-model = { path = "../rrt-model" }
rrt-runtime = { path = "../rrt-runtime" }
serde.workspace = true
serde_json.workspace = true
sha2.workspace = true

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,10 @@
[package]
name = "rrt-fixtures"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
rrt-runtime = { path = "../rrt-runtime" }
serde.workspace = true
serde_json.workspace = true

View file

@ -0,0 +1,82 @@
use std::collections::BTreeSet;
use serde::{Deserialize, Serialize};
use serde_json::Value;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct JsonDiffEntry {
pub path: String,
pub left: Value,
pub right: Value,
}
pub fn diff_json_values(left: &Value, right: &Value) -> Vec<JsonDiffEntry> {
let mut differences = Vec::new();
collect_json_differences("$", left, right, &mut differences);
differences
}
fn collect_json_differences(
path: &str,
left: &Value,
right: &Value,
differences: &mut Vec<JsonDiffEntry>,
) {
match (left, right) {
(Value::Object(left_map), Value::Object(right_map)) => {
let mut keys = BTreeSet::new();
keys.extend(left_map.keys().cloned());
keys.extend(right_map.keys().cloned());
for key in keys {
let next_path = format!("{path}.{key}");
match (left_map.get(&key), right_map.get(&key)) {
(Some(left_value), Some(right_value)) => {
collect_json_differences(&next_path, left_value, right_value, differences);
}
(left_value, right_value) => differences.push(JsonDiffEntry {
path: next_path,
left: left_value.cloned().unwrap_or(Value::Null),
right: right_value.cloned().unwrap_or(Value::Null),
}),
}
}
}
(Value::Array(left_items), Value::Array(right_items)) => {
let max_len = left_items.len().max(right_items.len());
for index in 0..max_len {
let next_path = format!("{path}[{index}]");
match (left_items.get(index), right_items.get(index)) {
(Some(left_value), Some(right_value)) => {
collect_json_differences(&next_path, left_value, right_value, differences);
}
(left_value, right_value) => differences.push(JsonDiffEntry {
path: next_path,
left: left_value.cloned().unwrap_or(Value::Null),
right: right_value.cloned().unwrap_or(Value::Null),
}),
}
}
}
_ if left != right => differences.push(JsonDiffEntry {
path: path.to_string(),
left: left.clone(),
right: right.clone(),
}),
_ => {}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn diffs_nested_json_values() {
let left = serde_json::json!({ "a": { "b": 1 } });
let right = serde_json::json!({ "a": { "b": 2 } });
let diff = diff_json_values(&left, &right);
assert_eq!(diff.len(), 1);
assert_eq!(diff[0].path, "$.a.b");
}
}

View file

@ -0,0 +1,12 @@
pub mod diff;
pub mod load;
pub mod normalize;
pub mod schema;
pub use diff::{JsonDiffEntry, diff_json_values};
pub use load::{load_fixture_document, load_fixture_document_from_str};
pub use normalize::normalize_runtime_state;
pub use schema::{
ExpectedRuntimeSummary, FIXTURE_FORMAT_VERSION, FixtureDocument, FixtureSource,
FixtureStateOrigin, FixtureValidationReport, RawFixtureDocument, validate_fixture_document,
};

View file

@ -0,0 +1,162 @@
use std::path::{Path, PathBuf};
use rrt_runtime::{load_runtime_snapshot_document, validate_runtime_snapshot_document};
use crate::{FixtureDocument, FixtureStateOrigin, RawFixtureDocument};
pub fn load_fixture_document(path: &Path) -> Result<FixtureDocument, Box<dyn std::error::Error>> {
let text = std::fs::read_to_string(path)?;
let base_dir = path.parent().unwrap_or_else(|| Path::new("."));
load_fixture_document_from_str_with_base(&text, base_dir)
}
pub fn load_fixture_document_from_str(
text: &str,
) -> Result<FixtureDocument, Box<dyn std::error::Error>> {
load_fixture_document_from_str_with_base(text, Path::new("."))
}
pub fn load_fixture_document_from_str_with_base(
text: &str,
base_dir: &Path,
) -> Result<FixtureDocument, Box<dyn std::error::Error>> {
let raw: RawFixtureDocument = serde_json::from_str(text)?;
resolve_raw_fixture_document(raw, base_dir)
}
fn resolve_raw_fixture_document(
raw: RawFixtureDocument,
base_dir: &Path,
) -> Result<FixtureDocument, Box<dyn std::error::Error>> {
let state = match (&raw.state, &raw.state_snapshot_path) {
(Some(_), Some(_)) => {
return Err(
"fixture must not specify both inline state and state_snapshot_path".into(),
);
}
(None, None) => {
return Err("fixture must specify either inline state or state_snapshot_path".into());
}
(Some(state), None) => state.clone(),
(None, Some(snapshot_path)) => {
let snapshot_path = resolve_snapshot_path(base_dir, snapshot_path);
let snapshot = load_runtime_snapshot_document(&snapshot_path)?;
validate_runtime_snapshot_document(&snapshot).map_err(|err| {
format!(
"invalid runtime snapshot {}: {err}",
snapshot_path.display()
)
})?;
snapshot.state
}
};
let state_origin = match raw.state_snapshot_path {
Some(snapshot_path) => FixtureStateOrigin::SnapshotPath(snapshot_path),
None => FixtureStateOrigin::Inline,
};
Ok(FixtureDocument {
format_version: raw.format_version,
fixture_id: raw.fixture_id,
source: raw.source,
state,
state_origin,
commands: raw.commands,
expected_summary: raw.expected_summary,
})
}
fn resolve_snapshot_path(base_dir: &Path, snapshot_path: &str) -> PathBuf {
let candidate = PathBuf::from(snapshot_path);
if candidate.is_absolute() {
candidate
} else {
base_dir.join(candidate)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::FixtureStateOrigin;
use rrt_runtime::{
CalendarPoint, RuntimeServiceState, RuntimeSnapshotDocument, RuntimeSnapshotSource,
RuntimeState, SNAPSHOT_FORMAT_VERSION, save_runtime_snapshot_document,
};
use std::collections::BTreeMap;
#[test]
fn loads_fixture_from_relative_snapshot_path() {
let nonce = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.expect("system time should be after epoch")
.as_nanos();
let fixture_dir = std::env::temp_dir().join(format!("rrt-fixture-load-{nonce}"));
std::fs::create_dir_all(&fixture_dir).expect("fixture dir should be created");
let snapshot_path = fixture_dir.join("state.json");
let snapshot = RuntimeSnapshotDocument {
format_version: SNAPSHOT_FORMAT_VERSION,
snapshot_id: "snapshot-backed-fixture-state".to_string(),
source: RuntimeSnapshotSource {
source_fixture_id: Some("snapshot-backed-fixture".to_string()),
description: Some("test snapshot".to_string()),
},
state: RuntimeState {
calendar: CalendarPoint {
year: 1830,
month_slot: 0,
phase_slot: 0,
tick_slot: 5,
},
world_flags: BTreeMap::new(),
companies: Vec::new(),
event_runtime_records: Vec::new(),
service_state: RuntimeServiceState::default(),
},
};
save_runtime_snapshot_document(&snapshot_path, &snapshot).expect("snapshot should save");
let fixture_json = r#"
{
"format_version": 1,
"fixture_id": "snapshot-backed-fixture",
"source": {
"kind": "captured-runtime"
},
"state_snapshot_path": "state.json",
"commands": [
{
"kind": "step_count",
"steps": 1
}
],
"expected_summary": {
"calendar": {
"year": 1830,
"month_slot": 0,
"phase_slot": 0,
"tick_slot": 6
},
"world_flag_count": 0,
"company_count": 0,
"event_runtime_record_count": 0,
"total_company_cash": 0
}
}
"#;
let fixture = load_fixture_document_from_str_with_base(fixture_json, &fixture_dir)
.expect("snapshot-backed fixture should load");
assert_eq!(
fixture.state_origin,
FixtureStateOrigin::SnapshotPath("state.json".to_string())
);
assert_eq!(fixture.state.calendar.tick_slot, 5);
let _ = std::fs::remove_file(snapshot_path);
let _ = std::fs::remove_dir(fixture_dir);
}
}

View file

@ -0,0 +1,7 @@
use serde_json::Value;
use rrt_runtime::RuntimeState;
pub fn normalize_runtime_state(state: &RuntimeState) -> Result<Value, Box<dyn std::error::Error>> {
Ok(serde_json::to_value(state)?)
}

View file

@ -0,0 +1,277 @@
use serde::{Deserialize, Serialize};
use rrt_runtime::{RuntimeState, RuntimeSummary, StepCommand};
pub const FIXTURE_FORMAT_VERSION: u32 = 1;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct FixtureSource {
pub kind: String,
#[serde(default)]
pub description: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct ExpectedRuntimeSummary {
#[serde(default)]
pub calendar: Option<rrt_runtime::CalendarPoint>,
#[serde(default)]
pub world_flag_count: Option<usize>,
#[serde(default)]
pub company_count: Option<usize>,
#[serde(default)]
pub event_runtime_record_count: Option<usize>,
#[serde(default)]
pub total_event_record_service_count: Option<u64>,
#[serde(default)]
pub periodic_boundary_call_count: Option<u64>,
#[serde(default)]
pub total_trigger_dispatch_count: Option<u64>,
#[serde(default)]
pub dirty_rerun_count: Option<u64>,
#[serde(default)]
pub total_company_cash: Option<i64>,
}
impl ExpectedRuntimeSummary {
pub fn compare(&self, actual: &RuntimeSummary) -> Vec<String> {
let mut mismatches = Vec::new();
if let Some(calendar) = self.calendar {
if actual.calendar != calendar {
mismatches.push(format!(
"calendar mismatch: expected {:?}, got {:?}",
calendar, actual.calendar
));
}
}
if let Some(count) = self.world_flag_count {
if actual.world_flag_count != count {
mismatches.push(format!(
"world_flag_count mismatch: expected {count}, got {}",
actual.world_flag_count
));
}
}
if let Some(count) = self.company_count {
if actual.company_count != count {
mismatches.push(format!(
"company_count mismatch: expected {count}, got {}",
actual.company_count
));
}
}
if let Some(count) = self.event_runtime_record_count {
if actual.event_runtime_record_count != count {
mismatches.push(format!(
"event_runtime_record_count mismatch: expected {count}, got {}",
actual.event_runtime_record_count
));
}
}
if let Some(count) = self.total_event_record_service_count {
if actual.total_event_record_service_count != count {
mismatches.push(format!(
"total_event_record_service_count mismatch: expected {count}, got {}",
actual.total_event_record_service_count
));
}
}
if let Some(count) = self.periodic_boundary_call_count {
if actual.periodic_boundary_call_count != count {
mismatches.push(format!(
"periodic_boundary_call_count mismatch: expected {count}, got {}",
actual.periodic_boundary_call_count
));
}
}
if let Some(count) = self.total_trigger_dispatch_count {
if actual.total_trigger_dispatch_count != count {
mismatches.push(format!(
"total_trigger_dispatch_count mismatch: expected {count}, got {}",
actual.total_trigger_dispatch_count
));
}
}
if let Some(count) = self.dirty_rerun_count {
if actual.dirty_rerun_count != count {
mismatches.push(format!(
"dirty_rerun_count mismatch: expected {count}, got {}",
actual.dirty_rerun_count
));
}
}
if let Some(total) = self.total_company_cash {
if actual.total_company_cash != total {
mismatches.push(format!(
"total_company_cash mismatch: expected {total}, got {}",
actual.total_company_cash
));
}
}
mismatches
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct FixtureDocument {
pub format_version: u32,
pub fixture_id: String,
#[serde(default)]
pub source: FixtureSource,
pub state: RuntimeState,
pub state_origin: FixtureStateOrigin,
#[serde(default)]
pub commands: Vec<StepCommand>,
#[serde(default)]
pub expected_summary: ExpectedRuntimeSummary,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum FixtureStateOrigin {
Inline,
SnapshotPath(String),
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RawFixtureDocument {
pub format_version: u32,
pub fixture_id: String,
#[serde(default)]
pub source: FixtureSource,
#[serde(default)]
pub state: Option<RuntimeState>,
#[serde(default)]
pub state_snapshot_path: Option<String>,
#[serde(default)]
pub commands: Vec<StepCommand>,
#[serde(default)]
pub expected_summary: ExpectedRuntimeSummary,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct FixtureValidationReport {
pub fixture_id: String,
pub valid: bool,
pub issue_count: usize,
pub issues: Vec<String>,
}
pub fn validate_fixture_document(document: &FixtureDocument) -> FixtureValidationReport {
let mut issues = Vec::new();
if document.format_version != FIXTURE_FORMAT_VERSION {
issues.push(format!(
"unsupported format_version {} (expected {})",
document.format_version, FIXTURE_FORMAT_VERSION
));
}
if document.fixture_id.trim().is_empty() {
issues.push("fixture_id must not be empty".to_string());
}
if document.source.kind.trim().is_empty() {
issues.push("source.kind must not be empty".to_string());
}
if document.commands.is_empty() {
issues.push("fixture must contain at least one command".to_string());
}
if let Err(err) = document.state.validate() {
issues.push(format!("invalid runtime state: {err}"));
}
for (index, command) in document.commands.iter().enumerate() {
if let Err(err) = command.validate() {
issues.push(format!("invalid command at index {index}: {err}"));
}
}
FixtureValidationReport {
fixture_id: document.fixture_id.clone(),
valid: issues.is_empty(),
issue_count: issues.len(),
issues,
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::load_fixture_document_from_str;
const FIXTURE_JSON: &str = r#"
{
"format_version": 1,
"fixture_id": "minimal-world-step-smoke",
"source": {
"kind": "synthetic",
"description": "basic milestone parser smoke fixture"
},
"state": {
"calendar": {
"year": 1830,
"month_slot": 0,
"phase_slot": 0,
"tick_slot": 0
},
"world_flags": {
"sandbox": false
},
"companies": [
{
"company_id": 1,
"current_cash": 250000,
"debt": 0
}
],
"event_runtime_records": [],
"service_state": {
"periodic_boundary_calls": 0,
"trigger_dispatch_counts": {},
"total_event_record_services": 0,
"dirty_rerun_count": 0
}
},
"commands": [
{
"kind": "advance_to",
"calendar": {
"year": 1830,
"month_slot": 0,
"phase_slot": 0,
"tick_slot": 2
}
}
],
"expected_summary": {
"calendar": {
"year": 1830,
"month_slot": 0,
"phase_slot": 0,
"tick_slot": 2
},
"world_flag_count": 1,
"company_count": 1,
"event_runtime_record_count": 0,
"total_company_cash": 250000
}
}
"#;
#[test]
fn parses_and_validates_fixture() {
let fixture = load_fixture_document_from_str(FIXTURE_JSON).expect("fixture should parse");
let report = validate_fixture_document(&fixture);
assert!(report.valid, "report should be valid: {:?}", report.issues);
assert_eq!(fixture.state_origin, FixtureStateOrigin::Inline);
}
#[test]
fn compares_expected_summary() {
let fixture = load_fixture_document_from_str(FIXTURE_JSON).expect("fixture should parse");
let summary = RuntimeSummary::from_state(&fixture.state);
let mismatches = fixture.expected_summary.compare(&summary);
assert_eq!(mismatches.len(), 1);
assert!(mismatches[0].contains("calendar mismatch"));
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,10 @@
[package]
name = "rrt-runtime"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
serde.workspace = true
serde_json.workspace = true
sha2.workspace = true

View file

@ -0,0 +1,115 @@
use serde::{Deserialize, Serialize};
pub const MONTH_SLOTS_PER_YEAR: u32 = 12;
pub const PHASE_SLOTS_PER_MONTH: u32 = 28;
pub const TICKS_PER_PHASE: u32 = 180;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct CalendarPoint {
pub year: u32,
pub month_slot: u32,
pub phase_slot: u32,
pub tick_slot: u32,
}
impl CalendarPoint {
pub fn validate(&self) -> Result<(), String> {
if self.month_slot >= MONTH_SLOTS_PER_YEAR {
return Err(format!(
"month_slot {} is out of range 0..{}",
self.month_slot,
MONTH_SLOTS_PER_YEAR - 1
));
}
if self.phase_slot >= PHASE_SLOTS_PER_MONTH {
return Err(format!(
"phase_slot {} is out of range 0..{}",
self.phase_slot,
PHASE_SLOTS_PER_MONTH - 1
));
}
if self.tick_slot >= TICKS_PER_PHASE {
return Err(format!(
"tick_slot {} is out of range 0..{}",
self.tick_slot,
TICKS_PER_PHASE - 1
));
}
Ok(())
}
pub fn step_forward(&mut self) -> BoundaryEventKind {
self.tick_slot += 1;
if self.tick_slot < TICKS_PER_PHASE {
return BoundaryEventKind::Tick;
}
self.tick_slot = 0;
self.phase_slot += 1;
if self.phase_slot < PHASE_SLOTS_PER_MONTH {
return BoundaryEventKind::PhaseRollover;
}
self.phase_slot = 0;
self.month_slot += 1;
if self.month_slot < MONTH_SLOTS_PER_YEAR {
return BoundaryEventKind::MonthRollover;
}
self.month_slot = 0;
self.year += 1;
BoundaryEventKind::YearRollover
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BoundaryEventKind {
Tick,
PhaseRollover,
MonthRollover,
YearRollover,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn validates_calendar_bounds() {
let point = CalendarPoint {
year: 1830,
month_slot: 0,
phase_slot: 0,
tick_slot: 0,
};
assert!(point.validate().is_ok());
let invalid = CalendarPoint {
month_slot: MONTH_SLOTS_PER_YEAR,
..point
};
assert!(invalid.validate().is_err());
}
#[test]
fn steps_across_year_boundary() {
let mut point = CalendarPoint {
year: 1830,
month_slot: MONTH_SLOTS_PER_YEAR - 1,
phase_slot: PHASE_SLOTS_PER_MONTH - 1,
tick_slot: TICKS_PER_PHASE - 1,
};
let event = point.step_forward();
assert_eq!(event, BoundaryEventKind::YearRollover);
assert_eq!(
point,
CalendarPoint {
year: 1831,
month_slot: 0,
phase_slot: 0,
tick_slot: 0,
}
);
}
}

View file

@ -0,0 +1,367 @@
use std::fs;
use std::path::Path;
use serde::{Deserialize, Serialize};
const CAMPAIGN_SCENARIO_TABLE_VA: u32 = 0x0062_1cf0;
pub const CAMPAIGN_SCENARIO_COUNT: usize = 16;
const CAMPAIGN_SAVE_FORMAT_VA: u32 = 0x005d_1a78;
const CAMPAIGN_PROGRESS_CONTROL_BASE_ID: u16 = 0x0c372;
const CAMPAIGN_SELECTOR_CONTROL_BASE_ID: u16 = 0x0c382;
const CAMPAIGN_SELECTOR_CONTROL_COUNT: usize = 16;
pub const OBSERVED_CAMPAIGN_SCENARIO_NAMES: [&str; CAMPAIGN_SCENARIO_COUNT] = [
"Go West!",
"Germantown",
"Central Pacific",
"Texas Tea",
"War Effort",
"State of Germany",
"Britain",
"Crossing the Alps",
"Third Republic",
"Orient Express",
"Argentina Opens Up",
"Rhodes Unfinished",
"Japan Trembles",
"Greenland Growing",
"Dutchlantis",
"California Island",
];
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct CampaignPageBand {
pub page_index: usize,
pub progress_start_inclusive: u8,
pub progress_end_inclusive: Option<u8>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct CampaignScenarioEntry {
pub index: usize,
pub pointer_va: u32,
pub pointer_va_hex: String,
pub name: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct CampaignExeInspectionReport {
pub image_base: u32,
pub image_base_hex: String,
pub campaign_scenario_table_va: u32,
pub campaign_scenario_table_va_hex: String,
pub campaign_scenario_count: usize,
pub campaign_save_format_va: u32,
pub campaign_save_format_va_hex: String,
pub campaign_save_format_string: String,
pub campaign_progress_control_base_id: u16,
pub campaign_progress_control_base_id_hex: String,
pub campaign_selector_control_base_id: u16,
pub campaign_selector_control_base_id_hex: String,
pub campaign_selector_control_count: usize,
pub campaign_page_bands: Vec<CampaignPageBand>,
pub scenarios: Vec<CampaignScenarioEntry>,
pub notes: Vec<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct PeSection {
virtual_address: u32,
virtual_size: u32,
raw_data_pointer: u32,
raw_data_size: u32,
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct PeView {
image_base: u32,
sections: Vec<PeSection>,
}
pub fn inspect_campaign_exe_file(
path: &Path,
) -> Result<CampaignExeInspectionReport, Box<dyn std::error::Error>> {
let bytes = fs::read(path)?;
inspect_campaign_exe_bytes(&bytes)
}
pub fn inspect_campaign_exe_bytes(
bytes: &[u8],
) -> Result<CampaignExeInspectionReport, Box<dyn std::error::Error>> {
let view = parse_pe_view(bytes)?;
let scenarios = (0..CAMPAIGN_SCENARIO_COUNT)
.map(|index| {
let pointer_va = read_u32_at_va(
bytes,
&view,
CAMPAIGN_SCENARIO_TABLE_VA + (index as u32 * 4),
)?;
let name = read_c_string_at_va(bytes, &view, pointer_va)?;
Ok(CampaignScenarioEntry {
index,
pointer_va,
pointer_va_hex: format!("0x{pointer_va:08x}"),
name,
})
})
.collect::<Result<Vec<_>, Box<dyn std::error::Error>>>()?;
let campaign_save_format_string = read_c_string_at_va(bytes, &view, CAMPAIGN_SAVE_FORMAT_VA)?;
Ok(CampaignExeInspectionReport {
image_base: view.image_base,
image_base_hex: format!("0x{:08x}", view.image_base),
campaign_scenario_table_va: CAMPAIGN_SCENARIO_TABLE_VA,
campaign_scenario_table_va_hex: format!("0x{CAMPAIGN_SCENARIO_TABLE_VA:08x}"),
campaign_scenario_count: CAMPAIGN_SCENARIO_COUNT,
campaign_save_format_va: CAMPAIGN_SAVE_FORMAT_VA,
campaign_save_format_va_hex: format!("0x{CAMPAIGN_SAVE_FORMAT_VA:08x}"),
campaign_save_format_string,
campaign_progress_control_base_id: CAMPAIGN_PROGRESS_CONTROL_BASE_ID,
campaign_progress_control_base_id_hex: format!(
"0x{CAMPAIGN_PROGRESS_CONTROL_BASE_ID:04x}"
),
campaign_selector_control_base_id: CAMPAIGN_SELECTOR_CONTROL_BASE_ID,
campaign_selector_control_base_id_hex: format!(
"0x{CAMPAIGN_SELECTOR_CONTROL_BASE_ID:04x}"
),
campaign_selector_control_count: CAMPAIGN_SELECTOR_CONTROL_COUNT,
campaign_page_bands: vec![
CampaignPageBand {
page_index: 1,
progress_start_inclusive: 0,
progress_end_inclusive: Some(4),
},
CampaignPageBand {
page_index: 2,
progress_start_inclusive: 5,
progress_end_inclusive: Some(9),
},
CampaignPageBand {
page_index: 3,
progress_start_inclusive: 10,
progress_end_inclusive: Some(12),
},
CampaignPageBand {
page_index: 4,
progress_start_inclusive: 13,
progress_end_inclusive: None,
},
],
scenarios,
notes: vec![
"Campaign.win mirrors [profile+0xc4] into control 0xc372 + progress.".to_string(),
"Campaign.win mirrors the full sixteen-byte band [profile+0xc6..+0xd5] into controls 0xc382..0xc391.".to_string(),
"The observed page-band thresholds come from direct RT3.exe disassembly at 0x004b8d49..0x004b8d69.".to_string(),
],
})
}
fn parse_pe_view(bytes: &[u8]) -> Result<PeView, Box<dyn std::error::Error>> {
let pe_header_offset =
read_u32_le(bytes, 0x3c).ok_or("missing DOS e_lfanew for PE header")? as usize;
let signature = bytes
.get(pe_header_offset..pe_header_offset + 4)
.ok_or("truncated PE signature")?;
if signature != b"PE\0\0" {
return Err("invalid PE signature".into());
}
let file_header_offset = pe_header_offset + 4;
let number_of_sections =
read_u16_le(bytes, file_header_offset + 2).ok_or("missing PE section count")? as usize;
let size_of_optional_header =
read_u16_le(bytes, file_header_offset + 16).ok_or("missing optional header size")? as usize;
let optional_header_offset = file_header_offset + 20;
let optional_magic =
read_u16_le(bytes, optional_header_offset).ok_or("missing optional header magic")?;
if optional_magic != 0x10b {
return Err(format!("unsupported PE optional-header magic 0x{optional_magic:04x}").into());
}
let image_base =
read_u32_le(bytes, optional_header_offset + 28).ok_or("missing PE image base")?;
let section_table_offset = optional_header_offset + size_of_optional_header;
let mut sections = Vec::with_capacity(number_of_sections);
for index in 0..number_of_sections {
let section_offset = section_table_offset + index * 40;
let virtual_size =
read_u32_le(bytes, section_offset + 8).ok_or("truncated section virtual size")?;
let virtual_address =
read_u32_le(bytes, section_offset + 12).ok_or("truncated section RVA")?;
let raw_data_size =
read_u32_le(bytes, section_offset + 16).ok_or("truncated section raw size")?;
let raw_data_pointer =
read_u32_le(bytes, section_offset + 20).ok_or("truncated section raw pointer")?;
sections.push(PeSection {
virtual_address,
virtual_size,
raw_data_pointer,
raw_data_size,
});
}
Ok(PeView {
image_base,
sections,
})
}
fn read_u32_at_va(bytes: &[u8], view: &PeView, va: u32) -> Result<u32, Box<dyn std::error::Error>> {
let file_offset = map_va_to_file_offset(view, va)?;
read_u32_le(bytes, file_offset).ok_or_else(|| format!("truncated u32 at VA 0x{va:08x}").into())
}
fn read_c_string_at_va(
bytes: &[u8],
view: &PeView,
va: u32,
) -> Result<String, Box<dyn std::error::Error>> {
let start = map_va_to_file_offset(view, va)?;
let slice = bytes
.get(start..)
.ok_or_else(|| format!("VA 0x{va:08x} mapped outside file"))?;
let end = slice
.iter()
.position(|&byte| byte == 0)
.ok_or_else(|| format!("unterminated C string at VA 0x{va:08x}"))?;
let value = String::from_utf8(slice[..end].to_vec())?;
Ok(value)
}
fn map_va_to_file_offset(view: &PeView, va: u32) -> Result<usize, Box<dyn std::error::Error>> {
let rva = va
.checked_sub(view.image_base)
.ok_or_else(|| format!("VA 0x{va:08x} below image base 0x{:08x}", view.image_base))?;
for section in &view.sections {
let span = section.virtual_size.max(section.raw_data_size);
let section_end = section
.virtual_address
.checked_add(span)
.ok_or("section RVA range overflow")?;
if rva >= section.virtual_address && rva < section_end {
let delta = rva - section.virtual_address;
let file_offset = section
.raw_data_pointer
.checked_add(delta)
.ok_or("section file offset overflow")?;
return Ok(file_offset as usize);
}
}
Err(format!("VA 0x{va:08x} did not map into any PE section").into())
}
fn read_u16_le(bytes: &[u8], offset: usize) -> Option<u16> {
let slice = bytes.get(offset..offset + 2)?;
Some(u16::from_le_bytes([slice[0], slice[1]]))
}
fn read_u32_le(bytes: &[u8], offset: usize) -> Option<u32> {
let slice = bytes.get(offset..offset + 4)?;
Some(u32::from_le_bytes([slice[0], slice[1], slice[2], slice[3]]))
}
#[cfg(test)]
mod tests {
use super::{
CAMPAIGN_SAVE_FORMAT_VA, CAMPAIGN_SCENARIO_COUNT, CAMPAIGN_SCENARIO_TABLE_VA,
inspect_campaign_exe_bytes,
};
fn build_test_pe() -> Vec<u8> {
let image_base = 0x0040_0000u32;
let section_rva = 0x001c_0000u32;
let section_raw = 0x0000_0200u32;
let section_size = 0x0007_0000usize;
let mut bytes = vec![0u8; section_raw as usize + section_size];
bytes[0..2].copy_from_slice(b"MZ");
bytes[0x3c..0x40].copy_from_slice(&(0x80u32).to_le_bytes());
bytes[0x80..0x84].copy_from_slice(b"PE\0\0");
let file_header = 0x84usize;
bytes[file_header + 2..file_header + 4].copy_from_slice(&(1u16).to_le_bytes());
bytes[file_header + 16..file_header + 18].copy_from_slice(&(0xe0u16).to_le_bytes());
let optional_header = file_header + 20;
bytes[optional_header..optional_header + 2].copy_from_slice(&(0x10bu16).to_le_bytes());
bytes[optional_header + 28..optional_header + 32]
.copy_from_slice(&image_base.to_le_bytes());
let section_header = optional_header + 0xe0;
bytes[section_header..section_header + 5].copy_from_slice(b".data");
bytes[section_header + 8..section_header + 12]
.copy_from_slice(&(section_size as u32).to_le_bytes());
bytes[section_header + 12..section_header + 16].copy_from_slice(&section_rva.to_le_bytes());
bytes[section_header + 16..section_header + 20]
.copy_from_slice(&(section_size as u32).to_le_bytes());
bytes[section_header + 20..section_header + 24].copy_from_slice(&section_raw.to_le_bytes());
let scenario_table_file =
(CAMPAIGN_SCENARIO_TABLE_VA - image_base - section_rva + section_raw) as usize;
let format_file =
(CAMPAIGN_SAVE_FORMAT_VA - image_base - section_rva + section_raw) as usize;
let scenario_names = [
"Go West!",
"Germantown",
"Central Pacific",
"Texas Tea",
"War Effort",
"State of Germany",
"Britain",
"Crossing the Alps",
"Third Republic",
"Orient Express",
"Argentina Opens Up",
"Rhodes Unfinished",
"Japan Trembles",
"Greenland Growing",
"Dutchlantis",
"California Island",
];
let mut string_cursor = scenario_table_file + CAMPAIGN_SCENARIO_COUNT * 4;
for (index, name) in scenario_names.iter().enumerate() {
let pointer_va =
image_base + section_rva + (string_cursor - section_raw as usize) as u32;
bytes[scenario_table_file + index * 4..scenario_table_file + (index + 1) * 4]
.copy_from_slice(&pointer_va.to_le_bytes());
bytes[string_cursor..string_cursor + name.len()].copy_from_slice(name.as_bytes());
bytes[string_cursor + name.len()] = 0;
string_cursor += name.len() + 1;
}
let format = b"%s%02d.gmc\0";
bytes[format_file..format_file + format.len()].copy_from_slice(format);
bytes
}
#[test]
fn inspects_campaign_exe_tables_from_synthetic_pe() {
let bytes = build_test_pe();
let report = inspect_campaign_exe_bytes(&bytes).expect("campaign exe inspection");
assert_eq!(report.campaign_scenario_count, 16);
assert_eq!(report.campaign_save_format_string, "%s%02d.gmc");
assert_eq!(
report.scenarios.first().map(|entry| entry.name.as_str()),
Some("Go West!")
);
assert_eq!(
report.scenarios.last().map(|entry| entry.name.as_str()),
Some("California Island")
);
assert_eq!(report.campaign_page_bands.len(), 4);
assert_eq!(report.campaign_page_bands[1].progress_start_inclusive, 5);
assert_eq!(
report.campaign_page_bands[1].progress_end_inclusive,
Some(9)
);
}
}

View file

@ -0,0 +1,133 @@
use std::path::Path;
use serde::{Deserialize, Serialize};
use crate::RuntimeState;
pub const STATE_DUMP_FORMAT_VERSION: u32 = 1;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct RuntimeStateDumpSource {
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub source_binary: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RuntimeStateDumpDocument {
pub format_version: u32,
pub dump_id: String,
#[serde(default)]
pub source: RuntimeStateDumpSource,
pub state: RuntimeState,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RuntimeStateImport {
pub import_id: String,
pub description: Option<String>,
pub state: RuntimeState,
}
pub fn validate_runtime_state_dump_document(
document: &RuntimeStateDumpDocument,
) -> Result<(), String> {
if document.format_version != STATE_DUMP_FORMAT_VERSION {
return Err(format!(
"unsupported state dump format_version {} (expected {})",
document.format_version, STATE_DUMP_FORMAT_VERSION
));
}
if document.dump_id.trim().is_empty() {
return Err("dump_id must not be empty".to_string());
}
document.state.validate()
}
pub fn load_runtime_state_import(
path: &Path,
) -> Result<RuntimeStateImport, Box<dyn std::error::Error>> {
let text = std::fs::read_to_string(path)?;
load_runtime_state_import_from_str(
&text,
path.file_stem()
.and_then(|stem| stem.to_str())
.unwrap_or("runtime-state"),
)
}
pub fn load_runtime_state_import_from_str(
text: &str,
fallback_id: &str,
) -> Result<RuntimeStateImport, Box<dyn std::error::Error>> {
if let Ok(document) = serde_json::from_str::<RuntimeStateDumpDocument>(text) {
validate_runtime_state_dump_document(&document)
.map_err(|err| format!("invalid runtime state dump document: {err}"))?;
return Ok(RuntimeStateImport {
import_id: document.dump_id,
description: document.source.description,
state: document.state,
});
}
let state: RuntimeState = serde_json::from_str(text)?;
state
.validate()
.map_err(|err| format!("invalid runtime state: {err}"))?;
Ok(RuntimeStateImport {
import_id: fallback_id.to_string(),
description: None,
state,
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{CalendarPoint, RuntimeServiceState};
use std::collections::BTreeMap;
fn state() -> RuntimeState {
RuntimeState {
calendar: CalendarPoint {
year: 1830,
month_slot: 0,
phase_slot: 0,
tick_slot: 0,
},
world_flags: BTreeMap::new(),
companies: Vec::new(),
event_runtime_records: Vec::new(),
service_state: RuntimeServiceState::default(),
}
}
#[test]
fn loads_dump_document() {
let text = serde_json::to_string(&RuntimeStateDumpDocument {
format_version: STATE_DUMP_FORMAT_VERSION,
dump_id: "dump-smoke".to_string(),
source: RuntimeStateDumpSource {
description: Some("test dump".to_string()),
source_binary: None,
},
state: state(),
})
.expect("dump should serialize");
let import =
load_runtime_state_import_from_str(&text, "fallback").expect("dump should load");
assert_eq!(import.import_id, "dump-smoke");
assert_eq!(import.description.as_deref(), Some("test dump"));
}
#[test]
fn loads_bare_runtime_state() {
let text = serde_json::to_string(&state()).expect("state should serialize");
let import =
load_runtime_state_import_from_str(&text, "fallback").expect("state should load");
assert_eq!(import.import_id, "fallback");
assert!(import.description.is_none());
}
}

View file

@ -0,0 +1,47 @@
pub mod calendar;
pub mod campaign_exe;
pub mod import;
pub mod persistence;
pub mod pk4;
pub mod runtime;
pub mod smp;
pub mod step;
pub mod summary;
pub mod win;
pub use calendar::{CalendarPoint, MONTH_SLOTS_PER_YEAR, PHASE_SLOTS_PER_MONTH, TICKS_PER_PHASE};
pub use campaign_exe::{
CAMPAIGN_SCENARIO_COUNT, CampaignExeInspectionReport, CampaignPageBand, CampaignScenarioEntry,
OBSERVED_CAMPAIGN_SCENARIO_NAMES, inspect_campaign_exe_bytes, inspect_campaign_exe_file,
};
pub use import::{
RuntimeStateDumpDocument, RuntimeStateDumpSource, RuntimeStateImport,
STATE_DUMP_FORMAT_VERSION, load_runtime_state_import, validate_runtime_state_dump_document,
};
pub use persistence::{
RuntimeSnapshotDocument, RuntimeSnapshotSource, SNAPSHOT_FORMAT_VERSION,
load_runtime_snapshot_document, save_runtime_snapshot_document,
validate_runtime_snapshot_document,
};
pub use pk4::{
PK4_DIRECTORY_ENTRY_STRIDE, PK4_MAGIC, Pk4Entry, Pk4ExtractionReport, Pk4InspectionReport,
extract_pk4_entry_bytes, extract_pk4_entry_file, inspect_pk4_bytes, inspect_pk4_file,
};
pub use runtime::{RuntimeCompany, RuntimeEventRecord, RuntimeServiceState, RuntimeState};
pub use smp::{
SMP_FOUR_SIDECAR_BYTE_PLANES_MIN_BUNDLE_VERSION, SmpAsciiPreview, SmpClassicPackedProfileBlock,
SmpClassicRehydrateProfileProbe, SmpContainerProfile, SmpEarlyContentProbe,
SmpHeaderVariantProbe, SmpInspectionReport, SmpKnownTagHit, SmpPackedProfileWordLane,
SmpPreamble, SmpPreambleWord, SmpRt3105PackedProfileBlock, SmpRt3105PackedProfileProbe,
SmpRt3105PostSpanBridgeProbe, SmpRt3105SaveBridgePayloadProbe, SmpRt3105SaveNameTableEntry,
SmpRt3105SaveNameTableProbe, SmpRuntimeAnchorCycleBlock, SmpRuntimePostSpanHeaderCandidate,
SmpRuntimePostSpanProbe, SmpRuntimeTrailerBlock, SmpSaveAnchorRunBlock, SmpSaveBootstrapBlock,
SmpSecondaryVariantProbe, SmpSharedHeader, inspect_smp_bytes, inspect_smp_file,
};
pub use step::{BoundaryEvent, ServiceEvent, StepCommand, StepResult, execute_step_command};
pub use summary::RuntimeSummary;
pub use win::{
WinAnonymousSelectorRecord, WinHeaderWord, WinInspectionReport, WinReferenceDeltaFrequency,
WinResourceRecordSample, WinResourceReference, WinResourceSelectorRecord, inspect_win_bytes,
inspect_win_file,
};

View file

@ -0,0 +1,111 @@
use std::path::Path;
use serde::{Deserialize, Serialize};
use crate::{RuntimeState, RuntimeSummary};
pub const SNAPSHOT_FORMAT_VERSION: u32 = 1;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct RuntimeSnapshotSource {
#[serde(default)]
pub source_fixture_id: Option<String>,
#[serde(default)]
pub description: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RuntimeSnapshotDocument {
pub format_version: u32,
pub snapshot_id: String,
#[serde(default)]
pub source: RuntimeSnapshotSource,
pub state: RuntimeState,
}
impl RuntimeSnapshotDocument {
pub fn summary(&self) -> RuntimeSummary {
RuntimeSummary::from_state(&self.state)
}
}
pub fn validate_runtime_snapshot_document(
document: &RuntimeSnapshotDocument,
) -> Result<(), String> {
if document.format_version != SNAPSHOT_FORMAT_VERSION {
return Err(format!(
"unsupported snapshot format_version {} (expected {})",
document.format_version, SNAPSHOT_FORMAT_VERSION
));
}
if document.snapshot_id.trim().is_empty() {
return Err("snapshot_id must not be empty".to_string());
}
document.state.validate()
}
pub fn load_runtime_snapshot_document(
path: &Path,
) -> Result<RuntimeSnapshotDocument, Box<dyn std::error::Error>> {
let text = std::fs::read_to_string(path)?;
Ok(serde_json::from_str(&text)?)
}
pub fn save_runtime_snapshot_document(
path: &Path,
document: &RuntimeSnapshotDocument,
) -> Result<(), Box<dyn std::error::Error>> {
validate_runtime_snapshot_document(document)
.map_err(|err| format!("invalid runtime snapshot document: {err}"))?;
let bytes = serde_json::to_vec_pretty(document)?;
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::write(path, bytes)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{CalendarPoint, RuntimeServiceState};
use std::collections::BTreeMap;
fn snapshot() -> RuntimeSnapshotDocument {
RuntimeSnapshotDocument {
format_version: SNAPSHOT_FORMAT_VERSION,
snapshot_id: "snapshot-smoke".to_string(),
source: RuntimeSnapshotSource {
source_fixture_id: Some("fixture-smoke".to_string()),
description: Some("test snapshot".to_string()),
},
state: RuntimeState {
calendar: CalendarPoint {
year: 1830,
month_slot: 0,
phase_slot: 0,
tick_slot: 0,
},
world_flags: BTreeMap::new(),
companies: Vec::new(),
event_runtime_records: Vec::new(),
service_state: RuntimeServiceState::default(),
},
}
}
#[test]
fn validates_snapshot_document() {
let document = snapshot();
assert!(validate_runtime_snapshot_document(&document).is_ok());
}
#[test]
fn roundtrips_snapshot_json() {
let document = snapshot();
let value = serde_json::to_string_pretty(&document).expect("snapshot should serialize");
let reparsed: RuntimeSnapshotDocument =
serde_json::from_str(&value).expect("snapshot should deserialize");
assert_eq!(document, reparsed);
}
}

View file

@ -0,0 +1,313 @@
use std::fs;
use std::path::Path;
use serde::{Deserialize, Serialize};
pub const PK4_MAGIC: u32 = 0x0000_03eb;
pub const PK4_DIRECTORY_ENTRY_STRIDE: usize = 0x4a;
pub const PK4_DIRECTORY_METADATA_LEN: usize = 13;
pub const PK4_DIRECTORY_NAME_LEN: usize = PK4_DIRECTORY_ENTRY_STRIDE - PK4_DIRECTORY_METADATA_LEN;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Pk4Entry {
pub index: usize,
pub directory_offset: usize,
pub directory_offset_hex: String,
pub crc32: u32,
pub crc32_hex: String,
pub payload_len: u32,
pub payload_len_hex: String,
pub payload_offset: u32,
pub payload_offset_hex: String,
pub payload_absolute_offset: usize,
pub payload_absolute_offset_hex: String,
pub payload_end_offset: usize,
pub payload_end_offset_hex: String,
pub flag: u8,
pub flag_hex: String,
pub extension: Option<String>,
pub payload_signature_ascii: String,
pub payload_signature_hex: String,
pub name: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Pk4InspectionReport {
pub magic: u32,
pub magic_hex: String,
pub entry_count: usize,
pub directory_entry_stride: usize,
pub directory_len: usize,
pub directory_len_hex: String,
pub payload_base_offset: usize,
pub payload_base_offset_hex: String,
pub file_size: usize,
pub payloads_are_contiguous: bool,
pub notes: Vec<String>,
pub entries: Vec<Pk4Entry>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Pk4ExtractionReport {
pub matched_entry_name: String,
pub case_insensitive_match: bool,
pub extracted_len: usize,
pub extracted_len_hex: String,
pub entry: Pk4Entry,
}
pub fn inspect_pk4_file(path: &Path) -> Result<Pk4InspectionReport, Box<dyn std::error::Error>> {
let bytes = fs::read(path)?;
inspect_pk4_bytes(&bytes)
}
pub fn inspect_pk4_bytes(bytes: &[u8]) -> Result<Pk4InspectionReport, Box<dyn std::error::Error>> {
let magic = read_u32_le(bytes, 0).ok_or("truncated pk4 magic")?;
let entry_count = read_u32_le(bytes, 4).ok_or("truncated pk4 entry count")? as usize;
let directory_len = entry_count
.checked_mul(PK4_DIRECTORY_ENTRY_STRIDE)
.ok_or("pk4 directory length overflow")?;
let payload_base_offset = 8usize
.checked_add(directory_len)
.ok_or("pk4 payload base overflow")?;
if payload_base_offset > bytes.len() {
return Err(format!(
"pk4 directory extends past end of file: payload base 0x{payload_base_offset:x}, file size 0x{:x}",
bytes.len()
)
.into());
}
let mut entries = Vec::with_capacity(entry_count);
for index in 0..entry_count {
let directory_offset = 8 + index * PK4_DIRECTORY_ENTRY_STRIDE;
let directory_entry = bytes
.get(directory_offset..directory_offset + PK4_DIRECTORY_ENTRY_STRIDE)
.ok_or_else(|| {
format!(
"truncated pk4 directory entry {} at offset 0x{directory_offset:x}",
index
)
})?;
let crc32 = read_u32_le(directory_entry, 0).ok_or("truncated pk4 entry crc32")?;
let payload_len = read_u32_le(directory_entry, 4).ok_or("truncated pk4 entry length")?;
let payload_offset =
read_u32_le(directory_entry, 8).ok_or("truncated pk4 entry payload offset")?;
let flag = directory_entry[12];
let name = parse_name(&directory_entry[13..])?;
let payload_absolute_offset = payload_base_offset
.checked_add(payload_offset as usize)
.ok_or_else(|| format!("pk4 payload offset overflow for entry {name}"))?;
let payload_end_offset = payload_absolute_offset
.checked_add(payload_len as usize)
.ok_or_else(|| format!("pk4 payload end overflow for entry {name}"))?;
let payload = bytes.get(payload_absolute_offset..payload_end_offset).ok_or_else(|| {
format!(
"pk4 payload for entry {name} extends past end of file: 0x{payload_absolute_offset:x}..0x{payload_end_offset:x} > 0x{:x}",
bytes.len()
)
})?;
entries.push(Pk4Entry {
index,
directory_offset,
directory_offset_hex: format!("0x{directory_offset:04x}"),
crc32,
crc32_hex: format!("0x{crc32:08x}"),
payload_len,
payload_len_hex: format!("0x{payload_len:08x}"),
payload_offset,
payload_offset_hex: format!("0x{payload_offset:08x}"),
payload_absolute_offset,
payload_absolute_offset_hex: format!("0x{payload_absolute_offset:08x}"),
payload_end_offset,
payload_end_offset_hex: format!("0x{payload_end_offset:08x}"),
flag,
flag_hex: format!("0x{flag:02x}"),
extension: Path::new(&name)
.extension()
.and_then(|extension| extension.to_str())
.map(|extension| extension.to_ascii_lowercase()),
payload_signature_ascii: ascii_preview(payload, 8),
payload_signature_hex: hex_preview(payload, 8),
name,
});
}
let payloads_are_contiguous = entries
.windows(2)
.all(|window| window[0].payload_end_offset == window[1].payload_absolute_offset);
let mut notes = Vec::new();
if magic == PK4_MAGIC {
notes.push(
"Header magic matches the observed RT3 pack4 container family (0x03eb).".to_string(),
);
} else {
notes.push(format!(
"Header magic 0x{magic:08x} differs from the observed RT3 pack4 container family 0x{PK4_MAGIC:08x}."
));
}
notes.push(format!(
"Payload base is derived as 8 + entry_count * 0x{PK4_DIRECTORY_ENTRY_STRIDE:02x}."
));
if payloads_are_contiguous {
notes.push(
"Entry payload offsets form one contiguous packed data region in directory order."
.to_string(),
);
}
Ok(Pk4InspectionReport {
magic,
magic_hex: format!("0x{magic:08x}"),
entry_count,
directory_entry_stride: PK4_DIRECTORY_ENTRY_STRIDE,
directory_len,
directory_len_hex: format!("0x{directory_len:08x}"),
payload_base_offset,
payload_base_offset_hex: format!("0x{payload_base_offset:08x}"),
file_size: bytes.len(),
payloads_are_contiguous,
notes,
entries,
})
}
pub fn extract_pk4_entry_file(
pk4_path: &Path,
entry_name: &str,
output_path: &Path,
) -> Result<Pk4ExtractionReport, Box<dyn std::error::Error>> {
let bytes = fs::read(pk4_path)?;
let (report, payload) = extract_pk4_entry_bytes(&bytes, entry_name)?;
fs::write(output_path, payload)?;
Ok(report)
}
pub fn extract_pk4_entry_bytes(
bytes: &[u8],
entry_name: &str,
) -> Result<(Pk4ExtractionReport, Vec<u8>), Box<dyn std::error::Error>> {
let inspection = inspect_pk4_bytes(bytes)?;
let (entry, case_insensitive_match) = find_entry(&inspection.entries, entry_name)
.ok_or_else(|| format!("pk4 entry not found: {entry_name}"))?;
let payload = bytes[entry.payload_absolute_offset..entry.payload_end_offset].to_vec();
let report = Pk4ExtractionReport {
matched_entry_name: entry.name.clone(),
case_insensitive_match,
extracted_len: payload.len(),
extracted_len_hex: format!("0x{:08x}", payload.len()),
entry: entry.clone(),
};
Ok((report, payload))
}
fn find_entry<'a>(entries: &'a [Pk4Entry], requested_name: &str) -> Option<(&'a Pk4Entry, bool)> {
if let Some(entry) = entries.iter().find(|entry| entry.name == requested_name) {
return Some((entry, false));
}
let requested_lower = requested_name.to_ascii_lowercase();
let mut matches = entries
.iter()
.filter(|entry| entry.name.to_ascii_lowercase() == requested_lower);
let first = matches.next()?;
if matches.next().is_some() {
return None;
}
Some((first, true))
}
fn parse_name(bytes: &[u8]) -> Result<String, Box<dyn std::error::Error>> {
let raw = bytes
.split(|byte| *byte == 0)
.next()
.ok_or("missing pk4 entry name")?;
if raw.is_empty() {
return Err("empty pk4 entry name".into());
}
Ok(String::from_utf8(raw.to_vec())?)
}
fn ascii_preview(bytes: &[u8], limit: usize) -> String {
bytes
.iter()
.take(limit)
.map(|byte| match byte {
b' '..=b'~' => *byte as char,
_ => '.',
})
.collect()
}
fn hex_preview(bytes: &[u8], limit: usize) -> String {
let mut output = String::new();
for byte in bytes.iter().take(limit) {
output.push_str(&format!("{byte:02x}"));
}
output
}
fn read_u32_le(bytes: &[u8], offset: usize) -> Option<u32> {
let slice = bytes.get(offset..offset + 4)?;
Some(u32::from_le_bytes(slice.try_into().ok()?))
}
#[cfg(test)]
mod tests {
use super::*;
fn build_entry(
crc32: u32,
payload_len: u32,
payload_offset: u32,
name: &str,
) -> [u8; PK4_DIRECTORY_ENTRY_STRIDE] {
let mut entry = [0u8; PK4_DIRECTORY_ENTRY_STRIDE];
entry[0..4].copy_from_slice(&crc32.to_le_bytes());
entry[4..8].copy_from_slice(&payload_len.to_le_bytes());
entry[8..12].copy_from_slice(&payload_offset.to_le_bytes());
let name_bytes = name.as_bytes();
entry[13..13 + name_bytes.len()].copy_from_slice(name_bytes);
entry
}
#[test]
fn inspects_synthetic_pk4_bytes() {
let mut bytes = Vec::new();
bytes.extend_from_slice(&PK4_MAGIC.to_le_bytes());
bytes.extend_from_slice(&(2u32).to_le_bytes());
bytes.extend_from_slice(&build_entry(0x11223344, 5, 0, "alpha.txt"));
bytes.extend_from_slice(&build_entry(0x55667788, 4, 5, "beta.dds"));
bytes.extend_from_slice(b"helloDDS!");
let report = inspect_pk4_bytes(&bytes).expect("pk4 inspection should succeed");
assert_eq!(report.entry_count, 2);
assert_eq!(
report.payload_base_offset,
8 + 2 * PK4_DIRECTORY_ENTRY_STRIDE
);
assert!(report.payloads_are_contiguous);
assert_eq!(report.entries[0].name, "alpha.txt");
assert_eq!(report.entries[0].payload_signature_ascii, "hello");
assert_eq!(report.entries[1].name, "beta.dds");
assert_eq!(report.entries[1].payload_signature_ascii, "DDS!");
}
#[test]
fn extracts_case_insensitive_entry_match() {
let mut bytes = Vec::new();
bytes.extend_from_slice(&PK4_MAGIC.to_le_bytes());
bytes.extend_from_slice(&(1u32).to_le_bytes());
bytes.extend_from_slice(&build_entry(0x11223344, 5, 0, "Campaign.win"));
bytes.extend_from_slice(b"HELLO");
let (report, payload) =
extract_pk4_entry_bytes(&bytes, "campaign.win").expect("pk4 extraction should succeed");
assert!(report.case_insensitive_match);
assert_eq!(report.matched_entry_name, "Campaign.win");
assert_eq!(payload, b"HELLO");
}
}

View file

@ -0,0 +1,110 @@
use std::collections::{BTreeMap, BTreeSet};
use serde::{Deserialize, Serialize};
use crate::CalendarPoint;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RuntimeCompany {
pub company_id: u32,
pub current_cash: i64,
pub debt: u64,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RuntimeEventRecord {
pub record_id: u32,
pub trigger_kind: u8,
pub active: bool,
#[serde(default)]
pub service_count: u32,
#[serde(default)]
pub marks_collection_dirty: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)]
pub struct RuntimeServiceState {
#[serde(default)]
pub periodic_boundary_calls: u64,
#[serde(default)]
pub trigger_dispatch_counts: BTreeMap<u8, u64>,
#[serde(default)]
pub total_event_record_services: u64,
#[serde(default)]
pub dirty_rerun_count: u64,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RuntimeState {
pub calendar: CalendarPoint,
#[serde(default)]
pub world_flags: BTreeMap<String, bool>,
#[serde(default)]
pub companies: Vec<RuntimeCompany>,
#[serde(default)]
pub event_runtime_records: Vec<RuntimeEventRecord>,
#[serde(default)]
pub service_state: RuntimeServiceState,
}
impl RuntimeState {
pub fn validate(&self) -> Result<(), String> {
self.calendar.validate()?;
let mut seen_company_ids = BTreeSet::new();
for company in &self.companies {
if !seen_company_ids.insert(company.company_id) {
return Err(format!("duplicate company_id {}", company.company_id));
}
}
let mut seen_record_ids = BTreeSet::new();
for record in &self.event_runtime_records {
if !seen_record_ids.insert(record.record_id) {
return Err(format!("duplicate record_id {}", record.record_id));
}
}
for key in self.world_flags.keys() {
if key.trim().is_empty() {
return Err("world_flags contains an empty key".to_string());
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn rejects_duplicate_company_ids() {
let state = RuntimeState {
calendar: CalendarPoint {
year: 1830,
month_slot: 0,
phase_slot: 0,
tick_slot: 0,
},
world_flags: BTreeMap::new(),
companies: vec![
RuntimeCompany {
company_id: 1,
current_cash: 100,
debt: 0,
},
RuntimeCompany {
company_id: 1,
current_cash: 200,
debt: 0,
},
],
event_runtime_records: Vec::new(),
service_state: RuntimeServiceState::default(),
};
assert!(state.validate().is_err());
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,304 @@
use serde::{Deserialize, Serialize};
use crate::{RuntimeState, RuntimeSummary, calendar::BoundaryEventKind};
const PERIODIC_TRIGGER_KIND_ORDER: [u8; 6] = [1, 0, 3, 2, 5, 4];
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "kind", rename_all = "snake_case")]
pub enum StepCommand {
AdvanceTo { calendar: crate::CalendarPoint },
StepCount { steps: u32 },
ServiceTriggerKind { trigger_kind: u8 },
ServicePeriodicBoundary,
}
impl StepCommand {
pub fn validate(&self) -> Result<(), String> {
match self {
Self::AdvanceTo { calendar } => calendar.validate(),
Self::StepCount { steps } => {
if *steps == 0 {
return Err("step_count command requires steps > 0".to_string());
}
Ok(())
}
Self::ServiceTriggerKind { .. } | Self::ServicePeriodicBoundary => Ok(()),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct BoundaryEvent {
pub kind: String,
pub calendar: crate::CalendarPoint,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ServiceEvent {
pub kind: String,
pub trigger_kind: Option<u8>,
pub serviced_record_ids: Vec<u32>,
pub dirty_rerun: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct StepResult {
pub initial_summary: RuntimeSummary,
pub final_summary: RuntimeSummary,
pub steps_executed: u64,
pub boundary_events: Vec<BoundaryEvent>,
pub service_events: Vec<ServiceEvent>,
}
pub fn execute_step_command(
state: &mut RuntimeState,
command: &StepCommand,
) -> Result<StepResult, String> {
state.validate()?;
command.validate()?;
let initial_summary = RuntimeSummary::from_state(state);
let mut boundary_events = Vec::new();
let mut service_events = Vec::new();
let steps_executed = match command {
StepCommand::AdvanceTo { calendar } => {
advance_to_target_calendar_point(state, *calendar, &mut boundary_events)?
}
StepCommand::StepCount { steps } => step_count(state, *steps, &mut boundary_events),
StepCommand::ServiceTriggerKind { trigger_kind } => {
service_trigger_kind(state, *trigger_kind, &mut service_events);
0
}
StepCommand::ServicePeriodicBoundary => {
service_periodic_boundary(state, &mut service_events);
0
}
};
let final_summary = RuntimeSummary::from_state(state);
Ok(StepResult {
initial_summary,
final_summary,
steps_executed,
boundary_events,
service_events,
})
}
fn advance_to_target_calendar_point(
state: &mut RuntimeState,
target: crate::CalendarPoint,
boundary_events: &mut Vec<BoundaryEvent>,
) -> Result<u64, String> {
target.validate()?;
if target < state.calendar {
return Err(format!(
"advance_to target {:?} is earlier than current calendar {:?}",
target, state.calendar
));
}
let mut steps = 0_u64;
while state.calendar < target {
step_once(state, boundary_events);
steps += 1;
}
Ok(steps)
}
fn step_count(
state: &mut RuntimeState,
steps: u32,
boundary_events: &mut Vec<BoundaryEvent>,
) -> u64 {
for _ in 0..steps {
step_once(state, boundary_events);
}
steps.into()
}
fn step_once(state: &mut RuntimeState, boundary_events: &mut Vec<BoundaryEvent>) {
let boundary = state.calendar.step_forward();
if boundary != BoundaryEventKind::Tick {
boundary_events.push(BoundaryEvent {
kind: boundary_kind_label(boundary).to_string(),
calendar: state.calendar,
});
}
}
fn boundary_kind_label(boundary: BoundaryEventKind) -> &'static str {
match boundary {
BoundaryEventKind::Tick => "tick",
BoundaryEventKind::PhaseRollover => "phase_rollover",
BoundaryEventKind::MonthRollover => "month_rollover",
BoundaryEventKind::YearRollover => "year_rollover",
}
}
fn service_periodic_boundary(state: &mut RuntimeState, service_events: &mut Vec<ServiceEvent>) {
state.service_state.periodic_boundary_calls += 1;
for trigger_kind in PERIODIC_TRIGGER_KIND_ORDER {
service_trigger_kind(state, trigger_kind, service_events);
}
}
fn service_trigger_kind(
state: &mut RuntimeState,
trigger_kind: u8,
service_events: &mut Vec<ServiceEvent>,
) {
let mut serviced_record_ids = Vec::new();
let mut dirty_rerun = false;
*state
.service_state
.trigger_dispatch_counts
.entry(trigger_kind)
.or_insert(0) += 1;
for record in &mut state.event_runtime_records {
if record.active && record.trigger_kind == trigger_kind {
record.service_count += 1;
serviced_record_ids.push(record.record_id);
state.service_state.total_event_record_services += 1;
if trigger_kind != 0x0a && record.marks_collection_dirty {
dirty_rerun = true;
}
}
}
service_events.push(ServiceEvent {
kind: "trigger_dispatch".to_string(),
trigger_kind: Some(trigger_kind),
serviced_record_ids,
dirty_rerun,
});
if dirty_rerun {
state.service_state.dirty_rerun_count += 1;
service_trigger_kind(state, 0x0a, service_events);
}
}
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use super::*;
use crate::{CalendarPoint, RuntimeCompany, RuntimeEventRecord, RuntimeServiceState};
fn state() -> RuntimeState {
RuntimeState {
calendar: CalendarPoint {
year: 1830,
month_slot: 0,
phase_slot: 0,
tick_slot: 0,
},
world_flags: BTreeMap::new(),
companies: vec![RuntimeCompany {
company_id: 1,
current_cash: 10,
debt: 0,
}],
event_runtime_records: Vec::new(),
service_state: RuntimeServiceState::default(),
}
}
#[test]
fn advances_to_target() {
let mut state = state();
let result = execute_step_command(
&mut state,
&StepCommand::AdvanceTo {
calendar: CalendarPoint {
year: 1830,
month_slot: 0,
phase_slot: 0,
tick_slot: 5,
},
},
)
.expect("advance_to should succeed");
assert_eq!(result.steps_executed, 5);
assert_eq!(state.calendar.tick_slot, 5);
}
#[test]
fn rejects_backward_target() {
let mut state = state();
state.calendar.tick_slot = 3;
let result = execute_step_command(
&mut state,
&StepCommand::AdvanceTo {
calendar: CalendarPoint {
year: 1830,
month_slot: 0,
phase_slot: 0,
tick_slot: 2,
},
},
);
assert!(result.is_err());
}
#[test]
fn services_periodic_trigger_order_and_dirty_rerun() {
let mut state = RuntimeState {
event_runtime_records: vec![
RuntimeEventRecord {
record_id: 1,
trigger_kind: 1,
active: true,
service_count: 0,
marks_collection_dirty: true,
},
RuntimeEventRecord {
record_id: 2,
trigger_kind: 4,
active: true,
service_count: 0,
marks_collection_dirty: false,
},
RuntimeEventRecord {
record_id: 3,
trigger_kind: 0x0a,
active: true,
service_count: 0,
marks_collection_dirty: false,
},
],
..state()
};
let result = execute_step_command(&mut state, &StepCommand::ServicePeriodicBoundary)
.expect("periodic boundary service should succeed");
assert_eq!(result.steps_executed, 0);
assert_eq!(state.service_state.periodic_boundary_calls, 1);
assert_eq!(state.service_state.total_event_record_services, 3);
assert_eq!(state.service_state.dirty_rerun_count, 1);
assert_eq!(state.event_runtime_records[0].service_count, 1);
assert_eq!(state.event_runtime_records[1].service_count, 1);
assert_eq!(state.event_runtime_records[2].service_count, 1);
assert_eq!(
state.service_state.trigger_dispatch_counts.get(&1),
Some(&1)
);
assert_eq!(
state.service_state.trigger_dispatch_counts.get(&4),
Some(&1)
);
assert_eq!(
state.service_state.trigger_dispatch_counts.get(&0x0a),
Some(&1)
);
}
}

View file

@ -0,0 +1,40 @@
use serde::{Deserialize, Serialize};
use crate::{CalendarPoint, RuntimeState};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct RuntimeSummary {
pub calendar: CalendarPoint,
pub world_flag_count: usize,
pub company_count: usize,
pub event_runtime_record_count: usize,
pub total_event_record_service_count: u64,
pub periodic_boundary_call_count: u64,
pub total_trigger_dispatch_count: u64,
pub dirty_rerun_count: u64,
pub total_company_cash: i64,
}
impl RuntimeSummary {
pub fn from_state(state: &RuntimeState) -> Self {
Self {
calendar: state.calendar,
world_flag_count: state.world_flags.len(),
company_count: state.companies.len(),
event_runtime_record_count: state.event_runtime_records.len(),
total_event_record_service_count: state.service_state.total_event_record_services,
periodic_boundary_call_count: state.service_state.periodic_boundary_calls,
total_trigger_dispatch_count: state
.service_state
.trigger_dispatch_counts
.values()
.sum(),
dirty_rerun_count: state.service_state.dirty_rerun_count,
total_company_cash: state
.companies
.iter()
.map(|company| company.current_cash)
.sum(),
}
}
}

View file

@ -0,0 +1,547 @@
use std::fs;
use std::path::Path;
use serde::{Deserialize, Serialize};
const WIN_COMMON_HEADER_LEN: usize = 0x50;
const WIN_INLINE_RESOURCE_OFFSET: usize = 0x50;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct WinHeaderWord {
pub offset: usize,
pub offset_hex: String,
pub value: u32,
pub value_hex: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct WinResourceReference {
pub offset: usize,
pub offset_hex: String,
pub name: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct WinReferenceDeltaFrequency {
pub delta: usize,
pub delta_hex: String,
pub count: usize,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct WinResourceRecordSample {
pub offset: usize,
pub offset_hex: String,
pub name: String,
pub delta_from_previous: Option<usize>,
pub delta_from_previous_hex: Option<String>,
pub prelude_words: Vec<WinHeaderWord>,
pub post_name_word_0: u32,
pub post_name_word_0_hex: String,
pub post_name_word_0_high_u16: u16,
pub post_name_word_0_high_u16_hex: String,
pub post_name_word_0_low_u16: u16,
pub post_name_word_0_low_u16_hex: String,
pub post_name_word_1: u32,
pub post_name_word_1_hex: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct WinResourceSelectorRecord {
pub offset: usize,
pub offset_hex: String,
pub name: String,
pub post_name_word_0: u32,
pub post_name_word_0_hex: String,
pub selector_high_u16: u16,
pub selector_high_u16_hex: String,
pub selector_low_u16: u16,
pub selector_low_u16_hex: String,
pub post_name_word_1: u32,
pub post_name_word_1_hex: String,
pub post_name_word_1_high_u16: u16,
pub post_name_word_1_high_u16_hex: String,
pub post_name_word_1_middle_u16: u16,
pub post_name_word_1_middle_u16_hex: String,
pub post_name_word_1_low_u16: u16,
pub post_name_word_1_low_u16_hex: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct WinAnonymousSelectorRecord {
pub record_offset: usize,
pub record_offset_hex: String,
pub preceding_named_record_name: Option<String>,
pub preceding_named_record_offset_hex: Option<String>,
pub following_named_record_name: Option<String>,
pub following_named_record_offset_hex: Option<String>,
pub selector_word_0: u32,
pub selector_word_0_hex: String,
pub selector_word_0_high_u16: u16,
pub selector_word_0_high_u16_hex: String,
pub selector_word_0_low_u16: u16,
pub selector_word_0_low_u16_hex: String,
pub selector_word_1: u32,
pub selector_word_1_hex: String,
pub selector_word_1_middle_u16: u16,
pub selector_word_1_middle_u16_hex: String,
pub body_word_0: u32,
pub body_word_0_hex: String,
pub body_word_1: u32,
pub body_word_1_hex: String,
pub body_word_2: u32,
pub body_word_2_hex: String,
pub body_word_3: u32,
pub body_word_3_hex: String,
pub footer_word_0: u32,
pub footer_word_0_hex: String,
pub footer_word_1: u32,
pub footer_word_1_hex: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct WinInspectionReport {
pub file_size: usize,
pub common_header_len: usize,
pub common_header_len_hex: String,
pub shared_header_words: Vec<WinHeaderWord>,
pub matches_observed_common_signature: bool,
pub common_resource_record_prelude_prefix_words: Option<Vec<String>>,
pub name_len_matches_prelude_word_3_plus_nul_count: usize,
pub inline_root_resource_name: Option<String>,
pub inline_root_resource_offset: Option<usize>,
pub inline_root_resource_offset_hex: Option<String>,
pub imb_reference_count: usize,
pub unique_imb_reference_count: usize,
pub unique_imb_references: Vec<String>,
pub dominant_reference_deltas: Vec<WinReferenceDeltaFrequency>,
pub resource_selector_records: Vec<WinResourceSelectorRecord>,
pub anonymous_selector_records: Vec<WinAnonymousSelectorRecord>,
pub first_resource_record_samples: Vec<WinResourceRecordSample>,
pub first_imb_references: Vec<WinResourceReference>,
pub notes: Vec<String>,
}
pub fn inspect_win_file(path: &Path) -> Result<WinInspectionReport, Box<dyn std::error::Error>> {
let bytes = fs::read(path)?;
inspect_win_bytes(&bytes)
}
pub fn inspect_win_bytes(bytes: &[u8]) -> Result<WinInspectionReport, Box<dyn std::error::Error>> {
if bytes.len() < WIN_COMMON_HEADER_LEN {
return Err(format!(
"window resource is too short for the observed common header: {} < 0x{WIN_COMMON_HEADER_LEN:x}",
bytes.len()
)
.into());
}
let header_offsets = [
0x00usize, 0x04, 0x08, 0x0c, 0x10, 0x14, 0x18, 0x1c, 0x20, 0x24, 0x28, 0x2c, 0x30, 0x34,
0x38, 0x3c, 0x40, 0x44, 0x48, 0x4c,
];
let shared_header_words = header_offsets
.iter()
.map(|offset| {
let value = read_u32_le(bytes, *offset).expect("validated common header length");
WinHeaderWord {
offset: *offset,
offset_hex: format!("0x{offset:02x}"),
value,
value_hex: format!("0x{value:08x}"),
}
})
.collect::<Vec<_>>();
let matches_observed_common_signature = read_u32_le(bytes, 0x00) == Some(0x0000_07d0)
&& read_u32_le(bytes, 0x04) == Some(0)
&& read_u32_le(bytes, 0x08) == Some(0)
&& read_u32_le(bytes, 0x0c) == Some(0x8000_0000)
&& read_u32_le(bytes, 0x10) == Some(0x8000_003f)
&& read_u32_le(bytes, 0x14) == Some(0x0000_003f)
&& read_u32_le(bytes, 0x34) == Some(0x0007_d100)
&& read_u32_le(bytes, 0x38) == Some(0x0007_d200)
&& read_u32_le(bytes, 0x40) == Some(0x000b_b800)
&& read_u32_le(bytes, 0x48) == Some(0x000b_b900);
let inline_root_resource_name = parse_inline_ascii_name(bytes, WIN_INLINE_RESOURCE_OFFSET);
let inline_root_resource_offset = inline_root_resource_name
.as_ref()
.map(|_| WIN_INLINE_RESOURCE_OFFSET + 1);
let inline_root_resource_offset_hex =
inline_root_resource_offset.map(|offset| format!("0x{offset:04x}"));
let all_imb_references = collect_imb_references(bytes);
let resource_record_samples = build_resource_record_samples(bytes, &all_imb_references);
let resource_selector_records = build_resource_selector_records(&resource_record_samples);
let anonymous_selector_records = collect_anonymous_selector_records(bytes, &all_imb_references);
let common_resource_record_prelude_prefix_words =
shared_prelude_prefix_hex(&resource_record_samples);
let name_len_matches_prelude_word_3_plus_nul_count = resource_record_samples
.iter()
.filter(|sample| {
sample.prelude_words.len() == 4
&& sample.prelude_words[3].value == (sample.name.len() as u32 + 1)
})
.count();
let mut unique_imb_references = Vec::new();
for reference in &all_imb_references {
if !unique_imb_references.contains(&reference.name) {
unique_imb_references.push(reference.name.clone());
}
}
let mut notes = Vec::new();
if matches_observed_common_signature {
notes.push(
"Header matches the observed shared .win signature seen in Campaign.win, CompanyDetail.win, and setup.win."
.to_string(),
);
} else {
notes.push(
"Header diverges from the currently observed shared .win signature; treat field meanings as provisional."
.to_string(),
);
}
if inline_root_resource_name.is_some() {
notes.push(
"The blob carries an inline root .imb resource name immediately after the common 0x50-byte header."
.to_string(),
);
} else {
notes.push(
"No inline root .imb resource name appears at 0x50; this window likely starts directly with control records."
.to_string(),
);
}
notes.push(
"Embedded .imb strings are reported as resource references with selector lanes; this inspector still does not decode full control record semantics."
.to_string(),
);
Ok(WinInspectionReport {
file_size: bytes.len(),
common_header_len: WIN_COMMON_HEADER_LEN,
common_header_len_hex: format!("0x{WIN_COMMON_HEADER_LEN:02x}"),
shared_header_words,
matches_observed_common_signature,
common_resource_record_prelude_prefix_words,
name_len_matches_prelude_word_3_plus_nul_count,
inline_root_resource_name,
inline_root_resource_offset,
inline_root_resource_offset_hex,
imb_reference_count: all_imb_references.len(),
unique_imb_reference_count: unique_imb_references.len(),
unique_imb_references,
dominant_reference_deltas: build_delta_histogram(&resource_record_samples),
resource_selector_records,
anonymous_selector_records,
first_resource_record_samples: resource_record_samples.into_iter().take(32).collect(),
first_imb_references: all_imb_references.into_iter().take(32).collect(),
notes,
})
}
fn collect_imb_references(bytes: &[u8]) -> Vec<WinResourceReference> {
let mut references = Vec::new();
let mut offset = 0usize;
while offset < bytes.len() {
if let Some(name) = parse_imb_reference_at(bytes, offset) {
references.push(WinResourceReference {
offset,
offset_hex: format!("0x{offset:04x}"),
name,
});
}
offset += 1;
}
references
}
fn build_resource_record_samples(
bytes: &[u8],
references: &[WinResourceReference],
) -> Vec<WinResourceRecordSample> {
let mut samples = Vec::with_capacity(references.len());
for (index, reference) in references.iter().enumerate() {
let previous_offset = index
.checked_sub(1)
.and_then(|previous| references.get(previous))
.map(|previous| previous.offset);
let delta_from_previous = previous_offset.map(|previous| reference.offset - previous);
let delta_from_previous_hex = delta_from_previous.map(|delta| format!("0x{delta:x}"));
let prelude_words = if reference.offset >= 16 {
(0..4)
.map(|index| {
let offset = reference.offset - 16 + index * 4;
let value = read_u32_le(bytes, offset).unwrap_or(0);
WinHeaderWord {
offset,
offset_hex: format!("0x{offset:04x}"),
value,
value_hex: format!("0x{value:08x}"),
}
})
.collect()
} else {
Vec::new()
};
let name_end = reference.offset + reference.name.len();
let post_name_word_0 = read_u32_le(bytes, name_end + 1).unwrap_or(0);
let post_name_word_1 = read_u32_le(bytes, name_end + 5).unwrap_or(0);
let post_name_word_0_high_u16 = ((post_name_word_0 >> 16) & 0xffff) as u16;
let post_name_word_0_low_u16 = (post_name_word_0 & 0xffff) as u16;
samples.push(WinResourceRecordSample {
offset: reference.offset,
offset_hex: reference.offset_hex.clone(),
name: reference.name.clone(),
delta_from_previous,
delta_from_previous_hex,
prelude_words,
post_name_word_0,
post_name_word_0_hex: format!("0x{post_name_word_0:08x}"),
post_name_word_0_high_u16,
post_name_word_0_high_u16_hex: format!("0x{post_name_word_0_high_u16:04x}"),
post_name_word_0_low_u16,
post_name_word_0_low_u16_hex: format!("0x{post_name_word_0_low_u16:04x}"),
post_name_word_1,
post_name_word_1_hex: format!("0x{post_name_word_1:08x}"),
});
}
samples
}
fn build_delta_histogram(samples: &[WinResourceRecordSample]) -> Vec<WinReferenceDeltaFrequency> {
let mut counts = std::collections::BTreeMap::<usize, usize>::new();
for sample in samples {
if let Some(delta) = sample.delta_from_previous {
*counts.entry(delta).or_default() += 1;
}
}
let mut frequencies = counts
.into_iter()
.map(|(delta, count)| WinReferenceDeltaFrequency {
delta,
delta_hex: format!("0x{delta:x}"),
count,
})
.collect::<Vec<_>>();
frequencies.sort_by(|left, right| {
right
.count
.cmp(&left.count)
.then_with(|| left.delta.cmp(&right.delta))
});
frequencies.truncate(12);
frequencies
}
fn build_resource_selector_records(
samples: &[WinResourceRecordSample],
) -> Vec<WinResourceSelectorRecord> {
samples
.iter()
.map(|sample| {
let post_name_word_1_high_u16 = ((sample.post_name_word_1 >> 16) & 0xffff) as u16;
let post_name_word_1_middle_u16 = ((sample.post_name_word_1 >> 8) & 0xffff) as u16;
let post_name_word_1_low_u16 = (sample.post_name_word_1 & 0xffff) as u16;
WinResourceSelectorRecord {
offset: sample.offset,
offset_hex: sample.offset_hex.clone(),
name: sample.name.clone(),
post_name_word_0: sample.post_name_word_0,
post_name_word_0_hex: sample.post_name_word_0_hex.clone(),
selector_high_u16: sample.post_name_word_0_high_u16,
selector_high_u16_hex: sample.post_name_word_0_high_u16_hex.clone(),
selector_low_u16: sample.post_name_word_0_low_u16,
selector_low_u16_hex: sample.post_name_word_0_low_u16_hex.clone(),
post_name_word_1: sample.post_name_word_1,
post_name_word_1_hex: sample.post_name_word_1_hex.clone(),
post_name_word_1_high_u16,
post_name_word_1_high_u16_hex: format!("0x{post_name_word_1_high_u16:04x}"),
post_name_word_1_middle_u16,
post_name_word_1_middle_u16_hex: format!("0x{post_name_word_1_middle_u16:04x}"),
post_name_word_1_low_u16,
post_name_word_1_low_u16_hex: format!("0x{post_name_word_1_low_u16:04x}"),
}
})
.collect()
}
fn collect_anonymous_selector_records(
bytes: &[u8],
references: &[WinResourceReference],
) -> Vec<WinAnonymousSelectorRecord> {
const PRELUDE: [u8; 12] = [
0xb8, 0x0b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xb9, 0x0b, 0x00, 0x00,
];
let mut records = Vec::new();
let mut start = 0usize;
while let Some(relative) = bytes
.get(start..)
.and_then(|slice| slice.windows(PRELUDE.len()).position(|window| window == PRELUDE))
{
let record_offset = start + relative;
let name_len = read_u32_le(bytes, record_offset + PRELUDE.len()).unwrap_or(0);
if name_len == 0 {
let selector_word_0 = read_u32_le(bytes, record_offset + 0x10).unwrap_or(0);
let selector_word_0_low_u16 = (selector_word_0 & 0xffff) as u16;
if (0xc352..=0xc39b).contains(&selector_word_0_low_u16) {
let preceding_named_record =
references.iter().rev().find(|reference| reference.offset < record_offset);
let following_named_record =
references.iter().find(|reference| reference.offset > record_offset);
let selector_word_1 = read_u32_le(bytes, record_offset + 0x14).unwrap_or(0);
let selector_word_0_high_u16 = ((selector_word_0 >> 16) & 0xffff) as u16;
let selector_word_1_middle_u16 = ((selector_word_1 >> 8) & 0xffff) as u16;
let body_word_0 = read_u32_le(bytes, record_offset + 0x18).unwrap_or(0);
let body_word_1 = read_u32_le(bytes, record_offset + 0x1c).unwrap_or(0);
let body_word_2 = read_u32_le(bytes, record_offset + 0x20).unwrap_or(0);
let body_word_3 = read_u32_le(bytes, record_offset + 0x24).unwrap_or(0);
let footer_word_0 = read_u32_le(bytes, record_offset + 0x98).unwrap_or(0);
let footer_word_1 = read_u32_le(bytes, record_offset + 0x9c).unwrap_or(0);
records.push(WinAnonymousSelectorRecord {
record_offset,
record_offset_hex: format!("0x{record_offset:04x}"),
preceding_named_record_name: preceding_named_record
.map(|record| record.name.clone()),
preceding_named_record_offset_hex: preceding_named_record
.map(|record| record.offset_hex.clone()),
following_named_record_name: following_named_record
.map(|record| record.name.clone()),
following_named_record_offset_hex: following_named_record
.map(|record| record.offset_hex.clone()),
selector_word_0,
selector_word_0_hex: format!("0x{selector_word_0:08x}"),
selector_word_0_high_u16,
selector_word_0_high_u16_hex: format!("0x{selector_word_0_high_u16:04x}"),
selector_word_0_low_u16,
selector_word_0_low_u16_hex: format!("0x{selector_word_0_low_u16:04x}"),
selector_word_1,
selector_word_1_hex: format!("0x{selector_word_1:08x}"),
selector_word_1_middle_u16,
selector_word_1_middle_u16_hex: format!("0x{selector_word_1_middle_u16:04x}"),
body_word_0,
body_word_0_hex: format!("0x{body_word_0:08x}"),
body_word_1,
body_word_1_hex: format!("0x{body_word_1:08x}"),
body_word_2,
body_word_2_hex: format!("0x{body_word_2:08x}"),
body_word_3,
body_word_3_hex: format!("0x{body_word_3:08x}"),
footer_word_0,
footer_word_0_hex: format!("0x{footer_word_0:08x}"),
footer_word_1,
footer_word_1_hex: format!("0x{footer_word_1:08x}"),
});
}
}
start = record_offset + 1;
}
records
}
fn shared_prelude_prefix_hex(samples: &[WinResourceRecordSample]) -> Option<Vec<String>> {
let first = samples.first()?;
if first.prelude_words.len() < 3 {
return None;
}
let prefix = first.prelude_words[..3]
.iter()
.map(|word| word.value)
.collect::<Vec<_>>();
if samples.iter().all(|sample| {
sample.prelude_words.len() >= 3
&& sample.prelude_words[..3]
.iter()
.map(|word| word.value)
.collect::<Vec<_>>()
== prefix
}) {
return Some(
prefix
.into_iter()
.map(|value| format!("0x{value:08x}"))
.collect(),
);
}
None
}
fn parse_imb_reference_at(bytes: &[u8], offset: usize) -> Option<String> {
if offset > 0 {
let previous = *bytes.get(offset - 1)?;
if previous != 0 {
return None;
}
}
let slice = bytes.get(offset..)?;
let nul = slice.iter().position(|byte| *byte == 0)?;
let candidate = slice.get(..nul)?;
if candidate.len() < 5 {
return None;
}
let value = std::str::from_utf8(candidate).ok()?;
if !value.ends_with(".imb") {
return None;
}
if !value
.bytes()
.all(|byte| byte.is_ascii_alphanumeric() || matches!(byte, b'_' | b'-' | b'.' | b' '))
{
return None;
}
Some(value.to_string())
}
fn parse_inline_ascii_name(bytes: &[u8], offset: usize) -> Option<String> {
let prefix = *bytes.get(offset)?;
if prefix != 0 {
return None;
}
parse_imb_reference_at(bytes, offset + 1)
}
fn read_u32_le(bytes: &[u8], offset: usize) -> Option<u32> {
let slice = bytes.get(offset..offset + 4)?;
Some(u32::from_le_bytes(slice.try_into().ok()?))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn inspects_synthetic_window_blob() {
let mut bytes = vec![0u8; 0x90];
bytes[0x00..0x04].copy_from_slice(&0x0000_07d0u32.to_le_bytes());
bytes[0x0c..0x10].copy_from_slice(&0x8000_0000u32.to_le_bytes());
bytes[0x10..0x14].copy_from_slice(&0x8000_003fu32.to_le_bytes());
bytes[0x14..0x18].copy_from_slice(&0x0000_003fu32.to_le_bytes());
bytes[0x34..0x38].copy_from_slice(&0x0007_d100u32.to_le_bytes());
bytes[0x38..0x3c].copy_from_slice(&0x0007_d200u32.to_le_bytes());
bytes[0x40..0x44].copy_from_slice(&0x000b_b800u32.to_le_bytes());
bytes[0x48..0x4c].copy_from_slice(&0x000b_b900u32.to_le_bytes());
bytes[0x50] = 0;
bytes[0x51..0x51 + "Root.imb".len()].copy_from_slice(b"Root.imb");
bytes[0x59] = 0;
bytes.extend_from_slice(b"\0Button.imb\0");
let report = inspect_win_bytes(&bytes).expect("inspection should succeed");
assert!(report.matches_observed_common_signature);
assert_eq!(
report.inline_root_resource_name.as_deref(),
Some("Root.imb")
);
assert_eq!(report.imb_reference_count, 2);
assert_eq!(report.unique_imb_reference_count, 2);
assert_eq!(report.resource_selector_records.len(), 2);
assert_eq!(report.resource_selector_records[0].name, "Root.imb");
assert!(report.anonymous_selector_records.is_empty());
}
}