Compare commits

...

14 Commits
v1.5.1 ... main

Author SHA1 Message Date
b56b399201 Support loading Wii Menu (BootStage) DOL files
Also adds support to elf2dol to extract the inner DOL
from a BootStage DOL.
2025-06-23 18:21:42 -06:00
8620099731 Version v1.6.2 2025-06-09 22:45:48 -06:00
ae00c35ec3 Better jump table error context 2025-06-09 22:45:48 -06:00
ba2589646e Relax prolog/epilog sequence checks
Some games (e.g. Excite Truck) have very aggressive float
scheduling that can create large gaps between prolog
instructions. This allows arbitrary instructions in the
sequence checks, provided they're not a branch and don't
touch r0/r1.

Resolves #105
2025-06-09 22:45:48 -06:00
cadmic
7bc0bc474d
Continue analyzing functions after unknown jumps (#106)
* Continue analyzing functions after unknown jumps
2025-06-09 22:45:21 -06:00
cadmic
d969819b78
Guess endianness of "erased" DWARF info (#104) 2025-06-09 22:44:39 -06:00
f4a67ee619 Version v1.6.1 2025-06-04 22:04:30 -06:00
Robin Avery
d92a892c2b
Relax string size requirement for auto symbols (#102) 2025-06-04 20:01:39 -07:00
cadmic
5e33fea49f
Allow specifying replacement bytes in dtk extab clean (#103)
* Allow specifying replacement bytes in dtk extab clean

* Simplify extab padding replacement

* Reword log message

* clippy has bad taste

* Don't specify revision number for cwextab

---------

Co-authored-by: Amber Brault <celestialamber1@gmail.com>
2025-06-04 20:01:05 -07:00
9cafb77d3f Add dtk extab clean & config.yml clean_extab
It was discovered that certain extab actions contain
uninitalized data from the compiler. This provides
a way to zero out uninitialized data in DOL or object
files. Usage: `dtk extab clean input.dol output.dol`

A `clean_extab` setting was added to config.yml, so
projects can link the cleaned objects and target the
cleaned DOL hash.
2025-06-01 20:23:07 -06:00
Dávid Balatoni
20e877c9ec
Some ProDG improvements (#101) 2025-06-01 16:43:13 -06:00
88d0e6b789 cargo clippy --fix 2025-06-01 16:42:00 -06:00
f212b35d28 Fix BSS symbol data check in add_padding_symbols 2025-06-01 16:40:40 -06:00
9c681557f5 Write ldscript_template path to ouput depfile 2025-05-30 19:18:46 -06:00
47 changed files with 1171 additions and 708 deletions

72
Cargo.lock generated
View File

@ -339,16 +339,16 @@ checksum = "c2e06f9bce634a3c898eb1e5cb949ff63133cbb218af93cc9b38b31d6f3ea285"
[[package]] [[package]]
name = "cwextab" name = "cwextab"
version = "1.0.3" version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "003567b96ff9d8ac3275831650385891bca370092937be625157778b1e58f755" checksum = "9dd95393b8cc20937e4757d9c22b89d016613e934c60dcb073bd8a5aade79fcf"
dependencies = [ dependencies = [
"thiserror", "thiserror 2.0.12",
] ]
[[package]] [[package]]
name = "decomp-toolkit" name = "decomp-toolkit"
version = "1.5.1" version = "1.7.0"
dependencies = [ dependencies = [
"aes", "aes",
"anyhow", "anyhow",
@ -873,7 +873,7 @@ dependencies = [
"miniz_oxide", "miniz_oxide",
"rayon", "rayon",
"sha1", "sha1",
"thiserror", "thiserror 1.0.64",
"zerocopy", "zerocopy",
"zstd", "zstd",
] ]
@ -946,7 +946,7 @@ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1110,7 +1110,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1124,9 +1124,9 @@ dependencies = [
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.88" version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9" checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
@ -1158,7 +1158,7 @@ dependencies = [
"prost", "prost",
"prost-types", "prost-types",
"regex", "regex",
"syn 2.0.79", "syn 2.0.101",
"tempfile", "tempfile",
] ]
@ -1172,7 +1172,7 @@ dependencies = [
"itertools", "itertools",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1374,7 +1374,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1385,7 +1385,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1408,7 +1408,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1527,7 +1527,7 @@ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1549,7 +1549,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"rustversion", "rustversion",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1584,9 +1584,9 @@ dependencies = [
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.79" version = "2.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1609,7 +1609,7 @@ dependencies = [
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
"thiserror", "thiserror 1.0.64",
"walkdir", "walkdir",
] ]
@ -1632,7 +1632,16 @@ version = "1.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl 1.0.64",
]
[[package]]
name = "thiserror"
version = "2.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
dependencies = [
"thiserror-impl 2.0.12",
] ]
[[package]] [[package]]
@ -1643,7 +1652,18 @@ checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
]
[[package]]
name = "thiserror-impl"
version = "2.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.101",
] ]
[[package]] [[package]]
@ -1692,7 +1712,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1755,7 +1775,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"serde_derive_internals", "serde_derive_internals",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]
@ -1853,7 +1873,7 @@ dependencies = [
"once_cell", "once_cell",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -1875,7 +1895,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
"wasm-bindgen-backend", "wasm-bindgen-backend",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -2088,7 +2108,7 @@ checksum = "3ca22c4ad176b37bd81a565f66635bde3d654fe6832730c3e52e1018ae1655ee"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.79", "syn 2.0.101",
] ]
[[package]] [[package]]

View File

@ -3,7 +3,7 @@ name = "decomp-toolkit"
description = "Yet another GameCube/Wii decompilation toolkit." description = "Yet another GameCube/Wii decompilation toolkit."
authors = ["Luke Street <luke@street.dev>"] authors = ["Luke Street <luke@street.dev>"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
version = "1.5.1" version = "1.7.0"
edition = "2021" edition = "2021"
publish = false publish = false
repository = "https://github.com/encounter/decomp-toolkit" repository = "https://github.com/encounter/decomp-toolkit"
@ -37,7 +37,7 @@ typed-path = "0.9"
cbc = "0.1" cbc = "0.1"
crossterm = "0.28" crossterm = "0.28"
cwdemangle = "1.0" cwdemangle = "1.0"
cwextab = "1.0" cwextab = "1.1"
dyn-clone = "1.0" dyn-clone = "1.0"
enable-ansi-support = "0.2" enable-ansi-support = "0.2"
filetime = "0.2" filetime = "0.2"

View File

@ -297,6 +297,8 @@ Dumps DWARF 1.1 information from an ELF file. (Does **not** support DWARF 2+)
```shell ```shell
$ dtk dwarf dump input.elf $ dtk dwarf dump input.elf
# or, to include data that was stripped by MWLD
$ dtk dwarf dump input.elf --include-erased
``` ```
### elf disasm ### elf disasm

View File

@ -1,6 +1,6 @@
use std::{ use std::{
cmp::min, cmp::min,
collections::BTreeMap, collections::{BTreeMap, BTreeSet},
fmt::{Debug, Display, Formatter, UpperHex}, fmt::{Debug, Display, Formatter, UpperHex},
ops::{Add, AddAssign, BitAnd, Sub}, ops::{Add, AddAssign, BitAnd, Sub},
}; };
@ -191,7 +191,7 @@ impl AnalyzerState {
}; };
obj.add_symbol( obj.add_symbol(
ObjSymbol { ObjSymbol {
name: format!("jumptable_{}", address_str), name: format!("jumptable_{address_str}"),
address: addr.address as u64, address: addr.address as u64,
section: Some(addr.section), section: Some(addr.section),
size: size as u64, size: size as u64,
@ -275,7 +275,7 @@ impl AnalyzerState {
let (section_index, _) = obj let (section_index, _) = obj
.sections .sections
.at_address(entry) .at_address(entry)
.context(format!("Entry point {:#010X} outside of any section", entry))?; .context(format!("Entry point {entry:#010X} outside of any section"))?;
self.process_function_at(obj, SectionAddress::new(section_index, entry))?; self.process_function_at(obj, SectionAddress::new(section_index, entry))?;
} }
// Locate bounds for referenced functions until none are left // Locate bounds for referenced functions until none are left
@ -530,7 +530,7 @@ pub fn locate_sda_bases(obj: &mut ObjInfo) -> Result<bool> {
let (section_index, _) = obj let (section_index, _) = obj
.sections .sections
.at_address(entry as u32) .at_address(entry as u32)
.context(format!("Entry point {:#010X} outside of any section", entry))?; .context(format!("Entry point {entry:#010X} outside of any section"))?;
let entry_addr = SectionAddress::new(section_index, entry as u32); let entry_addr = SectionAddress::new(section_index, entry as u32);
let mut executor = Executor::new(obj); let mut executor = Executor::new(obj);
@ -572,6 +572,26 @@ pub fn locate_sda_bases(obj: &mut ObjInfo) -> Result<bool> {
Some((sda2_base, sda_base)) => { Some((sda2_base, sda_base)) => {
obj.sda2_base = Some(sda2_base); obj.sda2_base = Some(sda2_base);
obj.sda_base = Some(sda_base); obj.sda_base = Some(sda_base);
obj.add_symbol(
ObjSymbol {
name: "_SDA2_BASE_".to_string(),
address: sda2_base as u64,
size_known: true,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
..Default::default()
},
true,
)?;
obj.add_symbol(
ObjSymbol {
name: "_SDA_BASE_".to_string(),
address: sda_base as u64,
size_known: true,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
..Default::default()
},
true,
)?;
Ok(true) Ok(true)
} }
None => Ok(false), None => Ok(false),
@ -581,7 +601,7 @@ pub fn locate_sda_bases(obj: &mut ObjInfo) -> Result<bool> {
/// ProDG hardcodes .bss and .sbss section initialization in `entry` /// ProDG hardcodes .bss and .sbss section initialization in `entry`
/// This function locates the memset calls and returns a list of /// This function locates the memset calls and returns a list of
/// (address, size) pairs for the .bss sections. /// (address, size) pairs for the .bss sections.
pub fn locate_bss_memsets(obj: &mut ObjInfo) -> Result<Vec<(u32, u32)>> { pub fn locate_bss_memsets(obj: &ObjInfo) -> Result<Vec<(u32, u32)>> {
let mut bss_sections: Vec<(u32, u32)> = Vec::new(); let mut bss_sections: Vec<(u32, u32)> = Vec::new();
let Some(entry) = obj.entry else { let Some(entry) = obj.entry else {
return Ok(bss_sections); return Ok(bss_sections);
@ -589,7 +609,7 @@ pub fn locate_bss_memsets(obj: &mut ObjInfo) -> Result<Vec<(u32, u32)>> {
let (section_index, _) = obj let (section_index, _) = obj
.sections .sections
.at_address(entry as u32) .at_address(entry as u32)
.context(format!("Entry point {:#010X} outside of any section", entry))?; .context(format!("Entry point {entry:#010X} outside of any section"))?;
let entry_addr = SectionAddress::new(section_index, entry as u32); let entry_addr = SectionAddress::new(section_index, entry as u32);
let mut executor = Executor::new(obj); let mut executor = Executor::new(obj);
@ -632,3 +652,50 @@ pub fn locate_bss_memsets(obj: &mut ObjInfo) -> Result<Vec<(u32, u32)>> {
)?; )?;
Ok(bss_sections) Ok(bss_sections)
} }
/// Execute VM from specified entry point following inner-section branches and function calls,
/// noting all branch targets outside the current section.
pub fn locate_cross_section_branch_targets(
obj: &ObjInfo,
entry: SectionAddress,
) -> Result<BTreeSet<SectionAddress>> {
let mut branch_targets = BTreeSet::<SectionAddress>::new();
let mut executor = Executor::new(obj);
executor.push(entry, VM::new(), false);
executor.run(
obj,
|ExecCbData { executor, vm, result, ins_addr, section: _, ins: _, block_start: _ }| {
match result {
StepResult::Continue | StepResult::LoadStore { .. } => {
Ok(ExecCbResult::<()>::Continue)
}
StepResult::Illegal => bail!("Illegal instruction @ {}", ins_addr),
StepResult::Jump(target) => {
if let BranchTarget::Address(RelocationTarget::Address(addr)) = target {
if addr.section == entry.section {
executor.push(addr, vm.clone_all(), true);
} else {
branch_targets.insert(addr);
}
}
Ok(ExecCbResult::EndBlock)
}
StepResult::Branch(branches) => {
for branch in branches {
if let BranchTarget::Address(RelocationTarget::Address(addr)) =
branch.target
{
if addr.section == entry.section {
executor.push(addr, branch.vm, true);
} else {
branch_targets.insert(addr);
}
}
}
Ok(ExecCbResult::Continue)
}
}
},
)?;
Ok(branch_targets)
}

View File

@ -183,8 +183,7 @@ fn get_jump_table_entries(
let (section_index, _) = let (section_index, _) =
obj.sections.at_address(entry_addr).with_context(|| { obj.sections.at_address(entry_addr).with_context(|| {
format!( format!(
"Invalid jump table entry {:#010X} at {:#010X}", "Invalid jump table entry {entry_addr:#010X} at {cur_addr:#010X}"
entry_addr, cur_addr
) )
})?; })?;
entries.push(SectionAddress::new(section_index, entry_addr)); entries.push(SectionAddress::new(section_index, entry_addr));
@ -245,7 +244,9 @@ pub fn uniq_jump_table_entries(
return Ok((BTreeSet::new(), 0)); return Ok((BTreeSet::new(), 0));
} }
let (entries, size) = let (entries, size) =
get_jump_table_entries(obj, addr, size, from, function_start, function_end)?; get_jump_table_entries(obj, addr, size, from, function_start, function_end).with_context(
|| format!("While fetching jump table entries starting at {addr:#010X}"),
)?;
Ok((BTreeSet::from_iter(entries.iter().cloned()), size)) Ok((BTreeSet::from_iter(entries.iter().cloned()), size))
} }

View File

@ -2,7 +2,7 @@ use anyhow::Result;
use crate::{ use crate::{
obj::{ObjDataKind, ObjInfo, ObjSectionKind, ObjSymbolKind, SymbolIndex}, obj::{ObjDataKind, ObjInfo, ObjSectionKind, ObjSymbolKind, SymbolIndex},
util::split::is_linker_generated_label, util::{config::is_auto_symbol, split::is_linker_generated_label},
}; };
pub fn detect_objects(obj: &mut ObjInfo) -> Result<()> { pub fn detect_objects(obj: &mut ObjInfo) -> Result<()> {
@ -134,7 +134,9 @@ pub fn detect_strings(obj: &mut ObjInfo) -> Result<()> {
StringResult::None => {} StringResult::None => {}
StringResult::String { length, terminated } => { StringResult::String { length, terminated } => {
let size = if terminated { length + 1 } else { length }; let size = if terminated { length + 1 } else { length };
if !symbol.size_known || symbol.size == size as u64 { if symbol.size == size as u64
|| (is_auto_symbol(symbol) && symbol.size > size as u64)
{
let str = String::from_utf8_lossy(&data[..length]); let str = String::from_utf8_lossy(&data[..length]);
log::debug!("Found string '{}' @ {}", str, symbol.name); log::debug!("Found string '{}' @ {}", str, symbol.name);
symbols_set.push((symbol_idx, ObjDataKind::String, size)); symbols_set.push((symbol_idx, ObjDataKind::String, size));
@ -142,7 +144,9 @@ pub fn detect_strings(obj: &mut ObjInfo) -> Result<()> {
} }
StringResult::WString { length, str } => { StringResult::WString { length, str } => {
let size = length + 2; let size = length + 2;
if !symbol.size_known || symbol.size == size as u64 { if symbol.size == size as u64
|| (is_auto_symbol(symbol) && symbol.size > size as u64)
{
log::debug!("Found wide string '{}' @ {}", str, symbol.name); log::debug!("Found wide string '{}' @ {}", str, symbol.name);
symbols_set.push((symbol_idx, ObjDataKind::String16, size)); symbols_set.push((symbol_idx, ObjDataKind::String16, size));
} }

View File

@ -101,7 +101,7 @@ impl AnalysisPass for FindSaveRestSleds {
for i in reg_start..reg_end { for i in reg_start..reg_end {
let addr = start + (i - reg_start) * step_size; let addr = start + (i - reg_start) * step_size;
state.known_symbols.entry(addr).or_default().push(ObjSymbol { state.known_symbols.entry(addr).or_default().push(ObjSymbol {
name: format!("{}{}", label, i), name: format!("{label}{i}"),
address: addr.address as u64, address: addr.address as u64,
section: Some(start.section), section: Some(start.section),
size_known: true, size_known: true,

View File

@ -45,6 +45,17 @@ type BlockRange = Range<SectionAddress>;
type InsCheck = dyn Fn(Ins) -> bool; type InsCheck = dyn Fn(Ins) -> bool;
/// Stop searching for prologue/epilogue sequences if the next instruction
/// is a branch or uses r0 or r1.
fn is_end_of_seq(next: &Ins) -> bool {
next.is_branch()
|| next
.defs()
.iter()
.chain(next.uses().iter())
.any(|a| matches!(a, ppc750cl::Argument::GPR(ppc750cl::GPR(0 | 1))))
}
#[inline(always)] #[inline(always)]
fn check_sequence( fn check_sequence(
section: &ObjSection, section: &ObjSection,
@ -52,29 +63,26 @@ fn check_sequence(
ins: Option<Ins>, ins: Option<Ins>,
sequence: &[(&InsCheck, &InsCheck)], sequence: &[(&InsCheck, &InsCheck)],
) -> Result<bool> { ) -> Result<bool> {
let mut found = false; let ins = ins
.or_else(|| disassemble(section, addr.address))
.with_context(|| format!("Failed to disassemble instruction at {addr:#010X}"))?;
for &(first, second) in sequence { for &(first, second) in sequence {
let Some(ins) = ins.or_else(|| disassemble(section, addr.address)) else {
continue;
};
if !first(ins) { if !first(ins) {
continue; continue;
} }
let Some(next) = disassemble(section, addr.address + 4) else { let mut current_addr = addr.address + 4;
continue; while let Some(next) = disassemble(section, current_addr) {
}; if second(next) {
if second(next) return Ok(true);
// Also check the following instruction, in case the scheduler }
// put something in between. if is_end_of_seq(&next) {
|| (!next.is_branch() // If we hit a branch or an instruction that uses r0 or r1, stop searching.
&& matches!(disassemble(section, addr.address + 8), Some(ins) if second(ins))) break;
{ }
found = true; current_addr += 4;
break;
} }
} }
Ok(false)
Ok(found)
} }
fn check_prologue_sequence( fn check_prologue_sequence(
@ -89,15 +97,19 @@ fn check_prologue_sequence(
} }
#[inline(always)] #[inline(always)]
fn is_stwu(ins: Ins) -> bool { fn is_stwu(ins: Ins) -> bool {
// stwu r1, d(r1) // stwu[x] r1, d(r1)
ins.op == Opcode::Stwu && ins.field_rs() == 1 && ins.field_ra() == 1 matches!(ins.op, Opcode::Stwu | Opcode::Stwux) && ins.field_rs() == 1 && ins.field_ra() == 1
} }
#[inline(always)] #[inline(always)]
fn is_stw(ins: Ins) -> bool { fn is_stw(ins: Ins) -> bool {
// stw r0, d(r1) // stw r0, d(r1)
ins.op == Opcode::Stw && ins.field_rs() == 0 && ins.field_ra() == 1 ins.op == Opcode::Stw && ins.field_rs() == 0 && ins.field_ra() == 1
} }
check_sequence(section, addr, ins, &[(&is_stwu, &is_mflr), (&is_mflr, &is_stw)]) check_sequence(section, addr, ins, &[
(&is_stwu, &is_mflr),
(&is_mflr, &is_stw),
(&is_mflr, &is_stwu),
])
} }
impl FunctionSlices { impl FunctionSlices {
@ -148,7 +160,28 @@ impl FunctionSlices {
} }
if check_prologue_sequence(section, addr, Some(ins))? { if check_prologue_sequence(section, addr, Some(ins))? {
if let Some(prologue) = self.prologue { if let Some(prologue) = self.prologue {
if prologue != addr && prologue != addr - 4 { let invalid_seq = if prologue == addr {
false
} else if prologue > addr {
true
} else {
// Check if any instructions between the prologue and this address
// are branches or use r0 or r1.
let mut current_addr = prologue.address + 4;
loop {
if current_addr == addr.address {
break false;
}
let next = disassemble(section, current_addr).with_context(|| {
format!("Failed to disassemble {current_addr:#010X}")
})?;
if is_end_of_seq(&next) {
break true;
}
current_addr += 4;
}
};
if invalid_seq {
bail!("Found multiple functions inside a symbol: {:#010X} and {:#010X}. Check symbols.txt?", prologue, addr) bail!("Found multiple functions inside a symbol: {:#010X} and {:#010X}. Check symbols.txt?", prologue, addr)
} }
} else { } else {
@ -180,7 +213,11 @@ impl FunctionSlices {
ins.op == Opcode::Or && ins.field_rd() == 1 ins.op == Opcode::Or && ins.field_rd() == 1
} }
if check_sequence(section, addr, Some(ins), &[(&is_mtlr, &is_addi), (&is_or, &is_mtlr)])? { if check_sequence(section, addr, Some(ins), &[
(&is_mtlr, &is_addi),
(&is_mtlr, &is_or),
(&is_or, &is_mtlr),
])? {
if let Some(epilogue) = self.epilogue { if let Some(epilogue) = self.epilogue {
if epilogue != addr { if epilogue != addr {
bail!("Found duplicate epilogue: {:#010X} and {:#010X}", epilogue, addr) bail!("Found duplicate epilogue: {:#010X} and {:#010X}", epilogue, addr)
@ -227,7 +264,7 @@ impl FunctionSlices {
})?; })?;
} }
self.check_epilogue(section, ins_addr, ins) self.check_epilogue(section, ins_addr, ins)
.with_context(|| format!("While processing {:#010X}: {:#?}", function_start, self))?; .with_context(|| format!("While processing {function_start:#010X}: {self:#?}"))?;
if !self.has_conditional_blr && is_conditional_blr(ins) { if !self.has_conditional_blr && is_conditional_blr(ins) {
self.has_conditional_blr = true; self.has_conditional_blr = true;
} }
@ -340,7 +377,14 @@ impl FunctionSlices {
function_end.or_else(|| self.end()), function_end.or_else(|| self.end()),
)?; )?;
log::debug!("-> size {}: {:?}", size, entries); log::debug!("-> size {}: {:?}", size, entries);
if (entries.contains(&next_address) || self.blocks.contains_key(&next_address)) let max_block = self
.blocks
.keys()
.next_back()
.copied()
.unwrap_or(next_address)
.max(next_address);
if entries.iter().any(|&addr| addr > function_start && addr <= max_block)
&& !entries.iter().any(|&addr| { && !entries.iter().any(|&addr| {
self.is_known_function(known_functions, addr) self.is_known_function(known_functions, addr)
.is_some_and(|fn_addr| fn_addr != function_start) .is_some_and(|fn_addr| fn_addr != function_start)
@ -703,7 +747,7 @@ impl FunctionSlices {
} }
} }
// If we discovered a function prologue, known tail call. // If we discovered a function prologue, known tail call.
if slices.prologue.is_some() { if slices.prologue.is_some() || slices.has_r1_load {
log::trace!("Prologue discovered; known tail call: {:#010X}", addr); log::trace!("Prologue discovered; known tail call: {:#010X}", addr);
return TailCallResult::Is; return TailCallResult::Is;
} }

View File

@ -417,9 +417,13 @@ impl Tracker {
Ok(ExecCbResult::Continue) Ok(ExecCbResult::Continue)
} }
StepResult::Jump(target) => match target { StepResult::Jump(target) => match target {
BranchTarget::Return => Ok(ExecCbResult::EndBlock),
BranchTarget::Unknown BranchTarget::Unknown
| BranchTarget::Return
| BranchTarget::JumpTable { address: RelocationTarget::External, .. } => { | BranchTarget::JumpTable { address: RelocationTarget::External, .. } => {
let next_addr = ins_addr + 4;
if next_addr < function_end {
possible_missed_branches.insert(ins_addr + 4, vm.clone_all());
}
Ok(ExecCbResult::EndBlock) Ok(ExecCbResult::EndBlock)
} }
BranchTarget::Address(addr) => { BranchTarget::Address(addr) => {
@ -576,7 +580,7 @@ impl Tracker {
let relocation_target = relocation_target_for(obj, from, None).ok().flatten(); let relocation_target = relocation_target_for(obj, from, None).ok().flatten();
if !matches!(relocation_target, None | Some(RelocationTarget::External)) { if !matches!(relocation_target, None | Some(RelocationTarget::External)) {
// VM should have already handled this // VM should have already handled this
panic!("Relocation already exists for {:#010X} (from {:#010X})", addr, from); panic!("Relocation already exists for {addr:#010X} (from {from:#010X})");
} }
} }
// Remainder of this function is for executable objects only // Remainder of this function is for executable objects only
@ -668,7 +672,7 @@ impl Tracker {
0 0
}; };
let new_name = let new_name =
if module_id == 0 { name.to_string() } else { format!("{}:{}", name, module_id) }; if module_id == 0 { name.to_string() } else { format!("{name}:{module_id}") };
log::debug!("Renaming {} to {}", section.name, new_name); log::debug!("Renaming {} to {}", section.name, new_name);
section.name = new_name; section.name = new_name;
} }

View File

@ -127,16 +127,16 @@ fn extract(args: ExtractArgs) -> Result<()> {
} }
std::fs::create_dir_all(&out_dir)?; std::fs::create_dir_all(&out_dir)?;
if !args.quiet { if !args.quiet {
println!("Extracting {} to {}", path, out_dir); println!("Extracting {path} to {out_dir}");
} }
let mut file = open_file(path, false)?; let mut file = open_file(path, false)?;
let mut archive = ar::Archive::new(file.map()?); let mut archive = ar::Archive::new(file.map()?);
while let Some(entry) = archive.next_entry() { while let Some(entry) = archive.next_entry() {
let mut entry = entry.with_context(|| format!("Processing entry in {}", path))?; let mut entry = entry.with_context(|| format!("Processing entry in {path}"))?;
let file_name = std::str::from_utf8(entry.header().identifier())?; let file_name = std::str::from_utf8(entry.header().identifier())?;
if !args.quiet && args.verbose { if !args.quiet && args.verbose {
println!("\t{}", file_name); println!("\t{file_name}");
} }
let mut file_path = out_dir.clone(); let mut file_path = out_dir.clone();
for segment in file_name.split(&['/', '\\']) { for segment in file_name.split(&['/', '\\']) {
@ -146,7 +146,7 @@ fn extract(args: ExtractArgs) -> Result<()> {
std::fs::create_dir_all(parent)?; std::fs::create_dir_all(parent)?;
} }
let mut file = File::create(&file_path) let mut file = File::create(&file_path)
.with_context(|| format!("Failed to create file {}", file_path))?; .with_context(|| format!("Failed to create file {file_path}"))?;
std::io::copy(&mut entry, &mut file)?; std::io::copy(&mut entry, &mut file)?;
file.flush()?; file.flush()?;
@ -154,7 +154,7 @@ fn extract(args: ExtractArgs) -> Result<()> {
} }
} }
if !args.quiet { if !args.quiet {
println!("Extracted {} files", num_files); println!("Extracted {num_files} files");
} }
Ok(()) Ok(())
} }

View File

@ -47,6 +47,7 @@ use crate::{
diff::{calc_diff_ranges, print_diff, process_code}, diff::{calc_diff_ranges, print_diff, process_code},
dol::process_dol, dol::process_dol,
elf::{process_elf, write_elf}, elf::{process_elf, write_elf},
extab::clean_extab,
file::{ file::{
buf_copy_with_hash, buf_writer, check_hash_str, touch, verify_hash, FileIterator, buf_copy_with_hash, buf_writer, check_hash_str, touch, verify_hash, FileIterator,
FileReadInfo, FileReadInfo,
@ -293,6 +294,9 @@ pub struct ModuleConfig {
pub block_relocations: Vec<BlockRelocationConfig>, pub block_relocations: Vec<BlockRelocationConfig>,
#[serde(default, skip_serializing_if = "Vec::is_empty")] #[serde(default, skip_serializing_if = "Vec::is_empty")]
pub add_relocations: Vec<AddRelocationConfig>, pub add_relocations: Vec<AddRelocationConfig>,
/// Process exception tables and zero out uninitialized data.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub clean_extab: Option<bool>,
} }
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
@ -534,9 +538,11 @@ pub fn info(args: InfoArgs) -> Result<()> {
apply_selfile(&mut obj, file.map()?)?; apply_selfile(&mut obj, file.map()?)?;
} }
println!("{}:", obj.name); if !obj.name.is_empty() {
println!("{}:", obj.name);
}
if let Some(entry) = obj.entry { if let Some(entry) = obj.entry {
println!("Entry point: {:#010X}", entry); println!("Entry point: {entry:#010X}");
} }
println!("\nSections:"); println!("\nSections:");
println!("\t{: >10} | {: <10} | {: <10} | {: <10}", "Name", "Address", "Size", "File Off"); println!("\t{: >10} | {: <10} | {: <10} | {: <10}", "Name", "Address", "Size", "File Off");
@ -578,6 +584,7 @@ struct ModuleInfo<'a> {
config: &'a ModuleConfig, config: &'a ModuleConfig,
symbols_cache: Option<FileReadInfo>, symbols_cache: Option<FileReadInfo>,
splits_cache: Option<FileReadInfo>, splits_cache: Option<FileReadInfo>,
dep: Vec<Utf8NativePathBuf>,
} }
type ModuleMapByName<'a> = BTreeMap<String, ModuleInfo<'a>>; type ModuleMapByName<'a> = BTreeMap<String, ModuleInfo<'a>>;
@ -817,17 +824,29 @@ struct AnalyzeResult {
splits_cache: Option<FileReadInfo>, splits_cache: Option<FileReadInfo>,
} }
fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<AnalyzeResult> { fn load_dol_module(
let object_path = object_base.join(&config.base.object); config: &ModuleConfig,
object_base: &ObjectBase,
) -> Result<(ObjInfo, Utf8NativePathBuf)> {
let object_path = object_base.join(&config.object);
log::debug!("Loading {}", object_path); log::debug!("Loading {}", object_path);
let mut obj = { let mut obj = {
let mut file = object_base.open(&config.base.object)?; let mut file = object_base.open(&config.object)?;
let data = file.map()?; let data = file.map()?;
if let Some(hash_str) = &config.base.hash { if let Some(hash_str) = &config.hash {
verify_hash(data, hash_str)?; verify_hash(data, hash_str)?;
} }
process_dol(data, config.base.name())? process_dol(data, config.name())?
}; };
if config.clean_extab.unwrap_or(false) {
log::debug!("Cleaning extab for {}", config.name());
clean_extab(&mut obj, std::iter::empty())?;
}
Ok((obj, object_path))
}
fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<AnalyzeResult> {
let (mut obj, object_path) = load_dol_module(&config.base, object_base)?;
let mut dep = vec![object_path]; let mut dep = vec![object_path];
if let Some(comment_version) = config.mw_comment_version { if let Some(comment_version) = config.mw_comment_version {
@ -954,7 +973,7 @@ fn split_write_obj(
DirBuilder::new() DirBuilder::new()
.recursive(true) .recursive(true)
.create(out_dir) .create(out_dir)
.with_context(|| format!("Failed to create out dir '{}'", out_dir))?; .with_context(|| format!("Failed to create out dir '{out_dir}'"))?;
let obj_dir = out_dir.join("obj"); let obj_dir = out_dir.join("obj");
let entry = if module.obj.kind == ObjKind::Executable { let entry = if module.obj.kind == ObjKind::Executable {
module.obj.entry.and_then(|e| { module.obj.entry.and_then(|e| {
@ -1055,9 +1074,10 @@ fn split_write_obj(
// Generate ldscript.lcf // Generate ldscript.lcf
let ldscript_template = if let Some(template_path) = &module.config.ldscript_template { let ldscript_template = if let Some(template_path) = &module.config.ldscript_template {
let template_path = template_path.with_encoding(); let template_path = template_path.with_encoding();
Some(fs::read_to_string(&template_path).with_context(|| { let template = fs::read_to_string(&template_path)
format!("Failed to read linker script template '{}'", template_path) .with_context(|| format!("Failed to read linker script template '{template_path}'"))?;
})?) module.dep.push(template_path);
Some(template)
} else { } else {
None None
}; };
@ -1073,8 +1093,7 @@ fn split_write_obj(
let out_path = asm_dir.join(asm_path_for_unit(&unit.name)); let out_path = asm_dir.join(asm_path_for_unit(&unit.name));
let mut w = buf_writer(&out_path)?; let mut w = buf_writer(&out_path)?;
write_asm(&mut w, split_obj) write_asm(&mut w, split_obj).with_context(|| format!("Failed to write {out_path}"))?;
.with_context(|| format!("Failed to write {}", out_path))?;
w.flush()?; w.flush()?;
} }
} }
@ -1091,7 +1110,7 @@ fn write_if_changed(path: &Utf8NativePath, contents: &[u8]) -> Result<()> {
return Ok(()); return Ok(());
} }
} }
fs::write(path, contents).with_context(|| format!("Failed to write file '{}'", path))?; fs::write(path, contents).with_context(|| format!("Failed to write file '{path}'"))?;
Ok(()) Ok(())
} }
@ -1245,6 +1264,7 @@ fn split(args: SplitArgs) -> Result<()> {
config: &config.base, config: &config.base,
symbols_cache: result.symbols_cache, symbols_cache: result.symbols_cache,
splits_cache: result.splits_cache, splits_cache: result.splits_cache,
dep: Default::default(),
} }
}; };
let mut function_count = dol.obj.symbols.by_kind(ObjSymbolKind::Function).count(); let mut function_count = dol.obj.symbols.by_kind(ObjSymbolKind::Function).count();
@ -1259,6 +1279,7 @@ fn split(args: SplitArgs) -> Result<()> {
config: &config.modules[idx], config: &config.modules[idx],
symbols_cache: result.symbols_cache, symbols_cache: result.symbols_cache,
splits_cache: result.splits_cache, splits_cache: result.splits_cache,
dep: Default::default(),
}), }),
Entry::Occupied(_) => bail!("Duplicate module name {}", result.obj.name), Entry::Occupied(_) => bail!("Duplicate module name {}", result.obj.name),
}; };
@ -1440,6 +1461,10 @@ fn split(args: SplitArgs) -> Result<()> {
} }
// Write dep file // Write dep file
dep.extend(dol.dep);
for module in modules.into_values() {
dep.extend(module.dep);
}
{ {
let dep_path = args.out_dir.join("dep"); let dep_path = args.out_dir.join("dep");
let mut dep_file = buf_writer(&dep_path)?; let mut dep_file = buf_writer(&dep_path)?;
@ -1651,15 +1676,7 @@ fn diff(args: DiffArgs) -> Result<()> {
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?; let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
let object_base = find_object_base(&config)?; let object_base = find_object_base(&config)?;
log::info!("Loading {}", object_base.join(&config.base.object)); let (mut obj, _object_path) = load_dol_module(&config.base, &object_base)?;
let mut obj = {
let mut file = object_base.open(&config.base.object)?;
let data = file.map()?;
if let Some(hash_str) = &config.base.hash {
verify_hash(data, hash_str)?;
}
process_dol(data, config.base.name())?
};
if let Some(symbols_path) = &config.base.symbols { if let Some(symbols_path) = &config.base.symbols {
apply_symbols_file(&symbols_path.with_encoding(), &mut obj)?; apply_symbols_file(&symbols_path.with_encoding(), &mut obj)?;
@ -1875,15 +1892,7 @@ fn apply(args: ApplyArgs) -> Result<()> {
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?; let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
let object_base = find_object_base(&config)?; let object_base = find_object_base(&config)?;
log::info!("Loading {}", object_base.join(&config.base.object)); let (mut obj, _object_path) = load_dol_module(&config.base, &object_base)?;
let mut obj = {
let mut file = object_base.open(&config.base.object)?;
let data = file.map()?;
if let Some(hash_str) = &config.base.hash {
verify_hash(data, hash_str)?;
}
process_dol(data, config.base.name())?
};
let Some(symbols_path) = &config.base.symbols else { let Some(symbols_path) = &config.base.symbols else {
bail!("No symbols file specified in config"); bail!("No symbols file specified in config");
@ -2157,7 +2166,7 @@ impl ObjectBase {
} }
base.join(path.with_encoding()) base.join(path.with_encoding())
} }
ObjectBase::Vfs(base, _) => Utf8NativePathBuf::from(format!("{}:{}", base, path)), ObjectBase::Vfs(base, _) => Utf8NativePathBuf::from(format!("{base}:{path}")),
} }
} }
@ -2174,7 +2183,7 @@ impl ObjectBase {
} }
ObjectBase::Vfs(vfs_path, vfs) => { ObjectBase::Vfs(vfs_path, vfs) => {
open_file_with_fs(vfs.clone(), &path.with_encoding(), true) open_file_with_fs(vfs.clone(), &path.with_encoding(), true)
.with_context(|| format!("Using disc image {}", vfs_path)) .with_context(|| format!("Using disc image {vfs_path}"))
} }
} }
} }
@ -2192,18 +2201,18 @@ pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> {
if let Some(base) = &config.object_base { if let Some(base) = &config.object_base {
let base = base.with_encoding(); let base = base.with_encoding();
// Search for disc images in the object base directory // Search for disc images in the object base directory
for result in fs::read_dir(&base).with_context(|| format!("Reading directory {}", base))? { for result in fs::read_dir(&base).with_context(|| format!("Reading directory {base}"))? {
let entry = result.with_context(|| format!("Reading entry in directory {}", base))?; let entry = result.with_context(|| format!("Reading entry in directory {base}"))?;
let Ok(path) = check_path_buf(entry.path()) else { let Ok(path) = check_path_buf(entry.path()) else {
log::warn!("Path is not valid UTF-8: {:?}", entry.path()); log::warn!("Path is not valid UTF-8: {:?}", entry.path());
continue; continue;
}; };
let file_type = let file_type =
entry.file_type().with_context(|| format!("Getting file type for {}", path))?; entry.file_type().with_context(|| format!("Getting file type for {path}"))?;
let is_file = if file_type.is_symlink() { let is_file = if file_type.is_symlink() {
// Also traverse symlinks to files // Also traverse symlinks to files
fs::metadata(&path) fs::metadata(&path)
.with_context(|| format!("Getting metadata for {}", path))? .with_context(|| format!("Getting metadata for {path}"))?
.is_file() .is_file()
} else { } else {
file_type.is_file() file_type.is_file()
@ -2211,7 +2220,7 @@ pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> {
if is_file { if is_file {
let mut file = open_file(&path, false)?; let mut file = open_file(&path, false)?;
let format = detect(file.as_mut()) let format = detect(file.as_mut())
.with_context(|| format!("Detecting file type for {}", path))?; .with_context(|| format!("Detecting file type for {path}"))?;
match format { match format {
FileFormat::Archive(ArchiveKind::Disc(format)) => { FileFormat::Archive(ArchiveKind::Disc(format)) => {
let fs = open_fs(file, ArchiveKind::Disc(format))?; let fs = open_fs(file, ArchiveKind::Disc(format))?;
@ -2240,7 +2249,7 @@ fn extract_objects(config: &ProjectConfig, object_base: &ObjectBase) -> Result<U
{ {
let target_path = extracted_path(&target_dir, &config.base.object); let target_path = extracted_path(&target_dir, &config.base.object);
if !fs::exists(&target_path) if !fs::exists(&target_path)
.with_context(|| format!("Failed to check path '{}'", target_path))? .with_context(|| format!("Failed to check path '{target_path}'"))?
{ {
object_paths.push((&config.base.object, config.base.hash.as_deref(), target_path)); object_paths.push((&config.base.object, config.base.hash.as_deref(), target_path));
} }
@ -2248,7 +2257,7 @@ fn extract_objects(config: &ProjectConfig, object_base: &ObjectBase) -> Result<U
if let Some(selfile) = &config.selfile { if let Some(selfile) = &config.selfile {
let target_path = extracted_path(&target_dir, selfile); let target_path = extracted_path(&target_dir, selfile);
if !fs::exists(&target_path) if !fs::exists(&target_path)
.with_context(|| format!("Failed to check path '{}'", target_path))? .with_context(|| format!("Failed to check path '{target_path}'"))?
{ {
object_paths.push((selfile, config.selfile_hash.as_deref(), target_path)); object_paths.push((selfile, config.selfile_hash.as_deref(), target_path));
} }
@ -2256,7 +2265,7 @@ fn extract_objects(config: &ProjectConfig, object_base: &ObjectBase) -> Result<U
for module_config in &config.modules { for module_config in &config.modules {
let target_path = extracted_path(&target_dir, &module_config.object); let target_path = extracted_path(&target_dir, &module_config.object);
if !fs::exists(&target_path) if !fs::exists(&target_path)
.with_context(|| format!("Failed to check path '{}'", target_path))? .with_context(|| format!("Failed to check path '{target_path}'"))?
{ {
object_paths.push((&module_config.object, module_config.hash.as_deref(), target_path)); object_paths.push((&module_config.object, module_config.hash.as_deref(), target_path));
} }
@ -2275,12 +2284,12 @@ fn extract_objects(config: &ProjectConfig, object_base: &ObjectBase) -> Result<U
let mut file = object_base.open(source_path)?; let mut file = object_base.open(source_path)?;
if let Some(parent) = target_path.parent() { if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent) fs::create_dir_all(parent)
.with_context(|| format!("Failed to create directory '{}'", parent))?; .with_context(|| format!("Failed to create directory '{parent}'"))?;
} }
let mut out = fs::File::create(&target_path) let mut out = fs::File::create(&target_path)
.with_context(|| format!("Failed to create file '{}'", target_path))?; .with_context(|| format!("Failed to create file '{target_path}'"))?;
let hash_bytes = buf_copy_with_hash(&mut file, &mut out) let hash_bytes = buf_copy_with_hash(&mut file, &mut out)
.with_context(|| format!("Failed to extract file '{}'", target_path))?; .with_context(|| format!("Failed to extract file '{target_path}'"))?;
if let Some(hash) = hash { if let Some(hash) = hash {
check_hash_str(hash_bytes, hash).with_context(|| { check_hash_str(hash_bytes, hash).with_context(|| {
format!("Source file failed verification: '{}'", object_base.join(source_path)) format!("Source file failed verification: '{}'", object_base.join(source_path))

View File

@ -104,16 +104,16 @@ fn dump(args: DumpArgs) -> Result<()> {
// TODO make a basename method // TODO make a basename method
let name = name.trim_start_matches("D:").replace('\\', "/"); let name = name.trim_start_matches("D:").replace('\\', "/");
let name = name.rsplit_once('/').map(|(_, b)| b).unwrap_or(&name); let name = name.rsplit_once('/').map(|(_, b)| b).unwrap_or(&name);
let file_path = out_path.join(format!("{}.txt", name)); let file_path = out_path.join(format!("{name}.txt"));
let mut file = buf_writer(&file_path)?; let mut file = buf_writer(&file_path)?;
dump_debug_section(&args, &mut file, &obj_file, debug_section)?; dump_debug_section(&args, &mut file, &obj_file, debug_section)?;
file.flush()?; file.flush()?;
} else if args.no_color { } else if args.no_color {
println!("\n// File {}:", name); println!("\n// File {name}:");
dump_debug_section(&args, &mut stdout(), &obj_file, debug_section)?; dump_debug_section(&args, &mut stdout(), &obj_file, debug_section)?;
} else { } else {
let mut writer = HighlightWriter::new(syntax_set.clone(), syntax.clone(), theme); let mut writer = HighlightWriter::new(syntax_set.clone(), syntax.clone(), theme);
writeln!(writer, "\n// File {}:", name)?; writeln!(writer, "\n// File {name}:")?;
dump_debug_section(&args, &mut writer, &obj_file, debug_section)?; dump_debug_section(&args, &mut writer, &obj_file, debug_section)?;
} }
} }
@ -209,26 +209,25 @@ where
} }
writeln!(w, "\n/*\n Compile unit: {}", unit.name)?; writeln!(w, "\n/*\n Compile unit: {}", unit.name)?;
if let Some(producer) = unit.producer { if let Some(producer) = unit.producer {
writeln!(w, " Producer: {}", producer)?; writeln!(w, " Producer: {producer}")?;
} }
if let Some(comp_dir) = unit.comp_dir { if let Some(comp_dir) = unit.comp_dir {
writeln!(w, " Compile directory: {}", comp_dir)?; writeln!(w, " Compile directory: {comp_dir}")?;
} }
if let Some(language) = unit.language { if let Some(language) = unit.language {
writeln!(w, " Language: {}", language)?; writeln!(w, " Language: {language}")?;
} }
if let (Some(start), Some(end)) = (unit.start_address, unit.end_address) { if let (Some(start), Some(end)) = (unit.start_address, unit.end_address) {
writeln!(w, " Code range: {:#010X} -> {:#010X}", start, end)?; writeln!(w, " Code range: {start:#010X} -> {end:#010X}")?;
} }
if let Some(gcc_srcfile_name_offset) = unit.gcc_srcfile_name_offset { if let Some(gcc_srcfile_name_offset) = unit.gcc_srcfile_name_offset {
writeln!( writeln!(
w, w,
" GCC Source File Name Offset: {:#010X}", " GCC Source File Name Offset: {gcc_srcfile_name_offset:#010X}"
gcc_srcfile_name_offset
)?; )?;
} }
if let Some(gcc_srcinfo_offset) = unit.gcc_srcinfo_offset { if let Some(gcc_srcinfo_offset) = unit.gcc_srcinfo_offset {
writeln!(w, " GCC Source Info Offset: {:#010X}", gcc_srcinfo_offset)?; writeln!(w, " GCC Source Info Offset: {gcc_srcinfo_offset:#010X}")?;
} }
writeln!(w, "*/")?; writeln!(w, "*/")?;
@ -269,7 +268,7 @@ where
continue; continue;
} }
match tag_type_string(&info, &typedefs, &tag_type, child.is_erased) { match tag_type_string(&info, &typedefs, &tag_type, child.is_erased) {
Ok(s) => writeln!(w, "{}", s)?, Ok(s) => writeln!(w, "{s}")?,
Err(e) => { Err(e) => {
log::error!( log::error!(
"Failed to emit tag {:X} (unit {}): {}", "Failed to emit tag {:X} (unit {}): {}",

View File

@ -146,14 +146,14 @@ fn disasm(args: DisasmArgs) -> Result<()> {
let mut files_out = buf_writer(&args.out.join("link_order.txt"))?; let mut files_out = buf_writer(&args.out.join("link_order.txt"))?;
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) { for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
let out_name = file_stem_from_unit(&unit.name); let out_name = file_stem_from_unit(&unit.name);
let out_path = asm_dir.join(format!("{}.s", out_name)); let out_path = asm_dir.join(format!("{out_name}.s"));
log::info!("Writing {}", out_path); log::info!("Writing {}", out_path);
let mut w = buf_writer(&out_path)?; let mut w = buf_writer(&out_path)?;
write_asm(&mut w, split_obj)?; write_asm(&mut w, split_obj)?;
w.flush()?; w.flush()?;
writeln!(files_out, "{}.o", out_name)?; writeln!(files_out, "{out_name}.o")?;
} }
files_out.flush()?; files_out.flush()?;
} }
@ -402,7 +402,7 @@ fn signatures(args: SignaturesArgs) -> Result<()> {
Ok(Some(signature)) => signature, Ok(Some(signature)) => signature,
Ok(None) => continue, Ok(None) => continue,
Err(e) => { Err(e) => {
eprintln!("Failed: {:?}", e); eprintln!("Failed: {e:?}");
continue; continue;
} }
}; };
@ -545,13 +545,13 @@ fn info(args: InfoArgs) -> Result<()> {
.context("While reading .note.split section")?; .context("While reading .note.split section")?;
println!("\nSplit metadata (.note.split):"); println!("\nSplit metadata (.note.split):");
if let Some(generator) = &meta.generator { if let Some(generator) = &meta.generator {
println!("\tGenerator: {}", generator); println!("\tGenerator: {generator}");
} }
if let Some(module_name) = &meta.module_name { if let Some(module_name) = &meta.module_name {
println!("\tModule name: {}", module_name); println!("\tModule name: {module_name}");
} }
if let Some(module_id) = meta.module_id { if let Some(module_id) = meta.module_id {
println!("\tModule ID: {}", module_id); println!("\tModule ID: {module_id}");
} }
if let Some(virtual_addresses) = &meta.virtual_addresses { if let Some(virtual_addresses) = &meta.virtual_addresses {
println!("\tVirtual addresses:"); println!("\tVirtual addresses:");

View File

@ -6,17 +6,20 @@ use object::{Architecture, Endianness, Object, ObjectKind, ObjectSection, Sectio
use typed_path::Utf8NativePathBuf; use typed_path::Utf8NativePathBuf;
use crate::{ use crate::{
obj::ObjSectionKind, util::{
util::{alf::ALF_MAGIC, dol::process_dol, file::buf_writer, path::native_path}, dol::{process_dol, write_dol},
file::buf_writer,
path::native_path,
},
vfs::open_file, vfs::open_file,
}; };
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Converts an ELF (or ALF) file to a DOL file. /// Converts an ELF, ALF, or BootStage file to a DOL file.
#[argp(subcommand, name = "elf2dol")] #[argp(subcommand, name = "elf2dol")]
pub struct Args { pub struct Args {
#[argp(positional, from_str_fn(native_path))] #[argp(positional, from_str_fn(native_path))]
/// path to input ELF or ALF file /// path to input ELF, ALF or BootStage file
elf_file: Utf8NativePathBuf, elf_file: Utf8NativePathBuf,
#[argp(positional, from_str_fn(native_path))] #[argp(positional, from_str_fn(native_path))]
/// path to output DOL /// path to output DOL
@ -50,8 +53,8 @@ const MAX_DATA_SECTIONS: usize = 11;
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
let mut file = open_file(&args.elf_file, true)?; let mut file = open_file(&args.elf_file, true)?;
let data = file.map()?; let data = file.map()?;
if data.len() >= 4 && data[0..4] == ALF_MAGIC { if data.len() >= 4 && data[0..4] != object::elf::ELFMAG {
return convert_alf(args, data); return convert_dol_like(args, data);
} }
let obj_file = object::read::File::parse(data)?; let obj_file = object::read::File::parse(data)?;
@ -159,86 +162,11 @@ pub fn run(args: Args) -> Result<()> {
Ok(()) Ok(())
} }
fn convert_alf(args: Args, data: &[u8]) -> Result<()> { /// Converts a DOL-like format (ALF or BootStage) to a DOL file.
fn convert_dol_like(args: Args, data: &[u8]) -> Result<()> {
let obj = process_dol(data, "")?; let obj = process_dol(data, "")?;
let mut header = DolHeader { entry_point: obj.entry.unwrap() as u32, ..Default::default() };
let mut offset = 0x100u32;
let mut out = buf_writer(&args.dol_file)?; let mut out = buf_writer(&args.dol_file)?;
out.seek(SeekFrom::Start(offset as u64))?; write_dol(&obj, &mut out)?;
// Text sections
for (_, section) in obj.sections.iter().filter(|(_, s)| s.kind == ObjSectionKind::Code) {
log::debug!("Processing text section '{}'", section.name);
let address = section.address as u32;
let size = align32(section.size as u32);
*header.text_sections.get_mut(header.text_section_count).ok_or_else(|| {
anyhow!("Too many text sections (while processing '{}')", section.name)
})? = DolSection { offset, address, size };
header.text_section_count += 1;
write_aligned(&mut out, &section.data, size)?;
offset += size;
}
// Data sections
for (_, section) in obj
.sections
.iter()
.filter(|(_, s)| matches!(s.kind, ObjSectionKind::Data | ObjSectionKind::ReadOnlyData))
{
log::debug!("Processing data section '{}'", section.name);
let address = section.address as u32;
let size = align32(section.size as u32);
*header.data_sections.get_mut(header.data_section_count).ok_or_else(|| {
anyhow!("Too many data sections (while processing '{}')", section.name)
})? = DolSection { offset, address, size };
header.data_section_count += 1;
write_aligned(&mut out, &section.data, size)?;
offset += size;
}
// BSS sections
for (_, section) in obj.sections.iter().filter(|(_, s)| s.kind == ObjSectionKind::Bss) {
let address = section.address as u32;
let size = section.size as u32;
if header.bss_address == 0 {
header.bss_address = address;
}
header.bss_size = (address + size) - header.bss_address;
}
// Offsets
out.rewind()?;
for section in &header.text_sections {
out.write_all(&section.offset.to_be_bytes())?;
}
for section in &header.data_sections {
out.write_all(&section.offset.to_be_bytes())?;
}
// Addresses
for section in &header.text_sections {
out.write_all(&section.address.to_be_bytes())?;
}
for section in &header.data_sections {
out.write_all(&section.address.to_be_bytes())?;
}
// Sizes
for section in &header.text_sections {
out.write_all(&section.size.to_be_bytes())?;
}
for section in &header.data_sections {
out.write_all(&section.size.to_be_bytes())?;
}
// BSS + entry
out.write_all(&header.bss_address.to_be_bytes())?;
out.write_all(&header.bss_size.to_be_bytes())?;
out.write_all(&header.entry_point.to_be_bytes())?;
// Done!
out.flush()?;
Ok(()) Ok(())
} }

78
src/cmd/extab.rs Normal file
View File

@ -0,0 +1,78 @@
use std::io::Write;
use anyhow::{Context, Result};
use argp::FromArgs;
use typed_path::Utf8NativePathBuf;
use crate::{
util,
util::{
dol::{process_dol, write_dol},
elf::{is_elf_file, process_elf, write_elf},
file::buf_writer,
path::native_path,
},
vfs::open_file,
};
#[derive(FromArgs, PartialEq, Debug)]
/// Commands for processing extab (exception table) data.
#[argp(subcommand, name = "extab")]
pub struct Args {
#[argp(subcommand)]
command: SubCommand,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argp(subcommand)]
enum SubCommand {
Clean(CleanArgs),
}
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// Rewrites extab data in a DOL or ELF file, replacing any uninitialized padding bytes.
#[argp(subcommand, name = "clean")]
pub struct CleanArgs {
#[argp(positional, from_str_fn(native_path))]
/// Path to input file
input: Utf8NativePathBuf,
#[argp(positional, from_str_fn(native_path))]
/// Path to output file
output: Utf8NativePathBuf,
#[argp(option, short = 'p')]
/// Data to replace padding bytes with, encoded as a hexadecimal string. If not specified, padding bytes will be zeroed instead.
padding: Option<String>,
}
pub fn run(args: Args) -> Result<()> {
match args.command {
SubCommand::Clean(clean_args) => clean_extab(clean_args),
}
}
fn clean_extab(args: CleanArgs) -> Result<()> {
let is_elf = is_elf_file(&args.input)?;
let mut obj = if is_elf {
process_elf(&args.input)?
} else {
let mut file = open_file(&args.input, true)?;
let name = args.input.file_stem().unwrap_or_default();
process_dol(file.map()?, name)?
};
let padding: Vec<u8> = match args.padding {
None => Vec::new(),
Some(padding_str) => {
hex::decode(padding_str).context("Failed to decode padding bytes from hex")?
}
};
let num_cleaned = util::extab::clean_extab(&mut obj, padding.iter().copied())?;
tracing::debug!("Cleaned {num_cleaned} extab symbols");
let mut out = buf_writer(&args.output)?;
if is_elf {
let data = write_elf(&obj, false)?;
out.write_all(&data).context("Failed to write ELF")?;
} else {
write_dol(&obj, &mut out).context("Failed to write DOL")?;
}
Ok(())
}

View File

@ -175,7 +175,7 @@ fn symbol(args: SymbolArgs) -> Result<()> {
if let Some(vec) = entries.unit_references.get_vec(&symbol_ref) { if let Some(vec) = entries.unit_references.get_vec(&symbol_ref) {
println!("\nGenerated in TUs:"); println!("\nGenerated in TUs:");
for x in vec { for x in vec {
println!(">>> {}", x); println!(">>> {x}");
} }
} }
println!("\n"); println!("\n");

View File

@ -6,6 +6,7 @@ pub mod dol;
pub mod dwarf; pub mod dwarf;
pub mod elf; pub mod elf;
pub mod elf2dol; pub mod elf2dol;
pub mod extab;
pub mod map; pub mod map;
pub mod nlzss; pub mod nlzss;
pub mod rarc; pub mod rarc;

View File

@ -59,7 +59,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path))?; .with_context(|| format!("Failed to write '{out_path}'"))?;
} }
Ok(()) Ok(())
} }

View File

@ -316,7 +316,7 @@ fn make(args: MakeArgs) -> Result<()> {
.unwrap_or(idx as u32); .unwrap_or(idx as u32);
load_obj(file.map()?) load_obj(file.map()?)
.map(|o| LoadedModule { module_id, file: o, path: path.clone() }) .map(|o| LoadedModule { module_id, file: o, path: path.clone() })
.with_context(|| format!("Failed to load '{}'", path)) .with_context(|| format!("Failed to load '{path}'"))
}) })
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
@ -395,7 +395,7 @@ fn make(args: MakeArgs) -> Result<()> {
let rel_path = module_info.path.with_extension("rel"); let rel_path = module_info.path.with_extension("rel");
let mut w = buf_writer(&rel_path)?; let mut w = buf_writer(&rel_path)?;
write_rel(&mut w, &info, &module_info.file, relocations) write_rel(&mut w, &info, &module_info.file, relocations)
.with_context(|| format!("Failed to write '{}'", rel_path))?; .with_context(|| format!("Failed to write '{rel_path}'"))?;
w.flush()?; w.flush()?;
} }

View File

@ -143,7 +143,7 @@ fn make_rso(
let si = sym let si = sym
.section_index() .section_index()
.with_context(|| format!("Failed to find symbol `{}` section index", name))?; .with_context(|| format!("Failed to find symbol `{name}` section index"))?;
let addr = sym.address(); let addr = sym.address();
*index = si.0 as u8; *index = si.0 as u8;

View File

@ -45,14 +45,13 @@ pub fn run(args: Args) -> Result<()> {
check(&args, file.as_mut())?; check(&args, file.as_mut())?;
} }
if let Some(out_path) = &args.output { if let Some(out_path) = &args.output {
touch(out_path) touch(out_path).with_context(|| format!("Failed to touch output file '{out_path}'"))?;
.with_context(|| format!("Failed to touch output file '{}'", out_path))?;
} }
} else { } else {
let mut w: Box<dyn Write> = if let Some(out_path) = &args.output { let mut w: Box<dyn Write> = if let Some(out_path) = &args.output {
Box::new( Box::new(
buf_writer(out_path) buf_writer(out_path)
.with_context(|| format!("Failed to open output file '{}'", out_path))?, .with_context(|| format!("Failed to open output file '{out_path}'"))?,
) )
} else { } else {
Box::new(stdout()) Box::new(stdout())

View File

@ -85,7 +85,7 @@ fn file_info(
metadata: &VfsMetadata, metadata: &VfsMetadata,
) -> anyhow::Result<Columns<5>> { ) -> anyhow::Result<Columns<5>> {
let format = let format =
detect(file).with_context(|| format!("Failed to detect file format for {}", filename))?; detect(file).with_context(|| format!("Failed to detect file format for {filename}"))?;
let mut info: Columns<5> = [ let mut info: Columns<5> = [
Size::from_bytes(metadata.len).to_string(), Size::from_bytes(metadata.len).to_string(),
filename.to_string(), filename.to_string(),
@ -97,9 +97,9 @@ fn file_info(
let mut decompressed = decompress_file(file, kind)?; let mut decompressed = decompress_file(file, kind)?;
let metadata = decompressed let metadata = decompressed
.metadata() .metadata()
.with_context(|| format!("Failed to fetch metadata for {}", filename))?; .with_context(|| format!("Failed to fetch metadata for {filename}"))?;
let format = detect(decompressed.as_mut()) let format = detect(decompressed.as_mut())
.with_context(|| format!("Failed to detect file format for {}", filename))?; .with_context(|| format!("Failed to detect file format for {filename}"))?;
info[3] = format!("Decompressed: {}", Size::from_bytes(metadata.len)); info[3] = format!("Decompressed: {}", Size::from_bytes(metadata.len));
info[4] = format.to_string(); info[4] = format.to_string();
} }
@ -112,11 +112,11 @@ pub fn ls(args: LsArgs) -> anyhow::Result<()> {
OpenResult::File(mut file, path) => { OpenResult::File(mut file, path) => {
let filename = path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?; let filename = path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?;
if args.short { if args.short {
println!("{}", filename); println!("{filename}");
} else { } else {
let metadata = file let metadata = file
.metadata() .metadata()
.with_context(|| format!("Failed to fetch metadata for {}", path))?; .with_context(|| format!("Failed to fetch metadata for {path}"))?;
files.push(file_info(filename, file.as_mut(), &metadata)?); files.push(file_info(filename, file.as_mut(), &metadata)?);
} }
} }
@ -131,10 +131,10 @@ pub fn ls(args: LsArgs) -> anyhow::Result<()> {
for (i, column) in entry.iter().enumerate() { for (i, column) in entry.iter().enumerate() {
if widths[i] > 0 { if widths[i] > 0 {
if written > 0 { if written > 0 {
print!("{}", SEPARATOR); print!("{SEPARATOR}");
} }
written += 1; written += 1;
print!("{}", column); print!("{column}");
let remain = widths[i].saturating_sub(column.width_cjk()); let remain = widths[i].saturating_sub(column.width_cjk());
if remain > 0 { if remain > 0 {
print!("{:width$}", "", width = remain); print!("{:width$}", "", width = remain);
@ -161,25 +161,25 @@ fn ls_directory(
let display_path = base_filename.join(&filename); let display_path = base_filename.join(&filename);
let metadata = fs let metadata = fs
.metadata(&entry_path) .metadata(&entry_path)
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?; .with_context(|| format!("Failed to fetch metadata for {entry_path}"))?;
match metadata.file_type { match metadata.file_type {
VfsFileType::File => { VfsFileType::File => {
let mut file = fs let mut file = fs
.open(&entry_path) .open(&entry_path)
.with_context(|| format!("Failed to open file {}", entry_path))?; .with_context(|| format!("Failed to open file {entry_path}"))?;
if args.short { if args.short {
println!("{}", display_path); println!("{display_path}");
} else { } else {
files.push(file_info(display_path.as_str(), file.as_mut(), &metadata)?); files.push(file_info(display_path.as_str(), file.as_mut(), &metadata)?);
} }
} }
VfsFileType::Directory => { VfsFileType::Directory => {
if args.short { if args.short {
println!("{}/", display_path); println!("{display_path}/");
} else { } else {
files.push([ files.push([
" ".to_string(), " ".to_string(),
format!("{}/", display_path), format!("{display_path}/"),
"Directory".to_string(), "Directory".to_string(),
String::new(), String::new(),
String::new(), String::new(),
@ -206,7 +206,7 @@ pub fn cp(mut args: CpArgs) -> anyhow::Result<()> {
OpenResult::File(file, path) => { OpenResult::File(file, path) => {
let dest = if dest_is_dir { let dest = if dest_is_dir {
fs::create_dir_all(&dest) fs::create_dir_all(&dest)
.with_context(|| format!("Failed to create directory {}", dest))?; .with_context(|| format!("Failed to create directory {dest}"))?;
let filename = let filename =
path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?; path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?;
dest.join(filename) dest.join(filename)
@ -234,12 +234,12 @@ fn cp_file(
if let FileFormat::Compressed(kind) = detect(file.as_mut())? { if let FileFormat::Compressed(kind) = detect(file.as_mut())? {
if auto_decompress { if auto_decompress {
file = decompress_file(file.as_mut(), kind) file = decompress_file(file.as_mut(), kind)
.with_context(|| format!("Failed to decompress file {}", dest))?; .with_context(|| format!("Failed to decompress file {dest}"))?;
compression = Some(kind); compression = Some(kind);
} }
} }
let metadata = let metadata =
file.metadata().with_context(|| format!("Failed to fetch metadata for {}", dest))?; file.metadata().with_context(|| format!("Failed to fetch metadata for {dest}"))?;
if !quiet { if !quiet {
if let Some(kind) = compression { if let Some(kind) = compression {
println!( println!(
@ -254,10 +254,10 @@ fn cp_file(
} }
} }
let mut dest_file = let mut dest_file =
File::create(dest).with_context(|| format!("Failed to create file {}", dest))?; File::create(dest).with_context(|| format!("Failed to create file {dest}"))?;
buf_copy(file.as_mut(), &mut dest_file) buf_copy(file.as_mut(), &mut dest_file)
.with_context(|| format!("Failed to copy file {}", dest))?; .with_context(|| format!("Failed to copy file {dest}"))?;
dest_file.flush().with_context(|| format!("Failed to flush file {}", dest))?; dest_file.flush().with_context(|| format!("Failed to flush file {dest}"))?;
Ok(()) Ok(())
} }
@ -268,18 +268,18 @@ fn cp_recursive(
auto_decompress: bool, auto_decompress: bool,
quiet: bool, quiet: bool,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
fs::create_dir_all(dest).with_context(|| format!("Failed to create directory {}", dest))?; fs::create_dir_all(dest).with_context(|| format!("Failed to create directory {dest}"))?;
let entries = fs.read_dir(path)?; let entries = fs.read_dir(path)?;
for filename in entries { for filename in entries {
let entry_path = path.join(&filename); let entry_path = path.join(&filename);
let metadata = fs let metadata = fs
.metadata(&entry_path) .metadata(&entry_path)
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?; .with_context(|| format!("Failed to fetch metadata for {entry_path}"))?;
match metadata.file_type { match metadata.file_type {
VfsFileType::File => { VfsFileType::File => {
let file = fs let file = fs
.open(&entry_path) .open(&entry_path)
.with_context(|| format!("Failed to open file {}", entry_path))?; .with_context(|| format!("Failed to open file {entry_path}"))?;
cp_file(file, &entry_path, &dest.join(filename), auto_decompress, quiet)?; cp_file(file, &entry_path, &dest.join(filename), auto_decompress, quiet)?;
} }
VfsFileType::Directory => { VfsFileType::Directory => {

View File

@ -80,7 +80,7 @@ fn compress(args: CompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path))?; .with_context(|| format!("Failed to write '{out_path}'"))?;
} }
Ok(()) Ok(())
} }
@ -92,7 +92,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let data = { let data = {
let mut file = open_file(&path, true)?; let mut file = open_file(&path, true)?;
decompress_yay0(file.map()?) decompress_yay0(file.map()?)
.with_context(|| format!("Failed to decompress '{}' using Yay0", path))? .with_context(|| format!("Failed to decompress '{path}' using Yay0"))?
}; };
let out_path = if let Some(output) = &args.output { let out_path = if let Some(output) = &args.output {
if single_file { if single_file {
@ -104,7 +104,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path))?; .with_context(|| format!("Failed to write '{out_path}'"))?;
} }
Ok(()) Ok(())
} }

View File

@ -80,7 +80,7 @@ fn compress(args: CompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path))?; .with_context(|| format!("Failed to write '{out_path}'"))?;
} }
Ok(()) Ok(())
} }
@ -92,7 +92,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let data = { let data = {
let mut file = open_file(&path, false)?; let mut file = open_file(&path, false)?;
decompress_yaz0(file.map()?) decompress_yaz0(file.map()?)
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path))? .with_context(|| format!("Failed to decompress '{path}' using Yaz0"))?
}; };
let out_path = if let Some(output) = &args.output { let out_path = if let Some(output) = &args.output {
if single_file { if single_file {
@ -104,7 +104,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path))?; .with_context(|| format!("Failed to write '{out_path}'"))?;
} }
Ok(()) Ok(())
} }

View File

@ -96,6 +96,7 @@ enum SubCommand {
Dwarf(cmd::dwarf::Args), Dwarf(cmd::dwarf::Args),
Elf(cmd::elf::Args), Elf(cmd::elf::Args),
Elf2Dol(cmd::elf2dol::Args), Elf2Dol(cmd::elf2dol::Args),
Extab(cmd::extab::Args),
Map(cmd::map::Args), Map(cmd::map::Args),
Nlzss(cmd::nlzss::Args), Nlzss(cmd::nlzss::Args),
Rarc(cmd::rarc::Args), Rarc(cmd::rarc::Args),
@ -172,6 +173,7 @@ fn main() {
SubCommand::Dwarf(c_args) => cmd::dwarf::run(c_args), SubCommand::Dwarf(c_args) => cmd::dwarf::run(c_args),
SubCommand::Elf(c_args) => cmd::elf::run(c_args), SubCommand::Elf(c_args) => cmd::elf::run(c_args),
SubCommand::Elf2Dol(c_args) => cmd::elf2dol::run(c_args), SubCommand::Elf2Dol(c_args) => cmd::elf2dol::run(c_args),
SubCommand::Extab(c_args) => cmd::extab::run(c_args),
SubCommand::Map(c_args) => cmd::map::run(c_args), SubCommand::Map(c_args) => cmd::map::run(c_args),
SubCommand::Nlzss(c_args) => cmd::nlzss::run(c_args), SubCommand::Nlzss(c_args) => cmd::nlzss::run(c_args),
SubCommand::Rarc(c_args) => cmd::rarc::run(c_args), SubCommand::Rarc(c_args) => cmd::rarc::run(c_args),

View File

@ -403,7 +403,7 @@ impl ObjSymbols {
pub fn iter_ordered(&self) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)> { pub fn iter_ordered(&self) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)> {
self.symbols_by_section self.symbols_by_section
.iter() .iter()
.flat_map(|v| v.iter().map(|(_, v)| v)) .flat_map(|v| v.values())
.flat_map(move |v| v.iter().map(move |u| (*u, &self.symbols[*u as usize]))) .flat_map(move |v| v.iter().map(move |u| (*u, &self.symbols[*u as usize])))
} }
@ -450,7 +450,7 @@ impl ObjSymbols {
self.symbols_by_section self.symbols_by_section
.get(section_idx as usize) .get(section_idx as usize)
.into_iter() .into_iter()
.flat_map(|v| v.iter().map(|(_, v)| v)) .flat_map(|v| v.values())
.flat_map(move |v| v.iter().map(move |u| (*u, &self.symbols[*u as usize]))) .flat_map(move |v| v.iter().map(move |u| (*u, &self.symbols[*u as usize])))
} }

View File

@ -161,7 +161,7 @@ impl FromReader for AlfSymbolKind {
match u32::from_reader(reader, e)? { match u32::from_reader(reader, e)? {
0 => Ok(Self::Function), 0 => Ok(Self::Function),
1 => Ok(Self::Object), 1 => Ok(Self::Object),
v => Err(Error::new(ErrorKind::InvalidData, format!("invalid ALF symbol kind: {}", v))), v => Err(Error::new(ErrorKind::InvalidData, format!("invalid ALF symbol kind: {v}"))),
} }
} }
} }

View File

@ -442,12 +442,12 @@ where
match parse_extab(symbols, entry, section) { match parse_extab(symbols, entry, section) {
Ok(s) => { Ok(s) => {
for line in s.trim_end().lines() { for line in s.trim_end().lines() {
writeln!(w, " * {}", line)?; writeln!(w, " * {line}")?;
} }
} }
Err(e) => { Err(e) => {
log::warn!("Failed to decode extab entry {}: {}", symbol.name, e); log::warn!("Failed to decode extab entry {}: {}", symbol.name, e);
writeln!(w, " * Failed to decode extab entry: {}", e)?; writeln!(w, " * Failed to decode extab entry: {e}")?;
} }
} }
writeln!(w, " */")?; writeln!(w, " */")?;
@ -505,7 +505,7 @@ where
} }
current_symbol_kind = find_symbol_kind(current_symbol_kind, symbols, vec)?; current_symbol_kind = find_symbol_kind(current_symbol_kind, symbols, vec)?;
current_data_kind = find_data_kind(current_data_kind, symbols, vec) current_data_kind = find_data_kind(current_data_kind, symbols, vec)
.with_context(|| format!("At address {:#010X}", sym_addr))?; .with_context(|| format!("At address {sym_addr:#010X}"))?;
entry = entry_iter.next(); entry = entry_iter.next();
} else if current_address > sym_addr { } else if current_address > sym_addr {
let dbg_symbols = vec.iter().map(|e| &symbols[e.index as usize]).collect_vec(); let dbg_symbols = vec.iter().map(|e| &symbols[e.index as usize]).collect_vec();
@ -660,8 +660,8 @@ where W: Write + ?Sized {
'\x0D' => write!(w, "\\r")?, '\x0D' => write!(w, "\\r")?,
'\\' => write!(w, "\\\\")?, '\\' => write!(w, "\\\\")?,
'"' => write!(w, "\\\"")?, '"' => write!(w, "\\\"")?,
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{}", c)?, c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{c}")?,
_ => write!(w, "\\{:03o}", b)?, _ => write!(w, "\\{b:03o}")?,
} }
} }
writeln!(w, "\"")?; writeln!(w, "\"")?;
@ -684,13 +684,13 @@ where W: Write + ?Sized {
for c in cow.chars() { for c in cow.chars() {
match c { match c {
'#' => write!(w, "\\#")?, '#' => write!(w, "\\#")?,
_ => write!(w, "{}", c)?, _ => write!(w, "{c}")?,
} }
} }
write!(w, "\n\t.byte ")?; write!(w, "\n\t.byte ")?;
for (i, &b) in data.iter().enumerate() { for (i, &b) in data.iter().enumerate() {
write!(w, "0x{:02X}", b)?; write!(w, "0x{b:02X}")?;
if i + 1 != data.len() { if i + 1 != data.len() {
write!(w, ", ")?; write!(w, ", ")?;
} }
@ -721,7 +721,7 @@ where W: Write + ?Sized {
'\x0D' => write!(w, "\\r")?, '\x0D' => write!(w, "\\r")?,
'\\' => write!(w, "\\\\")?, '\\' => write!(w, "\\\\")?,
'"' => write!(w, "\\\"")?, '"' => write!(w, "\\\"")?,
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{}", c)?, c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{c}")?,
_ => write!(w, "\\{:#X}", c as u32)?, _ => write!(w, "\\{:#X}", c as u32)?,
} }
} }
@ -793,7 +793,7 @@ where W: Write + ?Sized {
}; };
for chunk in remain.chunks(chunk_size) { for chunk in remain.chunks(chunk_size) {
if data_kind == ObjDataKind::Byte || matches!(chunk.len(), 1 | 3 | 5..=7) { if data_kind == ObjDataKind::Byte || matches!(chunk.len(), 1 | 3 | 5..=7) {
let bytes = chunk.iter().map(|c| format!("{:#04X}", c)).collect::<Vec<String>>(); let bytes = chunk.iter().map(|c| format!("{c:#04X}")).collect::<Vec<String>>();
writeln!(w, "\t.byte {}", bytes.join(", "))?; writeln!(w, "\t.byte {}", bytes.join(", "))?;
} else { } else {
match chunk.len() { match chunk.len() {

View File

@ -95,7 +95,7 @@ fn bin2c_symbol(
} else { } else {
output.push(' '); output.push(' ');
} }
output.push_str(&format!("0x{:02X},", byte)); output.push_str(&format!("0x{byte:02X},"));
} }
output.push_str("\n};\n"); output.push_str("\n};\n");
output output
@ -111,7 +111,7 @@ fn bin2c_raw(data: &[u8]) -> String {
output.push(' '); output.push(' ');
} }
} }
output.push_str(&format!("0x{:02X},", byte)); output.push_str(&format!("0x{byte:02X},"));
} }
output.push('\n'); output.push('\n');
output output

View File

@ -58,7 +58,7 @@ impl FromReader for MWComment {
if magic != MAGIC { if magic != MAGIC {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Invalid .comment section magic: {:?}", magic), format!("Invalid .comment section magic: {magic:?}"),
)); ));
} }
// 0xB // 0xB
@ -78,7 +78,7 @@ impl FromReader for MWComment {
value => { value => {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Invalid value for pool_data: {}", value), format!("Invalid value for pool_data: {value}"),
)) ))
} }
}; };
@ -93,7 +93,7 @@ impl FromReader for MWComment {
v => { v => {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Expected header size {:#X}, got {:#X}", HEADER_SIZE, v), format!("Expected header size {HEADER_SIZE:#X}, got {v:#X}"),
)) ))
} }
} }
@ -102,7 +102,7 @@ impl FromReader for MWComment {
if flags & !7 != 0 { if flags & !7 != 0 {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Unexpected flag value {:#X}", flags), format!("Unexpected flag value {flags:#X}"),
)); ));
} }
if flags & 1 == 1 { if flags & 1 == 1 {
@ -221,14 +221,14 @@ impl FromReader for CommentSym {
if value != 0 { if value != 0 {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Unexpected value after active_flags (1): {:#X}", value), format!("Unexpected value after active_flags (1): {value:#X}"),
)); ));
} }
let value = u8::from_reader(reader, e)?; let value = u8::from_reader(reader, e)?;
if value != 0 { if value != 0 {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Unexpected value after active_flags (2): {:#X}", value), format!("Unexpected value after active_flags (2): {value:#X}"),
)); ));
} }
Ok(out) Ok(out)

View File

@ -282,11 +282,11 @@ where W: Write + ?Sized {
write!(w, " data:{kind}")?; write!(w, " data:{kind}")?;
} }
if let Some(hash) = symbol.name_hash { if let Some(hash) = symbol.name_hash {
write!(w, " hash:{:#010X}", hash)?; write!(w, " hash:{hash:#010X}")?;
} }
if let Some(hash) = symbol.demangled_name_hash { if let Some(hash) = symbol.demangled_name_hash {
if symbol.name_hash != symbol.demangled_name_hash { if symbol.name_hash != symbol.demangled_name_hash {
write!(w, " dhash:{:#010X}", hash)?; write!(w, " dhash:{hash:#010X}")?;
} }
} }
if symbol.flags.is_hidden() { if symbol.flags.is_hidden() {
@ -439,10 +439,10 @@ where W: Write + ?Sized {
for unit in obj.link_order.iter().filter(|unit| all || !unit.autogenerated) { for unit in obj.link_order.iter().filter(|unit| all || !unit.autogenerated) {
write!(w, "\n{}:", unit.name)?; write!(w, "\n{}:", unit.name)?;
if let Some(comment_version) = unit.comment_version { if let Some(comment_version) = unit.comment_version {
write!(w, " comment:{}", comment_version)?; write!(w, " comment:{comment_version}")?;
} }
if let Some(order) = unit.order { if let Some(order) = unit.order {
write!(w, " order:{}", order)?; write!(w, " order:{order}")?;
} }
writeln!(w)?; writeln!(w)?;
let mut split_iter = obj.sections.all_splits().peekable(); let mut split_iter = obj.sections.all_splits().peekable();
@ -458,14 +458,14 @@ where W: Write + ?Sized {
write!(w, "\t{:<11} start:{:#010X} end:{:#010X}", section.name, addr, end)?; write!(w, "\t{:<11} start:{:#010X} end:{:#010X}", section.name, addr, end)?;
if let Some(align) = split.align { if let Some(align) = split.align {
if align != default_section_align(section) as u32 { if align != default_section_align(section) as u32 {
write!(w, " align:{}", align)?; write!(w, " align:{align}")?;
} }
} }
if split.common { if split.common {
write!(w, " common")?; write!(w, " common")?;
} }
if let Some(name) = &split.rename { if let Some(name) = &split.rename {
write!(w, " rename:{}", name)?; write!(w, " rename:{name}")?;
} }
if split.skip { if split.skip {
write!(w, " skip")?; write!(w, " skip")?;
@ -783,7 +783,7 @@ pub mod signed_hex_serde {
if *value < 0 { if *value < 0 {
serializer.serialize_str(&format!("-{:#X}", -value)) serializer.serialize_str(&format!("-{:#X}", -value))
} else { } else {
serializer.serialize_str(&format!("{:#X}", value)) serializer.serialize_str(&format!("{value:#X}"))
} }
} }

View File

@ -209,7 +209,7 @@ fn print_line(ins_diff: &ObjInsDiff, base_addr: u64) -> Vec<Span> {
pad_to = 5; pad_to = 5;
} }
DiffText::Address(addr) => { DiffText::Address(addr) => {
label_text = format!("{:x}:", addr); label_text = format!("{addr:x}:");
pad_to = 5; pad_to = 5;
} }
DiffText::Opcode(mnemonic, _op) => { DiffText::Opcode(mnemonic, _op) => {

File diff suppressed because it is too large Load Diff

View File

@ -358,6 +358,7 @@ pub struct Tag {
pub kind: TagKind, pub kind: TagKind,
pub is_erased: bool, // Tag was deleted but has been reconstructed pub is_erased: bool, // Tag was deleted but has been reconstructed
pub is_erased_root: bool, // Tag is erased and is the root of a tree of erased tags pub is_erased_root: bool, // Tag is erased and is the root of a tree of erased tags
pub data_endian: Endian, // Endianness of the tag data (could be different from the address endianness for erased tags)
pub attributes: Vec<Attribute>, pub attributes: Vec<Attribute>,
} }
@ -554,6 +555,7 @@ where
kind: TagKind::Padding, kind: TagKind::Padding,
is_erased, is_erased,
is_erased_root: false, is_erased_root: false,
data_endian,
attributes: Vec::new(), attributes: Vec::new(),
}); });
return Ok(tags); return Ok(tags);
@ -563,26 +565,42 @@ where
let tag = TagKind::try_from(tag_num).context("Unknown DWARF tag type")?; let tag = TagKind::try_from(tag_num).context("Unknown DWARF tag type")?;
if tag == TagKind::Padding { if tag == TagKind::Padding {
if include_erased { if include_erased {
// Erased entries that have become padding are little-endian, and we // Erased entries that have become padding could be either
// have to guess the length and tag of the first entry. We assume // little-endian or big-endian, and we have to guess the length and
// the entry is either a variable or a function, and read until we // tag of the first entry. We assume the entry is either a variable
// find the high_pc attribute. Only MwGlobalRef will follow, and // or a function, and read until we find the high_pc attribute. Only
// these are unlikely to be confused with the length of the next // MwGlobalRef will follow, and these are unlikely to be confused
// entry. // with the length of the next entry.
let mut attributes = Vec::new(); let mut attributes = Vec::new();
let mut is_function = false; let mut is_function = false;
// Guess endianness based on first attribute
let data_endian = if is_erased {
data_endian
} else {
// Peek next two bytes
let mut buf = [0u8; 2];
reader.read_exact(&mut buf)?;
let attr_tag = u16::from_reader(&mut Cursor::new(&buf), data_endian)?;
reader.seek(SeekFrom::Current(-2))?;
match AttributeKind::try_from(attr_tag) {
Ok(_) => data_endian,
Err(_) => data_endian.flip(),
}
};
while reader.stream_position()? < position + size as u64 { while reader.stream_position()? < position + size as u64 {
// Peek next two bytes // Peek next two bytes
let mut buf = [0u8; 2]; let mut buf = [0u8; 2];
reader.read_exact(&mut buf)?; reader.read_exact(&mut buf)?;
let attr_tag = u16::from_reader(&mut Cursor::new(&buf), Endian::Little)?; let attr_tag = u16::from_reader(&mut Cursor::new(&buf), data_endian)?;
reader.seek(SeekFrom::Current(-2))?; reader.seek(SeekFrom::Current(-2))?;
if is_function && attr_tag != AttributeKind::MwGlobalRef as u16 { if is_function && attr_tag != AttributeKind::MwGlobalRef as u16 {
break; break;
} }
let attr = read_attribute(reader, Endian::Little, addr_endian)?; let attr = read_attribute(reader, data_endian, addr_endian)?;
if attr.kind == AttributeKind::HighPc { if attr.kind == AttributeKind::HighPc {
is_function = true; is_function = true;
} }
@ -594,12 +612,13 @@ where
kind, kind,
is_erased: true, is_erased: true,
is_erased_root: true, is_erased_root: true,
data_endian,
attributes, attributes,
}); });
// Read the rest of the tags // Read the rest of the tags
while reader.stream_position()? < position + size as u64 { while reader.stream_position()? < position + size as u64 {
for tag in read_tags(reader, Endian::Little, addr_endian, include_erased, true)? { for tag in read_tags(reader, data_endian, addr_endian, include_erased, true)? {
tags.push(tag); tags.push(tag);
} }
} }
@ -616,6 +635,7 @@ where
kind: tag, kind: tag,
is_erased, is_erased,
is_erased_root: false, is_erased_root: false,
data_endian,
attributes, attributes,
}); });
} }
@ -1145,8 +1165,8 @@ fn structure_type_string(
struct_def_string(info, typedefs, t)? struct_def_string(info, typedefs, t)?
} else if include_keyword { } else if include_keyword {
match t.kind { match t.kind {
StructureKind::Struct => format!("struct {}", name), StructureKind::Struct => format!("struct {name}"),
StructureKind::Class => format!("class {}", name), StructureKind::Class => format!("class {name}"),
} }
} else { } else {
name.clone() name.clone()
@ -1178,7 +1198,7 @@ fn enumeration_type_string(
if name.starts_with('@') { if name.starts_with('@') {
enum_def_string(t)? enum_def_string(t)?
} else if include_keyword { } else if include_keyword {
format!("enum {}", name) format!("enum {name}")
} else { } else {
name.clone() name.clone()
} }
@ -1203,7 +1223,7 @@ fn union_type_string(
if name.starts_with('@') { if name.starts_with('@') {
union_def_string(info, typedefs, t)? union_def_string(info, typedefs, t)?
} else if include_keyword { } else if include_keyword {
format!("union {}", name) format!("union {name}")
} else { } else {
name.clone() name.clone()
} }
@ -1306,7 +1326,7 @@ pub fn subroutine_type_string(
write!(parameters, "{}{}", ts.prefix, ts.suffix)?; write!(parameters, "{}{}", ts.prefix, ts.suffix)?;
} }
if let Some(location) = &parameter.location { if let Some(location) = &parameter.location {
write!(parameters, " /* {} */", location)?; write!(parameters, " /* {location} */")?;
} }
} }
if t.var_args { if t.var_args {
@ -1322,7 +1342,7 @@ pub fn subroutine_type_string(
let base_name = tag let base_name = tag
.string_attribute(AttributeKind::Name) .string_attribute(AttributeKind::Name)
.ok_or_else(|| anyhow!("member_of tag {} has no name attribute", member_of))?; .ok_or_else(|| anyhow!("member_of tag {} has no name attribute", member_of))?;
out.member = format!("{}::", base_name); out.member = format!("{base_name}::");
} }
Ok(out) Ok(out)
} }
@ -1337,7 +1357,7 @@ pub fn subroutine_def_string(
if is_erased { if is_erased {
out.push_str("// Erased\n"); out.push_str("// Erased\n");
} else if let (Some(start), Some(end)) = (t.start_address, t.end_address) { } else if let (Some(start), Some(end)) = (t.start_address, t.end_address) {
writeln!(out, "// Range: {:#X} -> {:#X}", start, end)?; writeln!(out, "// Range: {start:#X} -> {end:#X}")?;
} }
let rt = type_string(info, typedefs, &t.return_type, true)?; let rt = type_string(info, typedefs, &t.return_type, true)?;
if t.local { if t.local {
@ -1361,15 +1381,15 @@ pub fn subroutine_def_string(
let base_name = tag let base_name = tag
.string_attribute(AttributeKind::Name) .string_attribute(AttributeKind::Name)
.ok_or_else(|| anyhow!("member_of tag {} has no name attribute", member_of))?; .ok_or_else(|| anyhow!("member_of tag {} has no name attribute", member_of))?;
write!(out, "{}::", base_name)?; write!(out, "{base_name}::")?;
// Handle constructors and destructors // Handle constructors and destructors
if let Some(name) = t.name.as_ref() { if let Some(name) = t.name.as_ref() {
if name == "__dt" { if name == "__dt" {
write!(out, "~{}", base_name)?; write!(out, "~{base_name}")?;
name_written = true; name_written = true;
} else if name == "__ct" { } else if name == "__ct" {
write!(out, "{}", base_name)?; write!(out, "{base_name}")?;
name_written = true; name_written = true;
} }
} }
@ -1398,7 +1418,7 @@ pub fn subroutine_def_string(
write!(parameters, "{}{}", ts.prefix, ts.suffix)?; write!(parameters, "{}{}", ts.prefix, ts.suffix)?;
} }
if let Some(location) = &parameter.location { if let Some(location) = &parameter.location {
write!(parameters, " /* {} */", location)?; write!(parameters, " /* {location} */")?;
} }
} }
if t.var_args { if t.var_args {
@ -1420,7 +1440,7 @@ pub fn subroutine_def_string(
ts.suffix ts.suffix
)?; )?;
if let Some(location) = &variable.location { if let Some(location) = &variable.location {
write!(var_out, " // {}", location)?; write!(var_out, " // {location}")?;
} }
writeln!(var_out)?; writeln!(var_out)?;
} }
@ -1435,7 +1455,7 @@ pub fn subroutine_def_string(
.get(&reference) .get(&reference)
.ok_or_else(|| anyhow!("Failed to locate reference tag {}", reference))?; .ok_or_else(|| anyhow!("Failed to locate reference tag {}", reference))?;
if tag.kind == TagKind::Padding { if tag.kind == TagKind::Padding {
writeln!(out, " // -> ??? ({})", reference)?; writeln!(out, " // -> ??? ({reference})")?;
continue; continue;
} }
let variable = process_variable_tag(info, tag)?; let variable = process_variable_tag(info, tag)?;
@ -1477,13 +1497,13 @@ fn subroutine_block_string(
) -> Result<String> { ) -> Result<String> {
let mut out = String::new(); let mut out = String::new();
if let Some(name) = &block.name { if let Some(name) = &block.name {
write!(out, "{}: ", name)?; write!(out, "{name}: ")?;
} else { } else {
out.push_str("/* anonymous block */ "); out.push_str("/* anonymous block */ ");
} }
out.push_str("{\n"); out.push_str("{\n");
if let (Some(start), Some(end)) = (block.start_address, block.end_address) { if let (Some(start), Some(end)) = (block.start_address, block.end_address) {
writeln!(out, " // Range: {:#X} -> {:#X}", start, end)?; writeln!(out, " // Range: {start:#X} -> {end:#X}")?;
} }
let mut var_out = String::new(); let mut var_out = String::new();
for variable in &block.variables { for variable in &block.variables {
@ -1496,7 +1516,7 @@ fn subroutine_block_string(
ts.suffix ts.suffix
)?; )?;
if let Some(location) = &variable.location { if let Some(location) = &variable.location {
write!(var_out, " // {}", location)?; write!(var_out, " // {location}")?;
} }
writeln!(var_out)?; writeln!(var_out)?;
} }
@ -1635,9 +1655,9 @@ pub fn struct_def_string(
}; };
if let Some(name) = t.name.as_ref() { if let Some(name) = t.name.as_ref() {
if name.starts_with('@') { if name.starts_with('@') {
write!(out, " /* {} */", name)?; write!(out, " /* {name} */")?;
} else { } else {
write!(out, " {}", name)?; write!(out, " {name}")?;
} }
} }
let mut wrote_base = false; let mut wrote_base = false;
@ -1665,7 +1685,7 @@ pub fn struct_def_string(
} }
out.push_str(" {\n"); out.push_str(" {\n");
if let Some(byte_size) = t.byte_size { if let Some(byte_size) = t.byte_size {
writeln!(out, " // total size: {:#X}", byte_size)?; writeln!(out, " // total size: {byte_size:#X}")?;
} }
let mut vis = match t.kind { let mut vis = match t.kind {
StructureKind::Struct => Visibility::Public, StructureKind::Struct => Visibility::Public,
@ -1751,9 +1771,9 @@ pub fn enum_def_string(t: &EnumerationType) -> Result<String> {
let mut out = match t.name.as_ref() { let mut out = match t.name.as_ref() {
Some(name) => { Some(name) => {
if name.starts_with('@') { if name.starts_with('@') {
format!("enum /* {} */ {{\n", name) format!("enum /* {name} */ {{\n")
} else { } else {
format!("enum {} {{\n", name) format!("enum {name} {{\n")
} }
} }
None => "enum {\n".to_string(), None => "enum {\n".to_string(),
@ -1769,9 +1789,9 @@ pub fn union_def_string(info: &DwarfInfo, typedefs: &TypedefMap, t: &UnionType)
let mut out = match t.name.as_ref() { let mut out = match t.name.as_ref() {
Some(name) => { Some(name) => {
if name.starts_with('@') { if name.starts_with('@') {
format!("union /* {} */ {{\n", name) format!("union /* {name} */ {{\n")
} else { } else {
format!("union {} {{\n", name) format!("union {name} {{\n")
} }
} }
None => "union {\n".to_string(), None => "union {\n".to_string(),
@ -2028,9 +2048,9 @@ fn process_array_tag(info: &DwarfInfo, tag: &Tag) -> Result<ArrayType> {
(AttributeKind::Sibling, _) => {} (AttributeKind::Sibling, _) => {}
(AttributeKind::SubscrData, AttributeValue::Block(data)) => { (AttributeKind::SubscrData, AttributeValue::Block(data)) => {
subscr_data = subscr_data =
Some(process_array_subscript_data(data, info.e, tag.is_erased).with_context( Some(process_array_subscript_data(data, info.e).with_context(|| {
|| format!("Failed to process SubscrData for tag: {:?}", tag), format!("Failed to process SubscrData for tag: {tag:?}")
)?) })?)
} }
(AttributeKind::Ordering, val) => match val { (AttributeKind::Ordering, val) => match val {
AttributeValue::Data2(d2) => { AttributeValue::Data2(d2) => {
@ -2056,11 +2076,7 @@ fn process_array_tag(info: &DwarfInfo, tag: &Tag) -> Result<ArrayType> {
Ok(ArrayType { element_type: Box::from(element_type), dimensions }) Ok(ArrayType { element_type: Box::from(element_type), dimensions })
} }
fn process_array_subscript_data( fn process_array_subscript_data(data: &[u8], e: Endian) -> Result<(Type, Vec<ArrayDimension>)> {
data: &[u8],
e: Endian,
is_erased: bool,
) -> Result<(Type, Vec<ArrayDimension>)> {
let mut element_type = None; let mut element_type = None;
let mut dimensions = Vec::new(); let mut dimensions = Vec::new();
let mut data = data; let mut data = data;
@ -2101,8 +2117,7 @@ fn process_array_subscript_data(
SubscriptFormat::ElementType => { SubscriptFormat::ElementType => {
let mut cursor = Cursor::new(data); let mut cursor = Cursor::new(data);
// TODO: is this the right endianness to use for erased tags? // TODO: is this the right endianness to use for erased tags?
let type_attr = let type_attr = read_attribute(&mut cursor, e, e)?;
read_attribute(&mut cursor, if is_erased { Endian::Little } else { e }, e)?;
element_type = Some(process_type(&type_attr, e)?); element_type = Some(process_type(&type_attr, e)?);
data = &data[cursor.position() as usize..]; data = &data[cursor.position() as usize..];
} }
@ -2456,10 +2471,7 @@ fn process_subroutine_parameter_tag(info: &DwarfInfo, tag: &Tag) -> Result<Subro
) => kind = Some(process_type(attr, info.e)?), ) => kind = Some(process_type(attr, info.e)?),
(AttributeKind::Location, AttributeValue::Block(block)) => { (AttributeKind::Location, AttributeValue::Block(block)) => {
if !block.is_empty() { if !block.is_empty() {
location = Some(process_variable_location( location = Some(process_variable_location(block, tag.data_endian)?);
block,
if tag.is_erased { Endian::Little } else { info.e },
)?);
} }
} }
(AttributeKind::MwDwarf2Location, AttributeValue::Block(_block)) => { (AttributeKind::MwDwarf2Location, AttributeValue::Block(_block)) => {
@ -2514,10 +2526,7 @@ fn process_local_variable_tag(info: &DwarfInfo, tag: &Tag) -> Result<SubroutineV
) => kind = Some(process_type(attr, info.e)?), ) => kind = Some(process_type(attr, info.e)?),
(AttributeKind::Location, AttributeValue::Block(block)) => { (AttributeKind::Location, AttributeValue::Block(block)) => {
if !block.is_empty() { if !block.is_empty() {
location = Some(process_variable_location( location = Some(process_variable_location(block, tag.data_endian)?);
block,
if tag.is_erased { Endian::Little } else { info.e },
)?);
} }
} }
(AttributeKind::MwDwarf2Location, AttributeValue::Block(_block)) => { (AttributeKind::MwDwarf2Location, AttributeValue::Block(_block)) => {
@ -2615,13 +2624,13 @@ pub fn process_type(attr: &Attribute, e: Endian) -> Result<Type> {
match (attr.kind, &attr.value) { match (attr.kind, &attr.value) {
(AttributeKind::FundType, &AttributeValue::Data2(type_id)) => { (AttributeKind::FundType, &AttributeValue::Data2(type_id)) => {
let fund_type = FundType::parse_int(type_id) let fund_type = FundType::parse_int(type_id)
.with_context(|| format!("Invalid fundamental type ID '{:04X}'", type_id))?; .with_context(|| format!("Invalid fundamental type ID '{type_id:04X}'"))?;
Ok(Type { kind: TypeKind::Fundamental(fund_type), modifiers: vec![] }) Ok(Type { kind: TypeKind::Fundamental(fund_type), modifiers: vec![] })
} }
(AttributeKind::ModFundType, AttributeValue::Block(ops)) => { (AttributeKind::ModFundType, AttributeValue::Block(ops)) => {
let type_id = u16::from_bytes(ops[ops.len() - 2..].try_into()?, e); let type_id = u16::from_bytes(ops[ops.len() - 2..].try_into()?, e);
let fund_type = FundType::parse_int(type_id) let fund_type = FundType::parse_int(type_id)
.with_context(|| format!("Invalid fundamental type ID '{:04X}'", type_id))?; .with_context(|| format!("Invalid fundamental type ID '{type_id:04X}'"))?;
let modifiers = process_modifiers(&ops[..ops.len() - 2])?; let modifiers = process_modifiers(&ops[..ops.len() - 2])?;
Ok(Type { kind: TypeKind::Fundamental(fund_type), modifiers }) Ok(Type { kind: TypeKind::Fundamental(fund_type), modifiers })
} }
@ -2762,7 +2771,7 @@ pub fn tag_type_string(
match ud { match ud {
UserDefinedType::Structure(_) UserDefinedType::Structure(_)
| UserDefinedType::Enumeration(_) | UserDefinedType::Enumeration(_)
| UserDefinedType::Union(_) => Ok(format!("{};", ud_str)), | UserDefinedType::Union(_) => Ok(format!("{ud_str};")),
_ => Ok(ud_str), _ => Ok(ud_str),
} }
} }
@ -2789,9 +2798,9 @@ fn variable_string(
out.push(';'); out.push(';');
if include_extra { if include_extra {
let size = variable.kind.size(info)?; let size = variable.kind.size(info)?;
out.push_str(&format!(" // size: {:#X}", size)); out.push_str(&format!(" // size: {size:#X}"));
if let Some(addr) = variable.address { if let Some(addr) = variable.address {
out.push_str(&format!(", address: {:#X}", addr)); out.push_str(&format!(", address: {addr:#X}"));
} }
} }
Ok(out) Ok(out)

View File

@ -20,7 +20,7 @@ use object::{
Architecture, Endianness, File, Object, ObjectKind, ObjectSection, ObjectSymbol, Relocation, Architecture, Endianness, File, Object, ObjectKind, ObjectSection, ObjectSymbol, Relocation,
RelocationFlags, RelocationTarget, SectionKind, Symbol, SymbolKind, SymbolScope, SymbolSection, RelocationFlags, RelocationTarget, SectionKind, Symbol, SymbolKind, SymbolScope, SymbolSection,
}; };
use typed_path::Utf8NativePath; use typed_path::{Utf8NativePath, Utf8NativePathBuf};
use crate::{ use crate::{
array_ref, array_ref,
@ -164,7 +164,7 @@ pub fn process_elf(path: &Utf8NativePath) -> Result<ObjInfo> {
hash_map::Entry::Vacant(e) => e.insert(0), hash_map::Entry::Vacant(e) => e.insert(0),
}; };
*index += 1; *index += 1;
let new_name = format!("{}_{}", file_name, index); let new_name = format!("{file_name}_{index}");
// log::info!("Renaming {} to {}", file_name, new_name); // log::info!("Renaming {} to {}", file_name, new_name);
file_name.clone_from(&new_name); file_name.clone_from(&new_name);
match section_starts.entry(new_name.clone()) { match section_starts.entry(new_name.clone()) {
@ -275,8 +275,8 @@ pub fn process_elf(path: &Utf8NativePath) -> Result<ObjInfo> {
continue; continue;
} }
symbol_indexes.push(Some(symbols.len() as ObjSymbolIndex)); symbol_indexes.push(Some(symbols.len() as ObjSymbolIndex));
let align = mw_comment.as_ref().map(|(_, vec)| vec[symbol.index().0].align); let comment_sym = mw_comment.as_ref().map(|(_, vec)| &vec[symbol.index().0 - 1]);
symbols.push(to_obj_symbol(&obj_file, &symbol, &section_indexes, align)?); symbols.push(to_obj_symbol(&obj_file, &symbol, &section_indexes, comment_sym)?);
} }
let mut link_order = Vec::<ObjUnit>::new(); let mut link_order = Vec::<ObjUnit>::new();
@ -374,6 +374,7 @@ fn load_comment(obj_file: &File) -> Result<Option<(MWComment, Vec<CommentSym>)>>
let mut reader = Cursor::new(&*data); let mut reader = Cursor::new(&*data);
let header = MWComment::from_reader(&mut reader, Endian::Big)?; let header = MWComment::from_reader(&mut reader, Endian::Big)?;
log::debug!("Loaded .comment section header {:?}", header); log::debug!("Loaded .comment section header {:?}", header);
CommentSym::from_reader(&mut reader, Endian::Big)?; // Null symbol
let mut comment_syms = Vec::with_capacity(obj_file.symbols().count()); let mut comment_syms = Vec::with_capacity(obj_file.symbols().count());
for symbol in obj_file.symbols() { for symbol in obj_file.symbols() {
let comment_sym = CommentSym::from_reader(&mut reader, Endian::Big)?; let comment_sym = CommentSym::from_reader(&mut reader, Endian::Big)?;
@ -861,7 +862,7 @@ fn to_obj_symbol(
obj_file: &object::File<'_>, obj_file: &object::File<'_>,
symbol: &Symbol<'_, '_>, symbol: &Symbol<'_, '_>,
section_indexes: &[Option<usize>], section_indexes: &[Option<usize>],
align: Option<u32>, comment_sym: Option<&CommentSym>,
) -> Result<ObjSymbol> { ) -> Result<ObjSymbol> {
let section = match symbol.section_index() { let section = match symbol.section_index() {
Some(idx) => Some(obj_file.section_by_index(idx)?), Some(idx) => Some(obj_file.section_by_index(idx)?),
@ -891,6 +892,9 @@ fn to_obj_symbol(
if symbol.scope() == SymbolScope::Linkage { if symbol.scope() == SymbolScope::Linkage {
flags = ObjSymbolFlagSet(flags.0 | ObjSymbolFlags::Hidden); flags = ObjSymbolFlagSet(flags.0 | ObjSymbolFlags::Hidden);
} }
if comment_sym.is_some_and(|c| c.active_flags & 0x8 != 0) {
flags = ObjSymbolFlagSet(flags.0 | ObjSymbolFlags::Exported);
}
let section_idx = section.as_ref().and_then(|section| section_indexes[section.index().0]); let section_idx = section.as_ref().and_then(|section| section_indexes[section.index().0]);
Ok(ObjSymbol { Ok(ObjSymbol {
name: name.to_string(), name: name.to_string(),
@ -907,7 +911,7 @@ fn to_obj_symbol(
SymbolKind::Section => ObjSymbolKind::Section, SymbolKind::Section => ObjSymbolKind::Section,
_ => bail!("Unsupported symbol kind: {:?}", symbol), _ => bail!("Unsupported symbol kind: {:?}", symbol),
}, },
align, align: comment_sym.map(|c| c.align),
..Default::default() ..Default::default()
}) })
} }
@ -1005,3 +1009,10 @@ fn write_relocatable_section_data(w: &mut Writer, section: &ObjSection) -> Resul
w.write(&section.data[current_address..]); w.write(&section.data[current_address..]);
Ok(()) Ok(())
} }
pub fn is_elf_file(path: &Utf8NativePathBuf) -> Result<bool> {
let mut file = open_file(path, true)?;
let mut magic = [0; 4];
file.read_exact(&mut magic)?;
Ok(magic == elf::ELFMAG)
}

57
src/util/extab.rs Normal file
View File

@ -0,0 +1,57 @@
use anyhow::{Context, Result};
use itertools::Itertools;
use crate::obj::ObjInfo;
pub fn clean_extab(obj: &mut ObjInfo, mut padding: impl Iterator<Item = u8>) -> Result<usize> {
let (extab_section_index, extab_section) = obj
.sections
.iter_mut()
.find(|(_, s)| s.name == "extab")
.ok_or_else(|| anyhow::anyhow!("No extab section found"))?;
let mut num_cleaned = 0;
for (_symbol_index, symbol) in obj
.symbols
.for_section(extab_section_index)
.filter(|(_, s)| s.size > 0)
.sorted_by_key(|(_, s)| s.address)
{
let data = extab_section.symbol_data(symbol)?;
let decoded = cwextab::decode_extab(data).with_context(|| {
format!(
"Failed to decode {} (extab {:#010X}..{:#010X})",
symbol.name,
symbol.address,
symbol.address + symbol.size
)
})?;
let mut updated = false;
for action in &decoded.exception_actions {
// Check if the current action has padding
if let Some(padding_offset) = action.get_struct_padding_offset() {
let index = padding_offset as usize;
let section_offset = (symbol.address - extab_section.address) as usize
+ action.action_offset as usize;
let mut clean_data: Vec<u8> = action.get_exaction_bytes(false);
// Write the two padding bytes
clean_data[index] = padding.next().unwrap_or(0);
clean_data[index + 1] = padding.next().unwrap_or(0);
let orig_data =
&mut extab_section.data[section_offset..section_offset + clean_data.len()];
orig_data.copy_from_slice(&clean_data);
updated = true;
}
}
if updated {
tracing::debug!(
"Replaced uninitialized bytes in {} (extab {:#010X}..{:#010X})",
symbol.name,
symbol.address,
symbol.address + symbol.size
);
num_cleaned += 1;
}
}
Ok(num_cleaned)
}

View File

@ -26,7 +26,7 @@ pub fn buf_writer(path: &Utf8NativePath) -> Result<BufWriter<File>> {
if let Some(parent) = path.parent() { if let Some(parent) = path.parent() {
DirBuilder::new().recursive(true).create(parent)?; DirBuilder::new().recursive(true).create(parent)?;
} }
let file = File::create(path).with_context(|| format!("Failed to create file '{}'", path))?; let file = File::create(path).with_context(|| format!("Failed to create file '{path}'"))?;
Ok(BufWriter::new(file)) Ok(BufWriter::new(file))
} }

View File

@ -47,11 +47,11 @@ pub fn generate_ldscript(
let out = template let out = template
.unwrap_or(LCF_TEMPLATE) .unwrap_or(LCF_TEMPLATE)
.replace("$ORIGIN", &format!("{:#X}", origin)) .replace("$ORIGIN", &format!("{origin:#X}"))
.replace("$SECTIONS", &section_defs) .replace("$SECTIONS", &section_defs)
.replace("$LAST_SECTION_SYMBOL", &last_section_symbol) .replace("$LAST_SECTION_SYMBOL", &last_section_symbol)
.replace("$LAST_SECTION_NAME", &last_section_name) .replace("$LAST_SECTION_NAME", &last_section_name)
.replace("$STACKSIZE", &format!("{:#X}", stack_size)) .replace("$STACKSIZE", &format!("{stack_size:#X}"))
.replace("$FORCEACTIVE", &force_active.join("\n ")) .replace("$FORCEACTIVE", &force_active.join("\n "))
.replace("$ARENAHI", &format!("{:#X}", obj.arena_hi.unwrap_or(0x81700000))); .replace("$ARENAHI", &format!("{:#X}", obj.arena_hi.unwrap_or(0x81700000)));
Ok(out) Ok(out)
@ -74,7 +74,7 @@ pub fn generate_ldscript_partial(
// Some RELs have no entry point (`.text` was stripped) so mwld requires at least an empty // Some RELs have no entry point (`.text` was stripped) so mwld requires at least an empty
// `.init` section to be present in the linker script, for some reason. // `.init` section to be present in the linker script, for some reason.
if obj.entry.is_none() { if obj.entry.is_none() {
section_defs = format!(".init :{{}}\n {}", section_defs); section_defs = format!(".init :{{}}\n {section_defs}");
} }
let mut force_files = Vec::with_capacity(obj.link_order.len()); let mut force_files = Vec::with_capacity(obj.link_order.len());

View File

@ -10,6 +10,7 @@ pub mod diff;
pub mod dol; pub mod dol;
pub mod dwarf; pub mod dwarf;
pub mod elf; pub mod elf;
pub mod extab;
pub mod file; pub mod file;
pub mod lcf; pub mod lcf;
pub mod map; pub mod map;

View File

@ -209,7 +209,7 @@ impl<'a> RarcView<'a> {
) )
})?; })?;
let c_string = CStr::from_bytes_until_nul(name_buf) let c_string = CStr::from_bytes_until_nul(name_buf)
.map_err(|_| format!("RARC: name at offset {} not null-terminated", offset))?; .map_err(|_| format!("RARC: name at offset {offset} not null-terminated"))?;
Ok(c_string.to_string_lossy()) Ok(c_string.to_string_lossy())
} }

View File

@ -20,6 +20,15 @@ impl From<object::Endianness> for Endian {
} }
} }
impl Endian {
pub fn flip(self) -> Self {
match self {
Endian::Big => Endian::Little,
Endian::Little => Endian::Big,
}
}
}
pub const DYNAMIC_SIZE: usize = 0; pub const DYNAMIC_SIZE: usize = 0;
pub const fn struct_size<const N: usize>(fields: [usize; N]) -> usize { pub const fn struct_size<const N: usize>(fields: [usize; N]) -> usize {
@ -263,6 +272,18 @@ impl ToWriter for Vec<u8> {
fn write_size(&self) -> usize { self.len() } fn write_size(&self) -> usize { self.len() }
} }
impl<const N: usize> ToWriter for [u32; N] {
fn to_writer<W>(&self, writer: &mut W, e: Endian) -> io::Result<()>
where W: Write + ?Sized {
for &value in self {
value.to_writer(writer, e)?;
}
Ok(())
}
fn write_size(&self) -> usize { N * u32::STATIC_SIZE }
}
pub fn write_vec<T, W>(writer: &mut W, vec: &[T], e: Endian) -> io::Result<()> pub fn write_vec<T, W>(writer: &mut W, vec: &[T], e: Endian) -> io::Result<()>
where where
T: ToWriter, T: ToWriter,

View File

@ -364,7 +364,7 @@ where
reader.seek(SeekFrom::Start(header.section_info_offset as u64))?; reader.seek(SeekFrom::Start(header.section_info_offset as u64))?;
for idx in 0..header.num_sections { for idx in 0..header.num_sections {
let section = RelSectionHeader::from_reader(reader, Endian::Big) let section = RelSectionHeader::from_reader(reader, Endian::Big)
.with_context(|| format!("Failed to read REL section header {}", idx))?; .with_context(|| format!("Failed to read REL section header {idx}"))?;
sections.push(section); sections.push(section);
} }
Ok(sections) Ok(sections)
@ -390,7 +390,7 @@ where R: Read + Seek + ?Sized {
reader.seek(SeekFrom::Start(offset as u64))?; reader.seek(SeekFrom::Start(offset as u64))?;
let mut data = vec![0u8; size as usize]; let mut data = vec![0u8; size as usize];
reader.read_exact(&mut data).with_context(|| { reader.read_exact(&mut data).with_context(|| {
format!("Failed to read REL section {} data with size {:#X}", idx, size) format!("Failed to read REL section {idx} data with size {size:#X}")
})?; })?;
reader.seek(SeekFrom::Start(position))?; reader.seek(SeekFrom::Start(position))?;
data data
@ -405,7 +405,7 @@ where R: Read + Seek + ?Sized {
text_section = Some(idx as u8); text_section = Some(idx as u8);
(".text".to_string(), ObjSectionKind::Code, true) (".text".to_string(), ObjSectionKind::Code, true)
} else { } else {
(format!(".section{}", idx), ObjSectionKind::Data, false) (format!(".section{idx}"), ObjSectionKind::Data, false)
}; };
sections.push(ObjSection { sections.push(ObjSection {
name, name,

View File

@ -147,14 +147,14 @@ impl FromReader for RsoHeader {
if next != 0 { if next != 0 {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Expected 'next' to be 0, got {:#X}", next), format!("Expected 'next' to be 0, got {next:#X}"),
)); ));
} }
let prev = u32::from_reader(reader, e)?; let prev = u32::from_reader(reader, e)?;
if prev != 0 { if prev != 0 {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Expected 'prev' to be 0, got {:#X}", prev), format!("Expected 'prev' to be 0, got {prev:#X}"),
)); ));
} }
let num_sections = u32::from_reader(reader, e)?; let num_sections = u32::from_reader(reader, e)?;
@ -170,7 +170,7 @@ impl FromReader for RsoHeader {
if bss_section != 0 { if bss_section != 0 {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidData, io::ErrorKind::InvalidData,
format!("Expected 'bssSection' to be 0, got {:#X}", bss_section), format!("Expected 'bssSection' to be 0, got {bss_section:#X}"),
)); ));
} }
let prolog_offset = u32::from_reader(reader, e)?; let prolog_offset = u32::from_reader(reader, e)?;
@ -440,7 +440,7 @@ where R: Read + Seek + ?Sized {
// println!("Section {} offset {:#X} size {:#X}", idx, offset, size); // println!("Section {} offset {:#X} size {:#X}", idx, offset, size);
sections.push(ObjSection { sections.push(ObjSection {
name: format!(".section{}", idx), name: format!(".section{idx}"),
kind: if offset == 0 { kind: if offset == 0 {
ObjSectionKind::Bss ObjSectionKind::Bss
} else if section.exec() { } else if section.exec() {

View File

@ -26,7 +26,25 @@ fn split_ctors_dtors(obj: &mut ObjInfo, start: SectionAddress, end: SectionAddre
let mut current_address = start; let mut current_address = start;
let mut referenced_symbols = vec![]; let mut referenced_symbols = vec![];
// ProDG ctor list can start with -1
if matches!(read_u32(ctors_section, current_address.address), Some(0xFFFFFFFF)) {
current_address += 4;
}
while current_address < end { while current_address < end {
// ProDG hack when the end address is not known
if matches!(read_u32(ctors_section, current_address.address), Some(0)) {
while current_address < end {
ensure!(
matches!(read_u32(ctors_section, current_address.address), Some(0)),
"{} data detected at {:#010X} after null pointer",
ctors_section.name,
current_address,
);
current_address += 4;
}
break;
}
let function_addr = read_address(obj, ctors_section, current_address.address)?; let function_addr = read_address(obj, ctors_section, current_address.address)?;
log::debug!("Found {} entry: {:#010X}", ctors_section.name, function_addr); log::debug!("Found {} entry: {:#010X}", ctors_section.name, function_addr);
@ -644,7 +662,9 @@ fn add_padding_symbols(obj: &mut ObjInfo) -> Result<()> {
// Check if symbol is missing data between the end of the symbol and the next symbol // Check if symbol is missing data between the end of the symbol and the next symbol
let symbol_end = (symbol.address + symbol.size) as u32; let symbol_end = (symbol.address + symbol.size) as u32;
if section.kind != ObjSectionKind::Code && next_address > symbol_end { if !matches!(section.kind, ObjSectionKind::Code | ObjSectionKind::Bss)
&& next_address > symbol_end
{
let data = section.data_range(symbol_end, next_address)?; let data = section.data_range(symbol_end, next_address)?;
if data.iter().any(|&x| x != 0) { if data.iter().any(|&x| x != 0) {
log::debug!( log::debug!(
@ -653,7 +673,7 @@ fn add_padding_symbols(obj: &mut ObjInfo) -> Result<()> {
next_address next_address
); );
let name = if obj.module_id == 0 { let name = if obj.module_id == 0 {
format!("lbl_{:08X}", symbol_end) format!("lbl_{symbol_end:08X}")
} else { } else {
format!( format!(
"lbl_{}_{}_{:X}", "lbl_{}_{}_{:X}",
@ -1463,7 +1483,7 @@ fn auto_unit_name(
if unit_exists(&unit_name, obj, new_splits) { if unit_exists(&unit_name, obj, new_splits) {
let mut i = 1; let mut i = 1;
loop { loop {
let new_unit_name = format!("{}_{}", unit_name, i); let new_unit_name = format!("{unit_name}_{i}");
if !unit_exists(&new_unit_name, obj, new_splits) { if !unit_exists(&new_unit_name, obj, new_splits) {
unit_name = new_unit_name; unit_name = new_unit_name;
break; break;

View File

@ -333,7 +333,7 @@ impl VfsFile for DiscFile {
pub fn nod_to_io_error(e: nod::Error) -> io::Error { pub fn nod_to_io_error(e: nod::Error) -> io::Error {
match e { match e {
nod::Error::Io(msg, e) => io::Error::new(e.kind(), format!("{}: {}", msg, e)), nod::Error::Io(msg, e) => io::Error::new(e.kind(), format!("{msg}: {e}")),
e => io::Error::new(io::ErrorKind::InvalidData, e), e => io::Error::new(io::ErrorKind::InvalidData, e),
} }
} }

View File

@ -108,8 +108,8 @@ impl Display for VfsError {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
match self { match self {
VfsError::NotFound => write!(f, "File or directory not found"), VfsError::NotFound => write!(f, "File or directory not found"),
VfsError::IoError(e) => write!(f, "{}", e), VfsError::IoError(e) => write!(f, "{e}"),
VfsError::Other(e) => write!(f, "{}", e), VfsError::Other(e) => write!(f, "{e}"),
VfsError::NotADirectory => write!(f, "Path is a file, not a directory"), VfsError::NotADirectory => write!(f, "Path is a file, not a directory"),
VfsError::IsADirectory => write!(f, "Path is a directory, not a file"), VfsError::IsADirectory => write!(f, "Path is a directory, not a file"),
} }
@ -129,8 +129,8 @@ impl Display for FileFormat {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
match self { match self {
FileFormat::Regular => write!(f, "File"), FileFormat::Regular => write!(f, "File"),
FileFormat::Compressed(kind) => write!(f, "Compressed: {}", kind), FileFormat::Compressed(kind) => write!(f, "Compressed: {kind}"),
FileFormat::Archive(kind) => write!(f, "Archive: {}", kind), FileFormat::Archive(kind) => write!(f, "Archive: {kind}"),
} }
} }
} }
@ -165,7 +165,7 @@ impl Display for ArchiveKind {
match self { match self {
ArchiveKind::Rarc => write!(f, "RARC"), ArchiveKind::Rarc => write!(f, "RARC"),
ArchiveKind::U8 => write!(f, "U8"), ArchiveKind::U8 => write!(f, "U8"),
ArchiveKind::Disc(format) => write!(f, "Disc ({})", format), ArchiveKind::Disc(format) => write!(f, "Disc ({format})"),
ArchiveKind::Wad => write!(f, "WAD"), ArchiveKind::Wad => write!(f, "WAD"),
} }
} }
@ -228,13 +228,13 @@ pub fn open_path_with_fs(
let file_type = match fs.metadata(segment) { let file_type = match fs.metadata(segment) {
Ok(metadata) => metadata.file_type, Ok(metadata) => metadata.file_type,
Err(VfsError::NotFound) => return Err(anyhow!("{} not found", current_path)), Err(VfsError::NotFound) => return Err(anyhow!("{} not found", current_path)),
Err(e) => return Err(e).context(format!("Failed to open {}", current_path)), Err(e) => return Err(e).context(format!("Failed to open {current_path}")),
}; };
match file_type { match file_type {
VfsFileType::File => { VfsFileType::File => {
file = Some( file = Some(
fs.open(segment) fs.open(segment)
.with_context(|| format!("Failed to open {}", current_path))?, .with_context(|| format!("Failed to open {current_path}"))?,
); );
} }
VfsFileType::Directory => { VfsFileType::Directory => {
@ -248,7 +248,7 @@ pub fn open_path_with_fs(
} }
let mut current_file = file.take().unwrap(); let mut current_file = file.take().unwrap();
let format = detect(current_file.as_mut()) let format = detect(current_file.as_mut())
.with_context(|| format!("Failed to detect file type for {}", current_path))?; .with_context(|| format!("Failed to detect file type for {current_path}"))?;
if let Some(&next) = split.peek() { if let Some(&next) = split.peek() {
match next { match next {
"nlzss" => { "nlzss" => {
@ -256,7 +256,7 @@ pub fn open_path_with_fs(
file = Some( file = Some(
decompress_file(current_file.as_mut(), CompressionKind::Nlzss) decompress_file(current_file.as_mut(), CompressionKind::Nlzss)
.with_context(|| { .with_context(|| {
format!("Failed to decompress {} with NLZSS", current_path) format!("Failed to decompress {current_path} with NLZSS")
})?, })?,
); );
} }
@ -265,7 +265,7 @@ pub fn open_path_with_fs(
file = Some( file = Some(
decompress_file(current_file.as_mut(), CompressionKind::Yay0) decompress_file(current_file.as_mut(), CompressionKind::Yay0)
.with_context(|| { .with_context(|| {
format!("Failed to decompress {} with Yay0", current_path) format!("Failed to decompress {current_path} with Yay0")
})?, })?,
); );
} }
@ -274,7 +274,7 @@ pub fn open_path_with_fs(
file = Some( file = Some(
decompress_file(current_file.as_mut(), CompressionKind::Yaz0) decompress_file(current_file.as_mut(), CompressionKind::Yaz0)
.with_context(|| { .with_context(|| {
format!("Failed to decompress {} with Yaz0", current_path) format!("Failed to decompress {current_path} with Yaz0")
})?, })?,
); );
} }
@ -283,16 +283,15 @@ pub fn open_path_with_fs(
return Err(anyhow!("{} is not an archive", current_path)) return Err(anyhow!("{} is not an archive", current_path))
} }
FileFormat::Compressed(kind) => { FileFormat::Compressed(kind) => {
file = file = Some(
Some(decompress_file(current_file.as_mut(), kind).with_context( decompress_file(current_file.as_mut(), kind)
|| format!("Failed to decompress {}", current_path), .with_context(|| format!("Failed to decompress {current_path}"))?,
)?); );
// Continue the loop to detect the new format // Continue the loop to detect the new format
} }
FileFormat::Archive(kind) => { FileFormat::Archive(kind) => {
fs = open_fs(current_file, kind).with_context(|| { fs = open_fs(current_file, kind)
format!("Failed to open container {}", current_path) .with_context(|| format!("Failed to open container {current_path}"))?;
})?;
// Continue the loop to open the next segment // Continue the loop to open the next segment
} }
}, },
@ -302,7 +301,7 @@ pub fn open_path_with_fs(
return match format { return match format {
FileFormat::Compressed(kind) if auto_decompress => Ok(OpenResult::File( FileFormat::Compressed(kind) if auto_decompress => Ok(OpenResult::File(
decompress_file(current_file.as_mut(), kind) decompress_file(current_file.as_mut(), kind)
.with_context(|| format!("Failed to decompress {}", current_path))?, .with_context(|| format!("Failed to decompress {current_path}"))?,
segment.to_path_buf(), segment.to_path_buf(),
)), )),
_ => Ok(OpenResult::File(current_file, segment.to_path_buf())), _ => Ok(OpenResult::File(current_file, segment.to_path_buf())),

View File

@ -118,11 +118,11 @@ impl Vfs for WadFs {
} }
let title_id = hex::encode(self.wad.ticket().title_id); let title_id = hex::encode(self.wad.ticket().title_id);
let mut entries = Vec::new(); let mut entries = Vec::new();
entries.push(format!("{}.tik", title_id)); entries.push(format!("{title_id}.tik"));
entries.push(format!("{}.tmd", title_id)); entries.push(format!("{title_id}.tmd"));
entries.push(format!("{}.cert", title_id)); entries.push(format!("{title_id}.cert"));
if self.wad.header.footer_size.get() > 0 { if self.wad.header.footer_size.get() > 0 {
entries.push(format!("{}.trailer", title_id)); entries.push(format!("{title_id}.trailer"));
} }
for content in self.wad.contents() { for content in self.wad.contents() {
entries.push(format!("{:08x}.app", content.content_index.get())); entries.push(format!("{:08x}.app", content.content_index.get()));