mirror of
https://github.com/encounter/decomp-toolkit.git
synced 2025-08-06 04:05:37 +00:00
Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
b56b399201 | |||
8620099731 | |||
ae00c35ec3 | |||
ba2589646e | |||
|
7bc0bc474d | ||
|
d969819b78 | ||
f4a67ee619 | |||
|
d92a892c2b | ||
|
5e33fea49f | ||
9cafb77d3f | |||
|
20e877c9ec | ||
88d0e6b789 | |||
f212b35d28 | |||
9c681557f5 |
72
Cargo.lock
generated
72
Cargo.lock
generated
@ -339,16 +339,16 @@ checksum = "c2e06f9bce634a3c898eb1e5cb949ff63133cbb218af93cc9b38b31d6f3ea285"
|
||||
|
||||
[[package]]
|
||||
name = "cwextab"
|
||||
version = "1.0.3"
|
||||
version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "003567b96ff9d8ac3275831650385891bca370092937be625157778b1e58f755"
|
||||
checksum = "9dd95393b8cc20937e4757d9c22b89d016613e934c60dcb073bd8a5aade79fcf"
|
||||
dependencies = [
|
||||
"thiserror",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "decomp-toolkit"
|
||||
version = "1.5.1"
|
||||
version = "1.7.0"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"anyhow",
|
||||
@ -873,7 +873,7 @@ dependencies = [
|
||||
"miniz_oxide",
|
||||
"rayon",
|
||||
"sha1",
|
||||
"thiserror",
|
||||
"thiserror 1.0.64",
|
||||
"zerocopy",
|
||||
"zstd",
|
||||
]
|
||||
@ -946,7 +946,7 @@ dependencies = [
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1110,7 +1110,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1124,9 +1124,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.88"
|
||||
version = "1.0.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9"
|
||||
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@ -1158,7 +1158,7 @@ dependencies = [
|
||||
"prost",
|
||||
"prost-types",
|
||||
"regex",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
@ -1172,7 +1172,7 @@ dependencies = [
|
||||
"itertools",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1374,7 +1374,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1385,7 +1385,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1408,7 +1408,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1527,7 +1527,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1549,7 +1549,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1584,9 +1584,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.79"
|
||||
version = "2.0.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590"
|
||||
checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -1609,7 +1609,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
"thiserror 1.0.64",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
@ -1632,7 +1632,16 @@ version = "1.0.64"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
"thiserror-impl 1.0.64",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "2.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
|
||||
dependencies = [
|
||||
"thiserror-impl 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1643,7 +1652,18 @@ checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "2.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1692,7 +1712,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1755,7 +1775,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde_derive_internals",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1853,7 +1873,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
@ -1875,7 +1895,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
@ -2088,7 +2108,7 @@ checksum = "3ca22c4ad176b37bd81a565f66635bde3d654fe6832730c3e52e1018ae1655ee"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.79",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3,7 +3,7 @@ name = "decomp-toolkit"
|
||||
description = "Yet another GameCube/Wii decompilation toolkit."
|
||||
authors = ["Luke Street <luke@street.dev>"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
version = "1.5.1"
|
||||
version = "1.7.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
repository = "https://github.com/encounter/decomp-toolkit"
|
||||
@ -37,7 +37,7 @@ typed-path = "0.9"
|
||||
cbc = "0.1"
|
||||
crossterm = "0.28"
|
||||
cwdemangle = "1.0"
|
||||
cwextab = "1.0"
|
||||
cwextab = "1.1"
|
||||
dyn-clone = "1.0"
|
||||
enable-ansi-support = "0.2"
|
||||
filetime = "0.2"
|
||||
|
@ -297,6 +297,8 @@ Dumps DWARF 1.1 information from an ELF file. (Does **not** support DWARF 2+)
|
||||
|
||||
```shell
|
||||
$ dtk dwarf dump input.elf
|
||||
# or, to include data that was stripped by MWLD
|
||||
$ dtk dwarf dump input.elf --include-erased
|
||||
```
|
||||
|
||||
### elf disasm
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::{
|
||||
cmp::min,
|
||||
collections::BTreeMap,
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
fmt::{Debug, Display, Formatter, UpperHex},
|
||||
ops::{Add, AddAssign, BitAnd, Sub},
|
||||
};
|
||||
@ -191,7 +191,7 @@ impl AnalyzerState {
|
||||
};
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: format!("jumptable_{}", address_str),
|
||||
name: format!("jumptable_{address_str}"),
|
||||
address: addr.address as u64,
|
||||
section: Some(addr.section),
|
||||
size: size as u64,
|
||||
@ -275,7 +275,7 @@ impl AnalyzerState {
|
||||
let (section_index, _) = obj
|
||||
.sections
|
||||
.at_address(entry)
|
||||
.context(format!("Entry point {:#010X} outside of any section", entry))?;
|
||||
.context(format!("Entry point {entry:#010X} outside of any section"))?;
|
||||
self.process_function_at(obj, SectionAddress::new(section_index, entry))?;
|
||||
}
|
||||
// Locate bounds for referenced functions until none are left
|
||||
@ -530,7 +530,7 @@ pub fn locate_sda_bases(obj: &mut ObjInfo) -> Result<bool> {
|
||||
let (section_index, _) = obj
|
||||
.sections
|
||||
.at_address(entry as u32)
|
||||
.context(format!("Entry point {:#010X} outside of any section", entry))?;
|
||||
.context(format!("Entry point {entry:#010X} outside of any section"))?;
|
||||
let entry_addr = SectionAddress::new(section_index, entry as u32);
|
||||
|
||||
let mut executor = Executor::new(obj);
|
||||
@ -572,6 +572,26 @@ pub fn locate_sda_bases(obj: &mut ObjInfo) -> Result<bool> {
|
||||
Some((sda2_base, sda_base)) => {
|
||||
obj.sda2_base = Some(sda2_base);
|
||||
obj.sda_base = Some(sda_base);
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "_SDA2_BASE_".to_string(),
|
||||
address: sda2_base as u64,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
..Default::default()
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "_SDA_BASE_".to_string(),
|
||||
address: sda_base as u64,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
..Default::default()
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
Ok(true)
|
||||
}
|
||||
None => Ok(false),
|
||||
@ -581,7 +601,7 @@ pub fn locate_sda_bases(obj: &mut ObjInfo) -> Result<bool> {
|
||||
/// ProDG hardcodes .bss and .sbss section initialization in `entry`
|
||||
/// This function locates the memset calls and returns a list of
|
||||
/// (address, size) pairs for the .bss sections.
|
||||
pub fn locate_bss_memsets(obj: &mut ObjInfo) -> Result<Vec<(u32, u32)>> {
|
||||
pub fn locate_bss_memsets(obj: &ObjInfo) -> Result<Vec<(u32, u32)>> {
|
||||
let mut bss_sections: Vec<(u32, u32)> = Vec::new();
|
||||
let Some(entry) = obj.entry else {
|
||||
return Ok(bss_sections);
|
||||
@ -589,7 +609,7 @@ pub fn locate_bss_memsets(obj: &mut ObjInfo) -> Result<Vec<(u32, u32)>> {
|
||||
let (section_index, _) = obj
|
||||
.sections
|
||||
.at_address(entry as u32)
|
||||
.context(format!("Entry point {:#010X} outside of any section", entry))?;
|
||||
.context(format!("Entry point {entry:#010X} outside of any section"))?;
|
||||
let entry_addr = SectionAddress::new(section_index, entry as u32);
|
||||
|
||||
let mut executor = Executor::new(obj);
|
||||
@ -632,3 +652,50 @@ pub fn locate_bss_memsets(obj: &mut ObjInfo) -> Result<Vec<(u32, u32)>> {
|
||||
)?;
|
||||
Ok(bss_sections)
|
||||
}
|
||||
|
||||
/// Execute VM from specified entry point following inner-section branches and function calls,
|
||||
/// noting all branch targets outside the current section.
|
||||
pub fn locate_cross_section_branch_targets(
|
||||
obj: &ObjInfo,
|
||||
entry: SectionAddress,
|
||||
) -> Result<BTreeSet<SectionAddress>> {
|
||||
let mut branch_targets = BTreeSet::<SectionAddress>::new();
|
||||
let mut executor = Executor::new(obj);
|
||||
executor.push(entry, VM::new(), false);
|
||||
executor.run(
|
||||
obj,
|
||||
|ExecCbData { executor, vm, result, ins_addr, section: _, ins: _, block_start: _ }| {
|
||||
match result {
|
||||
StepResult::Continue | StepResult::LoadStore { .. } => {
|
||||
Ok(ExecCbResult::<()>::Continue)
|
||||
}
|
||||
StepResult::Illegal => bail!("Illegal instruction @ {}", ins_addr),
|
||||
StepResult::Jump(target) => {
|
||||
if let BranchTarget::Address(RelocationTarget::Address(addr)) = target {
|
||||
if addr.section == entry.section {
|
||||
executor.push(addr, vm.clone_all(), true);
|
||||
} else {
|
||||
branch_targets.insert(addr);
|
||||
}
|
||||
}
|
||||
Ok(ExecCbResult::EndBlock)
|
||||
}
|
||||
StepResult::Branch(branches) => {
|
||||
for branch in branches {
|
||||
if let BranchTarget::Address(RelocationTarget::Address(addr)) =
|
||||
branch.target
|
||||
{
|
||||
if addr.section == entry.section {
|
||||
executor.push(addr, branch.vm, true);
|
||||
} else {
|
||||
branch_targets.insert(addr);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(ExecCbResult::Continue)
|
||||
}
|
||||
}
|
||||
},
|
||||
)?;
|
||||
Ok(branch_targets)
|
||||
}
|
||||
|
@ -183,8 +183,7 @@ fn get_jump_table_entries(
|
||||
let (section_index, _) =
|
||||
obj.sections.at_address(entry_addr).with_context(|| {
|
||||
format!(
|
||||
"Invalid jump table entry {:#010X} at {:#010X}",
|
||||
entry_addr, cur_addr
|
||||
"Invalid jump table entry {entry_addr:#010X} at {cur_addr:#010X}"
|
||||
)
|
||||
})?;
|
||||
entries.push(SectionAddress::new(section_index, entry_addr));
|
||||
@ -245,7 +244,9 @@ pub fn uniq_jump_table_entries(
|
||||
return Ok((BTreeSet::new(), 0));
|
||||
}
|
||||
let (entries, size) =
|
||||
get_jump_table_entries(obj, addr, size, from, function_start, function_end)?;
|
||||
get_jump_table_entries(obj, addr, size, from, function_start, function_end).with_context(
|
||||
|| format!("While fetching jump table entries starting at {addr:#010X}"),
|
||||
)?;
|
||||
Ok((BTreeSet::from_iter(entries.iter().cloned()), size))
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
obj::{ObjDataKind, ObjInfo, ObjSectionKind, ObjSymbolKind, SymbolIndex},
|
||||
util::split::is_linker_generated_label,
|
||||
util::{config::is_auto_symbol, split::is_linker_generated_label},
|
||||
};
|
||||
|
||||
pub fn detect_objects(obj: &mut ObjInfo) -> Result<()> {
|
||||
@ -134,7 +134,9 @@ pub fn detect_strings(obj: &mut ObjInfo) -> Result<()> {
|
||||
StringResult::None => {}
|
||||
StringResult::String { length, terminated } => {
|
||||
let size = if terminated { length + 1 } else { length };
|
||||
if !symbol.size_known || symbol.size == size as u64 {
|
||||
if symbol.size == size as u64
|
||||
|| (is_auto_symbol(symbol) && symbol.size > size as u64)
|
||||
{
|
||||
let str = String::from_utf8_lossy(&data[..length]);
|
||||
log::debug!("Found string '{}' @ {}", str, symbol.name);
|
||||
symbols_set.push((symbol_idx, ObjDataKind::String, size));
|
||||
@ -142,7 +144,9 @@ pub fn detect_strings(obj: &mut ObjInfo) -> Result<()> {
|
||||
}
|
||||
StringResult::WString { length, str } => {
|
||||
let size = length + 2;
|
||||
if !symbol.size_known || symbol.size == size as u64 {
|
||||
if symbol.size == size as u64
|
||||
|| (is_auto_symbol(symbol) && symbol.size > size as u64)
|
||||
{
|
||||
log::debug!("Found wide string '{}' @ {}", str, symbol.name);
|
||||
symbols_set.push((symbol_idx, ObjDataKind::String16, size));
|
||||
}
|
||||
|
@ -101,7 +101,7 @@ impl AnalysisPass for FindSaveRestSleds {
|
||||
for i in reg_start..reg_end {
|
||||
let addr = start + (i - reg_start) * step_size;
|
||||
state.known_symbols.entry(addr).or_default().push(ObjSymbol {
|
||||
name: format!("{}{}", label, i),
|
||||
name: format!("{label}{i}"),
|
||||
address: addr.address as u64,
|
||||
section: Some(start.section),
|
||||
size_known: true,
|
||||
|
@ -45,6 +45,17 @@ type BlockRange = Range<SectionAddress>;
|
||||
|
||||
type InsCheck = dyn Fn(Ins) -> bool;
|
||||
|
||||
/// Stop searching for prologue/epilogue sequences if the next instruction
|
||||
/// is a branch or uses r0 or r1.
|
||||
fn is_end_of_seq(next: &Ins) -> bool {
|
||||
next.is_branch()
|
||||
|| next
|
||||
.defs()
|
||||
.iter()
|
||||
.chain(next.uses().iter())
|
||||
.any(|a| matches!(a, ppc750cl::Argument::GPR(ppc750cl::GPR(0 | 1))))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn check_sequence(
|
||||
section: &ObjSection,
|
||||
@ -52,29 +63,26 @@ fn check_sequence(
|
||||
ins: Option<Ins>,
|
||||
sequence: &[(&InsCheck, &InsCheck)],
|
||||
) -> Result<bool> {
|
||||
let mut found = false;
|
||||
let ins = ins
|
||||
.or_else(|| disassemble(section, addr.address))
|
||||
.with_context(|| format!("Failed to disassemble instruction at {addr:#010X}"))?;
|
||||
for &(first, second) in sequence {
|
||||
let Some(ins) = ins.or_else(|| disassemble(section, addr.address)) else {
|
||||
continue;
|
||||
};
|
||||
if !first(ins) {
|
||||
continue;
|
||||
}
|
||||
let Some(next) = disassemble(section, addr.address + 4) else {
|
||||
continue;
|
||||
};
|
||||
if second(next)
|
||||
// Also check the following instruction, in case the scheduler
|
||||
// put something in between.
|
||||
|| (!next.is_branch()
|
||||
&& matches!(disassemble(section, addr.address + 8), Some(ins) if second(ins)))
|
||||
{
|
||||
found = true;
|
||||
break;
|
||||
let mut current_addr = addr.address + 4;
|
||||
while let Some(next) = disassemble(section, current_addr) {
|
||||
if second(next) {
|
||||
return Ok(true);
|
||||
}
|
||||
if is_end_of_seq(&next) {
|
||||
// If we hit a branch or an instruction that uses r0 or r1, stop searching.
|
||||
break;
|
||||
}
|
||||
current_addr += 4;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(found)
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn check_prologue_sequence(
|
||||
@ -89,15 +97,19 @@ fn check_prologue_sequence(
|
||||
}
|
||||
#[inline(always)]
|
||||
fn is_stwu(ins: Ins) -> bool {
|
||||
// stwu r1, d(r1)
|
||||
ins.op == Opcode::Stwu && ins.field_rs() == 1 && ins.field_ra() == 1
|
||||
// stwu[x] r1, d(r1)
|
||||
matches!(ins.op, Opcode::Stwu | Opcode::Stwux) && ins.field_rs() == 1 && ins.field_ra() == 1
|
||||
}
|
||||
#[inline(always)]
|
||||
fn is_stw(ins: Ins) -> bool {
|
||||
// stw r0, d(r1)
|
||||
ins.op == Opcode::Stw && ins.field_rs() == 0 && ins.field_ra() == 1
|
||||
}
|
||||
check_sequence(section, addr, ins, &[(&is_stwu, &is_mflr), (&is_mflr, &is_stw)])
|
||||
check_sequence(section, addr, ins, &[
|
||||
(&is_stwu, &is_mflr),
|
||||
(&is_mflr, &is_stw),
|
||||
(&is_mflr, &is_stwu),
|
||||
])
|
||||
}
|
||||
|
||||
impl FunctionSlices {
|
||||
@ -148,7 +160,28 @@ impl FunctionSlices {
|
||||
}
|
||||
if check_prologue_sequence(section, addr, Some(ins))? {
|
||||
if let Some(prologue) = self.prologue {
|
||||
if prologue != addr && prologue != addr - 4 {
|
||||
let invalid_seq = if prologue == addr {
|
||||
false
|
||||
} else if prologue > addr {
|
||||
true
|
||||
} else {
|
||||
// Check if any instructions between the prologue and this address
|
||||
// are branches or use r0 or r1.
|
||||
let mut current_addr = prologue.address + 4;
|
||||
loop {
|
||||
if current_addr == addr.address {
|
||||
break false;
|
||||
}
|
||||
let next = disassemble(section, current_addr).with_context(|| {
|
||||
format!("Failed to disassemble {current_addr:#010X}")
|
||||
})?;
|
||||
if is_end_of_seq(&next) {
|
||||
break true;
|
||||
}
|
||||
current_addr += 4;
|
||||
}
|
||||
};
|
||||
if invalid_seq {
|
||||
bail!("Found multiple functions inside a symbol: {:#010X} and {:#010X}. Check symbols.txt?", prologue, addr)
|
||||
}
|
||||
} else {
|
||||
@ -180,7 +213,11 @@ impl FunctionSlices {
|
||||
ins.op == Opcode::Or && ins.field_rd() == 1
|
||||
}
|
||||
|
||||
if check_sequence(section, addr, Some(ins), &[(&is_mtlr, &is_addi), (&is_or, &is_mtlr)])? {
|
||||
if check_sequence(section, addr, Some(ins), &[
|
||||
(&is_mtlr, &is_addi),
|
||||
(&is_mtlr, &is_or),
|
||||
(&is_or, &is_mtlr),
|
||||
])? {
|
||||
if let Some(epilogue) = self.epilogue {
|
||||
if epilogue != addr {
|
||||
bail!("Found duplicate epilogue: {:#010X} and {:#010X}", epilogue, addr)
|
||||
@ -227,7 +264,7 @@ impl FunctionSlices {
|
||||
})?;
|
||||
}
|
||||
self.check_epilogue(section, ins_addr, ins)
|
||||
.with_context(|| format!("While processing {:#010X}: {:#?}", function_start, self))?;
|
||||
.with_context(|| format!("While processing {function_start:#010X}: {self:#?}"))?;
|
||||
if !self.has_conditional_blr && is_conditional_blr(ins) {
|
||||
self.has_conditional_blr = true;
|
||||
}
|
||||
@ -340,7 +377,14 @@ impl FunctionSlices {
|
||||
function_end.or_else(|| self.end()),
|
||||
)?;
|
||||
log::debug!("-> size {}: {:?}", size, entries);
|
||||
if (entries.contains(&next_address) || self.blocks.contains_key(&next_address))
|
||||
let max_block = self
|
||||
.blocks
|
||||
.keys()
|
||||
.next_back()
|
||||
.copied()
|
||||
.unwrap_or(next_address)
|
||||
.max(next_address);
|
||||
if entries.iter().any(|&addr| addr > function_start && addr <= max_block)
|
||||
&& !entries.iter().any(|&addr| {
|
||||
self.is_known_function(known_functions, addr)
|
||||
.is_some_and(|fn_addr| fn_addr != function_start)
|
||||
@ -703,7 +747,7 @@ impl FunctionSlices {
|
||||
}
|
||||
}
|
||||
// If we discovered a function prologue, known tail call.
|
||||
if slices.prologue.is_some() {
|
||||
if slices.prologue.is_some() || slices.has_r1_load {
|
||||
log::trace!("Prologue discovered; known tail call: {:#010X}", addr);
|
||||
return TailCallResult::Is;
|
||||
}
|
||||
|
@ -417,9 +417,13 @@ impl Tracker {
|
||||
Ok(ExecCbResult::Continue)
|
||||
}
|
||||
StepResult::Jump(target) => match target {
|
||||
BranchTarget::Return => Ok(ExecCbResult::EndBlock),
|
||||
BranchTarget::Unknown
|
||||
| BranchTarget::Return
|
||||
| BranchTarget::JumpTable { address: RelocationTarget::External, .. } => {
|
||||
let next_addr = ins_addr + 4;
|
||||
if next_addr < function_end {
|
||||
possible_missed_branches.insert(ins_addr + 4, vm.clone_all());
|
||||
}
|
||||
Ok(ExecCbResult::EndBlock)
|
||||
}
|
||||
BranchTarget::Address(addr) => {
|
||||
@ -576,7 +580,7 @@ impl Tracker {
|
||||
let relocation_target = relocation_target_for(obj, from, None).ok().flatten();
|
||||
if !matches!(relocation_target, None | Some(RelocationTarget::External)) {
|
||||
// VM should have already handled this
|
||||
panic!("Relocation already exists for {:#010X} (from {:#010X})", addr, from);
|
||||
panic!("Relocation already exists for {addr:#010X} (from {from:#010X})");
|
||||
}
|
||||
}
|
||||
// Remainder of this function is for executable objects only
|
||||
@ -668,7 +672,7 @@ impl Tracker {
|
||||
0
|
||||
};
|
||||
let new_name =
|
||||
if module_id == 0 { name.to_string() } else { format!("{}:{}", name, module_id) };
|
||||
if module_id == 0 { name.to_string() } else { format!("{name}:{module_id}") };
|
||||
log::debug!("Renaming {} to {}", section.name, new_name);
|
||||
section.name = new_name;
|
||||
}
|
||||
|
@ -127,16 +127,16 @@ fn extract(args: ExtractArgs) -> Result<()> {
|
||||
}
|
||||
std::fs::create_dir_all(&out_dir)?;
|
||||
if !args.quiet {
|
||||
println!("Extracting {} to {}", path, out_dir);
|
||||
println!("Extracting {path} to {out_dir}");
|
||||
}
|
||||
|
||||
let mut file = open_file(path, false)?;
|
||||
let mut archive = ar::Archive::new(file.map()?);
|
||||
while let Some(entry) = archive.next_entry() {
|
||||
let mut entry = entry.with_context(|| format!("Processing entry in {}", path))?;
|
||||
let mut entry = entry.with_context(|| format!("Processing entry in {path}"))?;
|
||||
let file_name = std::str::from_utf8(entry.header().identifier())?;
|
||||
if !args.quiet && args.verbose {
|
||||
println!("\t{}", file_name);
|
||||
println!("\t{file_name}");
|
||||
}
|
||||
let mut file_path = out_dir.clone();
|
||||
for segment in file_name.split(&['/', '\\']) {
|
||||
@ -146,7 +146,7 @@ fn extract(args: ExtractArgs) -> Result<()> {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let mut file = File::create(&file_path)
|
||||
.with_context(|| format!("Failed to create file {}", file_path))?;
|
||||
.with_context(|| format!("Failed to create file {file_path}"))?;
|
||||
std::io::copy(&mut entry, &mut file)?;
|
||||
file.flush()?;
|
||||
|
||||
@ -154,7 +154,7 @@ fn extract(args: ExtractArgs) -> Result<()> {
|
||||
}
|
||||
}
|
||||
if !args.quiet {
|
||||
println!("Extracted {} files", num_files);
|
||||
println!("Extracted {num_files} files");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -47,6 +47,7 @@ use crate::{
|
||||
diff::{calc_diff_ranges, print_diff, process_code},
|
||||
dol::process_dol,
|
||||
elf::{process_elf, write_elf},
|
||||
extab::clean_extab,
|
||||
file::{
|
||||
buf_copy_with_hash, buf_writer, check_hash_str, touch, verify_hash, FileIterator,
|
||||
FileReadInfo,
|
||||
@ -293,6 +294,9 @@ pub struct ModuleConfig {
|
||||
pub block_relocations: Vec<BlockRelocationConfig>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub add_relocations: Vec<AddRelocationConfig>,
|
||||
/// Process exception tables and zero out uninitialized data.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub clean_extab: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
|
||||
@ -534,9 +538,11 @@ pub fn info(args: InfoArgs) -> Result<()> {
|
||||
apply_selfile(&mut obj, file.map()?)?;
|
||||
}
|
||||
|
||||
println!("{}:", obj.name);
|
||||
if !obj.name.is_empty() {
|
||||
println!("{}:", obj.name);
|
||||
}
|
||||
if let Some(entry) = obj.entry {
|
||||
println!("Entry point: {:#010X}", entry);
|
||||
println!("Entry point: {entry:#010X}");
|
||||
}
|
||||
println!("\nSections:");
|
||||
println!("\t{: >10} | {: <10} | {: <10} | {: <10}", "Name", "Address", "Size", "File Off");
|
||||
@ -578,6 +584,7 @@ struct ModuleInfo<'a> {
|
||||
config: &'a ModuleConfig,
|
||||
symbols_cache: Option<FileReadInfo>,
|
||||
splits_cache: Option<FileReadInfo>,
|
||||
dep: Vec<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
type ModuleMapByName<'a> = BTreeMap<String, ModuleInfo<'a>>;
|
||||
@ -817,17 +824,29 @@ struct AnalyzeResult {
|
||||
splits_cache: Option<FileReadInfo>,
|
||||
}
|
||||
|
||||
fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<AnalyzeResult> {
|
||||
let object_path = object_base.join(&config.base.object);
|
||||
fn load_dol_module(
|
||||
config: &ModuleConfig,
|
||||
object_base: &ObjectBase,
|
||||
) -> Result<(ObjInfo, Utf8NativePathBuf)> {
|
||||
let object_path = object_base.join(&config.object);
|
||||
log::debug!("Loading {}", object_path);
|
||||
let mut obj = {
|
||||
let mut file = object_base.open(&config.base.object)?;
|
||||
let mut file = object_base.open(&config.object)?;
|
||||
let data = file.map()?;
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
if let Some(hash_str) = &config.hash {
|
||||
verify_hash(data, hash_str)?;
|
||||
}
|
||||
process_dol(data, config.base.name())?
|
||||
process_dol(data, config.name())?
|
||||
};
|
||||
if config.clean_extab.unwrap_or(false) {
|
||||
log::debug!("Cleaning extab for {}", config.name());
|
||||
clean_extab(&mut obj, std::iter::empty())?;
|
||||
}
|
||||
Ok((obj, object_path))
|
||||
}
|
||||
|
||||
fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<AnalyzeResult> {
|
||||
let (mut obj, object_path) = load_dol_module(&config.base, object_base)?;
|
||||
let mut dep = vec![object_path];
|
||||
|
||||
if let Some(comment_version) = config.mw_comment_version {
|
||||
@ -954,7 +973,7 @@ fn split_write_obj(
|
||||
DirBuilder::new()
|
||||
.recursive(true)
|
||||
.create(out_dir)
|
||||
.with_context(|| format!("Failed to create out dir '{}'", out_dir))?;
|
||||
.with_context(|| format!("Failed to create out dir '{out_dir}'"))?;
|
||||
let obj_dir = out_dir.join("obj");
|
||||
let entry = if module.obj.kind == ObjKind::Executable {
|
||||
module.obj.entry.and_then(|e| {
|
||||
@ -1055,9 +1074,10 @@ fn split_write_obj(
|
||||
// Generate ldscript.lcf
|
||||
let ldscript_template = if let Some(template_path) = &module.config.ldscript_template {
|
||||
let template_path = template_path.with_encoding();
|
||||
Some(fs::read_to_string(&template_path).with_context(|| {
|
||||
format!("Failed to read linker script template '{}'", template_path)
|
||||
})?)
|
||||
let template = fs::read_to_string(&template_path)
|
||||
.with_context(|| format!("Failed to read linker script template '{template_path}'"))?;
|
||||
module.dep.push(template_path);
|
||||
Some(template)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@ -1073,8 +1093,7 @@ fn split_write_obj(
|
||||
let out_path = asm_dir.join(asm_path_for_unit(&unit.name));
|
||||
|
||||
let mut w = buf_writer(&out_path)?;
|
||||
write_asm(&mut w, split_obj)
|
||||
.with_context(|| format!("Failed to write {}", out_path))?;
|
||||
write_asm(&mut w, split_obj).with_context(|| format!("Failed to write {out_path}"))?;
|
||||
w.flush()?;
|
||||
}
|
||||
}
|
||||
@ -1091,7 +1110,7 @@ fn write_if_changed(path: &Utf8NativePath, contents: &[u8]) -> Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
fs::write(path, contents).with_context(|| format!("Failed to write file '{}'", path))?;
|
||||
fs::write(path, contents).with_context(|| format!("Failed to write file '{path}'"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1245,6 +1264,7 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
config: &config.base,
|
||||
symbols_cache: result.symbols_cache,
|
||||
splits_cache: result.splits_cache,
|
||||
dep: Default::default(),
|
||||
}
|
||||
};
|
||||
let mut function_count = dol.obj.symbols.by_kind(ObjSymbolKind::Function).count();
|
||||
@ -1259,6 +1279,7 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
config: &config.modules[idx],
|
||||
symbols_cache: result.symbols_cache,
|
||||
splits_cache: result.splits_cache,
|
||||
dep: Default::default(),
|
||||
}),
|
||||
Entry::Occupied(_) => bail!("Duplicate module name {}", result.obj.name),
|
||||
};
|
||||
@ -1440,6 +1461,10 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
}
|
||||
|
||||
// Write dep file
|
||||
dep.extend(dol.dep);
|
||||
for module in modules.into_values() {
|
||||
dep.extend(module.dep);
|
||||
}
|
||||
{
|
||||
let dep_path = args.out_dir.join("dep");
|
||||
let mut dep_file = buf_writer(&dep_path)?;
|
||||
@ -1651,15 +1676,7 @@ fn diff(args: DiffArgs) -> Result<()> {
|
||||
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
|
||||
let object_base = find_object_base(&config)?;
|
||||
|
||||
log::info!("Loading {}", object_base.join(&config.base.object));
|
||||
let mut obj = {
|
||||
let mut file = object_base.open(&config.base.object)?;
|
||||
let data = file.map()?;
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(data, hash_str)?;
|
||||
}
|
||||
process_dol(data, config.base.name())?
|
||||
};
|
||||
let (mut obj, _object_path) = load_dol_module(&config.base, &object_base)?;
|
||||
|
||||
if let Some(symbols_path) = &config.base.symbols {
|
||||
apply_symbols_file(&symbols_path.with_encoding(), &mut obj)?;
|
||||
@ -1875,15 +1892,7 @@ fn apply(args: ApplyArgs) -> Result<()> {
|
||||
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
|
||||
let object_base = find_object_base(&config)?;
|
||||
|
||||
log::info!("Loading {}", object_base.join(&config.base.object));
|
||||
let mut obj = {
|
||||
let mut file = object_base.open(&config.base.object)?;
|
||||
let data = file.map()?;
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(data, hash_str)?;
|
||||
}
|
||||
process_dol(data, config.base.name())?
|
||||
};
|
||||
let (mut obj, _object_path) = load_dol_module(&config.base, &object_base)?;
|
||||
|
||||
let Some(symbols_path) = &config.base.symbols else {
|
||||
bail!("No symbols file specified in config");
|
||||
@ -2157,7 +2166,7 @@ impl ObjectBase {
|
||||
}
|
||||
base.join(path.with_encoding())
|
||||
}
|
||||
ObjectBase::Vfs(base, _) => Utf8NativePathBuf::from(format!("{}:{}", base, path)),
|
||||
ObjectBase::Vfs(base, _) => Utf8NativePathBuf::from(format!("{base}:{path}")),
|
||||
}
|
||||
}
|
||||
|
||||
@ -2174,7 +2183,7 @@ impl ObjectBase {
|
||||
}
|
||||
ObjectBase::Vfs(vfs_path, vfs) => {
|
||||
open_file_with_fs(vfs.clone(), &path.with_encoding(), true)
|
||||
.with_context(|| format!("Using disc image {}", vfs_path))
|
||||
.with_context(|| format!("Using disc image {vfs_path}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2192,18 +2201,18 @@ pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> {
|
||||
if let Some(base) = &config.object_base {
|
||||
let base = base.with_encoding();
|
||||
// Search for disc images in the object base directory
|
||||
for result in fs::read_dir(&base).with_context(|| format!("Reading directory {}", base))? {
|
||||
let entry = result.with_context(|| format!("Reading entry in directory {}", base))?;
|
||||
for result in fs::read_dir(&base).with_context(|| format!("Reading directory {base}"))? {
|
||||
let entry = result.with_context(|| format!("Reading entry in directory {base}"))?;
|
||||
let Ok(path) = check_path_buf(entry.path()) else {
|
||||
log::warn!("Path is not valid UTF-8: {:?}", entry.path());
|
||||
continue;
|
||||
};
|
||||
let file_type =
|
||||
entry.file_type().with_context(|| format!("Getting file type for {}", path))?;
|
||||
entry.file_type().with_context(|| format!("Getting file type for {path}"))?;
|
||||
let is_file = if file_type.is_symlink() {
|
||||
// Also traverse symlinks to files
|
||||
fs::metadata(&path)
|
||||
.with_context(|| format!("Getting metadata for {}", path))?
|
||||
.with_context(|| format!("Getting metadata for {path}"))?
|
||||
.is_file()
|
||||
} else {
|
||||
file_type.is_file()
|
||||
@ -2211,7 +2220,7 @@ pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> {
|
||||
if is_file {
|
||||
let mut file = open_file(&path, false)?;
|
||||
let format = detect(file.as_mut())
|
||||
.with_context(|| format!("Detecting file type for {}", path))?;
|
||||
.with_context(|| format!("Detecting file type for {path}"))?;
|
||||
match format {
|
||||
FileFormat::Archive(ArchiveKind::Disc(format)) => {
|
||||
let fs = open_fs(file, ArchiveKind::Disc(format))?;
|
||||
@ -2240,7 +2249,7 @@ fn extract_objects(config: &ProjectConfig, object_base: &ObjectBase) -> Result<U
|
||||
{
|
||||
let target_path = extracted_path(&target_dir, &config.base.object);
|
||||
if !fs::exists(&target_path)
|
||||
.with_context(|| format!("Failed to check path '{}'", target_path))?
|
||||
.with_context(|| format!("Failed to check path '{target_path}'"))?
|
||||
{
|
||||
object_paths.push((&config.base.object, config.base.hash.as_deref(), target_path));
|
||||
}
|
||||
@ -2248,7 +2257,7 @@ fn extract_objects(config: &ProjectConfig, object_base: &ObjectBase) -> Result<U
|
||||
if let Some(selfile) = &config.selfile {
|
||||
let target_path = extracted_path(&target_dir, selfile);
|
||||
if !fs::exists(&target_path)
|
||||
.with_context(|| format!("Failed to check path '{}'", target_path))?
|
||||
.with_context(|| format!("Failed to check path '{target_path}'"))?
|
||||
{
|
||||
object_paths.push((selfile, config.selfile_hash.as_deref(), target_path));
|
||||
}
|
||||
@ -2256,7 +2265,7 @@ fn extract_objects(config: &ProjectConfig, object_base: &ObjectBase) -> Result<U
|
||||
for module_config in &config.modules {
|
||||
let target_path = extracted_path(&target_dir, &module_config.object);
|
||||
if !fs::exists(&target_path)
|
||||
.with_context(|| format!("Failed to check path '{}'", target_path))?
|
||||
.with_context(|| format!("Failed to check path '{target_path}'"))?
|
||||
{
|
||||
object_paths.push((&module_config.object, module_config.hash.as_deref(), target_path));
|
||||
}
|
||||
@ -2275,12 +2284,12 @@ fn extract_objects(config: &ProjectConfig, object_base: &ObjectBase) -> Result<U
|
||||
let mut file = object_base.open(source_path)?;
|
||||
if let Some(parent) = target_path.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.with_context(|| format!("Failed to create directory '{}'", parent))?;
|
||||
.with_context(|| format!("Failed to create directory '{parent}'"))?;
|
||||
}
|
||||
let mut out = fs::File::create(&target_path)
|
||||
.with_context(|| format!("Failed to create file '{}'", target_path))?;
|
||||
.with_context(|| format!("Failed to create file '{target_path}'"))?;
|
||||
let hash_bytes = buf_copy_with_hash(&mut file, &mut out)
|
||||
.with_context(|| format!("Failed to extract file '{}'", target_path))?;
|
||||
.with_context(|| format!("Failed to extract file '{target_path}'"))?;
|
||||
if let Some(hash) = hash {
|
||||
check_hash_str(hash_bytes, hash).with_context(|| {
|
||||
format!("Source file failed verification: '{}'", object_base.join(source_path))
|
||||
|
@ -104,16 +104,16 @@ fn dump(args: DumpArgs) -> Result<()> {
|
||||
// TODO make a basename method
|
||||
let name = name.trim_start_matches("D:").replace('\\', "/");
|
||||
let name = name.rsplit_once('/').map(|(_, b)| b).unwrap_or(&name);
|
||||
let file_path = out_path.join(format!("{}.txt", name));
|
||||
let file_path = out_path.join(format!("{name}.txt"));
|
||||
let mut file = buf_writer(&file_path)?;
|
||||
dump_debug_section(&args, &mut file, &obj_file, debug_section)?;
|
||||
file.flush()?;
|
||||
} else if args.no_color {
|
||||
println!("\n// File {}:", name);
|
||||
println!("\n// File {name}:");
|
||||
dump_debug_section(&args, &mut stdout(), &obj_file, debug_section)?;
|
||||
} else {
|
||||
let mut writer = HighlightWriter::new(syntax_set.clone(), syntax.clone(), theme);
|
||||
writeln!(writer, "\n// File {}:", name)?;
|
||||
writeln!(writer, "\n// File {name}:")?;
|
||||
dump_debug_section(&args, &mut writer, &obj_file, debug_section)?;
|
||||
}
|
||||
}
|
||||
@ -209,26 +209,25 @@ where
|
||||
}
|
||||
writeln!(w, "\n/*\n Compile unit: {}", unit.name)?;
|
||||
if let Some(producer) = unit.producer {
|
||||
writeln!(w, " Producer: {}", producer)?;
|
||||
writeln!(w, " Producer: {producer}")?;
|
||||
}
|
||||
if let Some(comp_dir) = unit.comp_dir {
|
||||
writeln!(w, " Compile directory: {}", comp_dir)?;
|
||||
writeln!(w, " Compile directory: {comp_dir}")?;
|
||||
}
|
||||
if let Some(language) = unit.language {
|
||||
writeln!(w, " Language: {}", language)?;
|
||||
writeln!(w, " Language: {language}")?;
|
||||
}
|
||||
if let (Some(start), Some(end)) = (unit.start_address, unit.end_address) {
|
||||
writeln!(w, " Code range: {:#010X} -> {:#010X}", start, end)?;
|
||||
writeln!(w, " Code range: {start:#010X} -> {end:#010X}")?;
|
||||
}
|
||||
if let Some(gcc_srcfile_name_offset) = unit.gcc_srcfile_name_offset {
|
||||
writeln!(
|
||||
w,
|
||||
" GCC Source File Name Offset: {:#010X}",
|
||||
gcc_srcfile_name_offset
|
||||
" GCC Source File Name Offset: {gcc_srcfile_name_offset:#010X}"
|
||||
)?;
|
||||
}
|
||||
if let Some(gcc_srcinfo_offset) = unit.gcc_srcinfo_offset {
|
||||
writeln!(w, " GCC Source Info Offset: {:#010X}", gcc_srcinfo_offset)?;
|
||||
writeln!(w, " GCC Source Info Offset: {gcc_srcinfo_offset:#010X}")?;
|
||||
}
|
||||
writeln!(w, "*/")?;
|
||||
|
||||
@ -269,7 +268,7 @@ where
|
||||
continue;
|
||||
}
|
||||
match tag_type_string(&info, &typedefs, &tag_type, child.is_erased) {
|
||||
Ok(s) => writeln!(w, "{}", s)?,
|
||||
Ok(s) => writeln!(w, "{s}")?,
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"Failed to emit tag {:X} (unit {}): {}",
|
||||
|
@ -146,14 +146,14 @@ fn disasm(args: DisasmArgs) -> Result<()> {
|
||||
let mut files_out = buf_writer(&args.out.join("link_order.txt"))?;
|
||||
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
|
||||
let out_name = file_stem_from_unit(&unit.name);
|
||||
let out_path = asm_dir.join(format!("{}.s", out_name));
|
||||
let out_path = asm_dir.join(format!("{out_name}.s"));
|
||||
log::info!("Writing {}", out_path);
|
||||
|
||||
let mut w = buf_writer(&out_path)?;
|
||||
write_asm(&mut w, split_obj)?;
|
||||
w.flush()?;
|
||||
|
||||
writeln!(files_out, "{}.o", out_name)?;
|
||||
writeln!(files_out, "{out_name}.o")?;
|
||||
}
|
||||
files_out.flush()?;
|
||||
}
|
||||
@ -402,7 +402,7 @@ fn signatures(args: SignaturesArgs) -> Result<()> {
|
||||
Ok(Some(signature)) => signature,
|
||||
Ok(None) => continue,
|
||||
Err(e) => {
|
||||
eprintln!("Failed: {:?}", e);
|
||||
eprintln!("Failed: {e:?}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
@ -545,13 +545,13 @@ fn info(args: InfoArgs) -> Result<()> {
|
||||
.context("While reading .note.split section")?;
|
||||
println!("\nSplit metadata (.note.split):");
|
||||
if let Some(generator) = &meta.generator {
|
||||
println!("\tGenerator: {}", generator);
|
||||
println!("\tGenerator: {generator}");
|
||||
}
|
||||
if let Some(module_name) = &meta.module_name {
|
||||
println!("\tModule name: {}", module_name);
|
||||
println!("\tModule name: {module_name}");
|
||||
}
|
||||
if let Some(module_id) = meta.module_id {
|
||||
println!("\tModule ID: {}", module_id);
|
||||
println!("\tModule ID: {module_id}");
|
||||
}
|
||||
if let Some(virtual_addresses) = &meta.virtual_addresses {
|
||||
println!("\tVirtual addresses:");
|
||||
|
@ -6,17 +6,20 @@ use object::{Architecture, Endianness, Object, ObjectKind, ObjectSection, Sectio
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
obj::ObjSectionKind,
|
||||
util::{alf::ALF_MAGIC, dol::process_dol, file::buf_writer, path::native_path},
|
||||
util::{
|
||||
dol::{process_dol, write_dol},
|
||||
file::buf_writer,
|
||||
path::native_path,
|
||||
},
|
||||
vfs::open_file,
|
||||
};
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Converts an ELF (or ALF) file to a DOL file.
|
||||
/// Converts an ELF, ALF, or BootStage file to a DOL file.
|
||||
#[argp(subcommand, name = "elf2dol")]
|
||||
pub struct Args {
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// path to input ELF or ALF file
|
||||
/// path to input ELF, ALF or BootStage file
|
||||
elf_file: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// path to output DOL
|
||||
@ -50,8 +53,8 @@ const MAX_DATA_SECTIONS: usize = 11;
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
let mut file = open_file(&args.elf_file, true)?;
|
||||
let data = file.map()?;
|
||||
if data.len() >= 4 && data[0..4] == ALF_MAGIC {
|
||||
return convert_alf(args, data);
|
||||
if data.len() >= 4 && data[0..4] != object::elf::ELFMAG {
|
||||
return convert_dol_like(args, data);
|
||||
}
|
||||
|
||||
let obj_file = object::read::File::parse(data)?;
|
||||
@ -159,86 +162,11 @@ pub fn run(args: Args) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn convert_alf(args: Args, data: &[u8]) -> Result<()> {
|
||||
/// Converts a DOL-like format (ALF or BootStage) to a DOL file.
|
||||
fn convert_dol_like(args: Args, data: &[u8]) -> Result<()> {
|
||||
let obj = process_dol(data, "")?;
|
||||
|
||||
let mut header = DolHeader { entry_point: obj.entry.unwrap() as u32, ..Default::default() };
|
||||
let mut offset = 0x100u32;
|
||||
let mut out = buf_writer(&args.dol_file)?;
|
||||
out.seek(SeekFrom::Start(offset as u64))?;
|
||||
|
||||
// Text sections
|
||||
for (_, section) in obj.sections.iter().filter(|(_, s)| s.kind == ObjSectionKind::Code) {
|
||||
log::debug!("Processing text section '{}'", section.name);
|
||||
let address = section.address as u32;
|
||||
let size = align32(section.size as u32);
|
||||
*header.text_sections.get_mut(header.text_section_count).ok_or_else(|| {
|
||||
anyhow!("Too many text sections (while processing '{}')", section.name)
|
||||
})? = DolSection { offset, address, size };
|
||||
header.text_section_count += 1;
|
||||
write_aligned(&mut out, §ion.data, size)?;
|
||||
offset += size;
|
||||
}
|
||||
|
||||
// Data sections
|
||||
for (_, section) in obj
|
||||
.sections
|
||||
.iter()
|
||||
.filter(|(_, s)| matches!(s.kind, ObjSectionKind::Data | ObjSectionKind::ReadOnlyData))
|
||||
{
|
||||
log::debug!("Processing data section '{}'", section.name);
|
||||
let address = section.address as u32;
|
||||
let size = align32(section.size as u32);
|
||||
*header.data_sections.get_mut(header.data_section_count).ok_or_else(|| {
|
||||
anyhow!("Too many data sections (while processing '{}')", section.name)
|
||||
})? = DolSection { offset, address, size };
|
||||
header.data_section_count += 1;
|
||||
write_aligned(&mut out, §ion.data, size)?;
|
||||
offset += size;
|
||||
}
|
||||
|
||||
// BSS sections
|
||||
for (_, section) in obj.sections.iter().filter(|(_, s)| s.kind == ObjSectionKind::Bss) {
|
||||
let address = section.address as u32;
|
||||
let size = section.size as u32;
|
||||
if header.bss_address == 0 {
|
||||
header.bss_address = address;
|
||||
}
|
||||
header.bss_size = (address + size) - header.bss_address;
|
||||
}
|
||||
|
||||
// Offsets
|
||||
out.rewind()?;
|
||||
for section in &header.text_sections {
|
||||
out.write_all(§ion.offset.to_be_bytes())?;
|
||||
}
|
||||
for section in &header.data_sections {
|
||||
out.write_all(§ion.offset.to_be_bytes())?;
|
||||
}
|
||||
|
||||
// Addresses
|
||||
for section in &header.text_sections {
|
||||
out.write_all(§ion.address.to_be_bytes())?;
|
||||
}
|
||||
for section in &header.data_sections {
|
||||
out.write_all(§ion.address.to_be_bytes())?;
|
||||
}
|
||||
|
||||
// Sizes
|
||||
for section in &header.text_sections {
|
||||
out.write_all(§ion.size.to_be_bytes())?;
|
||||
}
|
||||
for section in &header.data_sections {
|
||||
out.write_all(§ion.size.to_be_bytes())?;
|
||||
}
|
||||
|
||||
// BSS + entry
|
||||
out.write_all(&header.bss_address.to_be_bytes())?;
|
||||
out.write_all(&header.bss_size.to_be_bytes())?;
|
||||
out.write_all(&header.entry_point.to_be_bytes())?;
|
||||
|
||||
// Done!
|
||||
out.flush()?;
|
||||
write_dol(&obj, &mut out)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
78
src/cmd/extab.rs
Normal file
78
src/cmd/extab.rs
Normal file
@ -0,0 +1,78 @@
|
||||
use std::io::Write;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use argp::FromArgs;
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
util,
|
||||
util::{
|
||||
dol::{process_dol, write_dol},
|
||||
elf::{is_elf_file, process_elf, write_elf},
|
||||
file::buf_writer,
|
||||
path::native_path,
|
||||
},
|
||||
vfs::open_file,
|
||||
};
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Commands for processing extab (exception table) data.
|
||||
#[argp(subcommand, name = "extab")]
|
||||
pub struct Args {
|
||||
#[argp(subcommand)]
|
||||
command: SubCommand,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
#[argp(subcommand)]
|
||||
enum SubCommand {
|
||||
Clean(CleanArgs),
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Rewrites extab data in a DOL or ELF file, replacing any uninitialized padding bytes.
|
||||
#[argp(subcommand, name = "clean")]
|
||||
pub struct CleanArgs {
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// Path to input file
|
||||
input: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// Path to output file
|
||||
output: Utf8NativePathBuf,
|
||||
#[argp(option, short = 'p')]
|
||||
/// Data to replace padding bytes with, encoded as a hexadecimal string. If not specified, padding bytes will be zeroed instead.
|
||||
padding: Option<String>,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
match args.command {
|
||||
SubCommand::Clean(clean_args) => clean_extab(clean_args),
|
||||
}
|
||||
}
|
||||
|
||||
fn clean_extab(args: CleanArgs) -> Result<()> {
|
||||
let is_elf = is_elf_file(&args.input)?;
|
||||
let mut obj = if is_elf {
|
||||
process_elf(&args.input)?
|
||||
} else {
|
||||
let mut file = open_file(&args.input, true)?;
|
||||
let name = args.input.file_stem().unwrap_or_default();
|
||||
process_dol(file.map()?, name)?
|
||||
};
|
||||
let padding: Vec<u8> = match args.padding {
|
||||
None => Vec::new(),
|
||||
Some(padding_str) => {
|
||||
hex::decode(padding_str).context("Failed to decode padding bytes from hex")?
|
||||
}
|
||||
};
|
||||
let num_cleaned = util::extab::clean_extab(&mut obj, padding.iter().copied())?;
|
||||
tracing::debug!("Cleaned {num_cleaned} extab symbols");
|
||||
let mut out = buf_writer(&args.output)?;
|
||||
if is_elf {
|
||||
let data = write_elf(&obj, false)?;
|
||||
out.write_all(&data).context("Failed to write ELF")?;
|
||||
} else {
|
||||
write_dol(&obj, &mut out).context("Failed to write DOL")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
@ -175,7 +175,7 @@ fn symbol(args: SymbolArgs) -> Result<()> {
|
||||
if let Some(vec) = entries.unit_references.get_vec(&symbol_ref) {
|
||||
println!("\nGenerated in TUs:");
|
||||
for x in vec {
|
||||
println!(">>> {}", x);
|
||||
println!(">>> {x}");
|
||||
}
|
||||
}
|
||||
println!("\n");
|
||||
|
@ -6,6 +6,7 @@ pub mod dol;
|
||||
pub mod dwarf;
|
||||
pub mod elf;
|
||||
pub mod elf2dol;
|
||||
pub mod extab;
|
||||
pub mod map;
|
||||
pub mod nlzss;
|
||||
pub mod rarc;
|
||||
|
@ -59,7 +59,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
||||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
.with_context(|| format!("Failed to write '{out_path}'"))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -316,7 +316,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||
.unwrap_or(idx as u32);
|
||||
load_obj(file.map()?)
|
||||
.map(|o| LoadedModule { module_id, file: o, path: path.clone() })
|
||||
.with_context(|| format!("Failed to load '{}'", path))
|
||||
.with_context(|| format!("Failed to load '{path}'"))
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
@ -395,7 +395,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||
let rel_path = module_info.path.with_extension("rel");
|
||||
let mut w = buf_writer(&rel_path)?;
|
||||
write_rel(&mut w, &info, &module_info.file, relocations)
|
||||
.with_context(|| format!("Failed to write '{}'", rel_path))?;
|
||||
.with_context(|| format!("Failed to write '{rel_path}'"))?;
|
||||
w.flush()?;
|
||||
}
|
||||
|
||||
|
@ -143,7 +143,7 @@ fn make_rso(
|
||||
|
||||
let si = sym
|
||||
.section_index()
|
||||
.with_context(|| format!("Failed to find symbol `{}` section index", name))?;
|
||||
.with_context(|| format!("Failed to find symbol `{name}` section index"))?;
|
||||
let addr = sym.address();
|
||||
|
||||
*index = si.0 as u8;
|
||||
|
@ -45,14 +45,13 @@ pub fn run(args: Args) -> Result<()> {
|
||||
check(&args, file.as_mut())?;
|
||||
}
|
||||
if let Some(out_path) = &args.output {
|
||||
touch(out_path)
|
||||
.with_context(|| format!("Failed to touch output file '{}'", out_path))?;
|
||||
touch(out_path).with_context(|| format!("Failed to touch output file '{out_path}'"))?;
|
||||
}
|
||||
} else {
|
||||
let mut w: Box<dyn Write> = if let Some(out_path) = &args.output {
|
||||
Box::new(
|
||||
buf_writer(out_path)
|
||||
.with_context(|| format!("Failed to open output file '{}'", out_path))?,
|
||||
.with_context(|| format!("Failed to open output file '{out_path}'"))?,
|
||||
)
|
||||
} else {
|
||||
Box::new(stdout())
|
||||
|
@ -85,7 +85,7 @@ fn file_info(
|
||||
metadata: &VfsMetadata,
|
||||
) -> anyhow::Result<Columns<5>> {
|
||||
let format =
|
||||
detect(file).with_context(|| format!("Failed to detect file format for {}", filename))?;
|
||||
detect(file).with_context(|| format!("Failed to detect file format for {filename}"))?;
|
||||
let mut info: Columns<5> = [
|
||||
Size::from_bytes(metadata.len).to_string(),
|
||||
filename.to_string(),
|
||||
@ -97,9 +97,9 @@ fn file_info(
|
||||
let mut decompressed = decompress_file(file, kind)?;
|
||||
let metadata = decompressed
|
||||
.metadata()
|
||||
.with_context(|| format!("Failed to fetch metadata for {}", filename))?;
|
||||
.with_context(|| format!("Failed to fetch metadata for {filename}"))?;
|
||||
let format = detect(decompressed.as_mut())
|
||||
.with_context(|| format!("Failed to detect file format for {}", filename))?;
|
||||
.with_context(|| format!("Failed to detect file format for {filename}"))?;
|
||||
info[3] = format!("Decompressed: {}", Size::from_bytes(metadata.len));
|
||||
info[4] = format.to_string();
|
||||
}
|
||||
@ -112,11 +112,11 @@ pub fn ls(args: LsArgs) -> anyhow::Result<()> {
|
||||
OpenResult::File(mut file, path) => {
|
||||
let filename = path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?;
|
||||
if args.short {
|
||||
println!("{}", filename);
|
||||
println!("{filename}");
|
||||
} else {
|
||||
let metadata = file
|
||||
.metadata()
|
||||
.with_context(|| format!("Failed to fetch metadata for {}", path))?;
|
||||
.with_context(|| format!("Failed to fetch metadata for {path}"))?;
|
||||
files.push(file_info(filename, file.as_mut(), &metadata)?);
|
||||
}
|
||||
}
|
||||
@ -131,10 +131,10 @@ pub fn ls(args: LsArgs) -> anyhow::Result<()> {
|
||||
for (i, column) in entry.iter().enumerate() {
|
||||
if widths[i] > 0 {
|
||||
if written > 0 {
|
||||
print!("{}", SEPARATOR);
|
||||
print!("{SEPARATOR}");
|
||||
}
|
||||
written += 1;
|
||||
print!("{}", column);
|
||||
print!("{column}");
|
||||
let remain = widths[i].saturating_sub(column.width_cjk());
|
||||
if remain > 0 {
|
||||
print!("{:width$}", "", width = remain);
|
||||
@ -161,25 +161,25 @@ fn ls_directory(
|
||||
let display_path = base_filename.join(&filename);
|
||||
let metadata = fs
|
||||
.metadata(&entry_path)
|
||||
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?;
|
||||
.with_context(|| format!("Failed to fetch metadata for {entry_path}"))?;
|
||||
match metadata.file_type {
|
||||
VfsFileType::File => {
|
||||
let mut file = fs
|
||||
.open(&entry_path)
|
||||
.with_context(|| format!("Failed to open file {}", entry_path))?;
|
||||
.with_context(|| format!("Failed to open file {entry_path}"))?;
|
||||
if args.short {
|
||||
println!("{}", display_path);
|
||||
println!("{display_path}");
|
||||
} else {
|
||||
files.push(file_info(display_path.as_str(), file.as_mut(), &metadata)?);
|
||||
}
|
||||
}
|
||||
VfsFileType::Directory => {
|
||||
if args.short {
|
||||
println!("{}/", display_path);
|
||||
println!("{display_path}/");
|
||||
} else {
|
||||
files.push([
|
||||
" ".to_string(),
|
||||
format!("{}/", display_path),
|
||||
format!("{display_path}/"),
|
||||
"Directory".to_string(),
|
||||
String::new(),
|
||||
String::new(),
|
||||
@ -206,7 +206,7 @@ pub fn cp(mut args: CpArgs) -> anyhow::Result<()> {
|
||||
OpenResult::File(file, path) => {
|
||||
let dest = if dest_is_dir {
|
||||
fs::create_dir_all(&dest)
|
||||
.with_context(|| format!("Failed to create directory {}", dest))?;
|
||||
.with_context(|| format!("Failed to create directory {dest}"))?;
|
||||
let filename =
|
||||
path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?;
|
||||
dest.join(filename)
|
||||
@ -234,12 +234,12 @@ fn cp_file(
|
||||
if let FileFormat::Compressed(kind) = detect(file.as_mut())? {
|
||||
if auto_decompress {
|
||||
file = decompress_file(file.as_mut(), kind)
|
||||
.with_context(|| format!("Failed to decompress file {}", dest))?;
|
||||
.with_context(|| format!("Failed to decompress file {dest}"))?;
|
||||
compression = Some(kind);
|
||||
}
|
||||
}
|
||||
let metadata =
|
||||
file.metadata().with_context(|| format!("Failed to fetch metadata for {}", dest))?;
|
||||
file.metadata().with_context(|| format!("Failed to fetch metadata for {dest}"))?;
|
||||
if !quiet {
|
||||
if let Some(kind) = compression {
|
||||
println!(
|
||||
@ -254,10 +254,10 @@ fn cp_file(
|
||||
}
|
||||
}
|
||||
let mut dest_file =
|
||||
File::create(dest).with_context(|| format!("Failed to create file {}", dest))?;
|
||||
File::create(dest).with_context(|| format!("Failed to create file {dest}"))?;
|
||||
buf_copy(file.as_mut(), &mut dest_file)
|
||||
.with_context(|| format!("Failed to copy file {}", dest))?;
|
||||
dest_file.flush().with_context(|| format!("Failed to flush file {}", dest))?;
|
||||
.with_context(|| format!("Failed to copy file {dest}"))?;
|
||||
dest_file.flush().with_context(|| format!("Failed to flush file {dest}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -268,18 +268,18 @@ fn cp_recursive(
|
||||
auto_decompress: bool,
|
||||
quiet: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
fs::create_dir_all(dest).with_context(|| format!("Failed to create directory {}", dest))?;
|
||||
fs::create_dir_all(dest).with_context(|| format!("Failed to create directory {dest}"))?;
|
||||
let entries = fs.read_dir(path)?;
|
||||
for filename in entries {
|
||||
let entry_path = path.join(&filename);
|
||||
let metadata = fs
|
||||
.metadata(&entry_path)
|
||||
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?;
|
||||
.with_context(|| format!("Failed to fetch metadata for {entry_path}"))?;
|
||||
match metadata.file_type {
|
||||
VfsFileType::File => {
|
||||
let file = fs
|
||||
.open(&entry_path)
|
||||
.with_context(|| format!("Failed to open file {}", entry_path))?;
|
||||
.with_context(|| format!("Failed to open file {entry_path}"))?;
|
||||
cp_file(file, &entry_path, &dest.join(filename), auto_decompress, quiet)?;
|
||||
}
|
||||
VfsFileType::Directory => {
|
||||
|
@ -80,7 +80,7 @@ fn compress(args: CompressArgs) -> Result<()> {
|
||||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
.with_context(|| format!("Failed to write '{out_path}'"))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -92,7 +92,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
||||
let data = {
|
||||
let mut file = open_file(&path, true)?;
|
||||
decompress_yay0(file.map()?)
|
||||
.with_context(|| format!("Failed to decompress '{}' using Yay0", path))?
|
||||
.with_context(|| format!("Failed to decompress '{path}' using Yay0"))?
|
||||
};
|
||||
let out_path = if let Some(output) = &args.output {
|
||||
if single_file {
|
||||
@ -104,7 +104,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
||||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
.with_context(|| format!("Failed to write '{out_path}'"))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ fn compress(args: CompressArgs) -> Result<()> {
|
||||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
.with_context(|| format!("Failed to write '{out_path}'"))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -92,7 +92,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
||||
let data = {
|
||||
let mut file = open_file(&path, false)?;
|
||||
decompress_yaz0(file.map()?)
|
||||
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path))?
|
||||
.with_context(|| format!("Failed to decompress '{path}' using Yaz0"))?
|
||||
};
|
||||
let out_path = if let Some(output) = &args.output {
|
||||
if single_file {
|
||||
@ -104,7 +104,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
||||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
.with_context(|| format!("Failed to write '{out_path}'"))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -96,6 +96,7 @@ enum SubCommand {
|
||||
Dwarf(cmd::dwarf::Args),
|
||||
Elf(cmd::elf::Args),
|
||||
Elf2Dol(cmd::elf2dol::Args),
|
||||
Extab(cmd::extab::Args),
|
||||
Map(cmd::map::Args),
|
||||
Nlzss(cmd::nlzss::Args),
|
||||
Rarc(cmd::rarc::Args),
|
||||
@ -172,6 +173,7 @@ fn main() {
|
||||
SubCommand::Dwarf(c_args) => cmd::dwarf::run(c_args),
|
||||
SubCommand::Elf(c_args) => cmd::elf::run(c_args),
|
||||
SubCommand::Elf2Dol(c_args) => cmd::elf2dol::run(c_args),
|
||||
SubCommand::Extab(c_args) => cmd::extab::run(c_args),
|
||||
SubCommand::Map(c_args) => cmd::map::run(c_args),
|
||||
SubCommand::Nlzss(c_args) => cmd::nlzss::run(c_args),
|
||||
SubCommand::Rarc(c_args) => cmd::rarc::run(c_args),
|
||||
|
@ -403,7 +403,7 @@ impl ObjSymbols {
|
||||
pub fn iter_ordered(&self) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)> {
|
||||
self.symbols_by_section
|
||||
.iter()
|
||||
.flat_map(|v| v.iter().map(|(_, v)| v))
|
||||
.flat_map(|v| v.values())
|
||||
.flat_map(move |v| v.iter().map(move |u| (*u, &self.symbols[*u as usize])))
|
||||
}
|
||||
|
||||
@ -450,7 +450,7 @@ impl ObjSymbols {
|
||||
self.symbols_by_section
|
||||
.get(section_idx as usize)
|
||||
.into_iter()
|
||||
.flat_map(|v| v.iter().map(|(_, v)| v))
|
||||
.flat_map(|v| v.values())
|
||||
.flat_map(move |v| v.iter().map(move |u| (*u, &self.symbols[*u as usize])))
|
||||
}
|
||||
|
||||
|
@ -161,7 +161,7 @@ impl FromReader for AlfSymbolKind {
|
||||
match u32::from_reader(reader, e)? {
|
||||
0 => Ok(Self::Function),
|
||||
1 => Ok(Self::Object),
|
||||
v => Err(Error::new(ErrorKind::InvalidData, format!("invalid ALF symbol kind: {}", v))),
|
||||
v => Err(Error::new(ErrorKind::InvalidData, format!("invalid ALF symbol kind: {v}"))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -442,12 +442,12 @@ where
|
||||
match parse_extab(symbols, entry, section) {
|
||||
Ok(s) => {
|
||||
for line in s.trim_end().lines() {
|
||||
writeln!(w, " * {}", line)?;
|
||||
writeln!(w, " * {line}")?;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Failed to decode extab entry {}: {}", symbol.name, e);
|
||||
writeln!(w, " * Failed to decode extab entry: {}", e)?;
|
||||
writeln!(w, " * Failed to decode extab entry: {e}")?;
|
||||
}
|
||||
}
|
||||
writeln!(w, " */")?;
|
||||
@ -505,7 +505,7 @@ where
|
||||
}
|
||||
current_symbol_kind = find_symbol_kind(current_symbol_kind, symbols, vec)?;
|
||||
current_data_kind = find_data_kind(current_data_kind, symbols, vec)
|
||||
.with_context(|| format!("At address {:#010X}", sym_addr))?;
|
||||
.with_context(|| format!("At address {sym_addr:#010X}"))?;
|
||||
entry = entry_iter.next();
|
||||
} else if current_address > sym_addr {
|
||||
let dbg_symbols = vec.iter().map(|e| &symbols[e.index as usize]).collect_vec();
|
||||
@ -660,8 +660,8 @@ where W: Write + ?Sized {
|
||||
'\x0D' => write!(w, "\\r")?,
|
||||
'\\' => write!(w, "\\\\")?,
|
||||
'"' => write!(w, "\\\"")?,
|
||||
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{}", c)?,
|
||||
_ => write!(w, "\\{:03o}", b)?,
|
||||
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{c}")?,
|
||||
_ => write!(w, "\\{b:03o}")?,
|
||||
}
|
||||
}
|
||||
writeln!(w, "\"")?;
|
||||
@ -684,13 +684,13 @@ where W: Write + ?Sized {
|
||||
for c in cow.chars() {
|
||||
match c {
|
||||
'#' => write!(w, "\\#")?,
|
||||
_ => write!(w, "{}", c)?,
|
||||
_ => write!(w, "{c}")?,
|
||||
}
|
||||
}
|
||||
|
||||
write!(w, "\n\t.byte ")?;
|
||||
for (i, &b) in data.iter().enumerate() {
|
||||
write!(w, "0x{:02X}", b)?;
|
||||
write!(w, "0x{b:02X}")?;
|
||||
if i + 1 != data.len() {
|
||||
write!(w, ", ")?;
|
||||
}
|
||||
@ -721,7 +721,7 @@ where W: Write + ?Sized {
|
||||
'\x0D' => write!(w, "\\r")?,
|
||||
'\\' => write!(w, "\\\\")?,
|
||||
'"' => write!(w, "\\\"")?,
|
||||
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{}", c)?,
|
||||
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{c}")?,
|
||||
_ => write!(w, "\\{:#X}", c as u32)?,
|
||||
}
|
||||
}
|
||||
@ -793,7 +793,7 @@ where W: Write + ?Sized {
|
||||
};
|
||||
for chunk in remain.chunks(chunk_size) {
|
||||
if data_kind == ObjDataKind::Byte || matches!(chunk.len(), 1 | 3 | 5..=7) {
|
||||
let bytes = chunk.iter().map(|c| format!("{:#04X}", c)).collect::<Vec<String>>();
|
||||
let bytes = chunk.iter().map(|c| format!("{c:#04X}")).collect::<Vec<String>>();
|
||||
writeln!(w, "\t.byte {}", bytes.join(", "))?;
|
||||
} else {
|
||||
match chunk.len() {
|
||||
|
@ -95,7 +95,7 @@ fn bin2c_symbol(
|
||||
} else {
|
||||
output.push(' ');
|
||||
}
|
||||
output.push_str(&format!("0x{:02X},", byte));
|
||||
output.push_str(&format!("0x{byte:02X},"));
|
||||
}
|
||||
output.push_str("\n};\n");
|
||||
output
|
||||
@ -111,7 +111,7 @@ fn bin2c_raw(data: &[u8]) -> String {
|
||||
output.push(' ');
|
||||
}
|
||||
}
|
||||
output.push_str(&format!("0x{:02X},", byte));
|
||||
output.push_str(&format!("0x{byte:02X},"));
|
||||
}
|
||||
output.push('\n');
|
||||
output
|
||||
|
@ -58,7 +58,7 @@ impl FromReader for MWComment {
|
||||
if magic != MAGIC {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Invalid .comment section magic: {:?}", magic),
|
||||
format!("Invalid .comment section magic: {magic:?}"),
|
||||
));
|
||||
}
|
||||
// 0xB
|
||||
@ -78,7 +78,7 @@ impl FromReader for MWComment {
|
||||
value => {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Invalid value for pool_data: {}", value),
|
||||
format!("Invalid value for pool_data: {value}"),
|
||||
))
|
||||
}
|
||||
};
|
||||
@ -93,7 +93,7 @@ impl FromReader for MWComment {
|
||||
v => {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Expected header size {:#X}, got {:#X}", HEADER_SIZE, v),
|
||||
format!("Expected header size {HEADER_SIZE:#X}, got {v:#X}"),
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -102,7 +102,7 @@ impl FromReader for MWComment {
|
||||
if flags & !7 != 0 {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Unexpected flag value {:#X}", flags),
|
||||
format!("Unexpected flag value {flags:#X}"),
|
||||
));
|
||||
}
|
||||
if flags & 1 == 1 {
|
||||
@ -221,14 +221,14 @@ impl FromReader for CommentSym {
|
||||
if value != 0 {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Unexpected value after active_flags (1): {:#X}", value),
|
||||
format!("Unexpected value after active_flags (1): {value:#X}"),
|
||||
));
|
||||
}
|
||||
let value = u8::from_reader(reader, e)?;
|
||||
if value != 0 {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Unexpected value after active_flags (2): {:#X}", value),
|
||||
format!("Unexpected value after active_flags (2): {value:#X}"),
|
||||
));
|
||||
}
|
||||
Ok(out)
|
||||
|
@ -282,11 +282,11 @@ where W: Write + ?Sized {
|
||||
write!(w, " data:{kind}")?;
|
||||
}
|
||||
if let Some(hash) = symbol.name_hash {
|
||||
write!(w, " hash:{:#010X}", hash)?;
|
||||
write!(w, " hash:{hash:#010X}")?;
|
||||
}
|
||||
if let Some(hash) = symbol.demangled_name_hash {
|
||||
if symbol.name_hash != symbol.demangled_name_hash {
|
||||
write!(w, " dhash:{:#010X}", hash)?;
|
||||
write!(w, " dhash:{hash:#010X}")?;
|
||||
}
|
||||
}
|
||||
if symbol.flags.is_hidden() {
|
||||
@ -439,10 +439,10 @@ where W: Write + ?Sized {
|
||||
for unit in obj.link_order.iter().filter(|unit| all || !unit.autogenerated) {
|
||||
write!(w, "\n{}:", unit.name)?;
|
||||
if let Some(comment_version) = unit.comment_version {
|
||||
write!(w, " comment:{}", comment_version)?;
|
||||
write!(w, " comment:{comment_version}")?;
|
||||
}
|
||||
if let Some(order) = unit.order {
|
||||
write!(w, " order:{}", order)?;
|
||||
write!(w, " order:{order}")?;
|
||||
}
|
||||
writeln!(w)?;
|
||||
let mut split_iter = obj.sections.all_splits().peekable();
|
||||
@ -458,14 +458,14 @@ where W: Write + ?Sized {
|
||||
write!(w, "\t{:<11} start:{:#010X} end:{:#010X}", section.name, addr, end)?;
|
||||
if let Some(align) = split.align {
|
||||
if align != default_section_align(section) as u32 {
|
||||
write!(w, " align:{}", align)?;
|
||||
write!(w, " align:{align}")?;
|
||||
}
|
||||
}
|
||||
if split.common {
|
||||
write!(w, " common")?;
|
||||
}
|
||||
if let Some(name) = &split.rename {
|
||||
write!(w, " rename:{}", name)?;
|
||||
write!(w, " rename:{name}")?;
|
||||
}
|
||||
if split.skip {
|
||||
write!(w, " skip")?;
|
||||
@ -783,7 +783,7 @@ pub mod signed_hex_serde {
|
||||
if *value < 0 {
|
||||
serializer.serialize_str(&format!("-{:#X}", -value))
|
||||
} else {
|
||||
serializer.serialize_str(&format!("{:#X}", value))
|
||||
serializer.serialize_str(&format!("{value:#X}"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,7 +209,7 @@ fn print_line(ins_diff: &ObjInsDiff, base_addr: u64) -> Vec<Span> {
|
||||
pad_to = 5;
|
||||
}
|
||||
DiffText::Address(addr) => {
|
||||
label_text = format!("{:x}:", addr);
|
||||
label_text = format!("{addr:x}:");
|
||||
pad_to = 5;
|
||||
}
|
||||
DiffText::Opcode(mnemonic, _op) => {
|
||||
|
837
src/util/dol.rs
837
src/util/dol.rs
File diff suppressed because it is too large
Load Diff
@ -358,6 +358,7 @@ pub struct Tag {
|
||||
pub kind: TagKind,
|
||||
pub is_erased: bool, // Tag was deleted but has been reconstructed
|
||||
pub is_erased_root: bool, // Tag is erased and is the root of a tree of erased tags
|
||||
pub data_endian: Endian, // Endianness of the tag data (could be different from the address endianness for erased tags)
|
||||
pub attributes: Vec<Attribute>,
|
||||
}
|
||||
|
||||
@ -554,6 +555,7 @@ where
|
||||
kind: TagKind::Padding,
|
||||
is_erased,
|
||||
is_erased_root: false,
|
||||
data_endian,
|
||||
attributes: Vec::new(),
|
||||
});
|
||||
return Ok(tags);
|
||||
@ -563,26 +565,42 @@ where
|
||||
let tag = TagKind::try_from(tag_num).context("Unknown DWARF tag type")?;
|
||||
if tag == TagKind::Padding {
|
||||
if include_erased {
|
||||
// Erased entries that have become padding are little-endian, and we
|
||||
// have to guess the length and tag of the first entry. We assume
|
||||
// the entry is either a variable or a function, and read until we
|
||||
// find the high_pc attribute. Only MwGlobalRef will follow, and
|
||||
// these are unlikely to be confused with the length of the next
|
||||
// entry.
|
||||
// Erased entries that have become padding could be either
|
||||
// little-endian or big-endian, and we have to guess the length and
|
||||
// tag of the first entry. We assume the entry is either a variable
|
||||
// or a function, and read until we find the high_pc attribute. Only
|
||||
// MwGlobalRef will follow, and these are unlikely to be confused
|
||||
// with the length of the next entry.
|
||||
let mut attributes = Vec::new();
|
||||
let mut is_function = false;
|
||||
|
||||
// Guess endianness based on first attribute
|
||||
let data_endian = if is_erased {
|
||||
data_endian
|
||||
} else {
|
||||
// Peek next two bytes
|
||||
let mut buf = [0u8; 2];
|
||||
reader.read_exact(&mut buf)?;
|
||||
let attr_tag = u16::from_reader(&mut Cursor::new(&buf), data_endian)?;
|
||||
reader.seek(SeekFrom::Current(-2))?;
|
||||
match AttributeKind::try_from(attr_tag) {
|
||||
Ok(_) => data_endian,
|
||||
Err(_) => data_endian.flip(),
|
||||
}
|
||||
};
|
||||
|
||||
while reader.stream_position()? < position + size as u64 {
|
||||
// Peek next two bytes
|
||||
let mut buf = [0u8; 2];
|
||||
reader.read_exact(&mut buf)?;
|
||||
let attr_tag = u16::from_reader(&mut Cursor::new(&buf), Endian::Little)?;
|
||||
let attr_tag = u16::from_reader(&mut Cursor::new(&buf), data_endian)?;
|
||||
reader.seek(SeekFrom::Current(-2))?;
|
||||
|
||||
if is_function && attr_tag != AttributeKind::MwGlobalRef as u16 {
|
||||
break;
|
||||
}
|
||||
|
||||
let attr = read_attribute(reader, Endian::Little, addr_endian)?;
|
||||
let attr = read_attribute(reader, data_endian, addr_endian)?;
|
||||
if attr.kind == AttributeKind::HighPc {
|
||||
is_function = true;
|
||||
}
|
||||
@ -594,12 +612,13 @@ where
|
||||
kind,
|
||||
is_erased: true,
|
||||
is_erased_root: true,
|
||||
data_endian,
|
||||
attributes,
|
||||
});
|
||||
|
||||
// Read the rest of the tags
|
||||
while reader.stream_position()? < position + size as u64 {
|
||||
for tag in read_tags(reader, Endian::Little, addr_endian, include_erased, true)? {
|
||||
for tag in read_tags(reader, data_endian, addr_endian, include_erased, true)? {
|
||||
tags.push(tag);
|
||||
}
|
||||
}
|
||||
@ -616,6 +635,7 @@ where
|
||||
kind: tag,
|
||||
is_erased,
|
||||
is_erased_root: false,
|
||||
data_endian,
|
||||
attributes,
|
||||
});
|
||||
}
|
||||
@ -1145,8 +1165,8 @@ fn structure_type_string(
|
||||
struct_def_string(info, typedefs, t)?
|
||||
} else if include_keyword {
|
||||
match t.kind {
|
||||
StructureKind::Struct => format!("struct {}", name),
|
||||
StructureKind::Class => format!("class {}", name),
|
||||
StructureKind::Struct => format!("struct {name}"),
|
||||
StructureKind::Class => format!("class {name}"),
|
||||
}
|
||||
} else {
|
||||
name.clone()
|
||||
@ -1178,7 +1198,7 @@ fn enumeration_type_string(
|
||||
if name.starts_with('@') {
|
||||
enum_def_string(t)?
|
||||
} else if include_keyword {
|
||||
format!("enum {}", name)
|
||||
format!("enum {name}")
|
||||
} else {
|
||||
name.clone()
|
||||
}
|
||||
@ -1203,7 +1223,7 @@ fn union_type_string(
|
||||
if name.starts_with('@') {
|
||||
union_def_string(info, typedefs, t)?
|
||||
} else if include_keyword {
|
||||
format!("union {}", name)
|
||||
format!("union {name}")
|
||||
} else {
|
||||
name.clone()
|
||||
}
|
||||
@ -1306,7 +1326,7 @@ pub fn subroutine_type_string(
|
||||
write!(parameters, "{}{}", ts.prefix, ts.suffix)?;
|
||||
}
|
||||
if let Some(location) = ¶meter.location {
|
||||
write!(parameters, " /* {} */", location)?;
|
||||
write!(parameters, " /* {location} */")?;
|
||||
}
|
||||
}
|
||||
if t.var_args {
|
||||
@ -1322,7 +1342,7 @@ pub fn subroutine_type_string(
|
||||
let base_name = tag
|
||||
.string_attribute(AttributeKind::Name)
|
||||
.ok_or_else(|| anyhow!("member_of tag {} has no name attribute", member_of))?;
|
||||
out.member = format!("{}::", base_name);
|
||||
out.member = format!("{base_name}::");
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
@ -1337,7 +1357,7 @@ pub fn subroutine_def_string(
|
||||
if is_erased {
|
||||
out.push_str("// Erased\n");
|
||||
} else if let (Some(start), Some(end)) = (t.start_address, t.end_address) {
|
||||
writeln!(out, "// Range: {:#X} -> {:#X}", start, end)?;
|
||||
writeln!(out, "// Range: {start:#X} -> {end:#X}")?;
|
||||
}
|
||||
let rt = type_string(info, typedefs, &t.return_type, true)?;
|
||||
if t.local {
|
||||
@ -1361,15 +1381,15 @@ pub fn subroutine_def_string(
|
||||
let base_name = tag
|
||||
.string_attribute(AttributeKind::Name)
|
||||
.ok_or_else(|| anyhow!("member_of tag {} has no name attribute", member_of))?;
|
||||
write!(out, "{}::", base_name)?;
|
||||
write!(out, "{base_name}::")?;
|
||||
|
||||
// Handle constructors and destructors
|
||||
if let Some(name) = t.name.as_ref() {
|
||||
if name == "__dt" {
|
||||
write!(out, "~{}", base_name)?;
|
||||
write!(out, "~{base_name}")?;
|
||||
name_written = true;
|
||||
} else if name == "__ct" {
|
||||
write!(out, "{}", base_name)?;
|
||||
write!(out, "{base_name}")?;
|
||||
name_written = true;
|
||||
}
|
||||
}
|
||||
@ -1398,7 +1418,7 @@ pub fn subroutine_def_string(
|
||||
write!(parameters, "{}{}", ts.prefix, ts.suffix)?;
|
||||
}
|
||||
if let Some(location) = ¶meter.location {
|
||||
write!(parameters, " /* {} */", location)?;
|
||||
write!(parameters, " /* {location} */")?;
|
||||
}
|
||||
}
|
||||
if t.var_args {
|
||||
@ -1420,7 +1440,7 @@ pub fn subroutine_def_string(
|
||||
ts.suffix
|
||||
)?;
|
||||
if let Some(location) = &variable.location {
|
||||
write!(var_out, " // {}", location)?;
|
||||
write!(var_out, " // {location}")?;
|
||||
}
|
||||
writeln!(var_out)?;
|
||||
}
|
||||
@ -1435,7 +1455,7 @@ pub fn subroutine_def_string(
|
||||
.get(&reference)
|
||||
.ok_or_else(|| anyhow!("Failed to locate reference tag {}", reference))?;
|
||||
if tag.kind == TagKind::Padding {
|
||||
writeln!(out, " // -> ??? ({})", reference)?;
|
||||
writeln!(out, " // -> ??? ({reference})")?;
|
||||
continue;
|
||||
}
|
||||
let variable = process_variable_tag(info, tag)?;
|
||||
@ -1477,13 +1497,13 @@ fn subroutine_block_string(
|
||||
) -> Result<String> {
|
||||
let mut out = String::new();
|
||||
if let Some(name) = &block.name {
|
||||
write!(out, "{}: ", name)?;
|
||||
write!(out, "{name}: ")?;
|
||||
} else {
|
||||
out.push_str("/* anonymous block */ ");
|
||||
}
|
||||
out.push_str("{\n");
|
||||
if let (Some(start), Some(end)) = (block.start_address, block.end_address) {
|
||||
writeln!(out, " // Range: {:#X} -> {:#X}", start, end)?;
|
||||
writeln!(out, " // Range: {start:#X} -> {end:#X}")?;
|
||||
}
|
||||
let mut var_out = String::new();
|
||||
for variable in &block.variables {
|
||||
@ -1496,7 +1516,7 @@ fn subroutine_block_string(
|
||||
ts.suffix
|
||||
)?;
|
||||
if let Some(location) = &variable.location {
|
||||
write!(var_out, " // {}", location)?;
|
||||
write!(var_out, " // {location}")?;
|
||||
}
|
||||
writeln!(var_out)?;
|
||||
}
|
||||
@ -1635,9 +1655,9 @@ pub fn struct_def_string(
|
||||
};
|
||||
if let Some(name) = t.name.as_ref() {
|
||||
if name.starts_with('@') {
|
||||
write!(out, " /* {} */", name)?;
|
||||
write!(out, " /* {name} */")?;
|
||||
} else {
|
||||
write!(out, " {}", name)?;
|
||||
write!(out, " {name}")?;
|
||||
}
|
||||
}
|
||||
let mut wrote_base = false;
|
||||
@ -1665,7 +1685,7 @@ pub fn struct_def_string(
|
||||
}
|
||||
out.push_str(" {\n");
|
||||
if let Some(byte_size) = t.byte_size {
|
||||
writeln!(out, " // total size: {:#X}", byte_size)?;
|
||||
writeln!(out, " // total size: {byte_size:#X}")?;
|
||||
}
|
||||
let mut vis = match t.kind {
|
||||
StructureKind::Struct => Visibility::Public,
|
||||
@ -1751,9 +1771,9 @@ pub fn enum_def_string(t: &EnumerationType) -> Result<String> {
|
||||
let mut out = match t.name.as_ref() {
|
||||
Some(name) => {
|
||||
if name.starts_with('@') {
|
||||
format!("enum /* {} */ {{\n", name)
|
||||
format!("enum /* {name} */ {{\n")
|
||||
} else {
|
||||
format!("enum {} {{\n", name)
|
||||
format!("enum {name} {{\n")
|
||||
}
|
||||
}
|
||||
None => "enum {\n".to_string(),
|
||||
@ -1769,9 +1789,9 @@ pub fn union_def_string(info: &DwarfInfo, typedefs: &TypedefMap, t: &UnionType)
|
||||
let mut out = match t.name.as_ref() {
|
||||
Some(name) => {
|
||||
if name.starts_with('@') {
|
||||
format!("union /* {} */ {{\n", name)
|
||||
format!("union /* {name} */ {{\n")
|
||||
} else {
|
||||
format!("union {} {{\n", name)
|
||||
format!("union {name} {{\n")
|
||||
}
|
||||
}
|
||||
None => "union {\n".to_string(),
|
||||
@ -2028,9 +2048,9 @@ fn process_array_tag(info: &DwarfInfo, tag: &Tag) -> Result<ArrayType> {
|
||||
(AttributeKind::Sibling, _) => {}
|
||||
(AttributeKind::SubscrData, AttributeValue::Block(data)) => {
|
||||
subscr_data =
|
||||
Some(process_array_subscript_data(data, info.e, tag.is_erased).with_context(
|
||||
|| format!("Failed to process SubscrData for tag: {:?}", tag),
|
||||
)?)
|
||||
Some(process_array_subscript_data(data, info.e).with_context(|| {
|
||||
format!("Failed to process SubscrData for tag: {tag:?}")
|
||||
})?)
|
||||
}
|
||||
(AttributeKind::Ordering, val) => match val {
|
||||
AttributeValue::Data2(d2) => {
|
||||
@ -2056,11 +2076,7 @@ fn process_array_tag(info: &DwarfInfo, tag: &Tag) -> Result<ArrayType> {
|
||||
Ok(ArrayType { element_type: Box::from(element_type), dimensions })
|
||||
}
|
||||
|
||||
fn process_array_subscript_data(
|
||||
data: &[u8],
|
||||
e: Endian,
|
||||
is_erased: bool,
|
||||
) -> Result<(Type, Vec<ArrayDimension>)> {
|
||||
fn process_array_subscript_data(data: &[u8], e: Endian) -> Result<(Type, Vec<ArrayDimension>)> {
|
||||
let mut element_type = None;
|
||||
let mut dimensions = Vec::new();
|
||||
let mut data = data;
|
||||
@ -2101,8 +2117,7 @@ fn process_array_subscript_data(
|
||||
SubscriptFormat::ElementType => {
|
||||
let mut cursor = Cursor::new(data);
|
||||
// TODO: is this the right endianness to use for erased tags?
|
||||
let type_attr =
|
||||
read_attribute(&mut cursor, if is_erased { Endian::Little } else { e }, e)?;
|
||||
let type_attr = read_attribute(&mut cursor, e, e)?;
|
||||
element_type = Some(process_type(&type_attr, e)?);
|
||||
data = &data[cursor.position() as usize..];
|
||||
}
|
||||
@ -2456,10 +2471,7 @@ fn process_subroutine_parameter_tag(info: &DwarfInfo, tag: &Tag) -> Result<Subro
|
||||
) => kind = Some(process_type(attr, info.e)?),
|
||||
(AttributeKind::Location, AttributeValue::Block(block)) => {
|
||||
if !block.is_empty() {
|
||||
location = Some(process_variable_location(
|
||||
block,
|
||||
if tag.is_erased { Endian::Little } else { info.e },
|
||||
)?);
|
||||
location = Some(process_variable_location(block, tag.data_endian)?);
|
||||
}
|
||||
}
|
||||
(AttributeKind::MwDwarf2Location, AttributeValue::Block(_block)) => {
|
||||
@ -2514,10 +2526,7 @@ fn process_local_variable_tag(info: &DwarfInfo, tag: &Tag) -> Result<SubroutineV
|
||||
) => kind = Some(process_type(attr, info.e)?),
|
||||
(AttributeKind::Location, AttributeValue::Block(block)) => {
|
||||
if !block.is_empty() {
|
||||
location = Some(process_variable_location(
|
||||
block,
|
||||
if tag.is_erased { Endian::Little } else { info.e },
|
||||
)?);
|
||||
location = Some(process_variable_location(block, tag.data_endian)?);
|
||||
}
|
||||
}
|
||||
(AttributeKind::MwDwarf2Location, AttributeValue::Block(_block)) => {
|
||||
@ -2615,13 +2624,13 @@ pub fn process_type(attr: &Attribute, e: Endian) -> Result<Type> {
|
||||
match (attr.kind, &attr.value) {
|
||||
(AttributeKind::FundType, &AttributeValue::Data2(type_id)) => {
|
||||
let fund_type = FundType::parse_int(type_id)
|
||||
.with_context(|| format!("Invalid fundamental type ID '{:04X}'", type_id))?;
|
||||
.with_context(|| format!("Invalid fundamental type ID '{type_id:04X}'"))?;
|
||||
Ok(Type { kind: TypeKind::Fundamental(fund_type), modifiers: vec![] })
|
||||
}
|
||||
(AttributeKind::ModFundType, AttributeValue::Block(ops)) => {
|
||||
let type_id = u16::from_bytes(ops[ops.len() - 2..].try_into()?, e);
|
||||
let fund_type = FundType::parse_int(type_id)
|
||||
.with_context(|| format!("Invalid fundamental type ID '{:04X}'", type_id))?;
|
||||
.with_context(|| format!("Invalid fundamental type ID '{type_id:04X}'"))?;
|
||||
let modifiers = process_modifiers(&ops[..ops.len() - 2])?;
|
||||
Ok(Type { kind: TypeKind::Fundamental(fund_type), modifiers })
|
||||
}
|
||||
@ -2762,7 +2771,7 @@ pub fn tag_type_string(
|
||||
match ud {
|
||||
UserDefinedType::Structure(_)
|
||||
| UserDefinedType::Enumeration(_)
|
||||
| UserDefinedType::Union(_) => Ok(format!("{};", ud_str)),
|
||||
| UserDefinedType::Union(_) => Ok(format!("{ud_str};")),
|
||||
_ => Ok(ud_str),
|
||||
}
|
||||
}
|
||||
@ -2789,9 +2798,9 @@ fn variable_string(
|
||||
out.push(';');
|
||||
if include_extra {
|
||||
let size = variable.kind.size(info)?;
|
||||
out.push_str(&format!(" // size: {:#X}", size));
|
||||
out.push_str(&format!(" // size: {size:#X}"));
|
||||
if let Some(addr) = variable.address {
|
||||
out.push_str(&format!(", address: {:#X}", addr));
|
||||
out.push_str(&format!(", address: {addr:#X}"));
|
||||
}
|
||||
}
|
||||
Ok(out)
|
||||
|
@ -20,7 +20,7 @@ use object::{
|
||||
Architecture, Endianness, File, Object, ObjectKind, ObjectSection, ObjectSymbol, Relocation,
|
||||
RelocationFlags, RelocationTarget, SectionKind, Symbol, SymbolKind, SymbolScope, SymbolSection,
|
||||
};
|
||||
use typed_path::Utf8NativePath;
|
||||
use typed_path::{Utf8NativePath, Utf8NativePathBuf};
|
||||
|
||||
use crate::{
|
||||
array_ref,
|
||||
@ -164,7 +164,7 @@ pub fn process_elf(path: &Utf8NativePath) -> Result<ObjInfo> {
|
||||
hash_map::Entry::Vacant(e) => e.insert(0),
|
||||
};
|
||||
*index += 1;
|
||||
let new_name = format!("{}_{}", file_name, index);
|
||||
let new_name = format!("{file_name}_{index}");
|
||||
// log::info!("Renaming {} to {}", file_name, new_name);
|
||||
file_name.clone_from(&new_name);
|
||||
match section_starts.entry(new_name.clone()) {
|
||||
@ -275,8 +275,8 @@ pub fn process_elf(path: &Utf8NativePath) -> Result<ObjInfo> {
|
||||
continue;
|
||||
}
|
||||
symbol_indexes.push(Some(symbols.len() as ObjSymbolIndex));
|
||||
let align = mw_comment.as_ref().map(|(_, vec)| vec[symbol.index().0].align);
|
||||
symbols.push(to_obj_symbol(&obj_file, &symbol, §ion_indexes, align)?);
|
||||
let comment_sym = mw_comment.as_ref().map(|(_, vec)| &vec[symbol.index().0 - 1]);
|
||||
symbols.push(to_obj_symbol(&obj_file, &symbol, §ion_indexes, comment_sym)?);
|
||||
}
|
||||
|
||||
let mut link_order = Vec::<ObjUnit>::new();
|
||||
@ -374,6 +374,7 @@ fn load_comment(obj_file: &File) -> Result<Option<(MWComment, Vec<CommentSym>)>>
|
||||
let mut reader = Cursor::new(&*data);
|
||||
let header = MWComment::from_reader(&mut reader, Endian::Big)?;
|
||||
log::debug!("Loaded .comment section header {:?}", header);
|
||||
CommentSym::from_reader(&mut reader, Endian::Big)?; // Null symbol
|
||||
let mut comment_syms = Vec::with_capacity(obj_file.symbols().count());
|
||||
for symbol in obj_file.symbols() {
|
||||
let comment_sym = CommentSym::from_reader(&mut reader, Endian::Big)?;
|
||||
@ -861,7 +862,7 @@ fn to_obj_symbol(
|
||||
obj_file: &object::File<'_>,
|
||||
symbol: &Symbol<'_, '_>,
|
||||
section_indexes: &[Option<usize>],
|
||||
align: Option<u32>,
|
||||
comment_sym: Option<&CommentSym>,
|
||||
) -> Result<ObjSymbol> {
|
||||
let section = match symbol.section_index() {
|
||||
Some(idx) => Some(obj_file.section_by_index(idx)?),
|
||||
@ -891,6 +892,9 @@ fn to_obj_symbol(
|
||||
if symbol.scope() == SymbolScope::Linkage {
|
||||
flags = ObjSymbolFlagSet(flags.0 | ObjSymbolFlags::Hidden);
|
||||
}
|
||||
if comment_sym.is_some_and(|c| c.active_flags & 0x8 != 0) {
|
||||
flags = ObjSymbolFlagSet(flags.0 | ObjSymbolFlags::Exported);
|
||||
}
|
||||
let section_idx = section.as_ref().and_then(|section| section_indexes[section.index().0]);
|
||||
Ok(ObjSymbol {
|
||||
name: name.to_string(),
|
||||
@ -907,7 +911,7 @@ fn to_obj_symbol(
|
||||
SymbolKind::Section => ObjSymbolKind::Section,
|
||||
_ => bail!("Unsupported symbol kind: {:?}", symbol),
|
||||
},
|
||||
align,
|
||||
align: comment_sym.map(|c| c.align),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
@ -1005,3 +1009,10 @@ fn write_relocatable_section_data(w: &mut Writer, section: &ObjSection) -> Resul
|
||||
w.write(§ion.data[current_address..]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn is_elf_file(path: &Utf8NativePathBuf) -> Result<bool> {
|
||||
let mut file = open_file(path, true)?;
|
||||
let mut magic = [0; 4];
|
||||
file.read_exact(&mut magic)?;
|
||||
Ok(magic == elf::ELFMAG)
|
||||
}
|
||||
|
57
src/util/extab.rs
Normal file
57
src/util/extab.rs
Normal file
@ -0,0 +1,57 @@
|
||||
use anyhow::{Context, Result};
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::obj::ObjInfo;
|
||||
|
||||
pub fn clean_extab(obj: &mut ObjInfo, mut padding: impl Iterator<Item = u8>) -> Result<usize> {
|
||||
let (extab_section_index, extab_section) = obj
|
||||
.sections
|
||||
.iter_mut()
|
||||
.find(|(_, s)| s.name == "extab")
|
||||
.ok_or_else(|| anyhow::anyhow!("No extab section found"))?;
|
||||
let mut num_cleaned = 0;
|
||||
for (_symbol_index, symbol) in obj
|
||||
.symbols
|
||||
.for_section(extab_section_index)
|
||||
.filter(|(_, s)| s.size > 0)
|
||||
.sorted_by_key(|(_, s)| s.address)
|
||||
{
|
||||
let data = extab_section.symbol_data(symbol)?;
|
||||
let decoded = cwextab::decode_extab(data).with_context(|| {
|
||||
format!(
|
||||
"Failed to decode {} (extab {:#010X}..{:#010X})",
|
||||
symbol.name,
|
||||
symbol.address,
|
||||
symbol.address + symbol.size
|
||||
)
|
||||
})?;
|
||||
let mut updated = false;
|
||||
for action in &decoded.exception_actions {
|
||||
// Check if the current action has padding
|
||||
if let Some(padding_offset) = action.get_struct_padding_offset() {
|
||||
let index = padding_offset as usize;
|
||||
let section_offset = (symbol.address - extab_section.address) as usize
|
||||
+ action.action_offset as usize;
|
||||
let mut clean_data: Vec<u8> = action.get_exaction_bytes(false);
|
||||
// Write the two padding bytes
|
||||
clean_data[index] = padding.next().unwrap_or(0);
|
||||
clean_data[index + 1] = padding.next().unwrap_or(0);
|
||||
|
||||
let orig_data =
|
||||
&mut extab_section.data[section_offset..section_offset + clean_data.len()];
|
||||
orig_data.copy_from_slice(&clean_data);
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
if updated {
|
||||
tracing::debug!(
|
||||
"Replaced uninitialized bytes in {} (extab {:#010X}..{:#010X})",
|
||||
symbol.name,
|
||||
symbol.address,
|
||||
symbol.address + symbol.size
|
||||
);
|
||||
num_cleaned += 1;
|
||||
}
|
||||
}
|
||||
Ok(num_cleaned)
|
||||
}
|
@ -26,7 +26,7 @@ pub fn buf_writer(path: &Utf8NativePath) -> Result<BufWriter<File>> {
|
||||
if let Some(parent) = path.parent() {
|
||||
DirBuilder::new().recursive(true).create(parent)?;
|
||||
}
|
||||
let file = File::create(path).with_context(|| format!("Failed to create file '{}'", path))?;
|
||||
let file = File::create(path).with_context(|| format!("Failed to create file '{path}'"))?;
|
||||
Ok(BufWriter::new(file))
|
||||
}
|
||||
|
||||
|
@ -47,11 +47,11 @@ pub fn generate_ldscript(
|
||||
|
||||
let out = template
|
||||
.unwrap_or(LCF_TEMPLATE)
|
||||
.replace("$ORIGIN", &format!("{:#X}", origin))
|
||||
.replace("$ORIGIN", &format!("{origin:#X}"))
|
||||
.replace("$SECTIONS", §ion_defs)
|
||||
.replace("$LAST_SECTION_SYMBOL", &last_section_symbol)
|
||||
.replace("$LAST_SECTION_NAME", &last_section_name)
|
||||
.replace("$STACKSIZE", &format!("{:#X}", stack_size))
|
||||
.replace("$STACKSIZE", &format!("{stack_size:#X}"))
|
||||
.replace("$FORCEACTIVE", &force_active.join("\n "))
|
||||
.replace("$ARENAHI", &format!("{:#X}", obj.arena_hi.unwrap_or(0x81700000)));
|
||||
Ok(out)
|
||||
@ -74,7 +74,7 @@ pub fn generate_ldscript_partial(
|
||||
// Some RELs have no entry point (`.text` was stripped) so mwld requires at least an empty
|
||||
// `.init` section to be present in the linker script, for some reason.
|
||||
if obj.entry.is_none() {
|
||||
section_defs = format!(".init :{{}}\n {}", section_defs);
|
||||
section_defs = format!(".init :{{}}\n {section_defs}");
|
||||
}
|
||||
|
||||
let mut force_files = Vec::with_capacity(obj.link_order.len());
|
||||
|
@ -10,6 +10,7 @@ pub mod diff;
|
||||
pub mod dol;
|
||||
pub mod dwarf;
|
||||
pub mod elf;
|
||||
pub mod extab;
|
||||
pub mod file;
|
||||
pub mod lcf;
|
||||
pub mod map;
|
||||
|
@ -209,7 +209,7 @@ impl<'a> RarcView<'a> {
|
||||
)
|
||||
})?;
|
||||
let c_string = CStr::from_bytes_until_nul(name_buf)
|
||||
.map_err(|_| format!("RARC: name at offset {} not null-terminated", offset))?;
|
||||
.map_err(|_| format!("RARC: name at offset {offset} not null-terminated"))?;
|
||||
Ok(c_string.to_string_lossy())
|
||||
}
|
||||
|
||||
|
@ -20,6 +20,15 @@ impl From<object::Endianness> for Endian {
|
||||
}
|
||||
}
|
||||
|
||||
impl Endian {
|
||||
pub fn flip(self) -> Self {
|
||||
match self {
|
||||
Endian::Big => Endian::Little,
|
||||
Endian::Little => Endian::Big,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const DYNAMIC_SIZE: usize = 0;
|
||||
|
||||
pub const fn struct_size<const N: usize>(fields: [usize; N]) -> usize {
|
||||
@ -263,6 +272,18 @@ impl ToWriter for Vec<u8> {
|
||||
fn write_size(&self) -> usize { self.len() }
|
||||
}
|
||||
|
||||
impl<const N: usize> ToWriter for [u32; N] {
|
||||
fn to_writer<W>(&self, writer: &mut W, e: Endian) -> io::Result<()>
|
||||
where W: Write + ?Sized {
|
||||
for &value in self {
|
||||
value.to_writer(writer, e)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_size(&self) -> usize { N * u32::STATIC_SIZE }
|
||||
}
|
||||
|
||||
pub fn write_vec<T, W>(writer: &mut W, vec: &[T], e: Endian) -> io::Result<()>
|
||||
where
|
||||
T: ToWriter,
|
||||
|
@ -364,7 +364,7 @@ where
|
||||
reader.seek(SeekFrom::Start(header.section_info_offset as u64))?;
|
||||
for idx in 0..header.num_sections {
|
||||
let section = RelSectionHeader::from_reader(reader, Endian::Big)
|
||||
.with_context(|| format!("Failed to read REL section header {}", idx))?;
|
||||
.with_context(|| format!("Failed to read REL section header {idx}"))?;
|
||||
sections.push(section);
|
||||
}
|
||||
Ok(sections)
|
||||
@ -390,7 +390,7 @@ where R: Read + Seek + ?Sized {
|
||||
reader.seek(SeekFrom::Start(offset as u64))?;
|
||||
let mut data = vec![0u8; size as usize];
|
||||
reader.read_exact(&mut data).with_context(|| {
|
||||
format!("Failed to read REL section {} data with size {:#X}", idx, size)
|
||||
format!("Failed to read REL section {idx} data with size {size:#X}")
|
||||
})?;
|
||||
reader.seek(SeekFrom::Start(position))?;
|
||||
data
|
||||
@ -405,7 +405,7 @@ where R: Read + Seek + ?Sized {
|
||||
text_section = Some(idx as u8);
|
||||
(".text".to_string(), ObjSectionKind::Code, true)
|
||||
} else {
|
||||
(format!(".section{}", idx), ObjSectionKind::Data, false)
|
||||
(format!(".section{idx}"), ObjSectionKind::Data, false)
|
||||
};
|
||||
sections.push(ObjSection {
|
||||
name,
|
||||
|
@ -147,14 +147,14 @@ impl FromReader for RsoHeader {
|
||||
if next != 0 {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Expected 'next' to be 0, got {:#X}", next),
|
||||
format!("Expected 'next' to be 0, got {next:#X}"),
|
||||
));
|
||||
}
|
||||
let prev = u32::from_reader(reader, e)?;
|
||||
if prev != 0 {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Expected 'prev' to be 0, got {:#X}", prev),
|
||||
format!("Expected 'prev' to be 0, got {prev:#X}"),
|
||||
));
|
||||
}
|
||||
let num_sections = u32::from_reader(reader, e)?;
|
||||
@ -170,7 +170,7 @@ impl FromReader for RsoHeader {
|
||||
if bss_section != 0 {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Expected 'bssSection' to be 0, got {:#X}", bss_section),
|
||||
format!("Expected 'bssSection' to be 0, got {bss_section:#X}"),
|
||||
));
|
||||
}
|
||||
let prolog_offset = u32::from_reader(reader, e)?;
|
||||
@ -440,7 +440,7 @@ where R: Read + Seek + ?Sized {
|
||||
// println!("Section {} offset {:#X} size {:#X}", idx, offset, size);
|
||||
|
||||
sections.push(ObjSection {
|
||||
name: format!(".section{}", idx),
|
||||
name: format!(".section{idx}"),
|
||||
kind: if offset == 0 {
|
||||
ObjSectionKind::Bss
|
||||
} else if section.exec() {
|
||||
|
@ -26,7 +26,25 @@ fn split_ctors_dtors(obj: &mut ObjInfo, start: SectionAddress, end: SectionAddre
|
||||
let mut current_address = start;
|
||||
let mut referenced_symbols = vec![];
|
||||
|
||||
// ProDG ctor list can start with -1
|
||||
if matches!(read_u32(ctors_section, current_address.address), Some(0xFFFFFFFF)) {
|
||||
current_address += 4;
|
||||
}
|
||||
|
||||
while current_address < end {
|
||||
// ProDG hack when the end address is not known
|
||||
if matches!(read_u32(ctors_section, current_address.address), Some(0)) {
|
||||
while current_address < end {
|
||||
ensure!(
|
||||
matches!(read_u32(ctors_section, current_address.address), Some(0)),
|
||||
"{} data detected at {:#010X} after null pointer",
|
||||
ctors_section.name,
|
||||
current_address,
|
||||
);
|
||||
current_address += 4;
|
||||
}
|
||||
break;
|
||||
}
|
||||
let function_addr = read_address(obj, ctors_section, current_address.address)?;
|
||||
log::debug!("Found {} entry: {:#010X}", ctors_section.name, function_addr);
|
||||
|
||||
@ -644,7 +662,9 @@ fn add_padding_symbols(obj: &mut ObjInfo) -> Result<()> {
|
||||
|
||||
// Check if symbol is missing data between the end of the symbol and the next symbol
|
||||
let symbol_end = (symbol.address + symbol.size) as u32;
|
||||
if section.kind != ObjSectionKind::Code && next_address > symbol_end {
|
||||
if !matches!(section.kind, ObjSectionKind::Code | ObjSectionKind::Bss)
|
||||
&& next_address > symbol_end
|
||||
{
|
||||
let data = section.data_range(symbol_end, next_address)?;
|
||||
if data.iter().any(|&x| x != 0) {
|
||||
log::debug!(
|
||||
@ -653,7 +673,7 @@ fn add_padding_symbols(obj: &mut ObjInfo) -> Result<()> {
|
||||
next_address
|
||||
);
|
||||
let name = if obj.module_id == 0 {
|
||||
format!("lbl_{:08X}", symbol_end)
|
||||
format!("lbl_{symbol_end:08X}")
|
||||
} else {
|
||||
format!(
|
||||
"lbl_{}_{}_{:X}",
|
||||
@ -1463,7 +1483,7 @@ fn auto_unit_name(
|
||||
if unit_exists(&unit_name, obj, new_splits) {
|
||||
let mut i = 1;
|
||||
loop {
|
||||
let new_unit_name = format!("{}_{}", unit_name, i);
|
||||
let new_unit_name = format!("{unit_name}_{i}");
|
||||
if !unit_exists(&new_unit_name, obj, new_splits) {
|
||||
unit_name = new_unit_name;
|
||||
break;
|
||||
|
@ -333,7 +333,7 @@ impl VfsFile for DiscFile {
|
||||
|
||||
pub fn nod_to_io_error(e: nod::Error) -> io::Error {
|
||||
match e {
|
||||
nod::Error::Io(msg, e) => io::Error::new(e.kind(), format!("{}: {}", msg, e)),
|
||||
nod::Error::Io(msg, e) => io::Error::new(e.kind(), format!("{msg}: {e}")),
|
||||
e => io::Error::new(io::ErrorKind::InvalidData, e),
|
||||
}
|
||||
}
|
||||
|
@ -108,8 +108,8 @@ impl Display for VfsError {
|
||||
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
|
||||
match self {
|
||||
VfsError::NotFound => write!(f, "File or directory not found"),
|
||||
VfsError::IoError(e) => write!(f, "{}", e),
|
||||
VfsError::Other(e) => write!(f, "{}", e),
|
||||
VfsError::IoError(e) => write!(f, "{e}"),
|
||||
VfsError::Other(e) => write!(f, "{e}"),
|
||||
VfsError::NotADirectory => write!(f, "Path is a file, not a directory"),
|
||||
VfsError::IsADirectory => write!(f, "Path is a directory, not a file"),
|
||||
}
|
||||
@ -129,8 +129,8 @@ impl Display for FileFormat {
|
||||
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
|
||||
match self {
|
||||
FileFormat::Regular => write!(f, "File"),
|
||||
FileFormat::Compressed(kind) => write!(f, "Compressed: {}", kind),
|
||||
FileFormat::Archive(kind) => write!(f, "Archive: {}", kind),
|
||||
FileFormat::Compressed(kind) => write!(f, "Compressed: {kind}"),
|
||||
FileFormat::Archive(kind) => write!(f, "Archive: {kind}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -165,7 +165,7 @@ impl Display for ArchiveKind {
|
||||
match self {
|
||||
ArchiveKind::Rarc => write!(f, "RARC"),
|
||||
ArchiveKind::U8 => write!(f, "U8"),
|
||||
ArchiveKind::Disc(format) => write!(f, "Disc ({})", format),
|
||||
ArchiveKind::Disc(format) => write!(f, "Disc ({format})"),
|
||||
ArchiveKind::Wad => write!(f, "WAD"),
|
||||
}
|
||||
}
|
||||
@ -228,13 +228,13 @@ pub fn open_path_with_fs(
|
||||
let file_type = match fs.metadata(segment) {
|
||||
Ok(metadata) => metadata.file_type,
|
||||
Err(VfsError::NotFound) => return Err(anyhow!("{} not found", current_path)),
|
||||
Err(e) => return Err(e).context(format!("Failed to open {}", current_path)),
|
||||
Err(e) => return Err(e).context(format!("Failed to open {current_path}")),
|
||||
};
|
||||
match file_type {
|
||||
VfsFileType::File => {
|
||||
file = Some(
|
||||
fs.open(segment)
|
||||
.with_context(|| format!("Failed to open {}", current_path))?,
|
||||
.with_context(|| format!("Failed to open {current_path}"))?,
|
||||
);
|
||||
}
|
||||
VfsFileType::Directory => {
|
||||
@ -248,7 +248,7 @@ pub fn open_path_with_fs(
|
||||
}
|
||||
let mut current_file = file.take().unwrap();
|
||||
let format = detect(current_file.as_mut())
|
||||
.with_context(|| format!("Failed to detect file type for {}", current_path))?;
|
||||
.with_context(|| format!("Failed to detect file type for {current_path}"))?;
|
||||
if let Some(&next) = split.peek() {
|
||||
match next {
|
||||
"nlzss" => {
|
||||
@ -256,7 +256,7 @@ pub fn open_path_with_fs(
|
||||
file = Some(
|
||||
decompress_file(current_file.as_mut(), CompressionKind::Nlzss)
|
||||
.with_context(|| {
|
||||
format!("Failed to decompress {} with NLZSS", current_path)
|
||||
format!("Failed to decompress {current_path} with NLZSS")
|
||||
})?,
|
||||
);
|
||||
}
|
||||
@ -265,7 +265,7 @@ pub fn open_path_with_fs(
|
||||
file = Some(
|
||||
decompress_file(current_file.as_mut(), CompressionKind::Yay0)
|
||||
.with_context(|| {
|
||||
format!("Failed to decompress {} with Yay0", current_path)
|
||||
format!("Failed to decompress {current_path} with Yay0")
|
||||
})?,
|
||||
);
|
||||
}
|
||||
@ -274,7 +274,7 @@ pub fn open_path_with_fs(
|
||||
file = Some(
|
||||
decompress_file(current_file.as_mut(), CompressionKind::Yaz0)
|
||||
.with_context(|| {
|
||||
format!("Failed to decompress {} with Yaz0", current_path)
|
||||
format!("Failed to decompress {current_path} with Yaz0")
|
||||
})?,
|
||||
);
|
||||
}
|
||||
@ -283,16 +283,15 @@ pub fn open_path_with_fs(
|
||||
return Err(anyhow!("{} is not an archive", current_path))
|
||||
}
|
||||
FileFormat::Compressed(kind) => {
|
||||
file =
|
||||
Some(decompress_file(current_file.as_mut(), kind).with_context(
|
||||
|| format!("Failed to decompress {}", current_path),
|
||||
)?);
|
||||
file = Some(
|
||||
decompress_file(current_file.as_mut(), kind)
|
||||
.with_context(|| format!("Failed to decompress {current_path}"))?,
|
||||
);
|
||||
// Continue the loop to detect the new format
|
||||
}
|
||||
FileFormat::Archive(kind) => {
|
||||
fs = open_fs(current_file, kind).with_context(|| {
|
||||
format!("Failed to open container {}", current_path)
|
||||
})?;
|
||||
fs = open_fs(current_file, kind)
|
||||
.with_context(|| format!("Failed to open container {current_path}"))?;
|
||||
// Continue the loop to open the next segment
|
||||
}
|
||||
},
|
||||
@ -302,7 +301,7 @@ pub fn open_path_with_fs(
|
||||
return match format {
|
||||
FileFormat::Compressed(kind) if auto_decompress => Ok(OpenResult::File(
|
||||
decompress_file(current_file.as_mut(), kind)
|
||||
.with_context(|| format!("Failed to decompress {}", current_path))?,
|
||||
.with_context(|| format!("Failed to decompress {current_path}"))?,
|
||||
segment.to_path_buf(),
|
||||
)),
|
||||
_ => Ok(OpenResult::File(current_file, segment.to_path_buf())),
|
||||
|
@ -118,11 +118,11 @@ impl Vfs for WadFs {
|
||||
}
|
||||
let title_id = hex::encode(self.wad.ticket().title_id);
|
||||
let mut entries = Vec::new();
|
||||
entries.push(format!("{}.tik", title_id));
|
||||
entries.push(format!("{}.tmd", title_id));
|
||||
entries.push(format!("{}.cert", title_id));
|
||||
entries.push(format!("{title_id}.tik"));
|
||||
entries.push(format!("{title_id}.tmd"));
|
||||
entries.push(format!("{title_id}.cert"));
|
||||
if self.wad.header.footer_size.get() > 0 {
|
||||
entries.push(format!("{}.trailer", title_id));
|
||||
entries.push(format!("{title_id}.trailer"));
|
||||
}
|
||||
for content in self.wad.contents() {
|
||||
entries.push(format!("{:08x}.app", content.content_index.get()));
|
||||
|
Loading…
x
Reference in New Issue
Block a user