mirror of
https://github.com/encounter/decomp-toolkit.git
synced 2025-12-12 14:46:17 +00:00
Analyzer fixes galore
- Transparent NLZSS decompression (add `:nlzss` to path) - Overhaul portions of the analyzer to support more games - Reject some invalid data relocations automatically - Jump table analysis fixes
This commit is contained in:
151
src/cmd/dol.rs
151
src/cmd/dol.rs
@@ -5,7 +5,7 @@ use std::{
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
fs::DirBuilder,
|
||||
io::Write,
|
||||
io::{Cursor, Write},
|
||||
mem::take,
|
||||
path::{Path, PathBuf},
|
||||
time::Instant,
|
||||
@@ -31,8 +31,9 @@ use crate::{
|
||||
},
|
||||
cmd::shasum::file_sha1_string,
|
||||
obj::{
|
||||
ObjDataKind, ObjInfo, ObjReloc, ObjRelocKind, ObjSectionKind, ObjSymbol, ObjSymbolFlagSet,
|
||||
ObjSymbolFlags, ObjSymbolKind, ObjSymbolScope, SymbolIndex,
|
||||
best_match_for_reloc, ObjDataKind, ObjInfo, ObjKind, ObjReloc, ObjRelocKind,
|
||||
ObjSectionKind, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind, ObjSymbolScope,
|
||||
SymbolIndex,
|
||||
},
|
||||
util::{
|
||||
asm::write_asm,
|
||||
@@ -44,10 +45,7 @@ use crate::{
|
||||
dep::DepFile,
|
||||
dol::process_dol,
|
||||
elf::{process_elf, write_elf},
|
||||
file::{
|
||||
buf_reader, buf_writer, decompress_if_needed, map_file, touch, verify_hash,
|
||||
FileIterator, Reader,
|
||||
},
|
||||
file::{buf_reader, buf_writer, map_file, touch, verify_hash, FileIterator},
|
||||
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
|
||||
map::apply_map_file,
|
||||
rel::{process_rel, process_rel_header},
|
||||
@@ -272,6 +270,7 @@ pub struct OutputModule {
|
||||
pub module_id: u32,
|
||||
#[serde(with = "path_slash_serde")]
|
||||
pub ldscript: PathBuf,
|
||||
pub entry: Option<String>,
|
||||
pub units: Vec<OutputUnit>,
|
||||
}
|
||||
|
||||
@@ -293,7 +292,7 @@ pub fn run(args: Args) -> Result<()> {
|
||||
}
|
||||
|
||||
fn apply_selfile(obj: &mut ObjInfo, buf: &[u8]) -> Result<()> {
|
||||
let rso = process_rso(&mut Reader::new(buf))?;
|
||||
let rso = process_rso(&mut Cursor::new(buf))?;
|
||||
for symbol in rso.symbols.iter() {
|
||||
let dol_section_index = match symbol.section {
|
||||
Some(section) => section,
|
||||
@@ -373,25 +372,26 @@ fn apply_selfile(obj: &mut ObjInfo, buf: &[u8]) -> Result<()> {
|
||||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let mut obj = {
|
||||
let file = map_file(&args.dol_file)?;
|
||||
let data = decompress_if_needed(file.as_slice())?;
|
||||
process_dol(data.as_ref(), "")?
|
||||
process_dol(file.as_slice(), "")?
|
||||
};
|
||||
apply_signatures(&mut obj)?;
|
||||
|
||||
let mut state = AnalyzerState::default();
|
||||
FindSaveRestSleds::execute(&mut state, &obj)?;
|
||||
state.detect_functions(&obj)?;
|
||||
log::debug!("Discovered {} functions", state.function_slices.len());
|
||||
log::debug!(
|
||||
"Discovered {} functions",
|
||||
state.functions.iter().filter(|(_, i)| i.end.is_some()).count()
|
||||
);
|
||||
|
||||
FindTRKInterruptVectorTable::execute(&mut state, &obj)?;
|
||||
FindSaveRestSleds::execute(&mut state, &obj)?;
|
||||
state.apply(&mut obj)?;
|
||||
|
||||
apply_signatures_post(&mut obj)?;
|
||||
|
||||
if let Some(selfile) = &args.selfile {
|
||||
let file = map_file(selfile)?;
|
||||
let data = decompress_if_needed(file.as_slice())?;
|
||||
apply_selfile(&mut obj, data.as_ref())?;
|
||||
apply_selfile(&mut obj, file.as_slice())?;
|
||||
}
|
||||
|
||||
println!("{}:", obj.name);
|
||||
@@ -450,19 +450,31 @@ fn update_symbols(obj: &mut ObjInfo, modules: &ModuleMap<'_>, create_symbols: bo
|
||||
{
|
||||
if source_module_id == obj.module_id {
|
||||
// Skip if already resolved
|
||||
let (_, source_section) = obj
|
||||
.sections
|
||||
.get_elf_index(rel_reloc.section as usize)
|
||||
.ok_or_else(|| anyhow!("Failed to locate REL section {}", rel_reloc.section))?;
|
||||
let (_, source_section) =
|
||||
obj.sections.get_elf_index(rel_reloc.section as usize).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed to locate REL section {} in module ID {}: source module {}, {:?}",
|
||||
rel_reloc.section,
|
||||
obj.module_id,
|
||||
source_module_id,
|
||||
rel_reloc
|
||||
)
|
||||
})?;
|
||||
if source_section.relocations.contains(rel_reloc.address) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let (target_section_index, target_section) = obj
|
||||
.sections
|
||||
.get_elf_index(rel_reloc.target_section as usize)
|
||||
.ok_or_else(|| anyhow!("Failed to locate REL section {}", rel_reloc.target_section))?;
|
||||
let (target_section_index, target_section) =
|
||||
obj.sections.get_elf_index(rel_reloc.target_section as usize).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed to locate REL section {} in module ID {}: source module {}, {:?}",
|
||||
rel_reloc.target_section,
|
||||
obj.module_id,
|
||||
source_module_id,
|
||||
rel_reloc
|
||||
)
|
||||
})?;
|
||||
|
||||
if let Some((symbol_index, symbol)) = obj.symbols.for_relocation(
|
||||
SectionAddress::new(target_section_index, rel_reloc.addend),
|
||||
@@ -517,10 +529,15 @@ fn create_relocations(obj: &mut ObjInfo, modules: &ModuleMap<'_>, dol_obj: &ObjI
|
||||
// Resolve all relocations in this module
|
||||
for rel_reloc in take(&mut obj.unresolved_relocations) {
|
||||
// Skip if already resolved
|
||||
let (_, source_section) = obj
|
||||
.sections
|
||||
.get_elf_index(rel_reloc.section as usize)
|
||||
.ok_or_else(|| anyhow!("Failed to locate REL section {}", rel_reloc.section))?;
|
||||
let (_, source_section) =
|
||||
obj.sections.get_elf_index(rel_reloc.section as usize).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed to locate REL section {} in module ID {}: {:?}",
|
||||
rel_reloc.section,
|
||||
obj.module_id,
|
||||
rel_reloc
|
||||
)
|
||||
})?;
|
||||
if source_section.relocations.contains(rel_reloc.address) {
|
||||
continue;
|
||||
}
|
||||
@@ -575,10 +592,8 @@ fn create_relocations(obj: &mut ObjInfo, modules: &ModuleMap<'_>, dol_obj: &ObjI
|
||||
Some(rel_reloc.module_id)
|
||||
},
|
||||
};
|
||||
let (_, source_section) = obj
|
||||
.sections
|
||||
.get_elf_index_mut(rel_reloc.section as usize)
|
||||
.ok_or_else(|| anyhow!("Failed to locate REL section {}", rel_reloc.section))?;
|
||||
let (_, source_section) =
|
||||
obj.sections.get_elf_index_mut(rel_reloc.section as usize).unwrap();
|
||||
source_section.relocations.insert(rel_reloc.address, reloc)?;
|
||||
}
|
||||
|
||||
@@ -653,11 +668,10 @@ fn load_analyze_dol(config: &ProjectConfig) -> Result<AnalyzeResult> {
|
||||
log::debug!("Loading {}", config.base.object.display());
|
||||
let mut obj = {
|
||||
let file = map_file(&config.base.object)?;
|
||||
let data = decompress_if_needed(file.as_slice())?;
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(data.as_ref(), hash_str)?;
|
||||
verify_hash(file.as_slice(), hash_str)?;
|
||||
}
|
||||
process_dol(data.as_ref(), config.base.name().as_ref())?
|
||||
process_dol(file.as_slice(), config.base.name().as_ref())?
|
||||
};
|
||||
let mut dep = vec![config.base.object.clone()];
|
||||
|
||||
@@ -688,10 +702,9 @@ fn load_analyze_dol(config: &ProjectConfig) -> Result<AnalyzeResult> {
|
||||
if !config.quick_analysis {
|
||||
let mut state = AnalyzerState::default();
|
||||
debug!("Detecting function boundaries");
|
||||
state.detect_functions(&obj)?;
|
||||
|
||||
FindTRKInterruptVectorTable::execute(&mut state, &obj)?;
|
||||
FindSaveRestSleds::execute(&mut state, &obj)?;
|
||||
state.detect_functions(&obj)?;
|
||||
FindTRKInterruptVectorTable::execute(&mut state, &obj)?;
|
||||
state.apply(&mut obj)?;
|
||||
}
|
||||
|
||||
@@ -701,11 +714,10 @@ fn load_analyze_dol(config: &ProjectConfig) -> Result<AnalyzeResult> {
|
||||
if let Some(selfile) = &config.selfile {
|
||||
log::info!("Loading {}", selfile.display());
|
||||
let file = map_file(selfile)?;
|
||||
let data = decompress_if_needed(file.as_slice())?;
|
||||
if let Some(hash) = &config.selfile_hash {
|
||||
verify_hash(data.as_ref(), hash)?;
|
||||
verify_hash(file.as_slice(), hash)?;
|
||||
}
|
||||
apply_selfile(&mut obj, data.as_ref())?;
|
||||
apply_selfile(&mut obj, file.as_slice())?;
|
||||
dep.push(selfile.clone());
|
||||
}
|
||||
|
||||
@@ -761,11 +773,21 @@ fn split_write_obj(
|
||||
|
||||
debug!("Writing object files");
|
||||
let obj_dir = out_dir.join("obj");
|
||||
let entry = if obj.kind == ObjKind::Executable {
|
||||
obj.entry.and_then(|e| {
|
||||
let (section_index, _) = obj.sections.at_address(e as u32).ok()?;
|
||||
let symbols = obj.symbols.at_section_address(section_index, e as u32).collect_vec();
|
||||
best_match_for_reloc(symbols, ObjRelocKind::PpcRel24).map(|(_, s)| s.name.clone())
|
||||
})
|
||||
} else {
|
||||
obj.symbols.by_name("_prolog")?.map(|(_, s)| s.name.clone())
|
||||
};
|
||||
let mut out_config = OutputModule {
|
||||
name: module_config.name().to_string(),
|
||||
module_id: obj.module_id,
|
||||
ldscript: out_dir.join("ldscript.lcf"),
|
||||
units: Vec::with_capacity(split_objs.len()),
|
||||
entry,
|
||||
};
|
||||
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
|
||||
let out_obj = write_elf(split_obj)?;
|
||||
@@ -802,13 +824,12 @@ fn split_write_obj(
|
||||
|
||||
fn load_analyze_rel(config: &ProjectConfig, module_config: &ModuleConfig) -> Result<AnalyzeResult> {
|
||||
debug!("Loading {}", module_config.object.display());
|
||||
let map = map_file(&module_config.object)?;
|
||||
let buf = decompress_if_needed(map.as_slice())?;
|
||||
let file = map_file(&module_config.object)?;
|
||||
if let Some(hash_str) = &module_config.hash {
|
||||
verify_hash(buf.as_ref(), hash_str)?;
|
||||
verify_hash(file.as_slice(), hash_str)?;
|
||||
}
|
||||
let (_, mut module_obj) =
|
||||
process_rel(&mut Reader::new(buf.as_ref()), module_config.name().as_ref())?;
|
||||
process_rel(&mut Cursor::new(file.as_slice()), module_config.name().as_ref())?;
|
||||
|
||||
if let Some(comment_version) = config.mw_comment_version {
|
||||
module_obj.mw_comment = Some(MWComment::new(comment_version)?);
|
||||
@@ -863,11 +884,10 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
|
||||
for module_config in config.modules.iter_mut() {
|
||||
let file = map_file(&module_config.object)?;
|
||||
let buf = decompress_if_needed(file.as_slice())?;
|
||||
if let Some(hash_str) = &module_config.hash {
|
||||
verify_hash(buf.as_ref(), hash_str)?;
|
||||
verify_hash(file.as_slice(), hash_str)?;
|
||||
} else {
|
||||
module_config.hash = Some(file_sha1_string(&mut Reader::new(buf.as_ref()))?);
|
||||
module_config.hash = Some(file_sha1_string(&mut file.as_reader())?);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1080,21 +1100,26 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
|
||||
for (_symbol_idx, symbol) in real_obj.symbols.for_section(section_index) {
|
||||
let symbol_addr = SectionAddress::new(section_index, symbol.address as u32);
|
||||
real_functions.insert(symbol_addr, symbol.name.clone());
|
||||
match state.function_bounds.get(&symbol_addr) {
|
||||
Some(&Some(end)) => {
|
||||
if symbol.size > 0 && end != (symbol_addr + symbol.size as u32) {
|
||||
match state.functions.get(&symbol_addr) {
|
||||
Some(info) => {
|
||||
if let Some(end) = info.end {
|
||||
if symbol.size > 0 && end != (symbol_addr + symbol.size as u32) {
|
||||
log::warn!(
|
||||
"Function {:#010X} ({}) ends at {:#010X}, expected {:#010X}",
|
||||
symbol.address,
|
||||
symbol.name,
|
||||
end,
|
||||
symbol.address + symbol.size
|
||||
);
|
||||
}
|
||||
} else {
|
||||
log::warn!(
|
||||
"Function {:#010X} ({}) ends at {:#010X}, expected {:#010X}",
|
||||
"Function {:#010X} ({}) has no end",
|
||||
symbol.address,
|
||||
symbol.name,
|
||||
end,
|
||||
symbol.address + symbol.size
|
||||
symbol.name
|
||||
);
|
||||
}
|
||||
}
|
||||
Some(_) => {
|
||||
log::warn!("Function {:#010X} ({}) has no end", symbol.address, symbol.name);
|
||||
}
|
||||
None => {
|
||||
log::warn!(
|
||||
"Function {:#010X} ({}) not discovered!",
|
||||
@@ -1105,8 +1130,8 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
|
||||
}
|
||||
}
|
||||
}
|
||||
for (&start, &end) in &state.function_bounds {
|
||||
let Some(end) = end else {
|
||||
for (&start, info) in &state.functions {
|
||||
let Some(end) = info.end else {
|
||||
continue;
|
||||
};
|
||||
if !real_functions.contains_key(&start) {
|
||||
@@ -1206,11 +1231,10 @@ fn diff(args: DiffArgs) -> Result<()> {
|
||||
log::info!("Loading {}", config.base.object.display());
|
||||
let mut obj = {
|
||||
let file = map_file(&config.base.object)?;
|
||||
let data = decompress_if_needed(file.as_slice())?;
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(data.as_ref(), hash_str)?;
|
||||
verify_hash(file.as_slice(), hash_str)?;
|
||||
}
|
||||
process_dol(data.as_ref(), config.base.name().as_ref())?
|
||||
process_dol(file.as_slice(), config.base.name().as_ref())?
|
||||
};
|
||||
|
||||
if let Some(symbols_path) = &config.base.symbols {
|
||||
@@ -1353,11 +1377,10 @@ fn apply(args: ApplyArgs) -> Result<()> {
|
||||
log::info!("Loading {}", config.base.object.display());
|
||||
let mut obj = {
|
||||
let file = map_file(&config.base.object)?;
|
||||
let data = decompress_if_needed(file.as_slice())?;
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(data.as_ref(), hash_str)?;
|
||||
verify_hash(file.as_slice(), hash_str)?;
|
||||
}
|
||||
process_dol(data.as_ref(), config.base.name().as_ref())?
|
||||
process_dol(file.as_slice(), config.base.name().as_ref())?
|
||||
};
|
||||
|
||||
if let Some(symbols_path) = &config.base.symbols {
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::{
|
||||
collections::{btree_map, hash_map, BTreeMap, HashMap},
|
||||
fs,
|
||||
fs::DirBuilder,
|
||||
io::Write,
|
||||
io::{Cursor, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
@@ -22,7 +22,7 @@ use crate::{
|
||||
comment::{read_comment_sym, MWComment},
|
||||
config::{write_splits_file, write_symbols_file},
|
||||
elf::{process_elf, write_elf},
|
||||
file::{buf_writer, process_rsp, Reader},
|
||||
file::{buf_writer, process_rsp},
|
||||
signatures::{compare_signature, generate_signature, FunctionSignature},
|
||||
split::split_obj,
|
||||
IntoCow, ToCow,
|
||||
@@ -544,7 +544,7 @@ fn info(args: InfoArgs) -> Result<()> {
|
||||
if let Some(comment_section) = in_file.section_by_name(".comment") {
|
||||
let data = comment_section.uncompressed_data()?;
|
||||
if !data.is_empty() {
|
||||
let mut reader = Reader::new(&*data);
|
||||
let mut reader = Cursor::new(&*data);
|
||||
let header =
|
||||
MWComment::parse_header(&mut reader).context("While reading .comment section")?;
|
||||
println!("\nMetrowerks metadata (.comment):");
|
||||
|
||||
@@ -6,6 +6,7 @@ pub mod elf;
|
||||
pub mod elf2dol;
|
||||
pub mod map;
|
||||
pub mod metroidbuildinfo;
|
||||
pub mod nlzss;
|
||||
pub mod rarc;
|
||||
pub mod rel;
|
||||
pub mod rso;
|
||||
|
||||
63
src/cmd/nlzss.rs
Normal file
63
src/cmd/nlzss.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
use std::{fs, path::PathBuf};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use argp::FromArgs;
|
||||
|
||||
use crate::util::{
|
||||
file::{open_file, process_rsp},
|
||||
IntoCow, ToCow,
|
||||
};
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Commands for processing NLZSS-compressed files.
|
||||
#[argp(subcommand, name = "nlzss")]
|
||||
pub struct Args {
|
||||
#[argp(subcommand)]
|
||||
command: SubCommand,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
#[argp(subcommand)]
|
||||
enum SubCommand {
|
||||
Decompress(DecompressArgs),
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Decompresses NLZSS-compressed files.
|
||||
#[argp(subcommand, name = "decompress")]
|
||||
pub struct DecompressArgs {
|
||||
#[argp(positional)]
|
||||
/// NLZSS-compressed file(s)
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
/// Output file (or directory, if multiple files are specified).
|
||||
/// If not specified, decompresses in-place.
|
||||
output: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
match args.command {
|
||||
SubCommand::Decompress(args) => decompress(args),
|
||||
}
|
||||
}
|
||||
|
||||
fn decompress(args: DecompressArgs) -> Result<()> {
|
||||
let files = process_rsp(&args.files)?;
|
||||
let single_file = files.len() == 1;
|
||||
for path in files {
|
||||
let data = nintendo_lz::decompress(&mut open_file(&path)?)
|
||||
.map_err(|e| anyhow!("Failed to decompress '{}' with NLZSS: {}", path.display(), e))?;
|
||||
let out_path = if let Some(output) = &args.output {
|
||||
if single_file {
|
||||
output.as_path().to_cow()
|
||||
} else {
|
||||
output.join(path.file_name().unwrap()).into_cow()
|
||||
}
|
||||
} else {
|
||||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
238
src/cmd/rel.rs
238
src/cmd/rel.rs
@@ -1,8 +1,7 @@
|
||||
use std::{
|
||||
collections::{btree_map, BTreeMap},
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
io::Write,
|
||||
io::{ Write},
|
||||
path::PathBuf,
|
||||
time::Instant,
|
||||
};
|
||||
@@ -10,7 +9,8 @@ use std::{
|
||||
use anyhow::{anyhow, bail, ensure, Context, Result};
|
||||
use argp::FromArgs;
|
||||
use object::{
|
||||
Architecture, Endianness, Object, ObjectSection, ObjectSymbol, RelocationTarget, SymbolIndex,
|
||||
Architecture, Endianness, File, Object, ObjectSection, ObjectSymbol, RelocationTarget,
|
||||
SectionIndex, SymbolIndex,
|
||||
};
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
@@ -27,18 +27,18 @@ use crate::{
|
||||
tracker::Tracker,
|
||||
},
|
||||
array_ref_mut,
|
||||
cmd::dol::ProjectConfig,
|
||||
cmd::dol::{ModuleConfig, ProjectConfig},
|
||||
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol},
|
||||
util::{
|
||||
config::is_auto_symbol,
|
||||
dol::process_dol,
|
||||
elf::{to_obj_reloc_kind, write_elf},
|
||||
file::{
|
||||
buf_reader, buf_writer, decompress_if_needed, map_file, process_rsp, verify_hash,
|
||||
FileIterator, Reader,
|
||||
},
|
||||
file::{buf_reader, buf_writer, map_file, process_rsp, verify_hash, FileIterator},
|
||||
nested::NestedMap,
|
||||
rel::{process_rel, process_rel_header, write_rel, RelHeader, RelReloc, RelWriteInfo},
|
||||
rel::{
|
||||
process_rel, process_rel_header, process_rel_sections, write_rel, RelHeader, RelReloc,
|
||||
RelSectionHeader, RelWriteInfo, PERMITTED_SECTIONS,
|
||||
},
|
||||
IntoCow, ToCow,
|
||||
},
|
||||
};
|
||||
@@ -106,8 +106,8 @@ pub fn run(args: Args) -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
fn load_obj(buf: &[u8]) -> Result<object::File> {
|
||||
let obj = object::read::File::parse(buf)?;
|
||||
fn load_obj(buf: &[u8]) -> Result<File> {
|
||||
let obj = File::parse(buf)?;
|
||||
match obj.architecture() {
|
||||
Architecture::PowerPc => {}
|
||||
arch => bail!("Unexpected architecture: {arch:?}"),
|
||||
@@ -116,21 +116,134 @@ fn load_obj(buf: &[u8]) -> Result<object::File> {
|
||||
Ok(obj)
|
||||
}
|
||||
|
||||
/// Attempt to match the section index from the ELF to the original REL.
|
||||
/// Our built ELFs may be missing sections that were present in the original RELs.
|
||||
fn match_section_index(
|
||||
obj: &File,
|
||||
section_index: SectionIndex,
|
||||
rel_sections: &[RelSectionHeader],
|
||||
) -> Result<usize> {
|
||||
let (_, _) = (obj, rel_sections);
|
||||
Ok(section_index.0)
|
||||
// TODO
|
||||
// rel_sections
|
||||
// .iter()
|
||||
// .enumerate()
|
||||
// .filter(|(_, s)| s.size() > 0)
|
||||
// .zip(obj.sections().filter(|s| s.size() > 0))
|
||||
// .find_map(
|
||||
// |((rel_section_index, _), obj_section)| {
|
||||
// if obj_section.index() == section_index {
|
||||
// Some(rel_section_index)
|
||||
// } else {
|
||||
// None
|
||||
// }
|
||||
// },
|
||||
// )
|
||||
// .ok_or_else(|| {
|
||||
// anyhow!(
|
||||
// "Failed to find matching section index for {} ({}), REL section count: {}",
|
||||
// obj.section_by_index(section_index)
|
||||
// .ok()
|
||||
// .and_then(|s| s.name().ok().map(|s| s.to_string()))
|
||||
// .unwrap_or("[invalid]".to_string()),
|
||||
// section_index.0,
|
||||
// rel_sections.len()
|
||||
// )
|
||||
// })
|
||||
}
|
||||
|
||||
fn load_rel(module_config: &ModuleConfig) -> Result<(RelHeader, Vec<RelSectionHeader>)> {
|
||||
let file = map_file(&module_config.object)?;
|
||||
if let Some(hash_str) = &module_config.hash {
|
||||
verify_hash(file.as_slice(), hash_str)?;
|
||||
}
|
||||
let mut reader = file.as_reader();
|
||||
let header = process_rel_header(&mut reader)?;
|
||||
let sections = process_rel_sections(&mut reader, &header)?;
|
||||
Ok((header, sections))
|
||||
}
|
||||
|
||||
fn resolve_relocations(
|
||||
module: &File,
|
||||
existing_headers: &BTreeMap<u32, (RelHeader, Vec<RelSectionHeader>)>,
|
||||
module_id: usize,
|
||||
symbol_map: &FxHashMap<&[u8], (usize, SymbolIndex)>,
|
||||
modules: &[(File, PathBuf)],
|
||||
relocations: &mut Vec<RelReloc>,
|
||||
) -> Result<usize> {
|
||||
let mut resolved = 0usize;
|
||||
for section in module.sections() {
|
||||
if !matches!(section.name(), Ok(name) if PERMITTED_SECTIONS.contains(&name)) {
|
||||
continue;
|
||||
}
|
||||
let section_index = if let Some((_, sections)) = existing_headers.get(&(module_id as u32)) {
|
||||
match_section_index(module, section.index(), sections)?
|
||||
} else {
|
||||
section.index().0
|
||||
} as u8;
|
||||
for (address, reloc) in section.relocations() {
|
||||
let reloc_target = match reloc.target() {
|
||||
RelocationTarget::Symbol(idx) => {
|
||||
module.symbol_by_index(idx).with_context(|| {
|
||||
format!("Relocation against invalid symbol index {}", idx.0)
|
||||
})?
|
||||
}
|
||||
reloc_target => bail!("Unsupported relocation target: {reloc_target:?}"),
|
||||
};
|
||||
let (target_module_id, target_symbol) = if reloc_target.is_undefined() {
|
||||
resolved += 1;
|
||||
symbol_map
|
||||
.get(reloc_target.name_bytes()?)
|
||||
.map(|&(module_id, symbol_idx)| {
|
||||
(module_id, modules[module_id].0.symbol_by_index(symbol_idx).unwrap())
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed to find symbol {} in any module",
|
||||
reloc_target.name().unwrap_or("[invalid]")
|
||||
)
|
||||
})?
|
||||
} else {
|
||||
(module_id, reloc_target)
|
||||
};
|
||||
let target_section_index = target_symbol.section_index().unwrap();
|
||||
let target_section = if let Some((_, sections)) =
|
||||
existing_headers.get(&(target_module_id as u32))
|
||||
{
|
||||
match_section_index(&modules[target_module_id].0, target_section_index, sections)?
|
||||
} else {
|
||||
target_section_index.0
|
||||
} as u8;
|
||||
relocations.push(RelReloc {
|
||||
kind: to_obj_reloc_kind(reloc.kind())?,
|
||||
section: section_index,
|
||||
address: address as u32,
|
||||
module_id: target_module_id as u32,
|
||||
target_section,
|
||||
addend: (target_symbol.address() as i64 + reloc.addend()) as u32,
|
||||
// Extra
|
||||
original_section: section.index().0 as u8,
|
||||
original_target_section: target_section_index.0 as u8,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(resolved)
|
||||
}
|
||||
|
||||
fn make(args: MakeArgs) -> Result<()> {
|
||||
let total = Instant::now();
|
||||
|
||||
// Load existing REL headers (if specified)
|
||||
let mut existing_headers = BTreeMap::<u32, RelHeader>::new();
|
||||
let mut existing_headers = BTreeMap::<u32, (RelHeader, Vec<RelSectionHeader>)>::new();
|
||||
if let Some(config_path) = &args.config {
|
||||
let config: ProjectConfig = serde_yaml::from_reader(&mut buf_reader(config_path)?)?;
|
||||
for module_config in &config.modules {
|
||||
let map = map_file(&module_config.object)?;
|
||||
let buf = decompress_if_needed(map.as_slice())?;
|
||||
if let Some(hash_str) = &module_config.hash {
|
||||
verify_hash(buf.as_ref(), hash_str)?;
|
||||
}
|
||||
let header = process_rel_header(&mut Reader::new(buf.as_ref()))?;
|
||||
existing_headers.insert(header.module_id, header);
|
||||
let _span = info_span!("module", name = %module_config.name()).entered();
|
||||
let (header, sections) = load_rel(module_config).with_context(|| {
|
||||
format!("While loading REL '{}'", module_config.object.display())
|
||||
})?;
|
||||
existing_headers.insert(header.module_id, (header, sections));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -144,6 +257,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||
.zip(&paths)
|
||||
.map(|(file, path)| {
|
||||
load_obj(file.as_slice())
|
||||
.map(|o| (o, path.clone()))
|
||||
.with_context(|| format!("Failed to load '{}'", path.display()))
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
@@ -151,7 +265,8 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||
// Create symbol map
|
||||
let start = Instant::now();
|
||||
let mut symbol_map = FxHashMap::<&[u8], (usize, SymbolIndex)>::default();
|
||||
for (module_id, module) in modules.iter().enumerate() {
|
||||
for (module_id, (module, path)) in modules.iter().enumerate() {
|
||||
let _span = info_span!("file", path = %path.display()).entered();
|
||||
for symbol in module.symbols() {
|
||||
if symbol.is_definition() && symbol.scope() == object::SymbolScope::Dynamic {
|
||||
symbol_map.entry(symbol.name_bytes()?).or_insert((module_id, symbol.index()));
|
||||
@@ -163,45 +278,19 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||
let mut resolved = 0usize;
|
||||
let mut relocations = Vec::<Vec<RelReloc>>::with_capacity(modules.len() - 1);
|
||||
relocations.resize_with(modules.len() - 1, Vec::new);
|
||||
for ((module_id, module), relocations) in
|
||||
for ((module_id, (module, path)), relocations) in
|
||||
modules.iter().enumerate().skip(1).zip(&mut relocations)
|
||||
{
|
||||
for section in module.sections() {
|
||||
for (address, reloc) in section.relocations() {
|
||||
let reloc_target = match reloc.target() {
|
||||
RelocationTarget::Symbol(idx) => {
|
||||
module.symbol_by_index(idx).with_context(|| {
|
||||
format!("Relocation against invalid symbol index {}", idx.0)
|
||||
})?
|
||||
}
|
||||
reloc_target => bail!("Unsupported relocation target: {reloc_target:?}"),
|
||||
};
|
||||
let (target_module_id, target_symbol) = if reloc_target.is_undefined() {
|
||||
resolved += 1;
|
||||
symbol_map
|
||||
.get(reloc_target.name_bytes()?)
|
||||
.map(|&(module_id, symbol_idx)| {
|
||||
(module_id, modules[module_id].symbol_by_index(symbol_idx).unwrap())
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed to find symbol {} in any module",
|
||||
reloc_target.name().unwrap_or("[invalid]")
|
||||
)
|
||||
})?
|
||||
} else {
|
||||
(module_id, reloc_target)
|
||||
};
|
||||
relocations.push(RelReloc {
|
||||
kind: to_obj_reloc_kind(reloc.kind())?,
|
||||
section: section.index().0 as u8,
|
||||
address: address as u32,
|
||||
module_id: target_module_id as u32,
|
||||
target_section: target_symbol.section_index().unwrap().0 as u8,
|
||||
addend: (target_symbol.address() as i64 + reloc.addend()) as u32,
|
||||
});
|
||||
}
|
||||
}
|
||||
let _span = info_span!("file", path = %path.display()).entered();
|
||||
resolved += resolve_relocations(
|
||||
module,
|
||||
&existing_headers,
|
||||
module_id,
|
||||
&symbol_map,
|
||||
&modules,
|
||||
relocations,
|
||||
)
|
||||
.with_context(|| format!("While resolving relocations in '{}'", path.display()))?;
|
||||
}
|
||||
|
||||
let duration = start.elapsed();
|
||||
@@ -214,12 +303,10 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||
|
||||
// Write RELs
|
||||
let start = Instant::now();
|
||||
for (((module_id, module), path), relocations) in
|
||||
modules.iter().enumerate().zip(&paths).skip(1).zip(relocations)
|
||||
for ((module_id, (module, path)), relocations) in
|
||||
modules.iter().enumerate().skip(1).zip(relocations)
|
||||
{
|
||||
let name =
|
||||
path.file_stem().unwrap_or(OsStr::new("[unknown]")).to_str().unwrap_or("[invalid]");
|
||||
let _span = info_span!("module", name = %name).entered();
|
||||
let _span = info_span!("file", path = %path.display()).entered();
|
||||
let mut info = RelWriteInfo {
|
||||
module_id: module_id as u32,
|
||||
version: 3,
|
||||
@@ -230,13 +317,13 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||
section_count: None,
|
||||
quiet: args.no_warn,
|
||||
};
|
||||
if let Some(existing_module) = existing_headers.get(&(module_id as u32)) {
|
||||
info.version = existing_module.version;
|
||||
info.name_offset = Some(existing_module.name_offset);
|
||||
info.name_size = Some(existing_module.name_size);
|
||||
info.align = existing_module.align;
|
||||
info.bss_align = existing_module.bss_align;
|
||||
info.section_count = Some(existing_module.num_sections as usize);
|
||||
if let Some((header, _)) = existing_headers.get(&(module_id as u32)) {
|
||||
info.version = header.version;
|
||||
info.name_offset = Some(header.name_offset);
|
||||
info.name_size = Some(header.name_size);
|
||||
info.align = header.align;
|
||||
info.bss_align = header.bss_align;
|
||||
info.section_count = Some(header.num_sections as usize);
|
||||
}
|
||||
let rel_path = path.with_extension("rel");
|
||||
let mut w = buf_writer(&rel_path)?;
|
||||
@@ -254,8 +341,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||
|
||||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let file = map_file(args.rel_file)?;
|
||||
let buf = decompress_if_needed(file.as_slice())?;
|
||||
let (header, mut module_obj) = process_rel(&mut Reader::new(buf.as_ref()), "")?;
|
||||
let (header, mut module_obj) = process_rel(&mut file.as_reader(), "")?;
|
||||
|
||||
let mut state = AnalyzerState::default();
|
||||
state.detect_functions(&module_obj)?;
|
||||
@@ -319,9 +405,8 @@ fn merge(args: MergeArgs) -> Result<()> {
|
||||
log::info!("Loading {}", args.dol_file.display());
|
||||
let mut obj = {
|
||||
let file = map_file(&args.dol_file)?;
|
||||
let buf = decompress_if_needed(file.as_slice())?;
|
||||
let name = args.dol_file.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default();
|
||||
process_dol(buf.as_ref(), name.as_ref())?
|
||||
process_dol(file.as_slice(), name.as_ref())?
|
||||
};
|
||||
|
||||
log::info!("Performing signature analysis");
|
||||
@@ -434,11 +519,14 @@ fn merge(args: MergeArgs) -> Result<()> {
|
||||
|
||||
log::info!("Detecting function boundaries");
|
||||
let mut state = AnalyzerState::default();
|
||||
FindSaveRestSleds::execute(&mut state, &obj)?;
|
||||
state.detect_functions(&obj)?;
|
||||
log::info!("Discovered {} functions", state.function_slices.len());
|
||||
log::info!(
|
||||
"Discovered {} functions",
|
||||
state.functions.iter().filter(|(_, i)| i.is_function()).count()
|
||||
);
|
||||
|
||||
FindTRKInterruptVectorTable::execute(&mut state, &obj)?;
|
||||
FindSaveRestSleds::execute(&mut state, &obj)?;
|
||||
state.apply(&mut obj)?;
|
||||
|
||||
apply_signatures_post(&mut obj)?;
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::Result;
|
||||
use argp::FromArgs;
|
||||
|
||||
use crate::util::{
|
||||
file::{decompress_if_needed, map_file, Reader},
|
||||
rso::process_rso,
|
||||
};
|
||||
use crate::util::{file::map_file, rso::process_rso};
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Commands for processing RSO files.
|
||||
@@ -39,10 +36,10 @@ pub fn run(args: Args) -> Result<()> {
|
||||
|
||||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let rso = {
|
||||
let file = map_file(&args.rso_file)?;
|
||||
let data = decompress_if_needed(file.as_slice())
|
||||
.with_context(|| format!("Failed to decompress '{}'", args.rso_file.display()))?;
|
||||
process_rso(&mut Reader::new(data.as_ref()))?
|
||||
let file = map_file(args.rso_file)?;
|
||||
let obj = process_rso(&mut file.as_reader())?;
|
||||
#[allow(clippy::let_and_return)]
|
||||
obj
|
||||
};
|
||||
println!("Read RSO module {}", rso.name);
|
||||
Ok(())
|
||||
|
||||
@@ -30,7 +30,8 @@ pub struct DecompressArgs {
|
||||
/// YAZ0-compressed files
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
/// Output directory. If not specified, decompresses in-place.
|
||||
/// Output file (or directory, if multiple files are specified).
|
||||
/// If not specified, decompresses in-place.
|
||||
output: Option<PathBuf>,
|
||||
}
|
||||
|
||||
@@ -41,10 +42,16 @@ pub fn run(args: Args) -> Result<()> {
|
||||
}
|
||||
|
||||
fn decompress(args: DecompressArgs) -> Result<()> {
|
||||
for path in process_rsp(&args.files)? {
|
||||
let files = process_rsp(&args.files)?;
|
||||
let single_file = files.len() == 1;
|
||||
for path in files {
|
||||
let data = decompress_reader(&mut open_file(&path)?)?;
|
||||
let out_path = if let Some(output) = &args.output {
|
||||
output.join(path.file_name().unwrap()).into_cow()
|
||||
if single_file {
|
||||
output.as_path().to_cow()
|
||||
} else {
|
||||
output.join(path.file_name().unwrap()).into_cow()
|
||||
}
|
||||
} else {
|
||||
path.as_path().to_cow()
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user