mirror of
https://github.com/encounter/decomp-toolkit.git
synced 2025-12-13 07:06:16 +00:00
Working rel make & more
- Added `elf info` - Improved `rel info` - Colored output for `shasum` - Fix section `rename` in RELs - Added padding symbols to avoid linker issues - Automatically set symbols to "active" in .comment output
This commit is contained in:
@@ -12,7 +12,6 @@ use std::{
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use argp::FromArgs;
|
||||
use itertools::Itertools;
|
||||
use memmap2::Mmap;
|
||||
use rayon::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, info, info_span};
|
||||
@@ -28,7 +27,6 @@ use crate::{
|
||||
signatures::{apply_signatures, apply_signatures_post},
|
||||
tracker::Tracker,
|
||||
},
|
||||
cmd::shasum::file_sha1,
|
||||
obj::{
|
||||
best_match_for_reloc, ObjDataKind, ObjInfo, ObjReloc, ObjRelocKind, ObjSectionKind,
|
||||
ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind, ObjSymbolScope, SymbolIndex,
|
||||
@@ -43,13 +41,13 @@ use crate::{
|
||||
dep::DepFile,
|
||||
dol::process_dol,
|
||||
elf::{process_elf, write_elf},
|
||||
file::{buf_writer, map_file, map_reader, touch, Reader},
|
||||
file::{buf_writer, decompress_if_needed, map_file, touch, verify_hash, Reader},
|
||||
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
|
||||
map::apply_map_file,
|
||||
rel::process_rel,
|
||||
rso::{process_rso, DOL_SECTION_ABS, DOL_SECTION_NAMES},
|
||||
split::{is_linker_generated_object, split_obj, update_splits},
|
||||
yaz0,
|
||||
IntoCow, ToCow,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -225,7 +223,7 @@ impl ModuleConfig {
|
||||
pub fn file_prefix(&self) -> Cow<'_, str> {
|
||||
match self.file_name() {
|
||||
Cow::Borrowed(s) => {
|
||||
Cow::Borrowed(s.split_once('.').map(|(prefix, _)| prefix).unwrap_or(&s))
|
||||
Cow::Borrowed(s.split_once('.').map(|(prefix, _)| prefix).unwrap_or(s))
|
||||
}
|
||||
Cow::Owned(s) => {
|
||||
Cow::Owned(s.split_once('.').map(|(prefix, _)| prefix).unwrap_or(&s).to_string())
|
||||
@@ -379,40 +377,32 @@ fn info(args: InfoArgs) -> Result<()> {
|
||||
);
|
||||
}
|
||||
println!("\nDiscovered symbols:");
|
||||
println!("\t{: >23} | {: <10} | {: <10}", "Name", "Address", "Size");
|
||||
println!("\t{: >10} | {: <10} | {: <10} | {: <10}", "Section", "Address", "Size", "Name");
|
||||
for (_, symbol) in obj.symbols.iter_ordered().chain(obj.symbols.iter_abs()) {
|
||||
if symbol.name.starts_with('@') || is_auto_symbol(&symbol.name) {
|
||||
if symbol.name.starts_with('@') || is_auto_symbol(symbol) {
|
||||
continue;
|
||||
}
|
||||
if symbol.size_known {
|
||||
println!("\t{: >23} | {:#010X} | {: <#10X}", symbol.name, symbol.address, symbol.size);
|
||||
let section_str = if let Some(section) = symbol.section {
|
||||
obj.sections[section].name.as_str()
|
||||
} else {
|
||||
let size_str = if symbol.section.is_none() { "ABS" } else { "?" };
|
||||
println!("\t{: >23} | {:#010X} | {: <10}", symbol.name, symbol.address, size_str);
|
||||
}
|
||||
"ABS"
|
||||
};
|
||||
let size_str = if symbol.size_known {
|
||||
format!("{:#X}", symbol.size).into_cow()
|
||||
} else if symbol.section.is_none() {
|
||||
"ABS".to_cow()
|
||||
} else {
|
||||
"?".to_cow()
|
||||
};
|
||||
println!(
|
||||
"\t{: >10} | {: <#10X} | {: <10} | {: <10}",
|
||||
section_str, symbol.address, size_str, symbol.name
|
||||
);
|
||||
}
|
||||
println!("\n{} discovered functions from exception table", obj.known_functions.len());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn verify_hash<P: AsRef<Path>>(path: P, hash_str: &str) -> Result<()> {
|
||||
let mut hash_bytes = [0u8; 20];
|
||||
hex::decode_to_slice(hash_str, &mut hash_bytes)
|
||||
.with_context(|| format!("Invalid SHA-1 '{hash_str}'"))?;
|
||||
let file = File::open(path.as_ref())
|
||||
.with_context(|| format!("Failed to open file '{}'", path.as_ref().display()))?;
|
||||
let found_hash = file_sha1(file)?;
|
||||
if found_hash.as_ref() == hash_bytes {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Hash mismatch: expected {}, but was {}",
|
||||
hex::encode(hash_bytes),
|
||||
hex::encode(found_hash)
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
type ModuleMap<'a> = BTreeMap<u32, (&'a ModuleConfig, ObjInfo)>;
|
||||
|
||||
fn update_symbols(obj: &mut ObjInfo, modules: &ModuleMap<'_>) -> Result<()> {
|
||||
@@ -632,15 +622,9 @@ fn resolve_external_relocations(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn decompress_if_needed(map: &Mmap) -> Result<Cow<[u8]>> {
|
||||
Ok(if map.len() > 4 && map[0..4] == *b"Yaz0" {
|
||||
Cow::Owned(yaz0::decompress_file(&mut map_reader(map))?)
|
||||
} else {
|
||||
Cow::Borrowed(map)
|
||||
})
|
||||
}
|
||||
type AnalyzeResult = (ObjInfo, Vec<PathBuf>);
|
||||
|
||||
fn load_analyze_dol(config: &ProjectConfig) -> Result<(ObjInfo, Vec<PathBuf>)> {
|
||||
fn load_analyze_dol(config: &ProjectConfig) -> Result<AnalyzeResult> {
|
||||
// log::info!("Loading {}", config.object.display());
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(&config.base.object, hash_str)?;
|
||||
@@ -697,7 +681,7 @@ fn split_write_obj(
|
||||
obj: &mut ObjInfo,
|
||||
config: &ProjectConfig,
|
||||
module_config: &ModuleConfig,
|
||||
out_dir: &PathBuf,
|
||||
out_dir: &Path,
|
||||
no_update: bool,
|
||||
) -> Result<OutputModule> {
|
||||
debug!("Performing relocation analysis");
|
||||
@@ -723,15 +707,15 @@ fn split_write_obj(
|
||||
if !no_update {
|
||||
debug!("Writing configuration");
|
||||
if let Some(symbols_path) = &module_config.symbols {
|
||||
write_symbols_file(symbols_path, &obj)?;
|
||||
write_symbols_file(symbols_path, obj)?;
|
||||
}
|
||||
if let Some(splits_path) = &module_config.splits {
|
||||
write_splits_file(splits_path, &obj, false)?;
|
||||
write_splits_file(splits_path, obj, false)?;
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Splitting {} objects", obj.link_order.len());
|
||||
let split_objs = split_obj(&obj)?;
|
||||
let split_objs = split_obj(obj)?;
|
||||
|
||||
debug!("Writing object files");
|
||||
let obj_dir = out_dir.join("obj");
|
||||
@@ -757,7 +741,7 @@ fn split_write_obj(
|
||||
}
|
||||
|
||||
// Generate ldscript.lcf
|
||||
fs::write(&out_config.ldscript, generate_ldscript(&obj, config.auto_force_files)?)?;
|
||||
fs::write(&out_config.ldscript, generate_ldscript(obj, config.auto_force_files)?)?;
|
||||
|
||||
debug!("Writing disassembly");
|
||||
let asm_dir = out_dir.join("asm");
|
||||
@@ -772,17 +756,18 @@ fn split_write_obj(
|
||||
Ok(out_config)
|
||||
}
|
||||
|
||||
fn load_analyze_rel(
|
||||
config: &ProjectConfig,
|
||||
module_config: &ModuleConfig,
|
||||
) -> Result<(ObjInfo, Vec<PathBuf>)> {
|
||||
fn load_analyze_rel(config: &ProjectConfig, module_config: &ModuleConfig) -> Result<AnalyzeResult> {
|
||||
debug!("Loading {}", module_config.object.display());
|
||||
if let Some(hash_str) = &module_config.hash {
|
||||
verify_hash(&module_config.object, hash_str)?;
|
||||
}
|
||||
let map = map_file(&module_config.object)?;
|
||||
let buf = decompress_if_needed(&map)?;
|
||||
let mut module_obj = process_rel(Reader::new(&buf))?;
|
||||
let (_, mut module_obj) = process_rel(&mut Reader::new(buf.as_ref()))?;
|
||||
|
||||
if let Some(comment_version) = config.mw_comment_version {
|
||||
module_obj.mw_comment = Some(MWComment::new(comment_version)?);
|
||||
}
|
||||
|
||||
let mut dep = vec![module_config.object.clone()];
|
||||
if let Some(map_path) = &module_config.map {
|
||||
@@ -833,8 +818,8 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
module_count,
|
||||
rayon::current_num_threads()
|
||||
);
|
||||
let mut dol_result: Option<Result<(ObjInfo, Vec<PathBuf>)>> = None;
|
||||
let mut modules_result: Option<Result<Vec<(ObjInfo, Vec<PathBuf>)>>> = None;
|
||||
let mut dol_result: Option<Result<AnalyzeResult>> = None;
|
||||
let mut modules_result: Option<Result<Vec<AnalyzeResult>>> = None;
|
||||
let start = Instant::now();
|
||||
rayon::scope(|s| {
|
||||
// DOL
|
||||
@@ -999,7 +984,7 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
// }
|
||||
|
||||
let duration = command_start.elapsed();
|
||||
info!("Total duration: {}.{:03}s", duration.as_secs(), duration.subsec_millis());
|
||||
info!("Total time: {}.{:03}s", duration.as_secs(), duration.subsec_millis());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1167,7 +1152,11 @@ fn diff(args: DiffArgs) -> Result<()> {
|
||||
log::info!("Loading {}", args.map_file.display());
|
||||
apply_map_file(&args.map_file, &mut linked_obj)?;
|
||||
|
||||
for orig_sym in obj.symbols.iter().filter(|s| s.kind != ObjSymbolKind::Section) {
|
||||
for orig_sym in obj
|
||||
.symbols
|
||||
.iter()
|
||||
.filter(|s| !matches!(s.kind, ObjSymbolKind::Unknown | ObjSymbolKind::Section))
|
||||
{
|
||||
let Some(orig_section_index) = orig_sym.section else { continue };
|
||||
let orig_section = &obj.sections[orig_section_index];
|
||||
let (linked_section_index, linked_section) =
|
||||
@@ -1244,7 +1233,9 @@ fn diff(args: DiffArgs) -> Result<()> {
|
||||
}
|
||||
|
||||
// Data diff
|
||||
for orig_sym in obj.symbols.iter().filter(|s| s.kind != ObjSymbolKind::Section) {
|
||||
for orig_sym in obj.symbols.iter().filter(|s| {
|
||||
s.size > 0 && !matches!(s.kind, ObjSymbolKind::Unknown | ObjSymbolKind::Section)
|
||||
}) {
|
||||
let Some(orig_section_index) = orig_sym.section else { continue };
|
||||
let orig_section = &obj.sections[orig_section_index];
|
||||
let (linked_section_index, linked_section) =
|
||||
|
||||
132
src/cmd/elf.rs
132
src/cmd/elf.rs
@@ -19,11 +19,13 @@ use crate::{
|
||||
obj::ObjKind,
|
||||
util::{
|
||||
asm::write_asm,
|
||||
comment::{read_comment_sym, MWComment},
|
||||
config::{write_splits_file, write_symbols_file},
|
||||
elf::{process_elf, write_elf},
|
||||
file::{buf_writer, process_rsp},
|
||||
file::{buf_writer, process_rsp, Reader},
|
||||
signatures::{compare_signature, generate_signature, FunctionSignature},
|
||||
split::split_obj,
|
||||
IntoCow, ToCow,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -43,6 +45,7 @@ enum SubCommand {
|
||||
Fixup(FixupArgs),
|
||||
Signatures(SignaturesArgs),
|
||||
Split(SplitArgs),
|
||||
Info(InfoArgs),
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
@@ -108,6 +111,15 @@ pub struct SignaturesArgs {
|
||||
out_file: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Prints information about an ELF file.
|
||||
#[argp(subcommand, name = "info")]
|
||||
pub struct InfoArgs {
|
||||
#[argp(positional)]
|
||||
/// input file
|
||||
input: PathBuf,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
match args.command {
|
||||
SubCommand::Config(c_args) => config(c_args),
|
||||
@@ -115,6 +127,7 @@ pub fn run(args: Args) -> Result<()> {
|
||||
SubCommand::Fixup(c_args) => fixup(c_args),
|
||||
SubCommand::Split(c_args) => split(c_args),
|
||||
SubCommand::Signatures(c_args) => signatures(c_args),
|
||||
SubCommand::Info(c_args) => info(c_args),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -467,3 +480,120 @@ fn signatures(args: SignaturesArgs) -> Result<()> {
|
||||
out.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let in_buf = fs::read(&args.input)
|
||||
.with_context(|| format!("Failed to open input file: '{}'", args.input.display()))?;
|
||||
let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?;
|
||||
|
||||
println!("ELF type: {:?}", in_file.kind());
|
||||
println!("Section count: {}", in_file.sections().count());
|
||||
println!("Symbol count: {}", in_file.symbols().count());
|
||||
println!(
|
||||
"Relocation count: {}",
|
||||
in_file.sections().map(|s| s.relocations().count()).sum::<usize>()
|
||||
);
|
||||
|
||||
println!("\nSections:");
|
||||
println!(
|
||||
"{: >15} | {: <10} | {: <10} | {: <10} | {: <10}",
|
||||
"Name", "Type", "Size", "File Off", "Index"
|
||||
);
|
||||
for section in in_file.sections().skip(1) {
|
||||
let kind_str = match section.kind() {
|
||||
SectionKind::Text => "code".to_cow(),
|
||||
SectionKind::Data => "data".to_cow(),
|
||||
SectionKind::ReadOnlyData => "rodata".to_cow(),
|
||||
SectionKind::UninitializedData => "bss".to_cow(),
|
||||
SectionKind::Metadata => continue, // "metadata".to_cow()
|
||||
SectionKind::Other => "other".to_cow(),
|
||||
_ => format!("unknown: {:?}", section.kind()).into_cow(),
|
||||
};
|
||||
println!(
|
||||
"{: >15} | {: <10} | {: <#10X} | {: <#10X} | {: <10}",
|
||||
section.name()?,
|
||||
kind_str,
|
||||
section.size(),
|
||||
section.file_range().unwrap_or_default().0,
|
||||
section.index().0
|
||||
);
|
||||
}
|
||||
|
||||
println!("\nSymbols:");
|
||||
println!("{: >15} | {: <10} | {: <10} | {: <10}", "Section", "Address", "Size", "Name");
|
||||
for symbol in in_file.symbols().filter(|s| s.is_definition()) {
|
||||
let section_str = if let Some(section) = symbol.section_index() {
|
||||
in_file.section_by_index(section)?.name()?.to_string().into_cow()
|
||||
} else {
|
||||
"ABS".to_cow()
|
||||
};
|
||||
let size_str = if symbol.section_index().is_none() {
|
||||
"ABS".to_cow()
|
||||
} else {
|
||||
format!("{:#X}", symbol.size()).into_cow()
|
||||
};
|
||||
println!(
|
||||
"{: >15} | {: <#10X} | {: <10} | {: <10}",
|
||||
section_str,
|
||||
symbol.address(),
|
||||
size_str,
|
||||
symbol.name()?
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(comment_section) = in_file.section_by_name(".comment") {
|
||||
let data = comment_section.uncompressed_data()?;
|
||||
if !data.is_empty() {
|
||||
let mut reader = Reader::new(&*data);
|
||||
let header =
|
||||
MWComment::parse_header(&mut reader).context("While reading .comment section")?;
|
||||
println!("\nMetrowerks metadata (.comment):");
|
||||
println!("\tVersion: {}", header.version);
|
||||
println!(
|
||||
"\tCompiler version: {}.{}.{}.{}",
|
||||
header.compiler_version[0],
|
||||
header.compiler_version[1],
|
||||
header.compiler_version[2],
|
||||
header.compiler_version[3]
|
||||
);
|
||||
println!("\tPool data: {}", header.pool_data);
|
||||
println!("\tFloat: {:?}", header.float);
|
||||
println!(
|
||||
"\tProcessor: {}",
|
||||
if header.processor == 0x16 {
|
||||
"Gekko".to_cow()
|
||||
} else {
|
||||
format!("{:#X}", header.processor).into_cow()
|
||||
}
|
||||
);
|
||||
println!(
|
||||
"\tIncompatible return small structs: {}",
|
||||
header.incompatible_return_small_structs
|
||||
);
|
||||
println!(
|
||||
"\tIncompatible sfpe double params: {}",
|
||||
header.incompatible_sfpe_double_params
|
||||
);
|
||||
println!("\tUnsafe global reg vars: {}", header.unsafe_global_reg_vars);
|
||||
println!("\n{: >10} | {: <6} | {: <6} | {: <10}", "Align", "Vis", "Active", "Symbol");
|
||||
for symbol in in_file.symbols() {
|
||||
let comment_sym = read_comment_sym(&mut reader)?;
|
||||
if symbol.is_definition() {
|
||||
println!(
|
||||
"{: >10} | {: <#6X} | {: <#6X} | {: <10}",
|
||||
comment_sym.align,
|
||||
comment_sym.vis_flags,
|
||||
comment_sym.active_flags,
|
||||
symbol.name()?
|
||||
);
|
||||
}
|
||||
}
|
||||
ensure!(
|
||||
data.len() - reader.position() as usize == 0,
|
||||
".comment section data not fully read"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
247
src/cmd/rel.rs
247
src/cmd/rel.rs
@@ -1,27 +1,45 @@
|
||||
use std::{
|
||||
collections::{btree_map, BTreeMap},
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
io::Write,
|
||||
path::PathBuf,
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use anyhow::{bail, ensure, Context, Result};
|
||||
use anyhow::{anyhow, bail, ensure, Context, Result};
|
||||
use argp::FromArgs;
|
||||
use object::{
|
||||
Architecture, Endianness, Object, ObjectSection, ObjectSymbol, RelocationTarget, SymbolIndex,
|
||||
};
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
use tracing::{info, info_span};
|
||||
|
||||
use crate::{
|
||||
analysis::{
|
||||
cfa::{AnalyzerState, SectionAddress},
|
||||
pass::{AnalysisPass, FindSaveRestSleds, FindTRKInterruptVectorTable},
|
||||
pass::{
|
||||
AnalysisPass, FindRelCtorsDtors, FindRelRodataData, FindSaveRestSleds,
|
||||
FindTRKInterruptVectorTable,
|
||||
},
|
||||
signatures::{apply_signatures, apply_signatures_post},
|
||||
tracker::Tracker,
|
||||
},
|
||||
array_ref_mut,
|
||||
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSymbol},
|
||||
cmd::dol::ProjectConfig,
|
||||
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol},
|
||||
util::{
|
||||
config::is_auto_symbol,
|
||||
dol::process_dol,
|
||||
elf::write_elf,
|
||||
file::{map_file, map_reader, FileIterator},
|
||||
elf::{to_obj_reloc_kind, write_elf},
|
||||
file::{
|
||||
buf_reader, buf_writer, decompress_if_needed, map_file, process_rsp, verify_hash,
|
||||
FileIterator, Reader,
|
||||
},
|
||||
nested::NestedMap,
|
||||
rel::process_rel,
|
||||
rel::{process_rel, process_rel_header, write_rel, RelHeader, RelReloc, RelWriteInfo},
|
||||
IntoCow, ToCow,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -37,6 +55,7 @@ pub struct Args {
|
||||
#[argp(subcommand)]
|
||||
enum SubCommand {
|
||||
Info(InfoArgs),
|
||||
Make(MakeArgs),
|
||||
Merge(MergeArgs),
|
||||
}
|
||||
|
||||
@@ -64,17 +83,227 @@ pub struct MergeArgs {
|
||||
out_file: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Creates RELs from an ELF + PLF(s).
|
||||
#[argp(subcommand, name = "make")]
|
||||
pub struct MakeArgs {
|
||||
#[argp(positional)]
|
||||
/// input file(s)
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'c')]
|
||||
/// (optional) project configuration file
|
||||
config: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
match args.command {
|
||||
SubCommand::Info(c_args) => info(c_args),
|
||||
SubCommand::Merge(c_args) => merge(c_args),
|
||||
SubCommand::Make(c_args) => make(c_args),
|
||||
}
|
||||
}
|
||||
|
||||
fn load_obj(buf: &[u8]) -> Result<object::File> {
|
||||
let obj = object::read::File::parse(buf)?;
|
||||
match obj.architecture() {
|
||||
Architecture::PowerPc => {}
|
||||
arch => bail!("Unexpected architecture: {arch:?}"),
|
||||
};
|
||||
ensure!(obj.endianness() == Endianness::Big, "Expected big endian");
|
||||
Ok(obj)
|
||||
}
|
||||
|
||||
fn make(args: MakeArgs) -> Result<()> {
|
||||
let total = Instant::now();
|
||||
|
||||
// Load existing REL headers (if specified)
|
||||
let mut existing_headers = BTreeMap::<u32, RelHeader>::new();
|
||||
if let Some(config_path) = &args.config {
|
||||
let config: ProjectConfig = serde_yaml::from_reader(&mut buf_reader(config_path)?)?;
|
||||
for module_config in &config.modules {
|
||||
if let Some(hash_str) = &module_config.hash {
|
||||
verify_hash(&module_config.object, hash_str)?;
|
||||
}
|
||||
let map = map_file(&module_config.object)?;
|
||||
let buf = decompress_if_needed(&map)?;
|
||||
let header = process_rel_header(&mut Reader::new(buf.as_ref()))?;
|
||||
existing_headers.insert(header.module_id, header);
|
||||
}
|
||||
}
|
||||
|
||||
let files = process_rsp(&args.files)?;
|
||||
info!("Loading {} modules", files.len());
|
||||
|
||||
// Load all modules
|
||||
let handles = files.iter().map(map_file).collect::<Result<Vec<_>>>()?;
|
||||
let modules = handles
|
||||
.par_iter()
|
||||
.zip(&files)
|
||||
.map(|(map, path)| {
|
||||
load_obj(map).with_context(|| format!("Failed to load '{}'", path.display()))
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
// Create symbol map
|
||||
let start = Instant::now();
|
||||
let mut symbol_map = FxHashMap::<&[u8], (usize, SymbolIndex)>::default();
|
||||
for (module_id, module) in modules.iter().enumerate() {
|
||||
for symbol in module.symbols() {
|
||||
if symbol.is_definition() && symbol.scope() == object::SymbolScope::Dynamic {
|
||||
symbol_map.entry(symbol.name_bytes()?).or_insert((module_id, symbol.index()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve relocations
|
||||
let mut resolved = 0usize;
|
||||
let mut relocations = Vec::<Vec<RelReloc>>::with_capacity(modules.len() - 1);
|
||||
relocations.resize_with(modules.len() - 1, Vec::new);
|
||||
for ((module_id, module), relocations) in
|
||||
modules.iter().enumerate().skip(1).zip(&mut relocations)
|
||||
{
|
||||
for section in module.sections() {
|
||||
for (address, reloc) in section.relocations() {
|
||||
let reloc_target = match reloc.target() {
|
||||
RelocationTarget::Symbol(idx) => {
|
||||
module.symbol_by_index(idx).with_context(|| {
|
||||
format!("Relocation against invalid symbol index {}", idx.0)
|
||||
})?
|
||||
}
|
||||
reloc_target => bail!("Unsupported relocation target: {reloc_target:?}"),
|
||||
};
|
||||
let (target_module_id, target_symbol) = if reloc_target.is_undefined() {
|
||||
resolved += 1;
|
||||
symbol_map
|
||||
.get(reloc_target.name_bytes()?)
|
||||
.map(|&(module_id, symbol_idx)| {
|
||||
(module_id, modules[module_id].symbol_by_index(symbol_idx).unwrap())
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed to find symbol {} in any module",
|
||||
reloc_target.name().unwrap_or("[invalid]")
|
||||
)
|
||||
})?
|
||||
} else {
|
||||
(module_id, reloc_target)
|
||||
};
|
||||
relocations.push(RelReloc {
|
||||
kind: to_obj_reloc_kind(reloc.kind())?,
|
||||
section: section.index().0 as u8,
|
||||
address: address as u32,
|
||||
module_id: target_module_id as u32,
|
||||
target_section: target_symbol.section_index().unwrap().0 as u8,
|
||||
addend: target_symbol.address() as u32,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let duration = start.elapsed();
|
||||
info!(
|
||||
"Symbol resolution completed in {}.{:03}s (resolved {} symbols)",
|
||||
duration.as_secs(),
|
||||
duration.subsec_millis(),
|
||||
resolved
|
||||
);
|
||||
|
||||
// Write RELs
|
||||
let start = Instant::now();
|
||||
for (((module_id, module), path), relocations) in
|
||||
modules.iter().enumerate().zip(&files).skip(1).zip(relocations)
|
||||
{
|
||||
let name =
|
||||
path.file_stem().unwrap_or(OsStr::new("[unknown]")).to_str().unwrap_or("[invalid]");
|
||||
let _span = info_span!("module", name = %name).entered();
|
||||
let mut info = RelWriteInfo {
|
||||
module_id: module_id as u32,
|
||||
version: 3,
|
||||
name_offset: None,
|
||||
name_size: None,
|
||||
align: None,
|
||||
bss_align: None,
|
||||
section_count: None,
|
||||
};
|
||||
if let Some(existing_module) = existing_headers.get(&(module_id as u32)) {
|
||||
info.version = existing_module.version;
|
||||
info.name_offset = Some(existing_module.name_offset);
|
||||
info.name_size = Some(existing_module.name_size);
|
||||
info.align = existing_module.align;
|
||||
info.bss_align = existing_module.bss_align;
|
||||
info.section_count = Some(existing_module.num_sections as usize);
|
||||
}
|
||||
let rel_path = path.with_extension("rel");
|
||||
let mut w = buf_writer(&rel_path)?;
|
||||
write_rel(&mut w, &info, module, relocations)
|
||||
.with_context(|| format!("Failed to write '{}'", rel_path.display()))?;
|
||||
w.flush()?;
|
||||
}
|
||||
let duration = start.elapsed();
|
||||
info!("RELs written in {}.{:03}s", duration.as_secs(), duration.subsec_millis());
|
||||
|
||||
let duration = total.elapsed();
|
||||
info!("Total time: {}.{:03}s", duration.as_secs(), duration.subsec_millis());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let map = map_file(args.rel_file)?;
|
||||
let rel = process_rel(map_reader(&map))?;
|
||||
println!("Read REL module ID {}", rel.module_id);
|
||||
let buf = decompress_if_needed(&map)?;
|
||||
let (header, mut module_obj) = process_rel(&mut Reader::new(buf.as_ref()))?;
|
||||
|
||||
let mut state = AnalyzerState::default();
|
||||
state.detect_functions(&module_obj)?;
|
||||
FindRelCtorsDtors::execute(&mut state, &module_obj)?;
|
||||
FindRelRodataData::execute(&mut state, &module_obj)?;
|
||||
state.apply(&mut module_obj)?;
|
||||
|
||||
apply_signatures(&mut module_obj)?;
|
||||
apply_signatures_post(&mut module_obj)?;
|
||||
|
||||
println!("REL module ID: {}", header.module_id);
|
||||
println!("REL version: {}", header.version);
|
||||
println!("Original section count: {}", header.num_sections);
|
||||
println!("\nSections:");
|
||||
println!(
|
||||
"{: >10} | {: <10} | {: <10} | {: <10} | {: <10}",
|
||||
"Name", "Type", "Size", "File Off", "Index"
|
||||
);
|
||||
for (_, section) in module_obj.sections.iter() {
|
||||
let kind_str = match section.kind {
|
||||
ObjSectionKind::Code => "code",
|
||||
ObjSectionKind::Data => "data",
|
||||
ObjSectionKind::ReadOnlyData => "rodata",
|
||||
ObjSectionKind::Bss => "bss",
|
||||
};
|
||||
println!(
|
||||
"{: >10} | {: <10} | {: <#10X} | {: <#10X} | {: <10}",
|
||||
section.name, kind_str, section.size, section.file_offset, section.elf_index
|
||||
);
|
||||
}
|
||||
println!("\nDiscovered symbols:");
|
||||
println!("{: >10} | {: <10} | {: <10} | {: <10}", "Section", "Address", "Size", "Name");
|
||||
for (_, symbol) in module_obj.symbols.iter_ordered() {
|
||||
if symbol.name.starts_with('@') || is_auto_symbol(symbol) {
|
||||
continue;
|
||||
}
|
||||
let section_str = if let Some(section) = symbol.section {
|
||||
module_obj.sections[section].name.as_str()
|
||||
} else {
|
||||
"ABS"
|
||||
};
|
||||
let size_str = if symbol.size_known {
|
||||
format!("{:#X}", symbol.size).into_cow()
|
||||
} else if symbol.section.is_none() {
|
||||
"ABS".to_cow()
|
||||
} else {
|
||||
"?".to_cow()
|
||||
};
|
||||
println!(
|
||||
"{: >10} | {: <#10X} | {: <10} | {: <10}",
|
||||
section_str, symbol.address, size_str, symbol.name
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -94,7 +323,7 @@ fn merge(args: MergeArgs) -> Result<()> {
|
||||
for result in FileIterator::new(&args.rel_files)? {
|
||||
let (path, entry) = result?;
|
||||
log::info!("Loading {}", path.display());
|
||||
let obj = process_rel(entry.as_reader())?;
|
||||
let (_, obj) = process_rel(&mut entry.as_reader())?;
|
||||
match module_map.entry(obj.module_id) {
|
||||
btree_map::Entry::Vacant(e) => e.insert(obj),
|
||||
btree_map::Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id),
|
||||
|
||||
@@ -6,6 +6,7 @@ use std::{
|
||||
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use argp::FromArgs;
|
||||
use owo_colors::OwoColorize;
|
||||
use sha1::{Digest, Sha1};
|
||||
|
||||
use crate::util::file::{process_rsp, touch};
|
||||
@@ -66,14 +67,17 @@ fn check(file: File) -> Result<()> {
|
||||
File::open(file_name).with_context(|| format!("Failed to open file '{file_name}'"))?;
|
||||
let found_hash = file_sha1(file)?;
|
||||
if hash_bytes == found_hash.as_ref() {
|
||||
println!("{file_name}: OK");
|
||||
println!("{}: {}", file_name, "OK".green());
|
||||
} else {
|
||||
println!("{file_name}: FAILED");
|
||||
println!("{}: {}", file_name, "FAILED".red());
|
||||
mismatches += 1;
|
||||
}
|
||||
}
|
||||
if mismatches != 0 {
|
||||
eprintln!("WARNING: {mismatches} computed checksum did NOT match");
|
||||
eprintln!(
|
||||
"{}",
|
||||
format!("WARNING: {mismatches} computed checksum(s) did NOT match").yellow()
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
Ok(())
|
||||
|
||||
Reference in New Issue
Block a user