mirror of
https://github.com/encounter/decomp-toolkit.git
synced 2025-12-11 22:44:15 +00:00
Semi-working REL analysis & splitting
This commit is contained in:
@@ -24,13 +24,13 @@ pub struct SectionAddress {
|
||||
|
||||
impl Debug for SectionAddress {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}:{:#010X}", self.section as isize, self.address)
|
||||
write!(f, "{}:{:#X}", self.section as isize, self.address)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for SectionAddress {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}:{:#010X}", self.section as isize, self.address)
|
||||
write!(f, "{}:{:#X}", self.section as isize, self.address)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,9 +131,19 @@ impl AnalyzerState {
|
||||
section.address,
|
||||
section.address + section.size
|
||||
);
|
||||
let address_str = if obj.module_id == 0 {
|
||||
format!("{:08X}", addr.address)
|
||||
} else {
|
||||
format!(
|
||||
"{}_{}_{:X}",
|
||||
obj.module_id,
|
||||
section.name.trim_start_matches('.'),
|
||||
addr.address
|
||||
)
|
||||
};
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: format!("jumptable_{:08X}", addr.address),
|
||||
name: format!("jumptable_{}", address_str),
|
||||
demangled_name: None,
|
||||
address: addr.address as u64,
|
||||
section: Some(addr.section),
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::{
|
||||
util::split::is_linker_generated_label,
|
||||
};
|
||||
|
||||
pub fn detect_object_boundaries(obj: &mut ObjInfo) -> Result<()> {
|
||||
pub fn detect_objects(obj: &mut ObjInfo) -> Result<()> {
|
||||
for (section_index, section) in
|
||||
obj.sections.iter_mut().filter(|(_, s)| s.kind != ObjSectionKind::Code)
|
||||
{
|
||||
|
||||
@@ -151,8 +151,9 @@ impl AnalysisPass for FindRelCtorsDtors {
|
||||
let possible_sections = obj
|
||||
.sections
|
||||
.iter()
|
||||
.filter(|&(_, section)| {
|
||||
.filter(|&(index, section)| {
|
||||
if section.section_known
|
||||
|| state.known_sections.contains_key(&index)
|
||||
|| !matches!(section.kind, ObjSectionKind::Data | ObjSectionKind::ReadOnlyData)
|
||||
|| section.size < 4
|
||||
{
|
||||
@@ -283,3 +284,40 @@ impl AnalysisPass for FindRelCtorsDtors {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FindRelRodataData {}
|
||||
|
||||
impl AnalysisPass for FindRelRodataData {
|
||||
fn execute(state: &mut AnalyzerState, obj: &ObjInfo) -> Result<()> {
|
||||
ensure!(obj.kind == ObjKind::Relocatable);
|
||||
|
||||
match (obj.sections.by_name(".rodata")?, obj.sections.by_name(".data")?) {
|
||||
(None, None) => {}
|
||||
_ => return Ok(()),
|
||||
}
|
||||
|
||||
let possible_sections = obj
|
||||
.sections
|
||||
.iter()
|
||||
.filter(|&(index, section)| {
|
||||
!section.section_known
|
||||
&& !state.known_sections.contains_key(&index)
|
||||
&& matches!(section.kind, ObjSectionKind::Data | ObjSectionKind::ReadOnlyData)
|
||||
})
|
||||
.collect_vec();
|
||||
|
||||
if possible_sections.len() != 2 {
|
||||
log::warn!("Failed to find .rodata and .data");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
log::debug!("Found .rodata and .data: {:?}", possible_sections);
|
||||
let rodata_section_index = possible_sections[0].0;
|
||||
state.known_sections.insert(rodata_section_index, ".rodata".to_string());
|
||||
|
||||
let data_section_index = possible_sections[1].0;
|
||||
state.known_sections.insert(data_section_index, ".data".to_string());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -309,8 +309,8 @@ impl FunctionSlices {
|
||||
}
|
||||
}
|
||||
}
|
||||
BranchTarget::JumpTable { .. } => {
|
||||
bail!("Conditional jump table unsupported @ {:#010X}", ins_addr);
|
||||
BranchTarget::JumpTable { address, size } => {
|
||||
bail!("Conditional jump table unsupported @ {:#010X} -> {:#010X} size {:#X?}", ins_addr, address, size);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ use std::{
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use ppc750cl::Opcode;
|
||||
use tracing::{debug_span, info_span};
|
||||
use tracing_attributes::instrument;
|
||||
|
||||
use crate::{
|
||||
analysis::{
|
||||
@@ -88,8 +90,8 @@ impl Tracker {
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(name = "tracker", skip(self, obj))]
|
||||
pub fn process(&mut self, obj: &ObjInfo) -> Result<()> {
|
||||
log::debug!("Processing code sections");
|
||||
self.process_code(obj)?;
|
||||
for (section_index, section) in obj
|
||||
.sections
|
||||
@@ -151,6 +153,7 @@ impl Tracker {
|
||||
) -> Result<ExecCbResult<()>> {
|
||||
let ExecCbData { executor, vm, result, ins_addr, section: _, ins, block_start: _ } = data;
|
||||
let is_function_addr = |addr: SectionAddress| addr >= function_start && addr < function_end;
|
||||
let _span = debug_span!("ins", addr = %ins_addr, op = ?ins.op).entered();
|
||||
|
||||
match result {
|
||||
StepResult::Continue => {
|
||||
@@ -310,8 +313,20 @@ impl Tracker {
|
||||
executor.push(addr, branch.vm, true);
|
||||
}
|
||||
}
|
||||
BranchTarget::JumpTable { .. } => {
|
||||
bail!("Conditional jump table unsupported @ {:#010X}", ins_addr)
|
||||
BranchTarget::JumpTable { address, size } => {
|
||||
let (entries, _) = uniq_jump_table_entries(
|
||||
obj,
|
||||
address,
|
||||
size,
|
||||
ins_addr,
|
||||
function_start,
|
||||
Some(function_end),
|
||||
)?;
|
||||
for target in entries {
|
||||
if is_function_addr(target) {
|
||||
executor.push(target, branch.vm.clone_all(), true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -326,6 +341,9 @@ impl Tracker {
|
||||
};
|
||||
let function_start = SectionAddress::new(section_index, symbol.address as u32);
|
||||
let function_end = function_start + symbol.size as u32;
|
||||
let _span =
|
||||
info_span!("fn", name = %symbol.name, start = %function_start, end = %function_end)
|
||||
.entered();
|
||||
|
||||
// The compiler can sometimes create impossible-to-reach branches,
|
||||
// but we still want to track them.
|
||||
@@ -461,6 +479,7 @@ impl Tracker {
|
||||
.or_else(|| check_symbol(self.sda_base, "_SDA_BASE_"))
|
||||
}
|
||||
|
||||
#[instrument(name = "apply", skip(self, obj))]
|
||||
pub fn apply(&self, obj: &mut ObjInfo, replace: bool) -> Result<()> {
|
||||
fn apply_section_name(section: &mut ObjSection, name: &str) {
|
||||
let module_id = if let Some((_, b)) = section.name.split_once(':') {
|
||||
|
||||
@@ -126,8 +126,11 @@ pub fn section_address_for(
|
||||
let (section_index, _) = obj.sections.at_address(target_addr).ok()?;
|
||||
return Some(SectionAddress::new(section_index, target_addr));
|
||||
}
|
||||
// TODO: relative jumps within relocatable objects?
|
||||
None
|
||||
if obj.sections[ins_addr.section].contains(target_addr) {
|
||||
Some(SectionAddress::new(ins_addr.section, target_addr))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl VM {
|
||||
@@ -180,11 +183,11 @@ impl VM {
|
||||
pub fn clone_all(&self) -> Box<Self> { Box::new(self.clone()) }
|
||||
|
||||
pub fn step(&mut self, obj: &ObjInfo, ins_addr: SectionAddress, ins: &Ins) -> StepResult {
|
||||
let relocation_target = relocation_target_for(obj, ins_addr, None).ok().flatten();
|
||||
if let Some(_target) = relocation_target {
|
||||
let _defs = ins.defs();
|
||||
// TODO
|
||||
}
|
||||
// let relocation_target = relocation_target_for(obj, ins_addr, None).ok().flatten();
|
||||
// if let Some(_target) = relocation_target {
|
||||
// let _defs = ins.defs();
|
||||
// // TODO
|
||||
// }
|
||||
|
||||
match ins.op {
|
||||
Opcode::Illegal => {
|
||||
|
||||
618
src/cmd/dol.rs
618
src/cmd/dol.rs
@@ -1,45 +1,55 @@
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::{btree_map::Entry, hash_map, BTreeMap, HashMap},
|
||||
fs,
|
||||
fs::{DirBuilder, File},
|
||||
io::Write,
|
||||
mem::take,
|
||||
path::{Path, PathBuf},
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use argp::FromArgs;
|
||||
use itertools::Itertools;
|
||||
use memmap2::Mmap;
|
||||
use rayon::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, info, info_span};
|
||||
|
||||
use crate::{
|
||||
analysis::{
|
||||
cfa::{AnalyzerState, SectionAddress},
|
||||
objects::{detect_object_boundaries, detect_strings},
|
||||
pass::{AnalysisPass, FindRelCtorsDtors, FindSaveRestSleds, FindTRKInterruptVectorTable},
|
||||
objects::{detect_objects, detect_strings},
|
||||
pass::{
|
||||
AnalysisPass, FindRelCtorsDtors, FindRelRodataData, FindSaveRestSleds,
|
||||
FindTRKInterruptVectorTable,
|
||||
},
|
||||
signatures::{apply_signatures, apply_signatures_post},
|
||||
tracker::Tracker,
|
||||
},
|
||||
cmd::shasum::file_sha1,
|
||||
obj::{
|
||||
ObjDataKind, ObjInfo, ObjReloc, ObjRelocKind, ObjSectionKind, ObjSymbol, ObjSymbolFlagSet,
|
||||
ObjSymbolFlags, ObjSymbolKind, ObjSymbolScope, SymbolIndex,
|
||||
best_match_for_reloc, ObjDataKind, ObjInfo, ObjReloc, ObjRelocKind, ObjSectionKind,
|
||||
ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind, ObjSymbolScope, SymbolIndex,
|
||||
},
|
||||
util::{
|
||||
asm::write_asm,
|
||||
comment::MWComment,
|
||||
config::{
|
||||
apply_splits, apply_symbols_file, is_auto_symbol, write_splits_file, write_symbols_file,
|
||||
apply_splits_file, apply_symbols_file, is_auto_symbol, write_splits_file,
|
||||
write_symbols_file,
|
||||
},
|
||||
dep::DepFile,
|
||||
dol::process_dol,
|
||||
elf::{process_elf, write_elf},
|
||||
file::{buf_writer, map_file, map_reader, touch},
|
||||
file::{buf_writer, map_file, map_reader, touch, Reader},
|
||||
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
|
||||
map::apply_map_file,
|
||||
rel::process_rel,
|
||||
rso::{process_rso, DOL_SECTION_ABS, DOL_SECTION_NAMES},
|
||||
split::{is_linker_generated_object, split_obj, update_splits},
|
||||
yaz0,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -85,6 +95,9 @@ pub struct SplitArgs {
|
||||
#[argp(switch)]
|
||||
/// skip updating splits & symbol files (for build systems)
|
||||
no_update: bool,
|
||||
#[argp(option, short = 'j')]
|
||||
/// number of threads to use (default: number of logical CPUs)
|
||||
jobs: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
@@ -120,12 +133,52 @@ pub struct ApplyArgs {
|
||||
#[inline]
|
||||
fn bool_true() -> bool { true }
|
||||
|
||||
mod path_slash_serde {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use path_slash::PathBufExt as _;
|
||||
use serde::{self, Deserialize, Deserializer, Serializer};
|
||||
|
||||
pub fn serialize<S>(path: &PathBuf, s: S) -> Result<S::Ok, S::Error>
|
||||
where S: Serializer {
|
||||
let path_str = path.to_slash().ok_or_else(|| serde::ser::Error::custom("Invalid path"))?;
|
||||
s.serialize_str(path_str.as_ref())
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<PathBuf, D::Error>
|
||||
where D: Deserializer<'de> {
|
||||
String::deserialize(deserializer).map(PathBuf::from_slash)
|
||||
}
|
||||
}
|
||||
|
||||
mod path_slash_serde_option {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use path_slash::PathBufExt as _;
|
||||
use serde::{self, Deserialize, Deserializer, Serializer};
|
||||
|
||||
pub fn serialize<S>(path: &Option<PathBuf>, s: S) -> Result<S::Ok, S::Error>
|
||||
where S: Serializer {
|
||||
if let Some(path) = path {
|
||||
let path_str =
|
||||
path.to_slash().ok_or_else(|| serde::ser::Error::custom("Invalid path"))?;
|
||||
s.serialize_str(path_str.as_ref())
|
||||
} else {
|
||||
s.serialize_none()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<PathBuf>, D::Error>
|
||||
where D: Deserializer<'de> {
|
||||
Ok(Option::deserialize(deserializer)?.map(|s: String| PathBuf::from_slash(s)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct ProjectConfig {
|
||||
pub object: PathBuf,
|
||||
pub hash: Option<String>,
|
||||
pub splits: Option<PathBuf>,
|
||||
pub symbols: Option<PathBuf>,
|
||||
#[serde(flatten)]
|
||||
pub base: ModuleConfig,
|
||||
#[serde(with = "path_slash_serde_option", default)]
|
||||
pub selfile: Option<PathBuf>,
|
||||
pub selfile_hash: Option<String>,
|
||||
/// Version of the MW `.comment` section format.
|
||||
@@ -147,34 +200,63 @@ pub struct ProjectConfig {
|
||||
/// Adds all objects to FORCEFILES in the linker script.
|
||||
#[serde(default)]
|
||||
pub auto_force_files: bool,
|
||||
/// Specifies the start of the common BSS section.
|
||||
pub common_start: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct ModuleConfig {
|
||||
#[serde(with = "path_slash_serde")]
|
||||
pub object: PathBuf,
|
||||
pub hash: Option<String>,
|
||||
#[serde(with = "path_slash_serde_option", default)]
|
||||
pub splits: Option<PathBuf>,
|
||||
#[serde(with = "path_slash_serde_option", default)]
|
||||
pub symbols: Option<PathBuf>,
|
||||
#[serde(with = "path_slash_serde_option", default)]
|
||||
pub map: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl ModuleConfig {
|
||||
pub fn file_name(&self) -> Cow<'_, str> {
|
||||
self.object.file_name().unwrap_or(self.object.as_os_str()).to_string_lossy()
|
||||
}
|
||||
|
||||
pub fn file_prefix(&self) -> Cow<'_, str> {
|
||||
match self.file_name() {
|
||||
Cow::Borrowed(s) => {
|
||||
Cow::Borrowed(s.split_once('.').map(|(prefix, _)| prefix).unwrap_or(&s))
|
||||
}
|
||||
Cow::Owned(s) => {
|
||||
Cow::Owned(s.split_once('.').map(|(prefix, _)| prefix).unwrap_or(&s).to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn name(&self) -> Cow<'_, str> { self.file_prefix() }
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct OutputUnit {
|
||||
#[serde(with = "path_slash_serde")]
|
||||
pub object: PathBuf,
|
||||
pub name: String,
|
||||
pub autogenerated: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
|
||||
pub struct OutputModule {
|
||||
pub name: String,
|
||||
pub module_id: u32,
|
||||
#[serde(with = "path_slash_serde")]
|
||||
pub ldscript: PathBuf,
|
||||
pub units: Vec<OutputUnit>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
|
||||
pub struct OutputConfig {
|
||||
pub ldscript: PathBuf,
|
||||
pub units: Vec<OutputUnit>,
|
||||
#[serde(flatten)]
|
||||
pub base: OutputModule,
|
||||
pub modules: Vec<OutputModule>,
|
||||
}
|
||||
|
||||
@@ -331,7 +413,9 @@ fn verify_hash<P: AsRef<Path>>(path: P, hash_str: &str) -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
fn update_symbols(obj: &mut ObjInfo, modules: &BTreeMap<u32, ObjInfo>) -> Result<()> {
|
||||
type ModuleMap<'a> = BTreeMap<u32, (&'a ModuleConfig, ObjInfo)>;
|
||||
|
||||
fn update_symbols(obj: &mut ObjInfo, modules: &ModuleMap<'_>) -> Result<()> {
|
||||
log::debug!("Updating symbols for module {}", obj.module_id);
|
||||
|
||||
// Find all references to this module from other modules
|
||||
@@ -339,11 +423,9 @@ fn update_symbols(obj: &mut ObjInfo, modules: &BTreeMap<u32, ObjInfo>) -> Result
|
||||
.unresolved_relocations
|
||||
.iter()
|
||||
.map(|r| (obj.module_id, r))
|
||||
.chain(
|
||||
modules
|
||||
.iter()
|
||||
.flat_map(|(_, obj)| obj.unresolved_relocations.iter().map(|r| (obj.module_id, r))),
|
||||
)
|
||||
.chain(modules.iter().flat_map(|(_, (_, obj))| {
|
||||
obj.unresolved_relocations.iter().map(|r| (obj.module_id, r))
|
||||
}))
|
||||
.filter(|(_, r)| r.module_id == obj.module_id)
|
||||
{
|
||||
if source_module_id == obj.module_id {
|
||||
@@ -362,26 +444,12 @@ fn update_symbols(obj: &mut ObjInfo, modules: &BTreeMap<u32, ObjInfo>) -> Result
|
||||
.get_elf_index(rel_reloc.target_section as usize)
|
||||
.ok_or_else(|| anyhow!("Failed to locate REL section {}", rel_reloc.target_section))?;
|
||||
|
||||
let target_symbol = obj
|
||||
let target_symbols = obj
|
||||
.symbols
|
||||
.at_section_address(target_section_index, rel_reloc.addend)
|
||||
.filter(|(_, s)| s.referenced_by(rel_reloc.kind))
|
||||
.at_most_one()
|
||||
.map_err(|e| {
|
||||
for (_, symbol) in e {
|
||||
log::warn!(
|
||||
"Multiple symbols found for {:#010X}: {}",
|
||||
rel_reloc.addend,
|
||||
symbol.name
|
||||
);
|
||||
}
|
||||
anyhow!(
|
||||
"Multiple symbols found for {:#010X} while checking reloc {} {:?}",
|
||||
rel_reloc.addend,
|
||||
source_module_id,
|
||||
rel_reloc
|
||||
)
|
||||
})?;
|
||||
.collect_vec();
|
||||
let target_symbol = best_match_for_reloc(target_symbols, rel_reloc.kind);
|
||||
|
||||
if let Some((symbol_index, symbol)) = target_symbol {
|
||||
// Update symbol
|
||||
@@ -427,11 +495,7 @@ fn update_symbols(obj: &mut ObjInfo, modules: &BTreeMap<u32, ObjInfo>) -> Result
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_relocations(
|
||||
obj: &mut ObjInfo,
|
||||
modules: &BTreeMap<u32, ObjInfo>,
|
||||
dol_obj: &ObjInfo,
|
||||
) -> Result<()> {
|
||||
fn create_relocations(obj: &mut ObjInfo, modules: &ModuleMap<'_>, dol_obj: &ObjInfo) -> Result<()> {
|
||||
log::debug!("Creating relocations for module {}", obj.module_id);
|
||||
|
||||
// Resolve all relocations in this module
|
||||
@@ -450,9 +514,10 @@ fn create_relocations(
|
||||
} else if rel_reloc.module_id == obj.module_id {
|
||||
&*obj
|
||||
} else {
|
||||
modules
|
||||
&modules
|
||||
.get(&rel_reloc.module_id)
|
||||
.ok_or_else(|| anyhow!("Failed to locate module {}", rel_reloc.module_id))?
|
||||
.1
|
||||
};
|
||||
|
||||
let (target_section_index, _target_section) = if rel_reloc.module_id == 0 {
|
||||
@@ -469,21 +534,12 @@ fn create_relocations(
|
||||
)?
|
||||
};
|
||||
|
||||
let Some((symbol_index, symbol)) = target_obj
|
||||
let target_symbols = target_obj
|
||||
.symbols
|
||||
.at_section_address(target_section_index, rel_reloc.addend)
|
||||
.filter(|(_, s)| s.referenced_by(rel_reloc.kind))
|
||||
.at_most_one()
|
||||
.map_err(|e| {
|
||||
for (_, symbol) in e {
|
||||
log::warn!(
|
||||
"Multiple symbols found for {:#010X}: {}",
|
||||
rel_reloc.addend,
|
||||
symbol.name
|
||||
);
|
||||
}
|
||||
anyhow!("Multiple symbols found for {:#010X}", rel_reloc.addend)
|
||||
})?
|
||||
.collect_vec();
|
||||
let Some((symbol_index, symbol)) = best_match_for_reloc(target_symbols, rel_reloc.kind)
|
||||
else {
|
||||
bail!(
|
||||
"Couldn't find module {} symbol in section {} at {:#010X}",
|
||||
@@ -516,7 +572,7 @@ fn create_relocations(
|
||||
|
||||
fn resolve_external_relocations(
|
||||
obj: &mut ObjInfo,
|
||||
modules: &BTreeMap<u32, ObjInfo>,
|
||||
modules: &ModuleMap<'_>,
|
||||
dol_obj: Option<&ObjInfo>,
|
||||
) -> Result<()> {
|
||||
log::debug!("Resolving relocations for module {}", obj.module_id);
|
||||
@@ -540,9 +596,12 @@ fn resolve_external_relocations(
|
||||
} else if module_id == 0 {
|
||||
dol_obj.unwrap()
|
||||
} else {
|
||||
modules.get(&module_id).ok_or_else(|| {
|
||||
anyhow!("Failed to locate module {}", reloc.module.unwrap())
|
||||
})?
|
||||
&modules
|
||||
.get(&module_id)
|
||||
.ok_or_else(|| {
|
||||
anyhow!("Failed to locate module {}", reloc.module.unwrap())
|
||||
})?
|
||||
.1
|
||||
};
|
||||
|
||||
let target_symbol = &target_obj.symbols[reloc.target_symbol];
|
||||
@@ -573,69 +632,49 @@ fn resolve_external_relocations(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn split(args: SplitArgs) -> Result<()> {
|
||||
log::info!("Loading {}", args.config.display());
|
||||
let mut config_file = File::open(&args.config)
|
||||
.with_context(|| format!("Failed to open config file '{}'", args.config.display()))?;
|
||||
let config: ProjectConfig = serde_yaml::from_reader(&mut config_file)?;
|
||||
fn decompress_if_needed(map: &Mmap) -> Result<Cow<[u8]>> {
|
||||
Ok(if map.len() > 4 && map[0..4] == *b"Yaz0" {
|
||||
Cow::Owned(yaz0::decompress_file(&mut map_reader(map))?)
|
||||
} else {
|
||||
Cow::Borrowed(map)
|
||||
})
|
||||
}
|
||||
|
||||
let out_config_path = args.out_dir.join("config.json");
|
||||
let mut dep = DepFile::new(out_config_path.clone());
|
||||
|
||||
log::info!("Loading {}", config.object.display());
|
||||
if let Some(hash_str) = &config.hash {
|
||||
verify_hash(&config.object, hash_str)?;
|
||||
fn load_analyze_dol(config: &ProjectConfig) -> Result<(ObjInfo, Vec<PathBuf>)> {
|
||||
// log::info!("Loading {}", config.object.display());
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(&config.base.object, hash_str)?;
|
||||
}
|
||||
let mut obj = process_dol(&config.object)?;
|
||||
dep.push(config.object.clone());
|
||||
let mut obj = process_dol(&config.base.object)?;
|
||||
let mut dep = vec![config.base.object.clone()];
|
||||
|
||||
if let Some(comment_version) = config.mw_comment_version {
|
||||
obj.mw_comment = Some(MWComment::new(comment_version)?);
|
||||
}
|
||||
|
||||
let mut modules = BTreeMap::<u32, ObjInfo>::new();
|
||||
let mut module_ids = Vec::with_capacity(config.modules.len());
|
||||
if !config.modules.is_empty() {
|
||||
log::info!("Loading {} modules", config.modules.len());
|
||||
}
|
||||
for module_config in &config.modules {
|
||||
log::debug!("Loading {}", module_config.object.display());
|
||||
if let Some(hash_str) = &module_config.hash {
|
||||
verify_hash(&module_config.object, hash_str)?;
|
||||
}
|
||||
let map = map_file(&module_config.object)?;
|
||||
let rel_obj = process_rel(map_reader(&map))?;
|
||||
module_ids.push(rel_obj.module_id);
|
||||
match modules.entry(rel_obj.module_id) {
|
||||
Entry::Vacant(e) => e.insert(rel_obj),
|
||||
Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id),
|
||||
};
|
||||
dep.push(module_config.object.clone());
|
||||
if let Some(map_path) = &config.base.map {
|
||||
apply_map_file(map_path, &mut obj)?;
|
||||
dep.push(map_path.clone());
|
||||
}
|
||||
|
||||
if let Some(splits_path) = &config.splits {
|
||||
if let Some(splits_path) = &config.base.splits {
|
||||
apply_splits_file(splits_path, &mut obj)?;
|
||||
dep.push(splits_path.clone());
|
||||
if splits_path.is_file() {
|
||||
let map = map_file(splits_path)?;
|
||||
apply_splits(map_reader(&map), &mut obj)?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut state = AnalyzerState::default();
|
||||
|
||||
if let Some(symbols_path) = &config.symbols {
|
||||
dep.push(symbols_path.clone());
|
||||
if let Some(symbols_path) = &config.base.symbols {
|
||||
apply_symbols_file(symbols_path, &mut obj)?;
|
||||
dep.push(symbols_path.clone());
|
||||
}
|
||||
|
||||
// TODO move before symbols?
|
||||
log::info!("Performing signature analysis");
|
||||
debug!("Performing signature analysis");
|
||||
apply_signatures(&mut obj)?;
|
||||
|
||||
if !config.quick_analysis {
|
||||
log::info!("Detecting function boundaries");
|
||||
let mut state = AnalyzerState::default();
|
||||
debug!("Detecting function boundaries");
|
||||
state.detect_functions(&obj)?;
|
||||
log::info!("Discovered {} functions", state.function_slices.len());
|
||||
|
||||
FindTRKInterruptVectorTable::execute(&mut state, &obj)?;
|
||||
FindSaveRestSleds::execute(&mut state, &obj)?;
|
||||
@@ -649,116 +688,59 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
verify_hash(selfile, hash)?;
|
||||
}
|
||||
apply_selfile(&mut obj, selfile)?;
|
||||
dep.push(selfile.clone());
|
||||
}
|
||||
Ok((obj, dep))
|
||||
}
|
||||
|
||||
if !modules.is_empty() {
|
||||
log::info!("Analyzing modules");
|
||||
fn split_write_obj(
|
||||
obj: &mut ObjInfo,
|
||||
config: &ProjectConfig,
|
||||
module_config: &ModuleConfig,
|
||||
out_dir: &PathBuf,
|
||||
no_update: bool,
|
||||
) -> Result<OutputModule> {
|
||||
debug!("Performing relocation analysis");
|
||||
let mut tracker = Tracker::new(obj);
|
||||
tracker.process(obj)?;
|
||||
|
||||
let mut function_count = 0;
|
||||
for &module_id in &module_ids {
|
||||
log::info!("Analyzing module {}", module_id);
|
||||
let module_obj = modules.get_mut(&module_id).unwrap();
|
||||
let mut state = AnalyzerState::default();
|
||||
state.detect_functions(module_obj)?;
|
||||
function_count += state.function_slices.len();
|
||||
FindRelCtorsDtors::execute(&mut state, module_obj)?;
|
||||
state.apply(module_obj)?;
|
||||
apply_signatures(module_obj)?;
|
||||
apply_signatures_post(module_obj)?;
|
||||
}
|
||||
log::info!("Discovered {} functions in modules", function_count);
|
||||
|
||||
// Step 1: For each module, create any missing symbols (referenced from other modules) and set FORCEACTIVE
|
||||
update_symbols(&mut obj, &modules)?;
|
||||
for &module_id in &module_ids {
|
||||
let mut module_obj = modules.remove(&module_id).unwrap();
|
||||
update_symbols(&mut module_obj, &modules)?;
|
||||
modules.insert(module_id, module_obj);
|
||||
}
|
||||
|
||||
// Step 2: For each module, create relocations to symbols in other modules
|
||||
for &module_id in &module_ids {
|
||||
let mut module_obj = modules.remove(&module_id).unwrap();
|
||||
create_relocations(&mut module_obj, &modules, &obj)?;
|
||||
modules.insert(module_id, module_obj);
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Performing relocation analysis");
|
||||
let mut tracker = Tracker::new(&obj);
|
||||
tracker.process(&obj)?;
|
||||
|
||||
log::info!("Applying relocations");
|
||||
tracker.apply(&mut obj, false)?;
|
||||
if !modules.is_empty() {
|
||||
resolve_external_relocations(&mut obj, &modules, None)?;
|
||||
for &module_id in &module_ids {
|
||||
let mut module_obj = modules.remove(&module_id).unwrap();
|
||||
resolve_external_relocations(&mut module_obj, &modules, Some(&obj))?;
|
||||
|
||||
let mut tracker = Tracker::new(&module_obj);
|
||||
tracker.process(&module_obj)?;
|
||||
tracker.apply(&mut module_obj, false)?;
|
||||
|
||||
modules.insert(module_id, module_obj);
|
||||
}
|
||||
}
|
||||
debug!("Applying relocations");
|
||||
tracker.apply(obj, false)?;
|
||||
|
||||
if config.detect_objects {
|
||||
log::info!("Detecting object boundaries");
|
||||
detect_object_boundaries(&mut obj)?;
|
||||
for module_obj in modules.values_mut() {
|
||||
detect_object_boundaries(module_obj)?;
|
||||
}
|
||||
debug!("Detecting object boundaries");
|
||||
detect_objects(obj)?;
|
||||
}
|
||||
|
||||
if config.detect_strings {
|
||||
log::info!("Detecting strings");
|
||||
detect_strings(&mut obj)?;
|
||||
for module_obj in modules.values_mut() {
|
||||
detect_strings(module_obj)?;
|
||||
}
|
||||
debug!("Detecting strings");
|
||||
detect_strings(obj)?;
|
||||
}
|
||||
|
||||
log::info!("Adjusting splits");
|
||||
update_splits(&mut obj)?;
|
||||
for module_obj in modules.values_mut() {
|
||||
update_splits(module_obj)?;
|
||||
}
|
||||
debug!("Adjusting splits");
|
||||
update_splits(obj, if obj.module_id == 0 { config.common_start } else { None })?;
|
||||
|
||||
if !args.no_update {
|
||||
log::info!("Writing configuration");
|
||||
if let Some(symbols_path) = &config.symbols {
|
||||
if !no_update {
|
||||
debug!("Writing configuration");
|
||||
if let Some(symbols_path) = &module_config.symbols {
|
||||
write_symbols_file(symbols_path, &obj)?;
|
||||
}
|
||||
if let Some(splits_path) = &config.splits {
|
||||
if let Some(splits_path) = &module_config.splits {
|
||||
write_splits_file(splits_path, &obj, false)?;
|
||||
}
|
||||
|
||||
for (config, &module_id) in config.modules.iter().zip(&module_ids) {
|
||||
let module_obj = modules.get(&module_id).unwrap();
|
||||
if let Some(symbols_path) = &config.symbols {
|
||||
write_symbols_file(symbols_path, module_obj)?;
|
||||
}
|
||||
if let Some(splits_path) = &config.splits {
|
||||
write_splits_file(splits_path, module_obj, true)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Splitting {} objects", obj.link_order.len());
|
||||
debug!("Splitting {} objects", obj.link_order.len());
|
||||
let split_objs = split_obj(&obj)?;
|
||||
|
||||
// Create out dirs
|
||||
touch(&args.out_dir)?;
|
||||
let asm_dir = args.out_dir.join("asm");
|
||||
let include_dir = args.out_dir.join("include");
|
||||
let obj_dir = args.out_dir.join("obj");
|
||||
DirBuilder::new().recursive(true).create(&include_dir)?;
|
||||
fs::write(include_dir.join("macros.inc"), include_str!("../../assets/macros.inc"))?;
|
||||
|
||||
log::info!("Writing object files");
|
||||
let mut out_config = OutputConfig::default();
|
||||
debug!("Writing object files");
|
||||
let obj_dir = out_dir.join("obj");
|
||||
let mut out_config = OutputModule {
|
||||
name: module_config.name().to_string(),
|
||||
module_id: obj.module_id,
|
||||
ldscript: out_dir.join("ldscript.lcf"),
|
||||
units: Vec::with_capacity(split_objs.len()),
|
||||
};
|
||||
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
|
||||
let out_obj = write_elf(split_obj)?;
|
||||
let out_path = obj_dir.join(obj_path_for_unit(&unit.name));
|
||||
@@ -773,18 +755,12 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
fs::write(&out_path, out_obj)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||
}
|
||||
{
|
||||
let mut out_file = buf_writer(&out_config_path)?;
|
||||
serde_json::to_writer_pretty(&mut out_file, &out_config)?;
|
||||
out_file.flush()?;
|
||||
}
|
||||
|
||||
// Generate ldscript.lcf
|
||||
let ldscript_path = args.out_dir.join("ldscript.lcf");
|
||||
fs::write(&ldscript_path, generate_ldscript(&obj, config.auto_force_files)?)?;
|
||||
out_config.ldscript = ldscript_path;
|
||||
fs::write(&out_config.ldscript, generate_ldscript(&obj, config.auto_force_files)?)?;
|
||||
|
||||
log::info!("Writing disassembly");
|
||||
debug!("Writing disassembly");
|
||||
let asm_dir = out_dir.join("asm");
|
||||
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
|
||||
let out_path = asm_dir.join(asm_path_for_unit(&unit.name));
|
||||
|
||||
@@ -793,24 +769,220 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
.with_context(|| format!("Failed to write {}", out_path.display()))?;
|
||||
w.flush()?;
|
||||
}
|
||||
Ok(out_config)
|
||||
}
|
||||
|
||||
// Split and write modules
|
||||
for (config, &module_id) in config.modules.iter().zip(&module_ids) {
|
||||
let obj = modules.get_mut(&module_id).unwrap();
|
||||
fn load_analyze_rel(
|
||||
config: &ProjectConfig,
|
||||
module_config: &ModuleConfig,
|
||||
) -> Result<(ObjInfo, Vec<PathBuf>)> {
|
||||
debug!("Loading {}", module_config.object.display());
|
||||
if let Some(hash_str) = &module_config.hash {
|
||||
verify_hash(&module_config.object, hash_str)?;
|
||||
}
|
||||
let map = map_file(&module_config.object)?;
|
||||
let buf = decompress_if_needed(&map)?;
|
||||
let mut module_obj = process_rel(Reader::new(&buf))?;
|
||||
|
||||
let out_dir = args.out_dir.join(format!("module_{}", module_id));
|
||||
let asm_dir = out_dir.join("asm");
|
||||
// let obj_dir = out_dir.join("obj");
|
||||
let mut dep = vec![module_config.object.clone()];
|
||||
if let Some(map_path) = &module_config.map {
|
||||
apply_map_file(map_path, &mut module_obj)?;
|
||||
dep.push(map_path.clone());
|
||||
}
|
||||
|
||||
log::info!("Processing module {}", module_id);
|
||||
if let Some(splits_path) = &module_config.splits {
|
||||
apply_splits_file(splits_path, &mut module_obj)?;
|
||||
dep.push(splits_path.clone());
|
||||
}
|
||||
|
||||
// log::info!("Writing disassembly");
|
||||
let filename = config.object.file_name().unwrap().to_str().unwrap();
|
||||
let out_path = asm_dir.join(asm_path_for_unit(filename));
|
||||
let mut w = buf_writer(&out_path)?;
|
||||
write_asm(&mut w, obj)
|
||||
.with_context(|| format!("Failed to write {}", out_path.display()))?;
|
||||
w.flush()?;
|
||||
if let Some(symbols_path) = &module_config.symbols {
|
||||
apply_symbols_file(symbols_path, &mut module_obj)?;
|
||||
dep.push(symbols_path.clone());
|
||||
}
|
||||
|
||||
debug!("Analyzing module {}", module_obj.module_id);
|
||||
if !config.quick_analysis {
|
||||
let mut state = AnalyzerState::default();
|
||||
state.detect_functions(&module_obj)?;
|
||||
FindRelCtorsDtors::execute(&mut state, &module_obj)?;
|
||||
FindRelRodataData::execute(&mut state, &module_obj)?;
|
||||
state.apply(&mut module_obj)?;
|
||||
}
|
||||
apply_signatures(&mut module_obj)?;
|
||||
apply_signatures_post(&mut module_obj)?;
|
||||
Ok((module_obj, dep))
|
||||
}
|
||||
|
||||
fn split(args: SplitArgs) -> Result<()> {
|
||||
if let Some(jobs) = args.jobs {
|
||||
rayon::ThreadPoolBuilder::new().num_threads(jobs).build_global().unwrap();
|
||||
}
|
||||
|
||||
let command_start = Instant::now();
|
||||
info!("Loading {}", args.config.display());
|
||||
let mut config_file = File::open(&args.config)
|
||||
.with_context(|| format!("Failed to open config file '{}'", args.config.display()))?;
|
||||
let config: ProjectConfig = serde_yaml::from_reader(&mut config_file)?;
|
||||
|
||||
let out_config_path = args.out_dir.join("config.json");
|
||||
let mut dep = DepFile::new(out_config_path.clone());
|
||||
|
||||
let module_count = config.modules.len() + 1;
|
||||
info!(
|
||||
"Loading and analyzing {} modules (using {} threads)",
|
||||
module_count,
|
||||
rayon::current_num_threads()
|
||||
);
|
||||
let mut dol_result: Option<Result<(ObjInfo, Vec<PathBuf>)>> = None;
|
||||
let mut modules_result: Option<Result<Vec<(ObjInfo, Vec<PathBuf>)>>> = None;
|
||||
let start = Instant::now();
|
||||
rayon::scope(|s| {
|
||||
// DOL
|
||||
s.spawn(|_| {
|
||||
let _span = info_span!("module", name = %config.base.name()).entered();
|
||||
dol_result =
|
||||
Some(load_analyze_dol(&config).with_context(|| {
|
||||
format!("While loading object '{}'", config.base.file_name())
|
||||
}));
|
||||
});
|
||||
// Modules
|
||||
s.spawn(|_| {
|
||||
modules_result = Some(
|
||||
config
|
||||
.modules
|
||||
.par_iter()
|
||||
.map(|module_config| {
|
||||
let _span = info_span!("module", name = %module_config.name()).entered();
|
||||
load_analyze_rel(&config, module_config).with_context(|| {
|
||||
format!("While loading object '{}'", module_config.file_name())
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
});
|
||||
});
|
||||
let duration = start.elapsed();
|
||||
let (mut obj, dep_v) = dol_result.unwrap()?;
|
||||
let mut function_count = obj.symbols.by_kind(ObjSymbolKind::Function).count();
|
||||
dep.extend(dep_v);
|
||||
|
||||
let mut modules = BTreeMap::<u32, (&ModuleConfig, ObjInfo)>::new();
|
||||
for (idx, (module_obj, dep_v)) in modules_result.unwrap()?.into_iter().enumerate() {
|
||||
function_count += module_obj.symbols.by_kind(ObjSymbolKind::Function).count();
|
||||
dep.extend(dep_v);
|
||||
match modules.entry(module_obj.module_id) {
|
||||
Entry::Vacant(e) => e.insert((&config.modules[idx], module_obj)),
|
||||
Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id),
|
||||
};
|
||||
}
|
||||
info!(
|
||||
"Initial analysis completed in {}.{:03}s (found {} functions)",
|
||||
duration.as_secs(),
|
||||
duration.subsec_millis(),
|
||||
function_count
|
||||
);
|
||||
|
||||
if !modules.is_empty() {
|
||||
let module_ids = modules.keys().cloned().collect_vec();
|
||||
|
||||
// Create any missing symbols (referenced from other modules) and set FORCEACTIVE
|
||||
update_symbols(&mut obj, &modules)?;
|
||||
for &module_id in &module_ids {
|
||||
let (module_config, mut module_obj) = modules.remove(&module_id).unwrap();
|
||||
update_symbols(&mut module_obj, &modules)?;
|
||||
modules.insert(module_id, (module_config, module_obj));
|
||||
}
|
||||
|
||||
// Create relocations to symbols in other modules
|
||||
for &module_id in &module_ids {
|
||||
let (module_config, mut module_obj) = modules.remove(&module_id).unwrap();
|
||||
create_relocations(&mut module_obj, &modules, &obj)?;
|
||||
modules.insert(module_id, (module_config, module_obj));
|
||||
}
|
||||
|
||||
// Replace external relocations with internal ones, creating extern symbols
|
||||
resolve_external_relocations(&mut obj, &modules, None)?;
|
||||
for &module_id in &module_ids {
|
||||
let (module_config, mut module_obj) = modules.remove(&module_id).unwrap();
|
||||
resolve_external_relocations(&mut module_obj, &modules, Some(&obj))?;
|
||||
modules.insert(module_id, (module_config, module_obj));
|
||||
}
|
||||
}
|
||||
|
||||
// Create out dirs
|
||||
DirBuilder::new().recursive(true).create(&args.out_dir)?;
|
||||
touch(&args.out_dir)?;
|
||||
let include_dir = args.out_dir.join("include");
|
||||
DirBuilder::new().recursive(true).create(&include_dir)?;
|
||||
fs::write(include_dir.join("macros.inc"), include_str!("../../assets/macros.inc"))?;
|
||||
|
||||
info!("Rebuilding relocations and splitting");
|
||||
let mut dol_result: Option<Result<OutputModule>> = None;
|
||||
let mut modules_result: Option<Result<Vec<OutputModule>>> = None;
|
||||
let start = Instant::now();
|
||||
rayon::scope(|s| {
|
||||
// DOL
|
||||
s.spawn(|_| {
|
||||
let _span =
|
||||
info_span!("module", name = %config.base.name(), id = obj.module_id).entered();
|
||||
dol_result = Some(
|
||||
split_write_obj(&mut obj, &config, &config.base, &args.out_dir, args.no_update)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"While processing object '{}' (module ID {})",
|
||||
config.base.file_name(),
|
||||
obj.module_id
|
||||
)
|
||||
}),
|
||||
);
|
||||
});
|
||||
// Modules
|
||||
s.spawn(|_| {
|
||||
modules_result = Some(
|
||||
modules
|
||||
.par_iter_mut()
|
||||
.map(|(&module_id, (module_config, module_obj))| {
|
||||
let _span =
|
||||
info_span!("module", name = %module_config.name(), id = module_id)
|
||||
.entered();
|
||||
let out_dir = args.out_dir.join(module_config.name().as_ref());
|
||||
split_write_obj(
|
||||
module_obj,
|
||||
&config,
|
||||
module_config,
|
||||
&out_dir,
|
||||
args.no_update,
|
||||
)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"While processing object '{}' (module ID {})",
|
||||
module_config.file_name(),
|
||||
module_id
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
});
|
||||
});
|
||||
let duration = start.elapsed();
|
||||
let out_config = OutputConfig { base: dol_result.unwrap()?, modules: modules_result.unwrap()? };
|
||||
let mut object_count = out_config.base.units.len();
|
||||
for module in &out_config.modules {
|
||||
object_count += module.units.len();
|
||||
}
|
||||
info!(
|
||||
"Splitting completed in {}.{:03}s (wrote {} objects)",
|
||||
duration.as_secs(),
|
||||
duration.subsec_millis(),
|
||||
object_count
|
||||
);
|
||||
|
||||
// Write output config
|
||||
{
|
||||
let mut out_file = buf_writer(&out_config_path)?;
|
||||
serde_json::to_writer_pretty(&mut out_file, &out_config)?;
|
||||
out_file.flush()?;
|
||||
}
|
||||
|
||||
// Write dep file
|
||||
@@ -826,6 +998,8 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||
// validate(&obj, file, &state)?;
|
||||
// }
|
||||
|
||||
let duration = command_start.elapsed();
|
||||
info!("Total duration: {}.{:03}s", duration.as_secs(), duration.subsec_millis());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -980,10 +1154,10 @@ fn diff(args: DiffArgs) -> Result<()> {
|
||||
.with_context(|| format!("Failed to open config file '{}'", args.config.display()))?;
|
||||
let config: ProjectConfig = serde_yaml::from_reader(&mut config_file)?;
|
||||
|
||||
log::info!("Loading {}", config.object.display());
|
||||
let mut obj = process_dol(&config.object)?;
|
||||
log::info!("Loading {}", config.base.object.display());
|
||||
let mut obj = process_dol(&config.base.object)?;
|
||||
|
||||
if let Some(symbols_path) = &config.symbols {
|
||||
if let Some(symbols_path) = &config.base.symbols {
|
||||
apply_symbols_file(symbols_path, &mut obj)?;
|
||||
}
|
||||
|
||||
@@ -1116,10 +1290,10 @@ fn apply(args: ApplyArgs) -> Result<()> {
|
||||
.with_context(|| format!("Failed to open config file '{}'", args.config.display()))?;
|
||||
let config: ProjectConfig = serde_yaml::from_reader(&mut config_file)?;
|
||||
|
||||
log::info!("Loading {}", config.object.display());
|
||||
let mut obj = process_dol(&config.object)?;
|
||||
log::info!("Loading {}", config.base.object.display());
|
||||
let mut obj = process_dol(&config.base.object)?;
|
||||
|
||||
if let Some(symbols_path) = &config.symbols {
|
||||
if let Some(symbols_path) = &config.base.symbols {
|
||||
if !apply_symbols_file(symbols_path, &mut obj)? {
|
||||
bail!("Symbols file '{}' does not exist", symbols_path.display());
|
||||
}
|
||||
@@ -1260,7 +1434,7 @@ fn apply(args: ApplyArgs) -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
write_symbols_file(config.symbols.as_ref().unwrap(), &obj)?;
|
||||
write_symbols_file(config.base.symbols.as_ref().unwrap(), &obj)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
12
src/main.rs
12
src/main.rs
@@ -1,4 +1,4 @@
|
||||
use std::{ffi::OsStr, io::Write, path::PathBuf, str::FromStr};
|
||||
use std::{ffi::OsStr, path::PathBuf, str::FromStr};
|
||||
|
||||
use argp::{FromArgValue, FromArgs};
|
||||
|
||||
@@ -86,13 +86,11 @@ enum SubCommand {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args: TopLevel = argp_version::from_env();
|
||||
env_logger::Builder::from_env(
|
||||
env_logger::Env::default().default_filter_or(args.log_level.to_string()),
|
||||
)
|
||||
.format(|f, r| writeln!(f, "[{}] {}", r.level(), r.args()))
|
||||
.init();
|
||||
let format = tracing_subscriber::fmt::format().with_target(false).without_time();
|
||||
tracing_subscriber::fmt().event_format(format).init();
|
||||
// TODO reimplement log level selection
|
||||
|
||||
let args: TopLevel = argp_version::from_env();
|
||||
let mut result = Ok(());
|
||||
if let Some(dir) = &args.chdir {
|
||||
result = std::env::set_current_dir(dir).map_err(|e| {
|
||||
|
||||
@@ -11,11 +11,11 @@ use std::{
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Result};
|
||||
pub use relocations::{ObjReloc, ObjRelocKind, ObjRelocations};
|
||||
pub use sections::{section_kind_for_section, ObjSection, ObjSectionKind, ObjSections};
|
||||
pub use sections::{ObjSection, ObjSectionKind, ObjSections};
|
||||
pub use splits::{ObjSplit, ObjSplits};
|
||||
pub use symbols::{
|
||||
ObjDataKind, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind, ObjSymbolScope,
|
||||
ObjSymbols, SymbolIndex,
|
||||
best_match_for_reloc, ObjDataKind, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
|
||||
ObjSymbolScope, ObjSymbols, SymbolIndex,
|
||||
};
|
||||
|
||||
use crate::util::{comment::MWComment, rel::RelReloc};
|
||||
|
||||
@@ -206,7 +206,7 @@ impl ObjSection {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn section_kind_for_section(section_name: &str) -> Result<ObjSectionKind> {
|
||||
fn section_kind_for_section(section_name: &str) -> Result<ObjSectionKind> {
|
||||
Ok(match section_name {
|
||||
".init" | ".text" | ".dbgtext" | ".vmtext" => ObjSectionKind::Code,
|
||||
".ctors" | ".dtors" | ".rodata" | ".sdata2" | "extab" | "extabindex" => {
|
||||
|
||||
@@ -170,9 +170,7 @@ impl ObjSymbols {
|
||||
let mut symbols_by_section: Vec<BTreeMap<u32, Vec<SymbolIndex>>> = vec![];
|
||||
let mut symbols_by_name = HashMap::<String, Vec<SymbolIndex>>::new();
|
||||
for (idx, symbol) in symbols.iter().enumerate() {
|
||||
if obj_kind == ObjKind::Executable {
|
||||
symbols_by_address.nested_push(symbol.address as u32, idx);
|
||||
}
|
||||
symbols_by_address.nested_push(symbol.address as u32, idx);
|
||||
if let Some(section_idx) = symbol.section {
|
||||
if section_idx >= symbols_by_section.len() {
|
||||
symbols_by_section.resize_with(section_idx + 1, BTreeMap::new);
|
||||
@@ -209,7 +207,8 @@ impl ObjSymbols {
|
||||
let target_symbol_idx = if let Some((symbol_idx, existing)) = opt {
|
||||
let size =
|
||||
if existing.size_known && in_symbol.size_known && existing.size != in_symbol.size {
|
||||
log::warn!(
|
||||
// TODO fix and promote back to warning
|
||||
log::debug!(
|
||||
"Conflicting size for {}: was {:#X}, now {:#X}",
|
||||
existing.name,
|
||||
existing.size,
|
||||
@@ -277,9 +276,7 @@ impl ObjSymbols {
|
||||
|
||||
pub fn add_direct(&mut self, in_symbol: ObjSymbol) -> Result<SymbolIndex> {
|
||||
let symbol_idx = self.symbols.len();
|
||||
if self.obj_kind == ObjKind::Executable {
|
||||
self.symbols_by_address.nested_push(in_symbol.address as u32, symbol_idx);
|
||||
}
|
||||
self.symbols_by_address.nested_push(in_symbol.address as u32, symbol_idx);
|
||||
if let Some(section_idx) = in_symbol.section {
|
||||
if section_idx >= self.symbols_by_section.len() {
|
||||
self.symbols_by_section.resize_with(section_idx + 1, BTreeMap::new);
|
||||
@@ -446,7 +443,7 @@ impl ObjSymbols {
|
||||
// ensure!(self.obj_kind == ObjKind::Executable);
|
||||
let mut result = None;
|
||||
for (_addr, symbol_idxs) in self.indexes_for_range(..=target_addr.address).rev() {
|
||||
let mut symbols = symbol_idxs
|
||||
let symbols = symbol_idxs
|
||||
.iter()
|
||||
.map(|&idx| (idx, &self.symbols[idx]))
|
||||
.filter(|(_, sym)| {
|
||||
@@ -454,42 +451,8 @@ impl ObjSymbols {
|
||||
&& sym.referenced_by(reloc_kind)
|
||||
})
|
||||
.collect_vec();
|
||||
let (symbol_idx, symbol) = if symbols.len() == 1 {
|
||||
symbols.pop().unwrap()
|
||||
} else {
|
||||
symbols.sort_by_key(|&(_, symbol)| {
|
||||
let mut rank = match symbol.kind {
|
||||
ObjSymbolKind::Function | ObjSymbolKind::Object => match reloc_kind {
|
||||
ObjRelocKind::PpcAddr16Hi
|
||||
| ObjRelocKind::PpcAddr16Ha
|
||||
| ObjRelocKind::PpcAddr16Lo => 1,
|
||||
ObjRelocKind::Absolute
|
||||
| ObjRelocKind::PpcRel24
|
||||
| ObjRelocKind::PpcRel14
|
||||
| ObjRelocKind::PpcEmbSda21 => 2,
|
||||
},
|
||||
// Label
|
||||
ObjSymbolKind::Unknown => match reloc_kind {
|
||||
ObjRelocKind::PpcAddr16Hi
|
||||
| ObjRelocKind::PpcAddr16Ha
|
||||
| ObjRelocKind::PpcAddr16Lo
|
||||
if !symbol.name.starts_with("..") =>
|
||||
{
|
||||
3
|
||||
}
|
||||
_ => 1,
|
||||
},
|
||||
ObjSymbolKind::Section => -1,
|
||||
};
|
||||
if symbol.size > 0 {
|
||||
rank += 1;
|
||||
}
|
||||
-rank
|
||||
});
|
||||
match symbols.first() {
|
||||
Some(&v) => v,
|
||||
None => continue,
|
||||
}
|
||||
let Some((symbol_idx, symbol)) = best_match_for_reloc(symbols, reloc_kind) else {
|
||||
continue;
|
||||
};
|
||||
if symbol.address == target_addr.address as u64 {
|
||||
result = Some((symbol_idx, symbol));
|
||||
@@ -549,3 +512,42 @@ impl ObjSymbol {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn best_match_for_reloc(
|
||||
mut symbols: Vec<(SymbolIndex, &ObjSymbol)>,
|
||||
reloc_kind: ObjRelocKind,
|
||||
) -> Option<(SymbolIndex, &ObjSymbol)> {
|
||||
if symbols.len() == 1 {
|
||||
return symbols.into_iter().next();
|
||||
}
|
||||
symbols.sort_by_key(|&(_, symbol)| {
|
||||
let mut rank = match symbol.kind {
|
||||
ObjSymbolKind::Function | ObjSymbolKind::Object => match reloc_kind {
|
||||
ObjRelocKind::PpcAddr16Hi
|
||||
| ObjRelocKind::PpcAddr16Ha
|
||||
| ObjRelocKind::PpcAddr16Lo => 1,
|
||||
ObjRelocKind::Absolute
|
||||
| ObjRelocKind::PpcRel24
|
||||
| ObjRelocKind::PpcRel14
|
||||
| ObjRelocKind::PpcEmbSda21 => 2,
|
||||
},
|
||||
// Label
|
||||
ObjSymbolKind::Unknown => match reloc_kind {
|
||||
ObjRelocKind::PpcAddr16Hi
|
||||
| ObjRelocKind::PpcAddr16Ha
|
||||
| ObjRelocKind::PpcAddr16Lo
|
||||
if !symbol.name.starts_with("..") =>
|
||||
{
|
||||
3
|
||||
}
|
||||
_ => 1,
|
||||
},
|
||||
ObjSymbolKind::Section => -1,
|
||||
};
|
||||
if symbol.size > 0 {
|
||||
rank += 1;
|
||||
}
|
||||
-rank
|
||||
});
|
||||
symbols.into_iter().next()
|
||||
}
|
||||
|
||||
@@ -12,8 +12,8 @@ use regex::{Captures, Regex};
|
||||
|
||||
use crate::{
|
||||
obj::{
|
||||
ObjDataKind, ObjInfo, ObjKind, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags,
|
||||
ObjSymbolKind, ObjUnit,
|
||||
ObjDataKind, ObjInfo, ObjKind, ObjSectionKind, ObjSplit, ObjSymbol, ObjSymbolFlagSet,
|
||||
ObjSymbolFlags, ObjSymbolKind, ObjUnit,
|
||||
},
|
||||
util::file::{buf_writer, map_file, map_reader},
|
||||
};
|
||||
@@ -163,9 +163,6 @@ pub fn write_symbols<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
|
||||
}
|
||||
|
||||
fn write_symbol<W: Write>(w: &mut W, obj: &ObjInfo, symbol: &ObjSymbol) -> Result<()> {
|
||||
// if let Some(demangled_name) = &symbol.demangled_name {
|
||||
// writeln!(w, "// {demangled_name}")?;
|
||||
// }
|
||||
write!(w, "{} = ", symbol.name)?;
|
||||
let section = symbol.section.and_then(|idx| obj.sections.get(idx));
|
||||
if let Some(section) = section {
|
||||
@@ -173,16 +170,6 @@ fn write_symbol<W: Write>(w: &mut W, obj: &ObjInfo, symbol: &ObjSymbol) -> Resul
|
||||
}
|
||||
write!(w, "{:#010X}; //", symbol.address)?;
|
||||
write!(w, " type:{}", symbol_kind_to_str(symbol.kind))?;
|
||||
// if let Some(section) = section {
|
||||
// match section.kind {
|
||||
// ObjSectionKind::Code => {
|
||||
// write!(w, " type:function")?;
|
||||
// }
|
||||
// ObjSectionKind::Data | ObjSectionKind::ReadOnlyData | ObjSectionKind::Bss => {
|
||||
// write!(w, " type:object")?;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
if symbol.size_known && symbol.size > 0 {
|
||||
write!(w, " size:{:#X}", symbol.size)?;
|
||||
}
|
||||
@@ -287,6 +274,27 @@ fn symbol_data_kind_from_str(s: &str) -> Option<ObjDataKind> {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn section_kind_from_str(s: &str) -> Option<ObjSectionKind> {
|
||||
match s {
|
||||
"code" | "text" => Some(ObjSectionKind::Code),
|
||||
"data" => Some(ObjSectionKind::Data),
|
||||
"rodata" => Some(ObjSectionKind::ReadOnlyData),
|
||||
"bss" => Some(ObjSectionKind::Bss),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn section_kind_to_str(kind: ObjSectionKind) -> &'static str {
|
||||
match kind {
|
||||
ObjSectionKind::Code => "code",
|
||||
ObjSectionKind::Data => "data",
|
||||
ObjSectionKind::ReadOnlyData => "rodata",
|
||||
ObjSectionKind::Bss => "bss",
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn write_splits_file<P: AsRef<Path>>(path: P, obj: &ObjInfo, all: bool) -> Result<()> {
|
||||
let mut w = buf_writer(path)?;
|
||||
@@ -298,7 +306,11 @@ pub fn write_splits_file<P: AsRef<Path>>(path: P, obj: &ObjInfo, all: bool) -> R
|
||||
pub fn write_splits<W: Write>(w: &mut W, obj: &ObjInfo, all: bool) -> Result<()> {
|
||||
writeln!(w, "Sections:")?;
|
||||
for (_, section) in obj.sections.iter() {
|
||||
write!(w, "\t{:<11} type:{:?} align:{:#X}", section.name, section.kind, section.align)?;
|
||||
write!(w, "\t{:<11} type:{}", section.name, section_kind_to_str(section.kind))?;
|
||||
if section.align > 0 {
|
||||
write!(w, " align:{}", section.align)?;
|
||||
}
|
||||
writeln!(w)?;
|
||||
}
|
||||
for unit in obj.link_order.iter().filter(|unit| all || !unit.autogenerated) {
|
||||
write!(w, "\n{}:", unit.name)?;
|
||||
@@ -350,13 +362,21 @@ struct SplitUnit {
|
||||
comment_version: Option<u8>,
|
||||
}
|
||||
|
||||
struct SectionDef {
|
||||
name: String,
|
||||
kind: Option<ObjSectionKind>,
|
||||
align: Option<u32>,
|
||||
}
|
||||
|
||||
enum SplitLine {
|
||||
Unit(SplitUnit),
|
||||
Section(SplitSection),
|
||||
UnitSection(SplitSection),
|
||||
SectionsStart,
|
||||
Section(SectionDef),
|
||||
None,
|
||||
}
|
||||
|
||||
fn parse_split_line(line: &str) -> Result<SplitLine> {
|
||||
fn parse_split_line(line: &str, state: &SplitState) -> Result<SplitLine> {
|
||||
static UNIT_LINE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new("^\\s*(?P<name>[^\\s:]+)\\s*:\\s*(?P<attrs>.*)$").unwrap());
|
||||
static SECTION_LINE: Lazy<Regex> =
|
||||
@@ -368,14 +388,19 @@ fn parse_split_line(line: &str) -> Result<SplitLine> {
|
||||
} else if let Some(captures) = UNIT_LINE.captures(line) {
|
||||
parse_unit_line(captures).with_context(|| format!("While parsing split line: '{line}'"))
|
||||
} else if let Some(captures) = SECTION_LINE.captures(line) {
|
||||
parse_section_line(captures).with_context(|| format!("While parsing split line: '{line}'"))
|
||||
parse_section_line(captures, state)
|
||||
.with_context(|| format!("While parsing split line: '{line}'"))
|
||||
} else {
|
||||
Err(anyhow!("Failed to parse split line: '{line}'"))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_unit_line(captures: Captures) -> Result<SplitLine> {
|
||||
let mut unit = SplitUnit { name: captures["name"].to_string(), comment_version: None };
|
||||
let name = &captures["name"];
|
||||
if name == "Sections" {
|
||||
return Ok(SplitLine::SectionsStart);
|
||||
}
|
||||
let mut unit = SplitUnit { name: name.to_string(), comment_version: None };
|
||||
|
||||
for attr in captures["attrs"].split(' ').filter(|&s| !s.is_empty()) {
|
||||
if let Some((attr, value)) = attr.split_once(':') {
|
||||
@@ -391,7 +416,33 @@ fn parse_unit_line(captures: Captures) -> Result<SplitLine> {
|
||||
Ok(SplitLine::Unit(unit))
|
||||
}
|
||||
|
||||
fn parse_section_line(captures: Captures) -> Result<SplitLine> {
|
||||
fn parse_section_line(captures: Captures, state: &SplitState) -> Result<SplitLine> {
|
||||
if matches!(state, SplitState::Sections(_)) {
|
||||
let name = &captures["name"];
|
||||
let mut section = SectionDef { name: name.to_string(), kind: None, align: None };
|
||||
|
||||
for attr in captures["attrs"].split(' ').filter(|&s| !s.is_empty()) {
|
||||
if let Some((attr, value)) = attr.split_once(':') {
|
||||
match attr {
|
||||
"type" => {
|
||||
section.kind = Some(
|
||||
section_kind_from_str(value)
|
||||
.ok_or_else(|| anyhow!("Unknown section type '{}'", value))?,
|
||||
);
|
||||
}
|
||||
"align" => {
|
||||
section.align = Some(u32::from_str(value)?);
|
||||
}
|
||||
_ => bail!("Unknown section attribute '{attr}'"),
|
||||
}
|
||||
} else {
|
||||
bail!("Unknown section attribute '{attr}'");
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(SplitLine::Section(section));
|
||||
}
|
||||
|
||||
let mut section = SplitSection {
|
||||
name: captures["name"].to_string(),
|
||||
start: 0,
|
||||
@@ -423,27 +474,39 @@ fn parse_section_line(captures: Captures) -> Result<SplitLine> {
|
||||
}
|
||||
}
|
||||
if section.start > 0 && section.end > 0 {
|
||||
Ok(SplitLine::Section(section))
|
||||
Ok(SplitLine::UnitSection(section))
|
||||
} else {
|
||||
Err(anyhow!("Section '{}' missing start or end address", section.name))
|
||||
}
|
||||
}
|
||||
|
||||
enum SplitState {
|
||||
None,
|
||||
Sections(usize),
|
||||
Unit(String),
|
||||
}
|
||||
|
||||
pub fn apply_splits_file<P: AsRef<Path>>(path: P, obj: &mut ObjInfo) -> Result<bool> {
|
||||
Ok(if path.as_ref().is_file() {
|
||||
let map = map_file(path)?;
|
||||
apply_splits(map_reader(&map), obj)?;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
})
|
||||
}
|
||||
|
||||
pub fn apply_splits<R: BufRead>(r: R, obj: &mut ObjInfo) -> Result<()> {
|
||||
enum SplitState {
|
||||
None,
|
||||
Unit(String),
|
||||
}
|
||||
let mut state = SplitState::None;
|
||||
for result in r.lines() {
|
||||
let line = match result {
|
||||
Ok(line) => line,
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
let split_line = parse_split_line(&line)?;
|
||||
let split_line = parse_split_line(&line, &state)?;
|
||||
match (&mut state, split_line) {
|
||||
(
|
||||
SplitState::None | SplitState::Unit(_),
|
||||
SplitState::None | SplitState::Unit(_) | SplitState::Sections(_),
|
||||
SplitLine::Unit(SplitUnit { name, comment_version }),
|
||||
) => {
|
||||
obj.link_order.push(ObjUnit {
|
||||
@@ -453,12 +516,36 @@ pub fn apply_splits<R: BufRead>(r: R, obj: &mut ObjInfo) -> Result<()> {
|
||||
});
|
||||
state = SplitState::Unit(name);
|
||||
}
|
||||
(SplitState::None, SplitLine::Section(SplitSection { name, .. })) => {
|
||||
(SplitState::None, SplitLine::UnitSection(SplitSection { name, .. })) => {
|
||||
bail!("Section {} defined outside of unit", name);
|
||||
}
|
||||
(SplitState::None | SplitState::Unit(_), SplitLine::SectionsStart) => {
|
||||
state = SplitState::Sections(0);
|
||||
}
|
||||
(SplitState::Sections(index), SplitLine::Section(SectionDef { name, kind, align })) => {
|
||||
let Some(obj_section) = obj.sections.get_mut(*index) else {
|
||||
bail!(
|
||||
"Section out of bounds: {} (index {}), object has {} sections",
|
||||
name,
|
||||
index,
|
||||
obj.sections.count()
|
||||
);
|
||||
};
|
||||
if let Err(_) = obj_section.rename(name.clone()) {
|
||||
// Manual section
|
||||
obj_section.kind =
|
||||
kind.ok_or_else(|| anyhow!("Section '{}' missing type", name))?;
|
||||
obj_section.name = name;
|
||||
obj_section.section_known = true;
|
||||
}
|
||||
if let Some(align) = align {
|
||||
obj_section.align = align as u64;
|
||||
}
|
||||
*index += 1;
|
||||
}
|
||||
(
|
||||
SplitState::Unit(unit),
|
||||
SplitLine::Section(SplitSection { name, start, end, align, common, rename }),
|
||||
SplitLine::UnitSection(SplitSection { name, start, end, align, common, rename }),
|
||||
) => {
|
||||
let (section_index, _) = match obj.sections.by_name(&name)? {
|
||||
Some(v) => Ok(v),
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use std::{io::Write, path::PathBuf};
|
||||
|
||||
use path_slash::PathBufExt;
|
||||
|
||||
pub struct DepFile {
|
||||
pub name: PathBuf,
|
||||
pub dependencies: Vec<PathBuf>,
|
||||
@@ -10,10 +12,12 @@ impl DepFile {
|
||||
|
||||
pub fn push(&mut self, dependency: PathBuf) { self.dependencies.push(dependency); }
|
||||
|
||||
pub fn extend(&mut self, dependencies: Vec<PathBuf>) { self.dependencies.extend(dependencies); }
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
write!(w, "{}:", self.name.display())?;
|
||||
write!(w, "{}:", self.name.to_slash_lossy())?;
|
||||
for dep in &self.dependencies {
|
||||
write!(w, " \\\n {}", dep.display())?;
|
||||
write!(w, " \\\n {}", dep.to_slash_lossy())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ use anyhow::{anyhow, Context, Result};
|
||||
use byteorder::ReadBytesExt;
|
||||
use filetime::{set_file_mtime, FileTime};
|
||||
use memmap2::{Mmap, MmapOptions};
|
||||
use path_slash::PathBufExt;
|
||||
|
||||
use crate::util::{rarc, rarc::Node, yaz0};
|
||||
|
||||
@@ -80,7 +81,7 @@ pub fn process_rsp(files: &[PathBuf]) -> Result<Vec<PathBuf>> {
|
||||
for result in reader.lines() {
|
||||
let line = result?;
|
||||
if !line.is_empty() {
|
||||
out.push(PathBuf::from(line));
|
||||
out.push(PathBuf::from_slash(line));
|
||||
}
|
||||
}
|
||||
} else if path_str.contains('*') {
|
||||
|
||||
@@ -2,13 +2,18 @@ use std::path::PathBuf;
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use itertools::Itertools;
|
||||
use path_slash::PathBufExt;
|
||||
|
||||
use crate::obj::ObjInfo;
|
||||
use crate::obj::{ObjInfo, ObjKind};
|
||||
|
||||
#[inline]
|
||||
const fn align_up(value: u32, align: u32) -> u32 { (value + (align - 1)) & !(align - 1) }
|
||||
|
||||
pub fn generate_ldscript(obj: &ObjInfo, auto_force_files: bool) -> Result<String> {
|
||||
if obj.kind == ObjKind::Relocatable {
|
||||
return generate_ldscript_partial(obj, auto_force_files);
|
||||
}
|
||||
|
||||
let origin = obj.sections.iter().map(|(_, s)| s.address).min().unwrap();
|
||||
let stack_size = match (obj.stack_address, obj.stack_end) {
|
||||
(Some(stack_address), Some(stack_end)) => stack_address - stack_end,
|
||||
@@ -76,10 +81,38 @@ pub fn generate_ldscript(obj: &ObjInfo, auto_force_files: bool) -> Result<String
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
pub fn generate_ldscript_partial(obj: &ObjInfo, auto_force_files: bool) -> Result<String> {
|
||||
let section_defs =
|
||||
obj.sections.iter().map(|(_, s)| format!("{} :{{}}", s.name)).join("\n ");
|
||||
|
||||
let mut force_files = Vec::with_capacity(obj.link_order.len());
|
||||
for unit in &obj.link_order {
|
||||
let obj_path = obj_path_for_unit(&unit.name);
|
||||
force_files.push(obj_path.file_name().unwrap().to_str().unwrap().to_string());
|
||||
}
|
||||
|
||||
let mut force_active = vec![];
|
||||
for symbol in obj.symbols.iter() {
|
||||
if symbol.flags.is_force_active() && symbol.flags.is_global() {
|
||||
force_active.push(symbol.name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let mut out = include_str!("../../assets/ldscript_partial.lcf")
|
||||
.replace("$SECTIONS", §ion_defs)
|
||||
.replace("$FORCEACTIVE", &force_active.join("\n "));
|
||||
out = if auto_force_files {
|
||||
out.replace("$FORCEFILES", &force_files.join("\n "))
|
||||
} else {
|
||||
out.replace("$FORCEFILES", "")
|
||||
};
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
pub fn obj_path_for_unit(unit: &str) -> PathBuf {
|
||||
PathBuf::from(unit).with_extension("").with_extension("o")
|
||||
PathBuf::from_slash(unit).with_extension("").with_extension("o")
|
||||
}
|
||||
|
||||
pub fn asm_path_for_unit(unit: &str) -> PathBuf {
|
||||
PathBuf::from(unit).with_extension("").with_extension("s")
|
||||
PathBuf::from_slash(unit).with_extension("").with_extension("s")
|
||||
}
|
||||
|
||||
284
src/util/map.rs
284
src/util/map.rs
@@ -1,7 +1,7 @@
|
||||
#![allow(dead_code)]
|
||||
#![allow(unused_mut)]
|
||||
use std::{
|
||||
collections::{btree_map, BTreeMap, HashMap, HashSet},
|
||||
collections::{btree_map, BTreeMap, HashMap},
|
||||
hash::Hash,
|
||||
io::BufRead,
|
||||
mem::replace,
|
||||
@@ -15,10 +15,7 @@ use once_cell::sync::Lazy;
|
||||
use regex::{Captures, Regex};
|
||||
|
||||
use crate::{
|
||||
obj::{
|
||||
section_kind_for_section, ObjInfo, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags,
|
||||
ObjSymbolKind,
|
||||
},
|
||||
obj::{ObjInfo, ObjKind, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind},
|
||||
util::file::{map_file, map_reader},
|
||||
};
|
||||
|
||||
@@ -65,86 +62,6 @@ struct SectionOrder {
|
||||
#[inline]
|
||||
fn is_code_section(section: &str) -> bool { matches!(section, ".text" | ".init") }
|
||||
|
||||
/// Iterate over the BTreeMap and generate an ordered list of symbols and TUs by address.
|
||||
fn resolve_section_order(
|
||||
_address_to_symbol: &BTreeMap<u32, SymbolRef>,
|
||||
symbol_entries: &mut HashMap<SymbolRef, SymbolEntry>,
|
||||
) -> Result<SectionOrder> {
|
||||
let ordering = SectionOrder::default();
|
||||
|
||||
// let mut last_unit = String::new();
|
||||
// let mut last_section = String::new();
|
||||
// let mut section_unit_idx = 0usize;
|
||||
// for symbol_ref in address_to_symbol.values() {
|
||||
// if let Some(symbol) = symbol_entries.get_mut(symbol_ref) {
|
||||
// if last_unit != symbol.unit {
|
||||
// if last_section != symbol.section {
|
||||
// ordering.unit_order.push((symbol.section.clone(), vec![]));
|
||||
// section_unit_idx = ordering.unit_order.len() - 1;
|
||||
// last_section = symbol.section.clone();
|
||||
// }
|
||||
// let unit_order = &mut ordering.unit_order[section_unit_idx];
|
||||
// if unit_order.1.contains(&symbol.unit) {
|
||||
// // With -common on, .bss is split into two parts. The TU order repeats
|
||||
// // at the end with all globally-deduplicated BSS symbols. Once we detect
|
||||
// // a duplicate inside of .bss, we create a new section and start again.
|
||||
// // TODO the first entry in .comm *could* be a TU without regular .bss
|
||||
// if symbol.section == ".bss" {
|
||||
// log::debug!(".comm section detected, duplicate {}", symbol.unit);
|
||||
// ordering.unit_order.push((".comm".to_string(), vec![symbol.unit.clone()]));
|
||||
// section_unit_idx = ordering.unit_order.len() - 1;
|
||||
// } else {
|
||||
// bail!(
|
||||
// "TU order conflict: {} exists multiple times in {}.",
|
||||
// symbol.unit, symbol.section,
|
||||
// );
|
||||
// }
|
||||
// } else {
|
||||
// unit_order.1.push(symbol.unit.clone());
|
||||
// }
|
||||
// last_unit = symbol.unit.clone();
|
||||
// }
|
||||
// // For ASM-generated objects, notype,local symbols in .text
|
||||
// // are usually local jump labels, and should be ignored.
|
||||
// if is_code_section(&symbol.section)
|
||||
// && symbol.size == 0
|
||||
// && symbol.kind == SymbolKind::NoType
|
||||
// && symbol.visibility == SymbolVisibility::Local
|
||||
// {
|
||||
// // Being named something other than lbl_* could indicate
|
||||
// // that it's actually a local function, but let's just
|
||||
// // make the user resolve that if necessary.
|
||||
// if !symbol.name.starts_with("lbl_") {
|
||||
// log::warn!("Skipping local text symbol {}", symbol.name);
|
||||
// }
|
||||
// continue;
|
||||
// }
|
||||
// // Guess the symbol type if necessary.
|
||||
// if symbol.kind == SymbolKind::NoType {
|
||||
// if is_code_section(&symbol.section) {
|
||||
// symbol.kind = SymbolKind::Function;
|
||||
// } else {
|
||||
// symbol.kind = SymbolKind::Object;
|
||||
// }
|
||||
// }
|
||||
// ordering.symbol_order.push(symbol_ref.clone());
|
||||
// } else {
|
||||
// bail!("Symbol has address but no entry: {symbol_ref:?}");
|
||||
// }
|
||||
// }
|
||||
|
||||
for iter in ordering.symbol_order.windows(2) {
|
||||
let next_address = symbol_entries.get(&iter[1]).unwrap().address;
|
||||
let symbol = symbol_entries.get_mut(&iter[0]).unwrap();
|
||||
// For ASM-generated objects, we need to guess the symbol size.
|
||||
if symbol.size == 0 {
|
||||
symbol.size = next_address - symbol.address;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ordering)
|
||||
}
|
||||
|
||||
macro_rules! static_regex {
|
||||
($name:ident, $str:expr) => {
|
||||
static $name: Lazy<Regex> = Lazy::new(|| Regex::new($str).unwrap());
|
||||
@@ -171,7 +88,7 @@ static_regex!(LINK_MAP_EXTERN_SYMBOL, "^\\s*>>> SYMBOL NOT FOUND: (.*)$");
|
||||
static_regex!(SECTION_LAYOUT_START, "^(?P<section>.*) section layout$");
|
||||
static_regex!(
|
||||
SECTION_LAYOUT_SYMBOL,
|
||||
"^\\s*(?P<rom_addr>[0-9A-Fa-f]+|UNUSED)\\s+(?P<size>[0-9A-Fa-f]+)\\s+(?P<addr>[0-9A-Fa-f]{8}|\\.{8})\\s+(?P<offset>[0-9A-Fa-f]{8}|\\.{8})\\s+(?P<align>\\d+)?\\s*(?P<sym>.*?)(?:\\s+\\(entry of (?P<entry_of>.*?)\\))?\\s+(?P<tu>.*)$"
|
||||
"^\\s*(?P<rom_addr>[0-9A-Fa-f]+|UNUSED)\\s+(?P<size>[0-9A-Fa-f]+)\\s+(?P<addr>[0-9A-Fa-f]{8}|\\.{8})(?:\\s+(?P<offset>[0-9A-Fa-f]{8}|\\.{8}))?\\s+(?P<align>\\d+)?\\s*(?P<sym>.*?)(?:\\s+\\(entry of (?P<entry_of>.*?)\\))?\\s+(?P<tu>.*)$"
|
||||
);
|
||||
static_regex!(
|
||||
SECTION_LAYOUT_HEADER,
|
||||
@@ -187,11 +104,12 @@ static_regex!(MEMORY_MAP_ENTRY, "^\\s*(?P<section>\\S+)\\s+(?P<addr>[0-9A-Fa-f]+
|
||||
static_regex!(LINKER_SYMBOLS_START, "^\\s*Linker generated symbols:\\s*$");
|
||||
static_regex!(LINKER_SYMBOL_ENTRY, "^\\s*(?P<name>\\S+)\\s+(?P<addr>[0-9A-Fa-f]+|\\.{0,8})\\s*$");
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SectionInfo {
|
||||
name: String,
|
||||
address: u32,
|
||||
size: u32,
|
||||
file_offset: u32,
|
||||
pub name: String,
|
||||
pub address: u32,
|
||||
pub size: u32,
|
||||
pub file_offset: u32,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -200,11 +118,7 @@ pub struct MapInfo {
|
||||
pub unit_entries: MultiMap<String, SymbolRef>,
|
||||
pub entry_references: MultiMap<SymbolRef, SymbolRef>,
|
||||
pub entry_referenced_from: MultiMap<SymbolRef, SymbolRef>,
|
||||
// pub address_to_symbol: BTreeMap<u32, SymbolRef>,
|
||||
// pub unit_section_ranges: HashMap<String, HashMap<String, Range<u32>>>,
|
||||
// pub symbol_order: Vec<SymbolRef>,
|
||||
// pub unit_order: Vec<(String, Vec<String>)>,
|
||||
pub sections: BTreeMap<u32, SectionInfo>,
|
||||
pub sections: Vec<SectionInfo>,
|
||||
pub link_map_symbols: HashMap<SymbolRef, SymbolEntry>,
|
||||
pub section_symbols: HashMap<String, BTreeMap<u32, Vec<SymbolEntry>>>,
|
||||
pub section_units: HashMap<String, Vec<(u32, String)>>,
|
||||
@@ -442,34 +356,34 @@ impl StateMachine {
|
||||
|
||||
fn end_section_layout(mut state: SectionLayoutState, entries: &mut MapInfo) -> Result<()> {
|
||||
// Resolve duplicate TUs
|
||||
let mut existing = HashSet::new();
|
||||
for idx in 0..state.units.len() {
|
||||
let (addr, unit) = &state.units[idx];
|
||||
// FIXME
|
||||
if
|
||||
/*state.current_section == ".bss" ||*/
|
||||
existing.contains(unit) {
|
||||
if
|
||||
/*state.current_section == ".bss" ||*/
|
||||
&state.units[idx - 1].1 != unit {
|
||||
let new_name = format!("{unit}_{}_{:010X}", state.current_section, addr);
|
||||
log::info!("Renaming {unit} to {new_name}");
|
||||
for idx2 in 0..idx {
|
||||
let (addr, n_unit) = &state.units[idx2];
|
||||
if unit == n_unit {
|
||||
let new_name =
|
||||
format!("{n_unit}_{}_{:010X}", state.current_section, addr);
|
||||
log::info!("Renaming 2 {n_unit} to {new_name}");
|
||||
state.units[idx2].1 = new_name;
|
||||
break;
|
||||
}
|
||||
}
|
||||
state.units[idx].1 = new_name;
|
||||
}
|
||||
} else {
|
||||
existing.insert(unit.clone());
|
||||
}
|
||||
}
|
||||
// let mut existing = HashSet::new();
|
||||
// for idx in 0..state.units.len() {
|
||||
// let (addr, unit) = &state.units[idx];
|
||||
// // FIXME
|
||||
// if
|
||||
// /*state.current_section == ".bss" ||*/
|
||||
// existing.contains(unit) {
|
||||
// if
|
||||
// /*state.current_section == ".bss" ||*/
|
||||
// &state.units[idx - 1].1 != unit {
|
||||
// let new_name = format!("{unit}_{}_{:010X}", state.current_section, addr);
|
||||
// log::info!("Renaming {unit} to {new_name}");
|
||||
// for idx2 in 0..idx {
|
||||
// let (addr, n_unit) = &state.units[idx2];
|
||||
// if unit == n_unit {
|
||||
// let new_name =
|
||||
// format!("{n_unit}_{}_{:010X}", state.current_section, addr);
|
||||
// log::info!("Renaming 2 {n_unit} to {new_name}");
|
||||
// state.units[idx2].1 = new_name;
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
// state.units[idx].1 = new_name;
|
||||
// }
|
||||
// } else {
|
||||
// existing.insert(unit.clone());
|
||||
// }
|
||||
// }
|
||||
if !state.symbols.is_empty() {
|
||||
entries.section_symbols.insert(state.current_section.clone(), state.symbols);
|
||||
}
|
||||
@@ -590,7 +504,7 @@ impl StateMachine {
|
||||
let size = u32::from_str_radix(&captures["size"], 16)?;
|
||||
let file_offset = u32::from_str_radix(&captures["offset"], 16)?;
|
||||
// log::info!("Memory map entry: {section} {address:#010X} {size:#010X} {file_offset:#010X}");
|
||||
entries.sections.insert(address, SectionInfo {
|
||||
entries.sections.push(SectionInfo {
|
||||
name: section.to_string(),
|
||||
address,
|
||||
size,
|
||||
@@ -640,12 +554,7 @@ pub fn process_map<R: BufRead>(reader: R) -> Result<MapInfo> {
|
||||
}
|
||||
let state = replace(&mut sm.state, ProcessMapState::None);
|
||||
sm.end_state(state)?;
|
||||
|
||||
let entries = sm.result;
|
||||
// let section_order = resolve_section_order(&entries.address_to_symbol, &mut entries.symbols)?;
|
||||
// entries.symbol_order = section_order.symbol_order;
|
||||
// entries.unit_order = section_order.unit_order;
|
||||
Ok(entries)
|
||||
Ok(sm.result)
|
||||
}
|
||||
|
||||
pub fn apply_map_file<P: AsRef<Path>>(path: P, obj: &mut ObjInfo) -> Result<()> {
|
||||
@@ -655,44 +564,32 @@ pub fn apply_map_file<P: AsRef<Path>>(path: P, obj: &mut ObjInfo) -> Result<()>
|
||||
}
|
||||
|
||||
pub fn apply_map(result: &MapInfo, obj: &mut ObjInfo) -> Result<()> {
|
||||
for (_section_index, section) in obj.sections.iter_mut() {
|
||||
if let Some(info) = result.sections.get(&(section.address as u32)) {
|
||||
let kind = section_kind_for_section(&info.name)?;
|
||||
if section.section_known {
|
||||
if section.name != info.name {
|
||||
log::warn!("Section mismatch: was {}, map says {}", section.name, info.name);
|
||||
}
|
||||
if section.kind != kind {
|
||||
log::warn!(
|
||||
"Section type mismatch: {} was {:?}, map says {:?}",
|
||||
info.name,
|
||||
section.kind,
|
||||
kind
|
||||
);
|
||||
}
|
||||
for (section_index, section) in obj.sections.iter_mut() {
|
||||
log::info!("Section {}: {} ({:?})", section_index, section.name, result.sections);
|
||||
let opt = if obj.kind == ObjKind::Executable {
|
||||
result.sections.iter().find(|s| s.address == section.address as u32)
|
||||
} else {
|
||||
result.sections.iter().filter(|s| s.size > 0).nth(section_index)
|
||||
};
|
||||
if let Some(info) = opt {
|
||||
if section.section_known && section.name != info.name {
|
||||
log::warn!("Section mismatch: was {}, map says {}", section.name, info.name);
|
||||
}
|
||||
// if section.size != info.size as u64 {
|
||||
// log::warn!(
|
||||
// "Section size mismatch: {} was {:#X}, map says {:#X}",
|
||||
// info.name,
|
||||
// section.size,
|
||||
// info.size
|
||||
// );
|
||||
// }
|
||||
// if section.file_offset != info.file_offset as u64 {
|
||||
// log::warn!(
|
||||
// "Section file offset mismatch: {} was {:#X}, map says {:#X}",
|
||||
// info.name,
|
||||
// section.file_offset,
|
||||
// info.file_offset
|
||||
// );
|
||||
// }
|
||||
section.name = info.name.clone();
|
||||
section.kind = kind;
|
||||
// section.size = info.size as u64;
|
||||
// section.file_offset = info.file_offset as u64;
|
||||
// section.original_address = info.address as u64;
|
||||
section.section_known = true;
|
||||
if section.address != info.address as u64 {
|
||||
log::warn!(
|
||||
"Section address mismatch: was {:#010X}, map says {:#010X}",
|
||||
section.address,
|
||||
info.address
|
||||
);
|
||||
}
|
||||
if section.size != info.size as u64 {
|
||||
log::warn!(
|
||||
"Section size mismatch: was {:#X}, map says {:#X}",
|
||||
section.size,
|
||||
info.size
|
||||
);
|
||||
}
|
||||
section.rename(info.name.clone())?;
|
||||
} else {
|
||||
log::warn!("Section {} @ {:#010X} not found in map", section.name, section.address);
|
||||
}
|
||||
@@ -708,33 +605,32 @@ pub fn apply_map(result: &MapInfo, obj: &mut ObjInfo) -> Result<()> {
|
||||
}
|
||||
}
|
||||
// Add absolute symbols
|
||||
for symbol_entry in result.link_map_symbols.values().filter(|s| s.unit.is_none()) {
|
||||
add_symbol(obj, symbol_entry, None)?;
|
||||
}
|
||||
// Add splits
|
||||
let mut section_order: Vec<(String, Vec<String>)> = Vec::new();
|
||||
for (section, unit_order) in &result.section_units {
|
||||
let mut units = Vec::new();
|
||||
let mut existing = HashSet::new();
|
||||
for (_addr, unit) in unit_order {
|
||||
let unit = unit.clone();
|
||||
if !existing.contains(&unit) {
|
||||
units.push(unit.clone());
|
||||
existing.insert(unit.clone());
|
||||
}
|
||||
// obj.splits.nested_push(*addr, ObjSplit {
|
||||
// unit,
|
||||
// end: 0, // TODO?
|
||||
// align: None,
|
||||
// common: false, // TODO?
|
||||
// autogenerated: false,
|
||||
// });
|
||||
}
|
||||
section_order.push((section.clone(), units));
|
||||
}
|
||||
// TODO
|
||||
// log::info!("Section order: {:#?}", section_order);
|
||||
// obj.link_order = resolve_link_order(§ion_order)?;
|
||||
// for symbol_entry in result.link_map_symbols.values().filter(|s| s.unit.is_none()) {
|
||||
// add_symbol(obj, symbol_entry, None)?;
|
||||
// }
|
||||
// Add splits
|
||||
for (section_name, unit_order) in &result.section_units {
|
||||
let (_, section) = obj
|
||||
.sections
|
||||
.iter_mut()
|
||||
.find(|(_, s)| s.name == *section_name)
|
||||
.ok_or_else(|| anyhow!("Failed to locate section '{}'", section_name))?;
|
||||
let mut iter = unit_order.iter().peekable();
|
||||
while let Some((addr, unit)) = iter.next() {
|
||||
let next = iter
|
||||
.peek()
|
||||
.map(|(addr, _)| *addr)
|
||||
.unwrap_or_else(|| (section.address + section.size) as u32);
|
||||
section.splits.push(*addr, ObjSplit {
|
||||
unit: unit.clone(),
|
||||
end: next,
|
||||
align: None,
|
||||
common: false,
|
||||
autogenerated: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -760,7 +656,7 @@ fn add_symbol(obj: &mut ObjInfo, symbol_entry: &SymbolEntry, section: Option<usi
|
||||
SymbolKind::Section => ObjSymbolKind::Section,
|
||||
SymbolKind::NoType => ObjSymbolKind::Unknown,
|
||||
},
|
||||
align: None,
|
||||
align: symbol_entry.align,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
|
||||
@@ -144,7 +144,7 @@ pub fn apply_symbol(
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
false,
|
||||
)?;
|
||||
Ok(target_symbol_idx)
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ use std::{
|
||||
use anyhow::{anyhow, bail, ensure, Context, Result};
|
||||
use itertools::Itertools;
|
||||
use petgraph::{graph::NodeIndex, Graph};
|
||||
use tracing_attributes::instrument;
|
||||
|
||||
use crate::{
|
||||
analysis::{cfa::SectionAddress, read_address, read_u32},
|
||||
@@ -400,13 +401,13 @@ fn create_gap_splits(obj: &mut ObjInfo) -> Result<()> {
|
||||
}
|
||||
|
||||
/// Ensures that all .bss splits following a common split are also marked as common.
|
||||
fn update_common_splits(obj: &mut ObjInfo) -> Result<()> {
|
||||
fn update_common_splits(obj: &mut ObjInfo, common_start: Option<u32>) -> Result<()> {
|
||||
let Some((bss_section_index, bss_section)) = obj.sections.by_name(".bss")? else {
|
||||
return Ok(());
|
||||
};
|
||||
let Some(common_bss_start) =
|
||||
let Some(common_bss_start) = common_start.or_else(|| {
|
||||
bss_section.splits.iter().find(|(_, split)| split.common).map(|(addr, _)| addr)
|
||||
else {
|
||||
}) else {
|
||||
return Ok(());
|
||||
};
|
||||
log::debug!("Found common BSS start at {:#010X}", common_bss_start);
|
||||
@@ -434,7 +435,7 @@ fn validate_splits(obj: &ObjInfo) -> Result<()> {
|
||||
split.end
|
||||
);
|
||||
ensure!(
|
||||
split.end > 0 && split.end > addr,
|
||||
split.end > 0 && split.end >= addr,
|
||||
"Invalid split end {} {} {:#010X}..{:#010X}",
|
||||
split.unit,
|
||||
section.name,
|
||||
@@ -490,7 +491,8 @@ fn validate_splits(obj: &ObjInfo) -> Result<()> {
|
||||
/// - Ensuring extab & extabindex entries are split with their associated function
|
||||
/// - Creating splits for gaps between existing splits
|
||||
/// - Resolving a new object link order
|
||||
pub fn update_splits(obj: &mut ObjInfo) -> Result<()> {
|
||||
#[instrument(level = "debug", skip(obj))]
|
||||
pub fn update_splits(obj: &mut ObjInfo, common_start: Option<u32>) -> Result<()> {
|
||||
// Create splits for extab and extabindex entries
|
||||
if let Some((section_index, section)) = obj.sections.by_name("extabindex")? {
|
||||
let start = SectionAddress::new(section_index, section.address as u32);
|
||||
@@ -519,7 +521,7 @@ pub fn update_splits(obj: &mut ObjInfo) -> Result<()> {
|
||||
create_gap_splits(obj)?;
|
||||
|
||||
// Update common BSS splits
|
||||
update_common_splits(obj)?;
|
||||
update_common_splits(obj, common_start)?;
|
||||
|
||||
// Ensure splits don't overlap symbols or each other
|
||||
validate_splits(obj)?;
|
||||
@@ -534,6 +536,7 @@ pub fn update_splits(obj: &mut ObjInfo) -> Result<()> {
|
||||
/// We can use a topological sort to determine a valid global TU order.
|
||||
/// There can be ambiguities, but any solution that satisfies the link order
|
||||
/// constraints is considered valid.
|
||||
#[instrument(level = "debug", skip(obj))]
|
||||
fn resolve_link_order(obj: &ObjInfo) -> Result<Vec<ObjUnit>> {
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
@@ -619,10 +622,9 @@ fn resolve_link_order(obj: &ObjInfo) -> Result<Vec<ObjUnit>> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Split an executable object into relocatable objects.
|
||||
/// Split an object into multiple relocatable objects.
|
||||
#[instrument(level = "debug", skip(obj))]
|
||||
pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
||||
ensure!(obj.kind == ObjKind::Executable, "Expected executable object");
|
||||
|
||||
let mut objects: Vec<ObjInfo> = vec![];
|
||||
let mut object_symbols: Vec<Vec<Option<usize>>> = vec![];
|
||||
let mut name_to_obj: HashMap<String, usize> = HashMap::new();
|
||||
@@ -834,7 +836,19 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
||||
// If the symbol is local, we'll upgrade the scope to global
|
||||
// and rename it to avoid conflicts
|
||||
if target_sym.flags.is_local() {
|
||||
let address_str = format!("{:08X}", target_sym.address);
|
||||
let address_str = if obj.module_id == 0 {
|
||||
format!("{:08X}", target_sym.address)
|
||||
} else if let Some(section_index) = target_sym.section {
|
||||
let target_section = &obj.sections[section_index];
|
||||
format!(
|
||||
"{}_{}_{:X}",
|
||||
obj.module_id,
|
||||
target_section.name.trim_start_matches('.'),
|
||||
target_sym.address
|
||||
)
|
||||
} else {
|
||||
bail!("Local symbol {} has no section", target_sym.name);
|
||||
};
|
||||
let new_name = if target_sym.name.ends_with(&address_str) {
|
||||
target_sym.name.clone()
|
||||
} else {
|
||||
|
||||
Reference in New Issue
Block a user