Load objects from disc image & vfs module

Revamps support for container paths and centralizes logic into a VFS (virtual file system) module.
The new VFS architecture supports disc images and any layer of nesting.

For example, the following command works:
`dtk dol info 'Interactive Multi-Game Demo Disc - July 2002 (USA).rvz:files/zz_StarFox051702_e3.tgc:files/default.dol'`
This opens a TGC file inside an RVZ disc image, then reads `default.dol` in the FST.

Another example:
`dtk rel info 'Legend of Zelda, The - The Wind Waker (USA).rvz:files/RELS.arc:mmem/f_pc_profile_lst.rel'`
This opens a RARC archive inside an RVZ disc image, loads the Yaz0-compressed REL and
decompresses it on the fly.

This all operates in memory with minimal overhead, with no need to extract temporary files.

Supported container formats:
- Disc images (ISO/GCM, WIA/RVZ, WBFS, CISO, NFS, GCZ, TGC)
- RARC/SZS and U8 (.arc)

Supported compression formats:
- Yaz0 (SZS)
- Yay0 (SZP)
- NLZSS (.lz)

Additionally, projects can utilize a new configuration key `object_base`:
```
object: orig/GZLE01/sys/main.dol
modules:
- object: orig/GZLE01/files/RELS.arc:rels/mmem/f_pc_profile_lst.rel
```
becomes
```
object_base: orig/GZLE01
object: sys/main.dol
modules:
- object: files/RELS.arc:mmem/f_pc_profile_lst.rel
```
When loading the objects, decomp-toolkit will automatically check the `object_base`
directory for any disc images. (They can be named anything, but must be in the folder
root) If one is found, all objects will be fetched from the disc image itself, rather
than having to extract the files manually.

While still a work in progress, two new `vfs` commands were added: `vfs ls` and `vfs cp`.
These commands are very barebones currently, but allow listing directory contents and
extracting files from decomp-toolkit's vfs representation:
```
❯ dtk vfs ls disc.rvz:
files
sys

❯ dtk vfs ls disc.rvz:sys
boot.bin
bi2.bin
apploader.bin
fst.bin
main.dol

❯ dtk vfs cp disc.rvz:sys/main.dol .
```
This commit is contained in:
2024-10-03 21:50:35 -06:00
parent 26f52f65b7
commit f91c2a1474
32 changed files with 1789 additions and 1053 deletions

View File

@@ -10,9 +10,10 @@ use crate::{
cmd,
util::{
alf::AlfFile,
file::{buf_writer, map_file},
file::buf_writer,
reader::{Endian, FromReader},
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -60,9 +61,8 @@ pub fn run(args: Args) -> Result<()> {
fn hashes(args: HashesArgs) -> Result<()> {
let alf_file = {
let file = map_file(&args.alf_file)?;
let mut reader = file.as_reader();
AlfFile::from_reader(&mut reader, Endian::Little)?
let mut file = open_path(&args.alf_file, true)?;
AlfFile::from_reader(file.as_mut(), Endian::Little)?
};
let mut w: Box<dyn Write> = if let Some(output) = args.output {
Box::new(buf_writer(output)?)

View File

@@ -9,7 +9,10 @@ use anyhow::{anyhow, bail, Context, Result};
use argp::FromArgs;
use object::{Object, ObjectSymbol, SymbolScope};
use crate::util::file::{buf_writer, map_file, map_file_basic, process_rsp};
use crate::{
util::file::{buf_writer, process_rsp},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
/// Commands for processing static libraries.
@@ -80,8 +83,8 @@ fn create(args: CreateArgs) -> Result<()> {
Entry::Vacant(e) => e.insert(Vec::new()),
Entry::Occupied(_) => bail!("Duplicate file name '{path_str}'"),
};
let file = map_file_basic(path)?;
let obj = object::File::parse(file.as_slice())?;
let mut file = open_path(path, false)?;
let obj = object::File::parse(file.map()?)?;
for symbol in obj.symbols() {
if symbol.scope() == SymbolScope::Dynamic {
entries.push(symbol.name_bytes()?.to_vec());
@@ -126,8 +129,8 @@ fn extract(args: ExtractArgs) -> Result<()> {
println!("Extracting {} to {}", path.display(), out_dir.display());
}
let file = map_file(path)?;
let mut archive = ar::Archive::new(file.as_slice());
let mut file = open_path(path, false)?;
let mut archive = ar::Archive::new(file.map()?);
while let Some(entry) = archive.next_entry() {
let mut entry =
entry.with_context(|| format!("Processing entry in {}", path.display()))?;

View File

@@ -5,7 +5,7 @@ use std::{
ffi::OsStr,
fs,
fs::DirBuilder,
io::{Cursor, Write},
io::{Cursor, Seek, Write},
mem::take,
path::{Path, PathBuf},
time::Instant,
@@ -48,10 +48,7 @@ use crate::{
diff::{calc_diff_ranges, print_diff, process_code},
dol::process_dol,
elf::{process_elf, write_elf},
file::{
buf_reader, buf_writer, map_file, map_file_basic, touch, verify_hash, FileIterator,
FileReadInfo,
},
file::{buf_writer, touch, verify_hash, FileIterator, FileReadInfo},
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
map::apply_map_file,
rel::{process_rel, process_rel_header, update_rel_section_alignment},
@@ -59,6 +56,7 @@ use crate::{
split::{is_linker_generated_object, split_obj, update_splits},
IntoCow, ToCow,
},
vfs::{open_fs, open_path, open_path_fs, ArchiveKind, Vfs, VfsFile},
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -236,6 +234,9 @@ pub struct ProjectConfig {
/// Marks all emitted symbols as "exported" to prevent the linker from removing them.
#[serde(default = "bool_true", skip_serializing_if = "is_true")]
pub export_all: bool,
/// Optional base path for all object files.
#[serde(default, skip_serializing_if = "is_default")]
pub object_base: Option<PathBuf>,
}
impl Default for ProjectConfig {
@@ -254,6 +255,7 @@ impl Default for ProjectConfig {
symbols_known: false,
fill_gaps: true,
export_all: true,
object_base: None,
}
}
}
@@ -483,8 +485,8 @@ fn apply_selfile(obj: &mut ObjInfo, buf: &[u8]) -> Result<()> {
pub fn info(args: InfoArgs) -> Result<()> {
let mut obj = {
let file = map_file(&args.dol_file)?;
process_dol(file.as_slice(), "")?
let mut file = open_path(&args.dol_file, true)?;
process_dol(file.map()?, "")?
};
apply_signatures(&mut obj)?;
@@ -502,8 +504,8 @@ pub fn info(args: InfoArgs) -> Result<()> {
apply_signatures_post(&mut obj)?;
if let Some(selfile) = &args.selfile {
let file = map_file(selfile)?;
apply_selfile(&mut obj, file.as_slice())?;
let mut file = open_path(selfile, true)?;
apply_selfile(&mut obj, file.map()?)?;
}
println!("{}:", obj.name);
@@ -787,16 +789,18 @@ struct AnalyzeResult {
splits_cache: Option<FileReadInfo>,
}
fn load_analyze_dol(config: &ProjectConfig) -> Result<AnalyzeResult> {
log::debug!("Loading {}", config.base.object.display());
fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<AnalyzeResult> {
let object_path = object_base.join(&config.base.object);
log::debug!("Loading {}", object_path.display());
let mut obj = {
let file = map_file(&config.base.object)?;
let mut file = object_base.open(&config.base.object)?;
let data = file.map()?;
if let Some(hash_str) = &config.base.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
process_dol(file.as_slice(), config.base.name().as_ref())?
process_dol(data, config.base.name().as_ref())?
};
let mut dep = vec![config.base.object.clone()];
let mut dep = vec![object_path];
if let Some(comment_version) = config.mw_comment_version {
obj.mw_comment = Some(MWComment::new(comment_version)?);
@@ -843,11 +847,12 @@ fn load_analyze_dol(config: &ProjectConfig) -> Result<AnalyzeResult> {
if let Some(selfile) = &config.selfile {
log::info!("Loading {}", selfile.display());
let file = map_file(selfile)?;
let mut file = open_path(selfile, true)?;
let data = file.map()?;
if let Some(hash) = &config.selfile_hash {
verify_hash(file.as_slice(), hash)?;
verify_hash(data, hash)?;
}
apply_selfile(&mut obj, file.as_slice())?;
apply_selfile(&mut obj, data)?;
dep.push(selfile.clone());
}
@@ -1004,12 +1009,11 @@ fn split_write_obj(
fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
if path.is_file() {
let old_file = map_file_basic(path)?;
let mut old_file = open_path(path, true)?;
let old_data = old_file.map()?;
// If the file is the same size, check if the contents are the same
// Avoid writing if unchanged, since it will update the file's mtime
if old_file.len() == contents.len() as u64
&& xxh3_64(old_file.as_slice()) == xxh3_64(contents)
{
if old_data.len() == contents.len() && xxh3_64(old_data) == xxh3_64(contents) {
return Ok(());
}
}
@@ -1018,20 +1022,26 @@ fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
Ok(())
}
fn load_analyze_rel(config: &ProjectConfig, module_config: &ModuleConfig) -> Result<AnalyzeResult> {
debug!("Loading {}", module_config.object.display());
let file = map_file(&module_config.object)?;
fn load_analyze_rel(
config: &ProjectConfig,
object_base: &ObjectBase,
module_config: &ModuleConfig,
) -> Result<AnalyzeResult> {
let object_path = object_base.join(&module_config.object);
debug!("Loading {}", object_path.display());
let mut file = object_base.open(&module_config.object)?;
let data = file.map()?;
if let Some(hash_str) = &module_config.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
let (header, mut module_obj) =
process_rel(&mut Cursor::new(file.as_slice()), module_config.name().as_ref())?;
process_rel(&mut Cursor::new(data), module_config.name().as_ref())?;
if let Some(comment_version) = config.mw_comment_version {
module_obj.mw_comment = Some(MWComment::new(comment_version)?);
}
let mut dep = vec![module_config.object.clone()];
let mut dep = vec![object_path];
if let Some(map_path) = &module_config.map {
apply_map_file(map_path, &mut module_obj, None, None)?;
dep.push(map_path.clone());
@@ -1082,22 +1092,24 @@ fn load_analyze_rel(config: &ProjectConfig, module_config: &ModuleConfig) -> Res
fn split(args: SplitArgs) -> Result<()> {
if let Some(jobs) = args.jobs {
rayon::ThreadPoolBuilder::new().num_threads(jobs).build_global().unwrap();
rayon::ThreadPoolBuilder::new().num_threads(jobs).build_global()?;
}
let command_start = Instant::now();
info!("Loading {}", args.config.display());
let mut config: ProjectConfig = {
let mut config_file = buf_reader(&args.config)?;
serde_yaml::from_reader(&mut config_file)?
let mut config_file = open_path(&args.config, true)?;
serde_yaml::from_reader(config_file.as_mut())?
};
let object_base = find_object_base(&config)?;
for module_config in config.modules.iter_mut() {
let file = map_file(&module_config.object)?;
let mut file = object_base.open(&module_config.object)?;
let mut data = file.map()?;
if let Some(hash_str) = &module_config.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
} else {
module_config.hash = Some(file_sha1_string(&mut file.as_reader())?);
module_config.hash = Some(file_sha1_string(&mut data)?);
}
}
@@ -1121,7 +1133,7 @@ fn split(args: SplitArgs) -> Result<()> {
s.spawn(|_| {
let _span = info_span!("module", name = %config.base.name()).entered();
dol_result =
Some(load_analyze_dol(&config).with_context(|| {
Some(load_analyze_dol(&config, &object_base).with_context(|| {
format!("While loading object '{}'", config.base.file_name())
}));
});
@@ -1133,7 +1145,7 @@ fn split(args: SplitArgs) -> Result<()> {
.par_iter()
.map(|module_config| {
let _span = info_span!("module", name = %module_config.name()).entered();
load_analyze_rel(&config, module_config).with_context(|| {
load_analyze_rel(&config, &object_base, module_config).with_context(|| {
format!("While loading object '{}'", module_config.file_name())
})
})
@@ -1538,16 +1550,18 @@ fn symbol_name_fuzzy_eq(a: &ObjSymbol, b: &ObjSymbol) -> bool {
fn diff(args: DiffArgs) -> Result<()> {
log::info!("Loading {}", args.config.display());
let mut config_file = buf_reader(&args.config)?;
let config: ProjectConfig = serde_yaml::from_reader(&mut config_file)?;
let mut config_file = open_path(&args.config, true)?;
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
let object_base = find_object_base(&config)?;
log::info!("Loading {}", config.base.object.display());
log::info!("Loading {}", object_base.join(&config.base.object).display());
let mut obj = {
let file = map_file(&config.base.object)?;
let mut file = object_base.open(&config.base.object)?;
let data = file.map()?;
if let Some(hash_str) = &config.base.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
process_dol(file.as_slice(), config.base.name().as_ref())?
process_dol(data, config.base.name().as_ref())?
};
if let Some(symbols_path) = &config.base.symbols {
@@ -1717,16 +1731,18 @@ fn diff(args: DiffArgs) -> Result<()> {
fn apply(args: ApplyArgs) -> Result<()> {
log::info!("Loading {}", args.config.display());
let mut config_file = buf_reader(&args.config)?;
let config: ProjectConfig = serde_yaml::from_reader(&mut config_file)?;
let mut config_file = open_path(&args.config, true)?;
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
let object_base = find_object_base(&config)?;
log::info!("Loading {}", config.base.object.display());
log::info!("Loading {}", object_base.join(&config.base.object).display());
let mut obj = {
let file = map_file(&config.base.object)?;
let mut file = object_base.open(&config.base.object)?;
let data = file.map()?;
if let Some(hash_str) = &config.base.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
process_dol(file.as_slice(), config.base.name().as_ref())?
process_dol(data, config.base.name().as_ref())?
};
let Some(symbols_path) = &config.base.symbols else {
@@ -1881,30 +1897,31 @@ fn config(args: ConfigArgs) -> Result<()> {
let mut config = ProjectConfig::default();
let mut modules = Vec::<(u32, ModuleConfig)>::new();
for result in FileIterator::new(&args.objects)? {
let (path, entry) = result?;
let (path, mut entry) = result?;
log::info!("Loading {}", path.display());
match path.extension() {
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("dol")) => {
config.base.object = path;
config.base.hash = Some(file_sha1_string(&mut entry.as_reader())?);
config.base.hash = Some(file_sha1_string(&mut entry)?);
}
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("rel")) => {
let header = process_rel_header(&mut entry.as_reader())?;
let header = process_rel_header(&mut entry)?;
entry.rewind()?;
modules.push((header.module_id, ModuleConfig {
object: path,
hash: Some(file_sha1_string(&mut entry.as_reader())?),
hash: Some(file_sha1_string(&mut entry)?),
..Default::default()
}));
}
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("sel")) => {
config.selfile = Some(path);
config.selfile_hash = Some(file_sha1_string(&mut entry.as_reader())?);
config.selfile_hash = Some(file_sha1_string(&mut entry)?);
}
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("rso")) => {
config.modules.push(ModuleConfig {
object: path,
hash: Some(file_sha1_string(&mut entry.as_reader())?),
hash: Some(file_sha1_string(&mut entry)?),
..Default::default()
});
}
@@ -1975,3 +1992,52 @@ fn apply_add_relocations(obj: &mut ObjInfo, relocations: &[AddRelocationConfig])
}
Ok(())
}
pub enum ObjectBase {
None,
Directory(PathBuf),
Vfs(PathBuf, Box<dyn Vfs + Send + Sync>),
}
impl ObjectBase {
pub fn join(&self, path: &Path) -> PathBuf {
match self {
ObjectBase::None => path.to_path_buf(),
ObjectBase::Directory(base) => base.join(path),
ObjectBase::Vfs(base, _) => {
PathBuf::from(format!("{}:{}", base.display(), path.display()))
}
}
}
pub fn open(&self, path: &Path) -> Result<Box<dyn VfsFile>> {
match self {
ObjectBase::None => open_path(path, true),
ObjectBase::Directory(base) => open_path(&base.join(path), true),
ObjectBase::Vfs(vfs_path, vfs) => open_path_fs(vfs.clone(), path, true)
.with_context(|| format!("Using disc image {}", vfs_path.display())),
}
}
}
pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> {
if let Some(base) = &config.object_base {
// Search for disc images in the object base directory
for result in base.read_dir()? {
let entry = result?;
if entry.file_type()?.is_file() {
let path = entry.path();
let mut file = open_path(&path, false)?;
let format = nodtool::nod::Disc::detect(file.as_mut())?;
if format.is_some() {
file.rewind()?;
log::info!("Using disc image {}", path.display());
let fs = open_fs(file, ArchiveKind::Disc)?;
return Ok(ObjectBase::Vfs(path, fs));
}
}
}
return Ok(ObjectBase::Directory(base.clone()));
}
Ok(ObjectBase::None)
}

View File

@@ -16,12 +16,15 @@ use syntect::{
parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet},
};
use crate::util::{
dwarf::{
process_compile_unit, process_cu_tag, process_overlay_branch, read_debug_section,
should_skip_tag, tag_type_string, AttributeKind, TagKind,
use crate::{
util::{
dwarf::{
process_compile_unit, process_cu_tag, process_overlay_branch, read_debug_section,
should_skip_tag, tag_type_string, AttributeKind, TagKind,
},
file::buf_writer,
},
file::{buf_writer, map_file},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -73,8 +76,8 @@ fn dump(args: DumpArgs) -> Result<()> {
let theme = theme_set.themes.get("Solarized (dark)").context("Failed to load theme")?;
let syntax = syntax_set.find_syntax_by_name("C++").context("Failed to find syntax")?.clone();
let file = map_file(&args.in_file)?;
let buf = file.as_slice();
let mut file = open_path(&args.in_file, true)?;
let buf = file.map()?;
if buf.starts_with(b"!<arch>\n") {
let mut archive = ar::Archive::new(buf);
while let Some(result) = archive.next_entry() {

View File

@@ -7,7 +7,7 @@ use anyhow::{anyhow, bail, ensure, Result};
use argp::FromArgs;
use object::{Architecture, Endianness, Object, ObjectKind, ObjectSection, SectionKind};
use crate::util::file::{buf_writer, map_file};
use crate::{util::file::buf_writer, vfs::open_path};
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// Converts an ELF file to a DOL file.
@@ -46,8 +46,8 @@ const MAX_TEXT_SECTIONS: usize = 7;
const MAX_DATA_SECTIONS: usize = 11;
pub fn run(args: Args) -> Result<()> {
let file = map_file(&args.elf_file)?;
let obj_file = object::read::File::parse(file.as_slice())?;
let mut file = open_path(&args.elf_file, true)?;
let obj_file = object::read::File::parse(file.map()?)?;
match obj_file.architecture() {
Architecture::PowerPc => {}
arch => bail!("Unexpected architecture: {arch:?}"),

View File

@@ -5,11 +5,13 @@ use argp::FromArgs;
use cwdemangle::{demangle, DemangleOptions};
use tracing::error;
use crate::util::{
config::{write_splits_file, write_symbols_file},
file::map_file,
map::{create_obj, process_map, SymbolEntry, SymbolRef},
split::update_splits,
use crate::{
util::{
config::{write_splits_file, write_symbols_file},
map::{create_obj, process_map, SymbolEntry, SymbolRef},
split::update_splits,
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -73,8 +75,8 @@ pub fn run(args: Args) -> Result<()> {
}
fn entries(args: EntriesArgs) -> Result<()> {
let file = map_file(&args.map_file)?;
let entries = process_map(&mut file.as_reader(), None, None)?;
let mut file = open_path(&args.map_file, true)?;
let entries = process_map(file.as_mut(), None, None)?;
match entries.unit_entries.get_vec(&args.unit) {
Some(vec) => {
println!("Entries for {}:", args.unit);
@@ -104,9 +106,9 @@ fn entries(args: EntriesArgs) -> Result<()> {
}
fn symbol(args: SymbolArgs) -> Result<()> {
let file = map_file(&args.map_file)?;
let mut file = open_path(&args.map_file, true)?;
log::info!("Processing map...");
let entries = process_map(&mut file.as_reader(), None, None)?;
let entries = process_map(file.as_mut(), None, None)?;
log::info!("Done!");
let mut opt_ref: Option<(String, SymbolEntry)> = None;
@@ -179,9 +181,9 @@ fn symbol(args: SymbolArgs) -> Result<()> {
}
fn config(args: ConfigArgs) -> Result<()> {
let file = map_file(&args.map_file)?;
let mut file = open_path(&args.map_file, true)?;
log::info!("Processing map...");
let entries = process_map(&mut file.as_reader(), None, None)?;
let entries = process_map(file.as_mut(), None, None)?;
let mut obj = create_obj(&entries)?;
if let Err(e) = update_splits(&mut obj, None, false) {
error!("Failed to update splits: {}", e)

View File

@@ -14,5 +14,6 @@ pub mod rel;
pub mod rso;
pub mod shasum;
pub mod u8_arc;
pub mod vfs;
pub mod yay0;
pub mod yaz0;

View File

@@ -3,9 +3,9 @@ use std::{fs, path::PathBuf};
use anyhow::{anyhow, Context, Result};
use argp::FromArgs;
use crate::util::{
file::{open_file, process_rsp},
IntoCow, ToCow,
use crate::{
util::{file::process_rsp, IntoCow, ToCow},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -45,7 +45,8 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let files = process_rsp(&args.files)?;
let single_file = files.len() == 1;
for path in files {
let data = nintendo_lz::decompress(&mut open_file(&path)?)
let mut file = open_path(&path, false)?;
let data = nintendo_lz::decompress(&mut file)
.map_err(|e| anyhow!("Failed to decompress '{}' with NLZSS: {}", path.display(), e))?;
let out_path = if let Some(output) = &args.output {
if single_file {

View File

@@ -1,11 +1,11 @@
use std::{fs, fs::DirBuilder, path::PathBuf};
use std::path::PathBuf;
use anyhow::{Context, Result};
use anyhow::{anyhow, Result};
use argp::FromArgs;
use crate::util::{
file::{decompress_if_needed, map_file},
rarc::{Node, RarcReader},
use crate::{
util::rarc::{RarcNodeKind, RarcView},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -55,71 +55,42 @@ pub fn run(args: Args) -> Result<()> {
}
fn list(args: ListArgs) -> Result<()> {
let file = map_file(&args.file)?;
let rarc = RarcReader::new(&mut file.as_reader())
.with_context(|| format!("Failed to process RARC file '{}'", args.file.display()))?;
let mut current_path = PathBuf::new();
for node in rarc.nodes() {
match node {
Node::DirectoryBegin { name } => {
current_path.push(name.name);
}
Node::DirectoryEnd { name: _ } => {
current_path.pop();
}
Node::File { name, offset, size } => {
let path = current_path.join(name.name);
println!("{}: {} bytes, offset {:#X}", path.display(), size, offset);
}
Node::CurrentDirectory => {}
Node::ParentDirectory => {}
}
}
let mut file = open_path(&args.file, true)?;
let view = RarcView::new(file.map()?).map_err(|e| anyhow!(e))?;
test(&view, "")?;
test(&view, "/")?;
test(&view, "//")?;
test(&view, "/rels")?;
test(&view, "/rels/")?;
test(&view, "/rels/amem")?;
test(&view, "/rels/amem/")?;
test(&view, "/rels/mmem")?;
test(&view, "/rels/mmem/../mmem")?;
test(&view, "/rels/amem/d_a_am.rel")?;
test(&view, "//amem/d_a_am.rel")?;
test(&view, "amem/d_a_am.rel")?;
test(&view, "amem/d_a_am.rel/")?;
test(&view, "mmem/d_a_obj_pirateship.rel")?;
test(&view, "mmem//d_a_obj_pirateship.rel")?;
test(&view, "mmem/da_obj_pirateship.rel")?;
Ok(())
}
fn extract(args: ExtractArgs) -> Result<()> {
let file = map_file(&args.file)?;
let rarc = RarcReader::new(&mut file.as_reader())
.with_context(|| format!("Failed to process RARC file '{}'", args.file.display()))?;
let mut current_path = PathBuf::new();
for node in rarc.nodes() {
match node {
Node::DirectoryBegin { name } => {
current_path.push(name.name);
}
Node::DirectoryEnd { name: _ } => {
current_path.pop();
}
Node::File { name, offset, size } => {
let file_data = decompress_if_needed(
&file.as_slice()[offset as usize..offset as usize + size as usize],
)?;
let file_path = current_path.join(&name.name);
let output_path = args
.output
.as_ref()
.map(|p| p.join(&file_path))
.unwrap_or_else(|| file_path.clone());
if !args.quiet {
println!(
"Extracting {} to {} ({} bytes)",
file_path.display(),
output_path.display(),
size
);
}
if let Some(parent) = output_path.parent() {
DirBuilder::new().recursive(true).create(parent)?;
}
fs::write(&output_path, file_data)
.with_context(|| format!("Failed to write file '{}'", output_path.display()))?;
}
Node::CurrentDirectory => {}
Node::ParentDirectory => {}
}
}
fn test(view: &RarcView, path: &str) -> Result<()> {
let option = view.find(path);
let data = if let Some(RarcNodeKind::File(_, node)) = option {
view.get_data(node).map_err(|e| anyhow!(e))?
} else {
&[]
};
let vec = data.iter().cloned().take(4).collect::<Vec<_>>();
println!("{:?}: {:?} (len: {:?})", path, option, vec.as_slice());
// if let Some(RarcNodeKind::Directory(_, dir)) = option {
// for node in view.children(dir) {
// println!("Child: {:?} ({:?})", node, view.get_string(node.name_offset()));
// }
// }
Ok(())
}
fn extract(_args: ExtractArgs) -> Result<()> { todo!() }

View File

@@ -1,7 +1,7 @@
use std::{
collections::{btree_map, BTreeMap},
fs,
io::Write,
io::{Cursor, Write},
path::PathBuf,
time::Instant,
};
@@ -27,13 +27,13 @@ use crate::{
tracker::Tracker,
},
array_ref_mut,
cmd::dol::{ModuleConfig, ProjectConfig},
cmd::dol::{find_object_base, ModuleConfig, ObjectBase, ProjectConfig},
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol},
util::{
config::{is_auto_symbol, read_splits_sections, SectionDef},
dol::process_dol,
elf::{to_obj_reloc_kind, write_elf},
file::{buf_reader, buf_writer, map_file, process_rsp, verify_hash, FileIterator},
file::{buf_writer, process_rsp, verify_hash, FileIterator},
nested::NestedMap,
rel::{
print_relocations, process_rel, process_rel_header, process_rel_sections, write_rel,
@@ -41,6 +41,7 @@ use crate::{
},
IntoCow, ToCow,
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -162,12 +163,13 @@ fn match_section_index(
// })
}
fn load_rel(module_config: &ModuleConfig) -> Result<RelInfo> {
let file = map_file(&module_config.object)?;
fn load_rel(module_config: &ModuleConfig, object_base: &ObjectBase) -> Result<RelInfo> {
let mut file = object_base.open(&module_config.object)?;
let data = file.map()?;
if let Some(hash_str) = &module_config.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
let mut reader = file.as_reader();
let mut reader = Cursor::new(data);
let header = process_rel_header(&mut reader)?;
let sections = process_rel_sections(&mut reader, &header)?;
let section_defs = if let Some(splits_path) = &module_config.splits {
@@ -261,15 +263,19 @@ fn make(args: MakeArgs) -> Result<()> {
let mut existing_headers = BTreeMap::<u32, RelInfo>::new();
let mut name_to_module_id = FxHashMap::<String, u32>::default();
if let Some(config_path) = &args.config {
let config: ProjectConfig = serde_yaml::from_reader(&mut buf_reader(config_path)?)?;
let config: ProjectConfig = {
let mut file = open_path(config_path, true)?;
serde_yaml::from_reader(file.as_mut())?
};
let object_base = find_object_base(&config)?;
for module_config in &config.modules {
let module_name = module_config.name();
if !args.names.is_empty() && !args.names.iter().any(|n| n == &module_name) {
continue;
}
let _span = info_span!("module", name = %module_name).entered();
let info = load_rel(module_config).with_context(|| {
format!("While loading REL '{}'", module_config.object.display())
let info = load_rel(module_config, &object_base).with_context(|| {
format!("While loading REL '{}'", object_base.join(&module_config.object).display())
})?;
name_to_module_id.insert(module_name.to_string(), info.0.module_id);
match existing_headers.entry(info.0.module_id) {
@@ -287,9 +293,9 @@ fn make(args: MakeArgs) -> Result<()> {
}
// Load all modules
let files = paths.iter().map(map_file).collect::<Result<Vec<_>>>()?;
let mut files = paths.iter().map(|p| open_path(p, true)).collect::<Result<Vec<_>>>()?;
let modules = files
.par_iter()
.par_iter_mut()
.enumerate()
.zip(&paths)
.map(|((idx, file), path)| {
@@ -301,7 +307,7 @@ fn make(args: MakeArgs) -> Result<()> {
.and_then(|n| name_to_module_id.get(n))
.copied()
.unwrap_or(idx as u32);
load_obj(file.as_slice())
load_obj(file.map()?)
.map(|o| LoadedModule { module_id, file: o, path: path.clone() })
.with_context(|| format!("Failed to load '{}'", path.display()))
})
@@ -399,8 +405,8 @@ fn make(args: MakeArgs) -> Result<()> {
}
fn info(args: InfoArgs) -> Result<()> {
let file = map_file(args.rel_file)?;
let (header, mut module_obj) = process_rel(&mut file.as_reader(), "")?;
let mut file = open_path(&args.rel_file, true)?;
let (header, mut module_obj) = process_rel(file.as_mut(), "")?;
let mut state = AnalyzerState::default();
state.detect_functions(&module_obj)?;
@@ -458,7 +464,7 @@ fn info(args: InfoArgs) -> Result<()> {
if args.relocations {
println!("\nRelocations:");
println!(" [Source] section:address RelocType -> [Target] module:section:address");
print_relocations(&mut file.as_reader(), &header)?;
print_relocations(file.as_mut(), &header)?;
}
Ok(())
}
@@ -469,9 +475,9 @@ const fn align32(x: u32) -> u32 { (x + 31) & !31 }
fn merge(args: MergeArgs) -> Result<()> {
log::info!("Loading {}", args.dol_file.display());
let mut obj = {
let file = map_file(&args.dol_file)?;
let mut file = open_path(&args.dol_file, true)?;
let name = args.dol_file.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default();
process_dol(file.as_slice(), name.as_ref())?
process_dol(file.map()?, name.as_ref())?
};
log::info!("Performing signature analysis");
@@ -481,10 +487,10 @@ fn merge(args: MergeArgs) -> Result<()> {
let mut processed = 0;
let mut module_map = BTreeMap::<u32, ObjInfo>::new();
for result in FileIterator::new(&args.rel_files)? {
let (path, entry) = result?;
let (path, mut entry) = result?;
log::info!("Loading {}", path.display());
let name = path.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default();
let (_, obj) = process_rel(&mut entry.as_reader(), name.as_ref())?;
let (_, obj) = process_rel(&mut entry, name.as_ref())?;
match module_map.entry(obj.module_id) {
btree_map::Entry::Vacant(e) => e.insert(obj),
btree_map::Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id),

View File

@@ -11,13 +11,16 @@ use object::{
SymbolIndex, SymbolKind, SymbolSection,
};
use crate::util::{
file::{buf_reader, buf_writer, map_file},
reader::{Endian, ToWriter},
rso::{
process_rso, symbol_hash, RsoHeader, RsoRelocation, RsoSectionHeader, RsoSymbol,
RSO_SECTION_NAMES,
use crate::{
util::{
file::buf_writer,
reader::{Endian, ToWriter},
rso::{
process_rso, symbol_hash, RsoHeader, RsoRelocation, RsoSectionHeader, RsoSymbol,
RSO_SECTION_NAMES,
},
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -74,8 +77,8 @@ pub fn run(args: Args) -> Result<()> {
fn info(args: InfoArgs) -> Result<()> {
let rso = {
let file = map_file(args.rso_file)?;
let obj = process_rso(&mut file.as_reader())?;
let mut file = open_path(&args.rso_file, true)?;
let obj = process_rso(file.as_mut())?;
#[allow(clippy::let_and_return)]
obj
};
@@ -84,8 +87,8 @@ fn info(args: InfoArgs) -> Result<()> {
}
fn make(args: MakeArgs) -> Result<()> {
let file = map_file(&args.input)?;
let obj_file = object::read::File::parse(file.as_slice())?;
let mut file = open_path(&args.input, true)?;
let obj_file = object::read::File::parse(file.map()?)?;
match obj_file.architecture() {
Architecture::PowerPc => {}
arch => bail!("Unexpected architecture: {arch:?}"),
@@ -97,9 +100,9 @@ fn make(args: MakeArgs) -> Result<()> {
None => args.input.display().to_string(),
};
let symbols_to_export = match args.export {
let symbols_to_export = match &args.export {
Some(export_file_path) => {
let export_file_reader = buf_reader(export_file_path)?;
let export_file_reader = open_path(export_file_path, true)?;
export_file_reader.lines().map_while(Result::ok).collect()
}
None => vec![],

View File

@@ -1,6 +1,6 @@
use std::{
fs::File,
io::{stdout, BufRead, BufReader, Read, Write},
io::{stdout, BufRead, Read, Write},
path::{Path, PathBuf},
};
@@ -10,7 +10,10 @@ use owo_colors::{OwoColorize, Stream};
use path_slash::PathExt;
use sha1::{Digest, Sha1};
use crate::util::file::{buf_writer, open_file, process_rsp, touch};
use crate::{
util::file::{buf_writer, process_rsp, touch},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// Print or check SHA1 (160-bit) checksums.
@@ -36,8 +39,8 @@ const DEFAULT_BUF_SIZE: usize = 8192;
pub fn run(args: Args) -> Result<()> {
if args.check {
for path in process_rsp(&args.files)? {
let file = open_file(&path)?;
check(&args, &mut BufReader::new(file))?;
let mut file = open_path(&path, false)?;
check(&args, file.as_mut())?;
}
if let Some(out_path) = &args.output {
touch(out_path)
@@ -53,8 +56,8 @@ pub fn run(args: Args) -> Result<()> {
Box::new(stdout())
};
for path in process_rsp(&args.files)? {
let mut file = open_file(&path)?;
hash(w.as_mut(), &mut file, &path)?
let mut file = open_path(&path, false)?;
hash(w.as_mut(), file.as_mut(), &path)?
}
}
Ok(())

View File

@@ -4,9 +4,12 @@ use anyhow::{anyhow, Context, Result};
use argp::FromArgs;
use itertools::Itertools;
use crate::util::{
file::{decompress_if_needed, map_file},
u8_arc::{U8Node, U8View},
use crate::{
util::{
file::decompress_if_needed,
u8_arc::{U8Node, U8View},
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -56,8 +59,8 @@ pub fn run(args: Args) -> Result<()> {
}
fn list(args: ListArgs) -> Result<()> {
let file = map_file(&args.file)?;
let view = U8View::new(file.as_slice())
let mut file = open_path(&args.file, true)?;
let view = U8View::new(file.map()?)
.map_err(|e| anyhow!("Failed to open U8 file '{}': {}", args.file.display(), e))?;
visit_files(&view, |_, node, path| {
println!("{}: {} bytes, offset {:#X}", path, node.length(), node.offset());
@@ -66,15 +69,15 @@ fn list(args: ListArgs) -> Result<()> {
}
fn extract(args: ExtractArgs) -> Result<()> {
let file = map_file(&args.file)?;
let view = U8View::new(file.as_slice())
let mut file = open_path(&args.file, true)?;
let data = file.map()?;
let view = U8View::new(data)
.map_err(|e| anyhow!("Failed to open U8 file '{}': {}", args.file.display(), e))?;
visit_files(&view, |_, node, path| {
let offset = node.offset();
let size = node.length();
let file_data = decompress_if_needed(
&file.as_slice()[offset as usize..offset as usize + size as usize],
)?;
let file_data =
decompress_if_needed(&data[offset as usize..offset as usize + size as usize])?;
let output_path = args
.output
.as_ref()
@@ -94,7 +97,7 @@ fn extract(args: ExtractArgs) -> Result<()> {
fn visit_files(
view: &U8View,
mut visitor: impl FnMut(usize, &U8Node, String) -> Result<()>,
mut visitor: impl FnMut(usize, U8Node, String) -> Result<()>,
) -> Result<()> {
let mut path_segments = Vec::<(Cow<str>, usize)>::new();
for (idx, node, name) in view.iter() {

121
src/cmd/vfs.rs Normal file
View File

@@ -0,0 +1,121 @@
use std::{fs::File, io, io::Write, path::PathBuf};
use anyhow::{anyhow, bail};
use argp::FromArgs;
use nodtool::nod::ResultContext;
use crate::vfs::{decompress_file, detect, open_fs, FileFormat, StdFs, Vfs, VfsFileType};
#[derive(FromArgs, PartialEq, Debug)]
/// Commands for interacting with discs and containers.
#[argp(subcommand, name = "vfs")]
pub struct Args {
#[argp(subcommand)]
command: SubCommand,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argp(subcommand)]
enum SubCommand {
Ls(LsArgs),
Cp(CpArgs),
}
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// List files in a directory or container.
#[argp(subcommand, name = "ls")]
pub struct LsArgs {
#[argp(positional)]
/// Path to the container.
path: PathBuf,
}
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// Copy files from a container.
#[argp(subcommand, name = "cp")]
pub struct CpArgs {
#[argp(positional)]
/// Source path(s) and destination path.
paths: Vec<PathBuf>,
}
pub fn run(args: Args) -> anyhow::Result<()> {
match args.command {
SubCommand::Ls(args) => ls(args),
SubCommand::Cp(args) => cp(args),
}
}
fn find(path: &str) -> anyhow::Result<(Box<dyn Vfs>, &str)> {
let mut split = path.split(':');
let mut fs: Box<dyn Vfs> = Box::new(StdFs);
let mut path = split.next().unwrap();
for next in split {
let mut file = fs.open(path)?;
match detect(file.as_mut())? {
FileFormat::Archive(kind) => {
fs = open_fs(file, kind)?;
path = next;
}
_ => bail!("'{}' is not a container", path),
}
}
Ok((fs, path))
}
fn ls(args: LsArgs) -> anyhow::Result<()> {
let str = args.path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
let (mut fs, mut path) = find(str)?;
let metadata = fs.metadata(path)?;
if metadata.is_file() {
let mut file = fs.open(path)?;
match detect(file.as_mut())? {
FileFormat::Archive(kind) => {
fs = open_fs(file, kind)?;
path = "";
}
_ => bail!("'{}' is not a directory", path),
}
}
let entries = fs.read_dir(path)?;
for entry in entries {
println!("{}", entry);
}
Ok(())
}
fn cp(mut args: CpArgs) -> anyhow::Result<()> {
if args.paths.len() < 2 {
bail!("Both source and destination paths must be provided");
}
let dest = args.paths.pop().unwrap();
let dest_is_dir = args.paths.len() > 1 || dest.metadata().ok().is_some_and(|m| m.is_dir());
for path in args.paths {
let str = path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
let (mut fs, path) = find(str)?;
let metadata = fs.metadata(path)?;
match metadata.file_type {
VfsFileType::File => {
let mut file = fs.open(path)?;
if let FileFormat::Compressed(kind) = detect(file.as_mut())? {
file = decompress_file(file, kind)?;
}
let dest = if dest_is_dir {
let name = path.rsplit('/').next().unwrap();
dest.join(name)
} else {
dest.clone()
};
let mut dest_file = File::create(&dest)
.with_context(|| format!("Failed to create file {}", dest.display()))?;
io::copy(file.as_mut(), &mut dest_file)
.with_context(|| format!("Failed to write file {}", dest.display()))?;
dest_file
.flush()
.with_context(|| format!("Failed to flush file {}", dest.display()))?;
}
VfsFileType::Directory => bail!("Cannot copy directory"),
}
}
Ok(())
}

View File

@@ -3,10 +3,13 @@ use std::{fs, path::PathBuf};
use anyhow::{Context, Result};
use argp::FromArgs;
use crate::util::{
file::{map_file_basic, process_rsp},
ncompress::{compress_yay0, decompress_yay0},
IntoCow, ToCow,
use crate::{
util::{
file::process_rsp,
ncompress::{compress_yay0, decompress_yay0},
IntoCow, ToCow,
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -62,8 +65,8 @@ fn compress(args: CompressArgs) -> Result<()> {
let single_file = files.len() == 1;
for path in files {
let data = {
let file = map_file_basic(&path)?;
compress_yay0(file.as_slice())
let mut file = open_path(&path, false)?;
compress_yay0(file.map()?)
};
let out_path = if let Some(output) = &args.output {
if single_file {
@@ -85,8 +88,8 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let single_file = files.len() == 1;
for path in files {
let data = {
let file = map_file_basic(&path)?;
decompress_yay0(file.as_slice())
let mut file = open_path(&path, true)?;
decompress_yay0(file.map()?)
.with_context(|| format!("Failed to decompress '{}' using Yay0", path.display()))?
};
let out_path = if let Some(output) = &args.output {

View File

@@ -3,10 +3,13 @@ use std::{fs, path::PathBuf};
use anyhow::{Context, Result};
use argp::FromArgs;
use crate::util::{
file::{map_file_basic, process_rsp},
ncompress::{compress_yaz0, decompress_yaz0},
IntoCow, ToCow,
use crate::{
util::{
file::process_rsp,
ncompress::{compress_yaz0, decompress_yaz0},
IntoCow, ToCow,
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@@ -62,8 +65,8 @@ fn compress(args: CompressArgs) -> Result<()> {
let single_file = files.len() == 1;
for path in files {
let data = {
let file = map_file_basic(&path)?;
compress_yaz0(file.as_slice())
let mut file = open_path(&path, false)?;
compress_yaz0(file.map()?)
};
let out_path = if let Some(output) = &args.output {
if single_file {
@@ -85,8 +88,8 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let single_file = files.len() == 1;
for path in files {
let data = {
let file = map_file_basic(&path)?;
decompress_yaz0(file.as_slice())
let mut file = open_path(&path, false)?;
decompress_yaz0(file.map()?)
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path.display()))?
};
let out_path = if let Some(output) = &args.output {