Load objects from disc image & `vfs` module

Revamps support for container paths and centralizes logic into a VFS (virtual file system) module.
The new VFS architecture supports disc images and any layer of nesting.

For example, the following command works:
`dtk dol info 'Interactive Multi-Game Demo Disc - July 2002 (USA).rvz:files/zz_StarFox051702_e3.tgc:files/default.dol'`
This opens a TGC file inside an RVZ disc image, then reads `default.dol` in the FST.

Another example:
`dtk rel info 'Legend of Zelda, The - The Wind Waker (USA).rvz:files/RELS.arc:mmem/f_pc_profile_lst.rel'`
This opens a RARC archive inside an RVZ disc image, loads the Yaz0-compressed REL and
decompresses it on the fly.

This all operates in memory with minimal overhead, with no need to extract temporary files.

Supported container formats:
- Disc images (ISO/GCM, WIA/RVZ, WBFS, CISO, NFS, GCZ, TGC)
- RARC/SZS and U8 (.arc)

Supported compression formats:
- Yaz0 (SZS)
- Yay0 (SZP)
- NLZSS (.lz)

Additionally, projects can utilize a new configuration key `object_base`:
```
object: orig/GZLE01/sys/main.dol
modules:
- object: orig/GZLE01/files/RELS.arc:rels/mmem/f_pc_profile_lst.rel
```
becomes
```
object_base: orig/GZLE01
object: sys/main.dol
modules:
- object: files/RELS.arc:mmem/f_pc_profile_lst.rel
```
When loading the objects, decomp-toolkit will automatically check the `object_base`
directory for any disc images. (They can be named anything, but must be in the folder
root) If one is found, all objects will be fetched from the disc image itself, rather
than having to extract the files manually.

While still a work in progress, two new `vfs` commands were added: `vfs ls` and `vfs cp`.
These commands are very barebones currently, but allow listing directory contents and
extracting files from decomp-toolkit's vfs representation:
```
❯ dtk vfs ls disc.rvz:
files
sys

❯ dtk vfs ls disc.rvz:sys
boot.bin
bi2.bin
apploader.bin
fst.bin
main.dol

❯ dtk vfs cp disc.rvz:sys/main.dol .
```
This commit is contained in:
Luke Street 2024-10-03 21:50:35 -06:00
parent 26f52f65b7
commit f91c2a1474
32 changed files with 1789 additions and 1053 deletions

39
Cargo.lock generated
View File

@ -43,7 +43,7 @@ dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
"zerocopy 0.7.35",
]
[[package]]
@ -414,6 +414,7 @@ dependencies = [
"crossterm",
"cwdemangle",
"cwextab 1.0.2",
"dyn-clone",
"enable-ansi-support",
"filetime",
"fixedbitset 0.5.7",
@ -454,7 +455,7 @@ dependencies = [
"tracing-attributes",
"tracing-subscriber",
"xxhash-rust",
"zerocopy",
"zerocopy 0.7.35",
]
[[package]]
@ -919,9 +920,9 @@ dependencies = [
[[package]]
name = "nod"
version = "1.3.0"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69a1349ff4dfc0757d9b0537c6f7ef777ed414c183db59ae1e5faec4c998b8f1"
checksum = "75b9bd092c2ebed932654aa6de256e39d2156ce8c87ace31191f8086f6d22f02"
dependencies = [
"adler",
"aes",
@ -938,15 +939,15 @@ dependencies = [
"rayon",
"sha1",
"thiserror",
"zerocopy",
"zerocopy 0.8.0",
"zstd",
]
[[package]]
name = "nodtool"
version = "1.3.0"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93f97653db7343722bc0a13001dfc9ec83491246dcf113ee5aa2fdefd0f37447"
checksum = "598b0c24bb98d0094d37e8dc8d83bb600d857c9e42a475806ac1667ec52afbb3"
dependencies = [
"argp",
"base16ct",
@ -969,7 +970,7 @@ dependencies = [
"tracing-attributes",
"tracing-subscriber",
"xxhash-rust",
"zerocopy",
"zerocopy 0.8.0",
"zstd",
]
@ -2160,7 +2161,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
dependencies = [
"byteorder",
"zerocopy-derive",
"zerocopy-derive 0.7.35",
]
[[package]]
name = "zerocopy"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df7885ffcb82507a0f213c593e77c5f13d12cb96588d4e835ad7e9423ba034db"
dependencies = [
"zerocopy-derive 0.8.0",
]
[[package]]
@ -2174,6 +2184,17 @@ dependencies = [
"syn 2.0.79",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "930ad75608219e8ffdb8962a5433cb2b30064c7ccb564d3b76c2963390b1e435"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.79",
]
[[package]]
name = "zstd"
version = "0.13.2"

View File

@ -9,7 +9,7 @@ publish = false
repository = "https://github.com/encounter/decomp-toolkit"
readme = "README.md"
categories = ["command-line-utilities"]
rust-version = "1.73.0"
rust-version = "1.80.0"
[[bin]]
name = "dtk"
@ -33,6 +33,7 @@ base64 = "0.22"
crossterm = "0.28"
cwdemangle = "1.0"
cwextab = "1.0.2"
dyn-clone = "1.0"
enable-ansi-support = "0.2"
filetime = "0.2"
fixedbitset = "0.5"
@ -47,7 +48,7 @@ memchr = "2.7"
memmap2 = "0.9"
multimap = "0.10"
nintendo-lz = "0.1"
nodtool = "1.3"
nodtool = "1.4"
#nodtool = { path = "../nod-rs/nodtool" }
num_enum = "0.7"
objdiff-core = { version = "2.1", features = ["ppc"] }

View File

@ -10,9 +10,10 @@ use crate::{
cmd,
util::{
alf::AlfFile,
file::{buf_writer, map_file},
file::buf_writer,
reader::{Endian, FromReader},
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -60,9 +61,8 @@ pub fn run(args: Args) -> Result<()> {
fn hashes(args: HashesArgs) -> Result<()> {
let alf_file = {
let file = map_file(&args.alf_file)?;
let mut reader = file.as_reader();
AlfFile::from_reader(&mut reader, Endian::Little)?
let mut file = open_path(&args.alf_file, true)?;
AlfFile::from_reader(file.as_mut(), Endian::Little)?
};
let mut w: Box<dyn Write> = if let Some(output) = args.output {
Box::new(buf_writer(output)?)

View File

@ -9,7 +9,10 @@ use anyhow::{anyhow, bail, Context, Result};
use argp::FromArgs;
use object::{Object, ObjectSymbol, SymbolScope};
use crate::util::file::{buf_writer, map_file, map_file_basic, process_rsp};
use crate::{
util::file::{buf_writer, process_rsp},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
/// Commands for processing static libraries.
@ -80,8 +83,8 @@ fn create(args: CreateArgs) -> Result<()> {
Entry::Vacant(e) => e.insert(Vec::new()),
Entry::Occupied(_) => bail!("Duplicate file name '{path_str}'"),
};
let file = map_file_basic(path)?;
let obj = object::File::parse(file.as_slice())?;
let mut file = open_path(path, false)?;
let obj = object::File::parse(file.map()?)?;
for symbol in obj.symbols() {
if symbol.scope() == SymbolScope::Dynamic {
entries.push(symbol.name_bytes()?.to_vec());
@ -126,8 +129,8 @@ fn extract(args: ExtractArgs) -> Result<()> {
println!("Extracting {} to {}", path.display(), out_dir.display());
}
let file = map_file(path)?;
let mut archive = ar::Archive::new(file.as_slice());
let mut file = open_path(path, false)?;
let mut archive = ar::Archive::new(file.map()?);
while let Some(entry) = archive.next_entry() {
let mut entry =
entry.with_context(|| format!("Processing entry in {}", path.display()))?;

View File

@ -5,7 +5,7 @@ use std::{
ffi::OsStr,
fs,
fs::DirBuilder,
io::{Cursor, Write},
io::{Cursor, Seek, Write},
mem::take,
path::{Path, PathBuf},
time::Instant,
@ -48,10 +48,7 @@ use crate::{
diff::{calc_diff_ranges, print_diff, process_code},
dol::process_dol,
elf::{process_elf, write_elf},
file::{
buf_reader, buf_writer, map_file, map_file_basic, touch, verify_hash, FileIterator,
FileReadInfo,
},
file::{buf_writer, touch, verify_hash, FileIterator, FileReadInfo},
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
map::apply_map_file,
rel::{process_rel, process_rel_header, update_rel_section_alignment},
@ -59,6 +56,7 @@ use crate::{
split::{is_linker_generated_object, split_obj, update_splits},
IntoCow, ToCow,
},
vfs::{open_fs, open_path, open_path_fs, ArchiveKind, Vfs, VfsFile},
};
#[derive(FromArgs, PartialEq, Debug)]
@ -236,6 +234,9 @@ pub struct ProjectConfig {
/// Marks all emitted symbols as "exported" to prevent the linker from removing them.
#[serde(default = "bool_true", skip_serializing_if = "is_true")]
pub export_all: bool,
/// Optional base path for all object files.
#[serde(default, skip_serializing_if = "is_default")]
pub object_base: Option<PathBuf>,
}
impl Default for ProjectConfig {
@ -254,6 +255,7 @@ impl Default for ProjectConfig {
symbols_known: false,
fill_gaps: true,
export_all: true,
object_base: None,
}
}
}
@ -483,8 +485,8 @@ fn apply_selfile(obj: &mut ObjInfo, buf: &[u8]) -> Result<()> {
pub fn info(args: InfoArgs) -> Result<()> {
let mut obj = {
let file = map_file(&args.dol_file)?;
process_dol(file.as_slice(), "")?
let mut file = open_path(&args.dol_file, true)?;
process_dol(file.map()?, "")?
};
apply_signatures(&mut obj)?;
@ -502,8 +504,8 @@ pub fn info(args: InfoArgs) -> Result<()> {
apply_signatures_post(&mut obj)?;
if let Some(selfile) = &args.selfile {
let file = map_file(selfile)?;
apply_selfile(&mut obj, file.as_slice())?;
let mut file = open_path(selfile, true)?;
apply_selfile(&mut obj, file.map()?)?;
}
println!("{}:", obj.name);
@ -787,16 +789,18 @@ struct AnalyzeResult {
splits_cache: Option<FileReadInfo>,
}
fn load_analyze_dol(config: &ProjectConfig) -> Result<AnalyzeResult> {
log::debug!("Loading {}", config.base.object.display());
fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<AnalyzeResult> {
let object_path = object_base.join(&config.base.object);
log::debug!("Loading {}", object_path.display());
let mut obj = {
let file = map_file(&config.base.object)?;
let mut file = object_base.open(&config.base.object)?;
let data = file.map()?;
if let Some(hash_str) = &config.base.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
process_dol(file.as_slice(), config.base.name().as_ref())?
process_dol(data, config.base.name().as_ref())?
};
let mut dep = vec![config.base.object.clone()];
let mut dep = vec![object_path];
if let Some(comment_version) = config.mw_comment_version {
obj.mw_comment = Some(MWComment::new(comment_version)?);
@ -843,11 +847,12 @@ fn load_analyze_dol(config: &ProjectConfig) -> Result<AnalyzeResult> {
if let Some(selfile) = &config.selfile {
log::info!("Loading {}", selfile.display());
let file = map_file(selfile)?;
let mut file = open_path(selfile, true)?;
let data = file.map()?;
if let Some(hash) = &config.selfile_hash {
verify_hash(file.as_slice(), hash)?;
verify_hash(data, hash)?;
}
apply_selfile(&mut obj, file.as_slice())?;
apply_selfile(&mut obj, data)?;
dep.push(selfile.clone());
}
@ -1004,12 +1009,11 @@ fn split_write_obj(
fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
if path.is_file() {
let old_file = map_file_basic(path)?;
let mut old_file = open_path(path, true)?;
let old_data = old_file.map()?;
// If the file is the same size, check if the contents are the same
// Avoid writing if unchanged, since it will update the file's mtime
if old_file.len() == contents.len() as u64
&& xxh3_64(old_file.as_slice()) == xxh3_64(contents)
{
if old_data.len() == contents.len() && xxh3_64(old_data) == xxh3_64(contents) {
return Ok(());
}
}
@ -1018,20 +1022,26 @@ fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
Ok(())
}
fn load_analyze_rel(config: &ProjectConfig, module_config: &ModuleConfig) -> Result<AnalyzeResult> {
debug!("Loading {}", module_config.object.display());
let file = map_file(&module_config.object)?;
fn load_analyze_rel(
config: &ProjectConfig,
object_base: &ObjectBase,
module_config: &ModuleConfig,
) -> Result<AnalyzeResult> {
let object_path = object_base.join(&module_config.object);
debug!("Loading {}", object_path.display());
let mut file = object_base.open(&module_config.object)?;
let data = file.map()?;
if let Some(hash_str) = &module_config.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
let (header, mut module_obj) =
process_rel(&mut Cursor::new(file.as_slice()), module_config.name().as_ref())?;
process_rel(&mut Cursor::new(data), module_config.name().as_ref())?;
if let Some(comment_version) = config.mw_comment_version {
module_obj.mw_comment = Some(MWComment::new(comment_version)?);
}
let mut dep = vec![module_config.object.clone()];
let mut dep = vec![object_path];
if let Some(map_path) = &module_config.map {
apply_map_file(map_path, &mut module_obj, None, None)?;
dep.push(map_path.clone());
@ -1082,22 +1092,24 @@ fn load_analyze_rel(config: &ProjectConfig, module_config: &ModuleConfig) -> Res
fn split(args: SplitArgs) -> Result<()> {
if let Some(jobs) = args.jobs {
rayon::ThreadPoolBuilder::new().num_threads(jobs).build_global().unwrap();
rayon::ThreadPoolBuilder::new().num_threads(jobs).build_global()?;
}
let command_start = Instant::now();
info!("Loading {}", args.config.display());
let mut config: ProjectConfig = {
let mut config_file = buf_reader(&args.config)?;
serde_yaml::from_reader(&mut config_file)?
let mut config_file = open_path(&args.config, true)?;
serde_yaml::from_reader(config_file.as_mut())?
};
let object_base = find_object_base(&config)?;
for module_config in config.modules.iter_mut() {
let file = map_file(&module_config.object)?;
let mut file = object_base.open(&module_config.object)?;
let mut data = file.map()?;
if let Some(hash_str) = &module_config.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
} else {
module_config.hash = Some(file_sha1_string(&mut file.as_reader())?);
module_config.hash = Some(file_sha1_string(&mut data)?);
}
}
@ -1121,7 +1133,7 @@ fn split(args: SplitArgs) -> Result<()> {
s.spawn(|_| {
let _span = info_span!("module", name = %config.base.name()).entered();
dol_result =
Some(load_analyze_dol(&config).with_context(|| {
Some(load_analyze_dol(&config, &object_base).with_context(|| {
format!("While loading object '{}'", config.base.file_name())
}));
});
@ -1133,7 +1145,7 @@ fn split(args: SplitArgs) -> Result<()> {
.par_iter()
.map(|module_config| {
let _span = info_span!("module", name = %module_config.name()).entered();
load_analyze_rel(&config, module_config).with_context(|| {
load_analyze_rel(&config, &object_base, module_config).with_context(|| {
format!("While loading object '{}'", module_config.file_name())
})
})
@ -1538,16 +1550,18 @@ fn symbol_name_fuzzy_eq(a: &ObjSymbol, b: &ObjSymbol) -> bool {
fn diff(args: DiffArgs) -> Result<()> {
log::info!("Loading {}", args.config.display());
let mut config_file = buf_reader(&args.config)?;
let config: ProjectConfig = serde_yaml::from_reader(&mut config_file)?;
let mut config_file = open_path(&args.config, true)?;
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
let object_base = find_object_base(&config)?;
log::info!("Loading {}", config.base.object.display());
log::info!("Loading {}", object_base.join(&config.base.object).display());
let mut obj = {
let file = map_file(&config.base.object)?;
let mut file = object_base.open(&config.base.object)?;
let data = file.map()?;
if let Some(hash_str) = &config.base.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
process_dol(file.as_slice(), config.base.name().as_ref())?
process_dol(data, config.base.name().as_ref())?
};
if let Some(symbols_path) = &config.base.symbols {
@ -1717,16 +1731,18 @@ fn diff(args: DiffArgs) -> Result<()> {
fn apply(args: ApplyArgs) -> Result<()> {
log::info!("Loading {}", args.config.display());
let mut config_file = buf_reader(&args.config)?;
let config: ProjectConfig = serde_yaml::from_reader(&mut config_file)?;
let mut config_file = open_path(&args.config, true)?;
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
let object_base = find_object_base(&config)?;
log::info!("Loading {}", config.base.object.display());
log::info!("Loading {}", object_base.join(&config.base.object).display());
let mut obj = {
let file = map_file(&config.base.object)?;
let mut file = object_base.open(&config.base.object)?;
let data = file.map()?;
if let Some(hash_str) = &config.base.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
process_dol(file.as_slice(), config.base.name().as_ref())?
process_dol(data, config.base.name().as_ref())?
};
let Some(symbols_path) = &config.base.symbols else {
@ -1881,30 +1897,31 @@ fn config(args: ConfigArgs) -> Result<()> {
let mut config = ProjectConfig::default();
let mut modules = Vec::<(u32, ModuleConfig)>::new();
for result in FileIterator::new(&args.objects)? {
let (path, entry) = result?;
let (path, mut entry) = result?;
log::info!("Loading {}", path.display());
match path.extension() {
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("dol")) => {
config.base.object = path;
config.base.hash = Some(file_sha1_string(&mut entry.as_reader())?);
config.base.hash = Some(file_sha1_string(&mut entry)?);
}
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("rel")) => {
let header = process_rel_header(&mut entry.as_reader())?;
let header = process_rel_header(&mut entry)?;
entry.rewind()?;
modules.push((header.module_id, ModuleConfig {
object: path,
hash: Some(file_sha1_string(&mut entry.as_reader())?),
hash: Some(file_sha1_string(&mut entry)?),
..Default::default()
}));
}
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("sel")) => {
config.selfile = Some(path);
config.selfile_hash = Some(file_sha1_string(&mut entry.as_reader())?);
config.selfile_hash = Some(file_sha1_string(&mut entry)?);
}
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("rso")) => {
config.modules.push(ModuleConfig {
object: path,
hash: Some(file_sha1_string(&mut entry.as_reader())?),
hash: Some(file_sha1_string(&mut entry)?),
..Default::default()
});
}
@ -1975,3 +1992,52 @@ fn apply_add_relocations(obj: &mut ObjInfo, relocations: &[AddRelocationConfig])
}
Ok(())
}
pub enum ObjectBase {
None,
Directory(PathBuf),
Vfs(PathBuf, Box<dyn Vfs + Send + Sync>),
}
impl ObjectBase {
pub fn join(&self, path: &Path) -> PathBuf {
match self {
ObjectBase::None => path.to_path_buf(),
ObjectBase::Directory(base) => base.join(path),
ObjectBase::Vfs(base, _) => {
PathBuf::from(format!("{}:{}", base.display(), path.display()))
}
}
}
pub fn open(&self, path: &Path) -> Result<Box<dyn VfsFile>> {
match self {
ObjectBase::None => open_path(path, true),
ObjectBase::Directory(base) => open_path(&base.join(path), true),
ObjectBase::Vfs(vfs_path, vfs) => open_path_fs(vfs.clone(), path, true)
.with_context(|| format!("Using disc image {}", vfs_path.display())),
}
}
}
pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> {
if let Some(base) = &config.object_base {
// Search for disc images in the object base directory
for result in base.read_dir()? {
let entry = result?;
if entry.file_type()?.is_file() {
let path = entry.path();
let mut file = open_path(&path, false)?;
let format = nodtool::nod::Disc::detect(file.as_mut())?;
if format.is_some() {
file.rewind()?;
log::info!("Using disc image {}", path.display());
let fs = open_fs(file, ArchiveKind::Disc)?;
return Ok(ObjectBase::Vfs(path, fs));
}
}
}
return Ok(ObjectBase::Directory(base.clone()));
}
Ok(ObjectBase::None)
}

View File

@ -16,12 +16,15 @@ use syntect::{
parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet},
};
use crate::util::{
use crate::{
util::{
dwarf::{
process_compile_unit, process_cu_tag, process_overlay_branch, read_debug_section,
should_skip_tag, tag_type_string, AttributeKind, TagKind,
},
file::{buf_writer, map_file},
file::buf_writer,
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -73,8 +76,8 @@ fn dump(args: DumpArgs) -> Result<()> {
let theme = theme_set.themes.get("Solarized (dark)").context("Failed to load theme")?;
let syntax = syntax_set.find_syntax_by_name("C++").context("Failed to find syntax")?.clone();
let file = map_file(&args.in_file)?;
let buf = file.as_slice();
let mut file = open_path(&args.in_file, true)?;
let buf = file.map()?;
if buf.starts_with(b"!<arch>\n") {
let mut archive = ar::Archive::new(buf);
while let Some(result) = archive.next_entry() {

View File

@ -7,7 +7,7 @@ use anyhow::{anyhow, bail, ensure, Result};
use argp::FromArgs;
use object::{Architecture, Endianness, Object, ObjectKind, ObjectSection, SectionKind};
use crate::util::file::{buf_writer, map_file};
use crate::{util::file::buf_writer, vfs::open_path};
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// Converts an ELF file to a DOL file.
@ -46,8 +46,8 @@ const MAX_TEXT_SECTIONS: usize = 7;
const MAX_DATA_SECTIONS: usize = 11;
pub fn run(args: Args) -> Result<()> {
let file = map_file(&args.elf_file)?;
let obj_file = object::read::File::parse(file.as_slice())?;
let mut file = open_path(&args.elf_file, true)?;
let obj_file = object::read::File::parse(file.map()?)?;
match obj_file.architecture() {
Architecture::PowerPc => {}
arch => bail!("Unexpected architecture: {arch:?}"),

View File

@ -5,11 +5,13 @@ use argp::FromArgs;
use cwdemangle::{demangle, DemangleOptions};
use tracing::error;
use crate::util::{
use crate::{
util::{
config::{write_splits_file, write_symbols_file},
file::map_file,
map::{create_obj, process_map, SymbolEntry, SymbolRef},
split::update_splits,
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -73,8 +75,8 @@ pub fn run(args: Args) -> Result<()> {
}
fn entries(args: EntriesArgs) -> Result<()> {
let file = map_file(&args.map_file)?;
let entries = process_map(&mut file.as_reader(), None, None)?;
let mut file = open_path(&args.map_file, true)?;
let entries = process_map(file.as_mut(), None, None)?;
match entries.unit_entries.get_vec(&args.unit) {
Some(vec) => {
println!("Entries for {}:", args.unit);
@ -104,9 +106,9 @@ fn entries(args: EntriesArgs) -> Result<()> {
}
fn symbol(args: SymbolArgs) -> Result<()> {
let file = map_file(&args.map_file)?;
let mut file = open_path(&args.map_file, true)?;
log::info!("Processing map...");
let entries = process_map(&mut file.as_reader(), None, None)?;
let entries = process_map(file.as_mut(), None, None)?;
log::info!("Done!");
let mut opt_ref: Option<(String, SymbolEntry)> = None;
@ -179,9 +181,9 @@ fn symbol(args: SymbolArgs) -> Result<()> {
}
fn config(args: ConfigArgs) -> Result<()> {
let file = map_file(&args.map_file)?;
let mut file = open_path(&args.map_file, true)?;
log::info!("Processing map...");
let entries = process_map(&mut file.as_reader(), None, None)?;
let entries = process_map(file.as_mut(), None, None)?;
let mut obj = create_obj(&entries)?;
if let Err(e) = update_splits(&mut obj, None, false) {
error!("Failed to update splits: {}", e)

View File

@ -14,5 +14,6 @@ pub mod rel;
pub mod rso;
pub mod shasum;
pub mod u8_arc;
pub mod vfs;
pub mod yay0;
pub mod yaz0;

View File

@ -3,9 +3,9 @@ use std::{fs, path::PathBuf};
use anyhow::{anyhow, Context, Result};
use argp::FromArgs;
use crate::util::{
file::{open_file, process_rsp},
IntoCow, ToCow,
use crate::{
util::{file::process_rsp, IntoCow, ToCow},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -45,7 +45,8 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let files = process_rsp(&args.files)?;
let single_file = files.len() == 1;
for path in files {
let data = nintendo_lz::decompress(&mut open_file(&path)?)
let mut file = open_path(&path, false)?;
let data = nintendo_lz::decompress(&mut file)
.map_err(|e| anyhow!("Failed to decompress '{}' with NLZSS: {}", path.display(), e))?;
let out_path = if let Some(output) = &args.output {
if single_file {

View File

@ -1,11 +1,11 @@
use std::{fs, fs::DirBuilder, path::PathBuf};
use std::path::PathBuf;
use anyhow::{Context, Result};
use anyhow::{anyhow, Result};
use argp::FromArgs;
use crate::util::{
file::{decompress_if_needed, map_file},
rarc::{Node, RarcReader},
use crate::{
util::rarc::{RarcNodeKind, RarcView},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -55,71 +55,42 @@ pub fn run(args: Args) -> Result<()> {
}
fn list(args: ListArgs) -> Result<()> {
let file = map_file(&args.file)?;
let rarc = RarcReader::new(&mut file.as_reader())
.with_context(|| format!("Failed to process RARC file '{}'", args.file.display()))?;
let mut current_path = PathBuf::new();
for node in rarc.nodes() {
match node {
Node::DirectoryBegin { name } => {
current_path.push(name.name);
}
Node::DirectoryEnd { name: _ } => {
current_path.pop();
}
Node::File { name, offset, size } => {
let path = current_path.join(name.name);
println!("{}: {} bytes, offset {:#X}", path.display(), size, offset);
}
Node::CurrentDirectory => {}
Node::ParentDirectory => {}
}
}
let mut file = open_path(&args.file, true)?;
let view = RarcView::new(file.map()?).map_err(|e| anyhow!(e))?;
test(&view, "")?;
test(&view, "/")?;
test(&view, "//")?;
test(&view, "/rels")?;
test(&view, "/rels/")?;
test(&view, "/rels/amem")?;
test(&view, "/rels/amem/")?;
test(&view, "/rels/mmem")?;
test(&view, "/rels/mmem/../mmem")?;
test(&view, "/rels/amem/d_a_am.rel")?;
test(&view, "//amem/d_a_am.rel")?;
test(&view, "amem/d_a_am.rel")?;
test(&view, "amem/d_a_am.rel/")?;
test(&view, "mmem/d_a_obj_pirateship.rel")?;
test(&view, "mmem//d_a_obj_pirateship.rel")?;
test(&view, "mmem/da_obj_pirateship.rel")?;
Ok(())
}
fn extract(args: ExtractArgs) -> Result<()> {
let file = map_file(&args.file)?;
let rarc = RarcReader::new(&mut file.as_reader())
.with_context(|| format!("Failed to process RARC file '{}'", args.file.display()))?;
let mut current_path = PathBuf::new();
for node in rarc.nodes() {
match node {
Node::DirectoryBegin { name } => {
current_path.push(name.name);
}
Node::DirectoryEnd { name: _ } => {
current_path.pop();
}
Node::File { name, offset, size } => {
let file_data = decompress_if_needed(
&file.as_slice()[offset as usize..offset as usize + size as usize],
)?;
let file_path = current_path.join(&name.name);
let output_path = args
.output
.as_ref()
.map(|p| p.join(&file_path))
.unwrap_or_else(|| file_path.clone());
if !args.quiet {
println!(
"Extracting {} to {} ({} bytes)",
file_path.display(),
output_path.display(),
size
);
}
if let Some(parent) = output_path.parent() {
DirBuilder::new().recursive(true).create(parent)?;
}
fs::write(&output_path, file_data)
.with_context(|| format!("Failed to write file '{}'", output_path.display()))?;
}
Node::CurrentDirectory => {}
Node::ParentDirectory => {}
}
}
fn test(view: &RarcView, path: &str) -> Result<()> {
let option = view.find(path);
let data = if let Some(RarcNodeKind::File(_, node)) = option {
view.get_data(node).map_err(|e| anyhow!(e))?
} else {
&[]
};
let vec = data.iter().cloned().take(4).collect::<Vec<_>>();
println!("{:?}: {:?} (len: {:?})", path, option, vec.as_slice());
// if let Some(RarcNodeKind::Directory(_, dir)) = option {
// for node in view.children(dir) {
// println!("Child: {:?} ({:?})", node, view.get_string(node.name_offset()));
// }
// }
Ok(())
}
fn extract(_args: ExtractArgs) -> Result<()> { todo!() }

View File

@ -1,7 +1,7 @@
use std::{
collections::{btree_map, BTreeMap},
fs,
io::Write,
io::{Cursor, Write},
path::PathBuf,
time::Instant,
};
@ -27,13 +27,13 @@ use crate::{
tracker::Tracker,
},
array_ref_mut,
cmd::dol::{ModuleConfig, ProjectConfig},
cmd::dol::{find_object_base, ModuleConfig, ObjectBase, ProjectConfig},
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol},
util::{
config::{is_auto_symbol, read_splits_sections, SectionDef},
dol::process_dol,
elf::{to_obj_reloc_kind, write_elf},
file::{buf_reader, buf_writer, map_file, process_rsp, verify_hash, FileIterator},
file::{buf_writer, process_rsp, verify_hash, FileIterator},
nested::NestedMap,
rel::{
print_relocations, process_rel, process_rel_header, process_rel_sections, write_rel,
@ -41,6 +41,7 @@ use crate::{
},
IntoCow, ToCow,
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -162,12 +163,13 @@ fn match_section_index(
// })
}
fn load_rel(module_config: &ModuleConfig) -> Result<RelInfo> {
let file = map_file(&module_config.object)?;
fn load_rel(module_config: &ModuleConfig, object_base: &ObjectBase) -> Result<RelInfo> {
let mut file = object_base.open(&module_config.object)?;
let data = file.map()?;
if let Some(hash_str) = &module_config.hash {
verify_hash(file.as_slice(), hash_str)?;
verify_hash(data, hash_str)?;
}
let mut reader = file.as_reader();
let mut reader = Cursor::new(data);
let header = process_rel_header(&mut reader)?;
let sections = process_rel_sections(&mut reader, &header)?;
let section_defs = if let Some(splits_path) = &module_config.splits {
@ -261,15 +263,19 @@ fn make(args: MakeArgs) -> Result<()> {
let mut existing_headers = BTreeMap::<u32, RelInfo>::new();
let mut name_to_module_id = FxHashMap::<String, u32>::default();
if let Some(config_path) = &args.config {
let config: ProjectConfig = serde_yaml::from_reader(&mut buf_reader(config_path)?)?;
let config: ProjectConfig = {
let mut file = open_path(config_path, true)?;
serde_yaml::from_reader(file.as_mut())?
};
let object_base = find_object_base(&config)?;
for module_config in &config.modules {
let module_name = module_config.name();
if !args.names.is_empty() && !args.names.iter().any(|n| n == &module_name) {
continue;
}
let _span = info_span!("module", name = %module_name).entered();
let info = load_rel(module_config).with_context(|| {
format!("While loading REL '{}'", module_config.object.display())
let info = load_rel(module_config, &object_base).with_context(|| {
format!("While loading REL '{}'", object_base.join(&module_config.object).display())
})?;
name_to_module_id.insert(module_name.to_string(), info.0.module_id);
match existing_headers.entry(info.0.module_id) {
@ -287,9 +293,9 @@ fn make(args: MakeArgs) -> Result<()> {
}
// Load all modules
let files = paths.iter().map(map_file).collect::<Result<Vec<_>>>()?;
let mut files = paths.iter().map(|p| open_path(p, true)).collect::<Result<Vec<_>>>()?;
let modules = files
.par_iter()
.par_iter_mut()
.enumerate()
.zip(&paths)
.map(|((idx, file), path)| {
@ -301,7 +307,7 @@ fn make(args: MakeArgs) -> Result<()> {
.and_then(|n| name_to_module_id.get(n))
.copied()
.unwrap_or(idx as u32);
load_obj(file.as_slice())
load_obj(file.map()?)
.map(|o| LoadedModule { module_id, file: o, path: path.clone() })
.with_context(|| format!("Failed to load '{}'", path.display()))
})
@ -399,8 +405,8 @@ fn make(args: MakeArgs) -> Result<()> {
}
fn info(args: InfoArgs) -> Result<()> {
let file = map_file(args.rel_file)?;
let (header, mut module_obj) = process_rel(&mut file.as_reader(), "")?;
let mut file = open_path(&args.rel_file, true)?;
let (header, mut module_obj) = process_rel(file.as_mut(), "")?;
let mut state = AnalyzerState::default();
state.detect_functions(&module_obj)?;
@ -458,7 +464,7 @@ fn info(args: InfoArgs) -> Result<()> {
if args.relocations {
println!("\nRelocations:");
println!(" [Source] section:address RelocType -> [Target] module:section:address");
print_relocations(&mut file.as_reader(), &header)?;
print_relocations(file.as_mut(), &header)?;
}
Ok(())
}
@ -469,9 +475,9 @@ const fn align32(x: u32) -> u32 { (x + 31) & !31 }
fn merge(args: MergeArgs) -> Result<()> {
log::info!("Loading {}", args.dol_file.display());
let mut obj = {
let file = map_file(&args.dol_file)?;
let mut file = open_path(&args.dol_file, true)?;
let name = args.dol_file.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default();
process_dol(file.as_slice(), name.as_ref())?
process_dol(file.map()?, name.as_ref())?
};
log::info!("Performing signature analysis");
@ -481,10 +487,10 @@ fn merge(args: MergeArgs) -> Result<()> {
let mut processed = 0;
let mut module_map = BTreeMap::<u32, ObjInfo>::new();
for result in FileIterator::new(&args.rel_files)? {
let (path, entry) = result?;
let (path, mut entry) = result?;
log::info!("Loading {}", path.display());
let name = path.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default();
let (_, obj) = process_rel(&mut entry.as_reader(), name.as_ref())?;
let (_, obj) = process_rel(&mut entry, name.as_ref())?;
match module_map.entry(obj.module_id) {
btree_map::Entry::Vacant(e) => e.insert(obj),
btree_map::Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id),

View File

@ -11,13 +11,16 @@ use object::{
SymbolIndex, SymbolKind, SymbolSection,
};
use crate::util::{
file::{buf_reader, buf_writer, map_file},
use crate::{
util::{
file::buf_writer,
reader::{Endian, ToWriter},
rso::{
process_rso, symbol_hash, RsoHeader, RsoRelocation, RsoSectionHeader, RsoSymbol,
RSO_SECTION_NAMES,
},
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -74,8 +77,8 @@ pub fn run(args: Args) -> Result<()> {
fn info(args: InfoArgs) -> Result<()> {
let rso = {
let file = map_file(args.rso_file)?;
let obj = process_rso(&mut file.as_reader())?;
let mut file = open_path(&args.rso_file, true)?;
let obj = process_rso(file.as_mut())?;
#[allow(clippy::let_and_return)]
obj
};
@ -84,8 +87,8 @@ fn info(args: InfoArgs) -> Result<()> {
}
fn make(args: MakeArgs) -> Result<()> {
let file = map_file(&args.input)?;
let obj_file = object::read::File::parse(file.as_slice())?;
let mut file = open_path(&args.input, true)?;
let obj_file = object::read::File::parse(file.map()?)?;
match obj_file.architecture() {
Architecture::PowerPc => {}
arch => bail!("Unexpected architecture: {arch:?}"),
@ -97,9 +100,9 @@ fn make(args: MakeArgs) -> Result<()> {
None => args.input.display().to_string(),
};
let symbols_to_export = match args.export {
let symbols_to_export = match &args.export {
Some(export_file_path) => {
let export_file_reader = buf_reader(export_file_path)?;
let export_file_reader = open_path(export_file_path, true)?;
export_file_reader.lines().map_while(Result::ok).collect()
}
None => vec![],

View File

@ -1,6 +1,6 @@
use std::{
fs::File,
io::{stdout, BufRead, BufReader, Read, Write},
io::{stdout, BufRead, Read, Write},
path::{Path, PathBuf},
};
@ -10,7 +10,10 @@ use owo_colors::{OwoColorize, Stream};
use path_slash::PathExt;
use sha1::{Digest, Sha1};
use crate::util::file::{buf_writer, open_file, process_rsp, touch};
use crate::{
util::file::{buf_writer, process_rsp, touch},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// Print or check SHA1 (160-bit) checksums.
@ -36,8 +39,8 @@ const DEFAULT_BUF_SIZE: usize = 8192;
pub fn run(args: Args) -> Result<()> {
if args.check {
for path in process_rsp(&args.files)? {
let file = open_file(&path)?;
check(&args, &mut BufReader::new(file))?;
let mut file = open_path(&path, false)?;
check(&args, file.as_mut())?;
}
if let Some(out_path) = &args.output {
touch(out_path)
@ -53,8 +56,8 @@ pub fn run(args: Args) -> Result<()> {
Box::new(stdout())
};
for path in process_rsp(&args.files)? {
let mut file = open_file(&path)?;
hash(w.as_mut(), &mut file, &path)?
let mut file = open_path(&path, false)?;
hash(w.as_mut(), file.as_mut(), &path)?
}
}
Ok(())

View File

@ -4,9 +4,12 @@ use anyhow::{anyhow, Context, Result};
use argp::FromArgs;
use itertools::Itertools;
use crate::util::{
file::{decompress_if_needed, map_file},
use crate::{
util::{
file::decompress_if_needed,
u8_arc::{U8Node, U8View},
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -56,8 +59,8 @@ pub fn run(args: Args) -> Result<()> {
}
fn list(args: ListArgs) -> Result<()> {
let file = map_file(&args.file)?;
let view = U8View::new(file.as_slice())
let mut file = open_path(&args.file, true)?;
let view = U8View::new(file.map()?)
.map_err(|e| anyhow!("Failed to open U8 file '{}': {}", args.file.display(), e))?;
visit_files(&view, |_, node, path| {
println!("{}: {} bytes, offset {:#X}", path, node.length(), node.offset());
@ -66,15 +69,15 @@ fn list(args: ListArgs) -> Result<()> {
}
fn extract(args: ExtractArgs) -> Result<()> {
let file = map_file(&args.file)?;
let view = U8View::new(file.as_slice())
let mut file = open_path(&args.file, true)?;
let data = file.map()?;
let view = U8View::new(data)
.map_err(|e| anyhow!("Failed to open U8 file '{}': {}", args.file.display(), e))?;
visit_files(&view, |_, node, path| {
let offset = node.offset();
let size = node.length();
let file_data = decompress_if_needed(
&file.as_slice()[offset as usize..offset as usize + size as usize],
)?;
let file_data =
decompress_if_needed(&data[offset as usize..offset as usize + size as usize])?;
let output_path = args
.output
.as_ref()
@ -94,7 +97,7 @@ fn extract(args: ExtractArgs) -> Result<()> {
fn visit_files(
view: &U8View,
mut visitor: impl FnMut(usize, &U8Node, String) -> Result<()>,
mut visitor: impl FnMut(usize, U8Node, String) -> Result<()>,
) -> Result<()> {
let mut path_segments = Vec::<(Cow<str>, usize)>::new();
for (idx, node, name) in view.iter() {

121
src/cmd/vfs.rs Normal file
View File

@ -0,0 +1,121 @@
use std::{fs::File, io, io::Write, path::PathBuf};
use anyhow::{anyhow, bail};
use argp::FromArgs;
use nodtool::nod::ResultContext;
use crate::vfs::{decompress_file, detect, open_fs, FileFormat, StdFs, Vfs, VfsFileType};
#[derive(FromArgs, PartialEq, Debug)]
/// Commands for interacting with discs and containers.
#[argp(subcommand, name = "vfs")]
pub struct Args {
#[argp(subcommand)]
command: SubCommand,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argp(subcommand)]
enum SubCommand {
Ls(LsArgs),
Cp(CpArgs),
}
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// List files in a directory or container.
#[argp(subcommand, name = "ls")]
pub struct LsArgs {
#[argp(positional)]
/// Path to the container.
path: PathBuf,
}
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// Copy files from a container.
#[argp(subcommand, name = "cp")]
pub struct CpArgs {
#[argp(positional)]
/// Source path(s) and destination path.
paths: Vec<PathBuf>,
}
pub fn run(args: Args) -> anyhow::Result<()> {
match args.command {
SubCommand::Ls(args) => ls(args),
SubCommand::Cp(args) => cp(args),
}
}
fn find(path: &str) -> anyhow::Result<(Box<dyn Vfs>, &str)> {
let mut split = path.split(':');
let mut fs: Box<dyn Vfs> = Box::new(StdFs);
let mut path = split.next().unwrap();
for next in split {
let mut file = fs.open(path)?;
match detect(file.as_mut())? {
FileFormat::Archive(kind) => {
fs = open_fs(file, kind)?;
path = next;
}
_ => bail!("'{}' is not a container", path),
}
}
Ok((fs, path))
}
fn ls(args: LsArgs) -> anyhow::Result<()> {
let str = args.path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
let (mut fs, mut path) = find(str)?;
let metadata = fs.metadata(path)?;
if metadata.is_file() {
let mut file = fs.open(path)?;
match detect(file.as_mut())? {
FileFormat::Archive(kind) => {
fs = open_fs(file, kind)?;
path = "";
}
_ => bail!("'{}' is not a directory", path),
}
}
let entries = fs.read_dir(path)?;
for entry in entries {
println!("{}", entry);
}
Ok(())
}
fn cp(mut args: CpArgs) -> anyhow::Result<()> {
if args.paths.len() < 2 {
bail!("Both source and destination paths must be provided");
}
let dest = args.paths.pop().unwrap();
let dest_is_dir = args.paths.len() > 1 || dest.metadata().ok().is_some_and(|m| m.is_dir());
for path in args.paths {
let str = path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
let (mut fs, path) = find(str)?;
let metadata = fs.metadata(path)?;
match metadata.file_type {
VfsFileType::File => {
let mut file = fs.open(path)?;
if let FileFormat::Compressed(kind) = detect(file.as_mut())? {
file = decompress_file(file, kind)?;
}
let dest = if dest_is_dir {
let name = path.rsplit('/').next().unwrap();
dest.join(name)
} else {
dest.clone()
};
let mut dest_file = File::create(&dest)
.with_context(|| format!("Failed to create file {}", dest.display()))?;
io::copy(file.as_mut(), &mut dest_file)
.with_context(|| format!("Failed to write file {}", dest.display()))?;
dest_file
.flush()
.with_context(|| format!("Failed to flush file {}", dest.display()))?;
}
VfsFileType::Directory => bail!("Cannot copy directory"),
}
}
Ok(())
}

View File

@ -3,10 +3,13 @@ use std::{fs, path::PathBuf};
use anyhow::{Context, Result};
use argp::FromArgs;
use crate::util::{
file::{map_file_basic, process_rsp},
use crate::{
util::{
file::process_rsp,
ncompress::{compress_yay0, decompress_yay0},
IntoCow, ToCow,
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -62,8 +65,8 @@ fn compress(args: CompressArgs) -> Result<()> {
let single_file = files.len() == 1;
for path in files {
let data = {
let file = map_file_basic(&path)?;
compress_yay0(file.as_slice())
let mut file = open_path(&path, false)?;
compress_yay0(file.map()?)
};
let out_path = if let Some(output) = &args.output {
if single_file {
@ -85,8 +88,8 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let single_file = files.len() == 1;
for path in files {
let data = {
let file = map_file_basic(&path)?;
decompress_yay0(file.as_slice())
let mut file = open_path(&path, true)?;
decompress_yay0(file.map()?)
.with_context(|| format!("Failed to decompress '{}' using Yay0", path.display()))?
};
let out_path = if let Some(output) = &args.output {

View File

@ -3,10 +3,13 @@ use std::{fs, path::PathBuf};
use anyhow::{Context, Result};
use argp::FromArgs;
use crate::util::{
file::{map_file_basic, process_rsp},
use crate::{
util::{
file::process_rsp,
ncompress::{compress_yaz0, decompress_yaz0},
IntoCow, ToCow,
},
vfs::open_path,
};
#[derive(FromArgs, PartialEq, Debug)]
@ -62,8 +65,8 @@ fn compress(args: CompressArgs) -> Result<()> {
let single_file = files.len() == 1;
for path in files {
let data = {
let file = map_file_basic(&path)?;
compress_yaz0(file.as_slice())
let mut file = open_path(&path, false)?;
compress_yaz0(file.map()?)
};
let out_path = if let Some(output) = &args.output {
if single_file {
@ -85,8 +88,8 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let single_file = files.len() == 1;
for path in files {
let data = {
let file = map_file_basic(&path)?;
decompress_yaz0(file.as_slice())
let mut file = open_path(&path, false)?;
decompress_yaz0(file.map()?)
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path.display()))?
};
let out_path = if let Some(output) = &args.output {

View File

@ -12,6 +12,7 @@ pub mod argp_version;
pub mod cmd;
pub mod obj;
pub mod util;
pub mod vfs;
// musl's allocator is very slow, so use mimalloc when targeting musl.
// Otherwise, use the system allocator to avoid extra code size.
@ -102,6 +103,7 @@ enum SubCommand {
Rso(cmd::rso::Args),
Shasum(cmd::shasum::Args),
U8(cmd::u8_arc::Args),
Vfs(cmd::vfs::Args),
Yay0(cmd::yay0::Args),
Yaz0(cmd::yaz0::Args),
}
@ -177,6 +179,7 @@ fn main() {
SubCommand::Rso(c_args) => cmd::rso::run(c_args),
SubCommand::Shasum(c_args) => cmd::shasum::run(c_args),
SubCommand::U8(c_args) => cmd::u8_arc::run(c_args),
SubCommand::Vfs(c_args) => cmd::vfs::run(c_args),
SubCommand::Yay0(c_args) => cmd::yay0::run(c_args),
SubCommand::Yaz0(c_args) => cmd::yaz0::run(c_args),
});

View File

@ -21,9 +21,10 @@ use crate::{
ObjSymbolFlags, ObjSymbolKind, ObjUnit,
},
util::{
file::{buf_writer, map_file, FileReadInfo},
file::{buf_writer, FileReadInfo},
split::default_section_align,
},
vfs::open_path,
};
pub fn parse_u32(s: &str) -> Result<u32, ParseIntError> {
@ -46,10 +47,11 @@ pub fn parse_i32(s: &str) -> Result<i32, ParseIntError> {
pub fn apply_symbols_file<P>(path: P, obj: &mut ObjInfo) -> Result<Option<FileReadInfo>>
where P: AsRef<Path> {
Ok(if path.as_ref().is_file() {
let file = map_file(path)?;
let cached = FileReadInfo::new(&file)?;
for result in file.as_reader().lines() {
let path = path.as_ref();
Ok(if path.is_file() {
let mut file = open_path(path, true)?;
let cached = FileReadInfo::new(file.as_mut())?;
for result in file.lines() {
let line = match result {
Ok(line) => line,
Err(e) => bail!("Failed to process symbols file: {e:?}"),
@ -206,8 +208,8 @@ where
// Check file mtime
let path = path.as_ref();
let new_mtime = fs::metadata(path).ok().map(|m| FileTime::from_last_modification_time(&m));
if let Some(new_mtime) = new_mtime {
if new_mtime != cached_file.mtime {
if let (Some(new_mtime), Some(old_mtime)) = (new_mtime, cached_file.mtime) {
if new_mtime != old_mtime {
// File changed, don't write
warn!(path = %path.display(), "File changed since read, not updating");
return Ok(());
@ -625,10 +627,11 @@ enum SplitState {
pub fn apply_splits_file<P>(path: P, obj: &mut ObjInfo) -> Result<Option<FileReadInfo>>
where P: AsRef<Path> {
Ok(if path.as_ref().is_file() {
let file = map_file(path)?;
let cached = FileReadInfo::new(&file)?;
apply_splits(&mut file.as_reader(), obj)?;
let path = path.as_ref();
Ok(if path.is_file() {
let mut file = open_path(path, true)?;
let cached = FileReadInfo::new(file.as_mut())?;
apply_splits(file.as_mut(), obj)?;
Some(cached)
} else {
None
@ -737,14 +740,14 @@ where R: BufRead + ?Sized {
pub fn read_splits_sections<P>(path: P) -> Result<Option<Vec<SectionDef>>>
where P: AsRef<Path> {
if !path.as_ref().is_file() {
let path = path.as_ref();
if !path.is_file() {
return Ok(None);
}
let file = map_file(path)?;
let r = file.as_reader();
let file = open_path(path, true)?;
let mut sections = Vec::new();
let mut state = SplitState::None;
for result in r.lines() {
for result in file.lines() {
let line = match result {
Ok(line) => line,
Err(e) => return Err(e.into()),

View File

@ -4,37 +4,38 @@ use std::{
};
use itertools::Itertools;
use path_slash::PathBufExt;
use crate::util::file::split_path;
pub struct DepFile {
pub name: PathBuf,
pub dependencies: Vec<PathBuf>,
pub name: String,
pub dependencies: Vec<String>,
}
fn normalize_path(path: &Path) -> String {
let path = path.to_string_lossy().replace('\\', "/");
path.split_once(':').map(|(p, _)| p.to_string()).unwrap_or(path)
}
impl DepFile {
pub fn new(name: PathBuf) -> Self { Self { name, dependencies: vec![] } }
pub fn new(name: PathBuf) -> Self {
Self { name: name.to_string_lossy().into_owned(), dependencies: vec![] }
}
pub fn push<P>(&mut self, dependency: P)
where P: AsRef<Path> {
let path = split_path(dependency.as_ref())
.map(|(p, _)| p)
.unwrap_or_else(|_| dependency.as_ref().to_path_buf());
let path = dependency.as_ref().to_string_lossy().replace('\\', "/");
let path = path.split_once(':').map(|(p, _)| p.to_string()).unwrap_or(path);
self.dependencies.push(path);
}
pub fn extend(&mut self, dependencies: Vec<PathBuf>) {
self.dependencies.extend(dependencies.iter().map(|dependency| {
split_path(dependency).map(|(p, _)| p).unwrap_or_else(|_| dependency.clone())
}));
self.dependencies.extend(dependencies.iter().map(|dependency| normalize_path(dependency)));
}
pub fn write<W>(&self, w: &mut W) -> std::io::Result<()>
where W: Write + ?Sized {
write!(w, "{}:", self.name.to_slash_lossy())?;
write!(w, "{}:", self.name)?;
for dep in self.dependencies.iter().unique() {
write!(w, " \\\n {}", dep.to_slash_lossy().replace(' ', "\\ "))?;
write!(w, " \\\n {}", dep.replace(' ', "\\ "))?;
}
Ok(())
}

View File

@ -29,9 +29,9 @@ use crate::{
},
util::{
comment::{CommentSym, MWComment},
file::map_file,
reader::{Endian, FromReader, ToWriter},
},
vfs::open_path,
};
pub const SHT_MWCATS: u32 = SHT_LOUSER + 0x4A2A82C2;
@ -47,8 +47,9 @@ enum BoundaryState {
pub fn process_elf<P>(path: P) -> Result<ObjInfo>
where P: AsRef<Path> {
let file = map_file(path)?;
let obj_file = object::read::File::parse(file.as_slice())?;
let path = path.as_ref();
let mut file = open_path(path, true)?;
let obj_file = object::read::File::parse(file.map()?)?;
let architecture = match obj_file.architecture() {
Architecture::PowerPc => ObjArchitecture::PowerPc,
arch => bail!("Unexpected architecture: {arch:?}"),

View File

@ -1,15 +1,13 @@
use std::{
ffi::OsStr,
fs::{DirBuilder, File, OpenOptions},
io::{BufRead, BufReader, BufWriter, Cursor, Read, Seek, SeekFrom},
path::{Component, Path, PathBuf},
io,
io::{BufRead, BufWriter, Read, Seek, SeekFrom},
path::{Path, PathBuf},
};
use anyhow::{anyhow, bail, Context, Result};
use anyhow::{anyhow, Context, Result};
use filetime::{set_file_mtime, FileTime};
use memmap2::{Mmap, MmapOptions};
use path_slash::PathBufExt;
use rarc::RarcReader;
use sha1::{Digest, Sha1};
use xxhash_rust::xxh3::xxh3_64;
@ -17,194 +15,11 @@ use crate::{
array_ref,
util::{
ncompress::{decompress_yay0, decompress_yaz0, YAY0_MAGIC, YAZ0_MAGIC},
rarc,
rarc::{Node, RARC_MAGIC},
take_seek::{TakeSeek, TakeSeekExt},
u8_arc::{U8View, U8_MAGIC},
Bytes,
},
vfs::{open_path, VfsFile},
};
pub struct MappedFile {
mmap: Mmap,
mtime: FileTime,
offset: u64,
len: u64,
}
impl MappedFile {
pub fn as_reader(&self) -> Cursor<&[u8]> { Cursor::new(self.as_slice()) }
pub fn as_slice(&self) -> &[u8] {
&self.mmap[self.offset as usize..self.offset as usize + self.len as usize]
}
pub fn len(&self) -> u64 { self.len }
pub fn is_empty(&self) -> bool { self.len == 0 }
pub fn into_inner(self) -> Mmap { self.mmap }
}
pub fn split_path<P>(path: P) -> Result<(PathBuf, Option<PathBuf>)>
where P: AsRef<Path> {
let mut base_path = PathBuf::new();
let mut sub_path: Option<PathBuf> = None;
for component in path.as_ref().components() {
if let Component::Normal(str) = component {
let str = str.to_str().ok_or(anyhow!("Path is not valid UTF-8"))?;
if let Some((a, b)) = str.split_once(':') {
base_path.push(a);
sub_path = Some(PathBuf::from(b));
continue;
}
}
if let Some(sub_path) = &mut sub_path {
sub_path.push(component);
} else {
base_path.push(component);
}
}
Ok((base_path, sub_path))
}
/// Opens a memory mapped file, and decompresses it if needed.
pub fn map_file<P>(path: P) -> Result<FileEntry>
where P: AsRef<Path> {
let (base_path, sub_path) = split_path(path.as_ref())?;
let file = File::open(&base_path)
.with_context(|| format!("Failed to open file '{}'", base_path.display()))?;
let mtime = FileTime::from_last_modification_time(&file.metadata()?);
let mmap = unsafe { MmapOptions::new().map(&file) }
.with_context(|| format!("Failed to mmap file: '{}'", base_path.display()))?;
let (offset, len) = if let Some(sub_path) = sub_path {
if sub_path.as_os_str() == OsStr::new("nlzss") {
return Ok(FileEntry::Buffer(
nintendo_lz::decompress(&mut mmap.as_ref())
.map_err(|e| {
anyhow!(
"Failed to decompress '{}' with NLZSS: {}",
path.as_ref().display(),
e
)
})?
.into_boxed_slice(),
mtime,
));
} else if sub_path.as_os_str() == OsStr::new("yaz0") {
return Ok(FileEntry::Buffer(
decompress_yaz0(mmap.as_ref()).with_context(|| {
format!("Failed to decompress '{}' with Yaz0", path.as_ref().display())
})?,
mtime,
));
} else if sub_path.as_os_str() == OsStr::new("yay0") {
return Ok(FileEntry::Buffer(
decompress_yay0(mmap.as_ref()).with_context(|| {
format!("Failed to decompress '{}' with Yay0", path.as_ref().display())
})?,
mtime,
));
}
let buf = mmap.as_ref();
match *array_ref!(buf, 0, 4) {
RARC_MAGIC => {
let rarc = RarcReader::new(&mut Cursor::new(mmap.as_ref())).with_context(|| {
format!("Failed to open '{}' as RARC archive", base_path.display())
})?;
let (offset, size) = rarc.find_file(&sub_path)?.ok_or_else(|| {
anyhow!("File '{}' not found in '{}'", sub_path.display(), base_path.display())
})?;
(offset, size as u64)
}
U8_MAGIC => {
let arc = U8View::new(buf).map_err(|e| {
anyhow!("Failed to open '{}' as U8 archive: {}", base_path.display(), e)
})?;
let (_, node) = arc.find(sub_path.to_slash_lossy().as_ref()).ok_or_else(|| {
anyhow!("File '{}' not found in '{}'", sub_path.display(), base_path.display())
})?;
(node.offset() as u64, node.length() as u64)
}
_ => bail!("Couldn't detect archive type for '{}'", path.as_ref().display()),
}
} else {
(0, mmap.len() as u64)
};
let map = MappedFile { mmap, mtime, offset, len };
let buf = map.as_slice();
// Auto-detect compression if there's a magic number.
if buf.len() > 4 {
match *array_ref!(buf, 0, 4) {
YAZ0_MAGIC => {
return Ok(FileEntry::Buffer(
decompress_yaz0(buf).with_context(|| {
format!("Failed to decompress '{}' with Yaz0", path.as_ref().display())
})?,
mtime,
));
}
YAY0_MAGIC => {
return Ok(FileEntry::Buffer(
decompress_yay0(buf).with_context(|| {
format!("Failed to decompress '{}' with Yay0", path.as_ref().display())
})?,
mtime,
));
}
_ => {}
}
}
Ok(FileEntry::MappedFile(map))
}
/// Opens a memory mapped file without decompression or archive handling.
pub fn map_file_basic<P>(path: P) -> Result<FileEntry>
where P: AsRef<Path> {
let path = path.as_ref();
let file =
File::open(path).with_context(|| format!("Failed to open file '{}'", path.display()))?;
let mtime = FileTime::from_last_modification_time(&file.metadata()?);
let mmap = unsafe { MmapOptions::new().map(&file) }
.with_context(|| format!("Failed to mmap file: '{}'", path.display()))?;
let len = mmap.len() as u64;
Ok(FileEntry::MappedFile(MappedFile { mmap, mtime, offset: 0, len }))
}
pub type OpenedFile = TakeSeek<File>;
/// Opens a file (not memory mapped). No decompression is performed.
pub fn open_file<P>(path: P) -> Result<OpenedFile>
where P: AsRef<Path> {
let (base_path, sub_path) = split_path(path)?;
let mut file = File::open(&base_path)
.with_context(|| format!("Failed to open file '{}'", base_path.display()))?;
let (offset, size) = if let Some(sub_path) = sub_path {
let rarc = RarcReader::new(&mut BufReader::new(&file))
.with_context(|| format!("Failed to read RARC '{}'", base_path.display()))?;
rarc.find_file(&sub_path)?.map(|(o, s)| (o, s as u64)).ok_or_else(|| {
anyhow!("File '{}' not found in '{}'", sub_path.display(), base_path.display())
})?
} else {
(0, file.seek(SeekFrom::End(0))?)
};
file.seek(SeekFrom::Start(offset))?;
Ok(file.take_seek(size))
}
pub trait Reader: BufRead + Seek {}
impl Reader for Cursor<&[u8]> {}
/// Creates a buffered reader around a file (not memory mapped).
pub fn buf_reader<P>(path: P) -> Result<BufReader<File>>
where P: AsRef<Path> {
let file = File::open(&path)
.with_context(|| format!("Failed to open file '{}'", path.as_ref().display()))?;
Ok(BufReader::new(file))
}
/// Creates a buffered writer around a file (not memory mapped).
pub fn buf_writer<P>(path: P) -> Result<BufWriter<File>>
where P: AsRef<Path> {
@ -217,18 +32,18 @@ where P: AsRef<Path> {
}
/// Reads a string with known size at the specified offset.
pub fn read_string<R>(reader: &mut R, off: u64, size: usize) -> Result<String>
pub fn read_string<R>(reader: &mut R, off: u64, size: usize) -> io::Result<String>
where R: Read + Seek + ?Sized {
let mut data = vec![0u8; size];
let pos = reader.stream_position()?;
reader.seek(SeekFrom::Start(off))?;
reader.read_exact(&mut data)?;
reader.seek(SeekFrom::Start(pos))?;
Ok(String::from_utf8(data)?)
String::from_utf8(data).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
/// Reads a zero-terminated string at the specified offset.
pub fn read_c_string<R>(reader: &mut R, off: u64) -> Result<String>
pub fn read_c_string<R>(reader: &mut R, off: u64) -> io::Result<String>
where R: Read + Seek + ?Sized {
let pos = reader.stream_position()?;
reader.seek(SeekFrom::Start(off))?;
@ -252,8 +67,9 @@ pub fn process_rsp(files: &[PathBuf]) -> Result<Vec<PathBuf>> {
let path_str =
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
if let Some(rsp_file) = path_str.strip_prefix('@') {
let reader = buf_reader(rsp_file)?;
for result in reader.lines() {
let rsp_path = Path::new(rsp_file);
let file = open_path(rsp_path, true)?;
for result in file.lines() {
let line = result?;
if !line.is_empty() {
out.push(PathBuf::from_slash(line));
@ -270,120 +86,20 @@ pub fn process_rsp(files: &[PathBuf]) -> Result<Vec<PathBuf>> {
Ok(out)
}
/// Iterator over files in a RARC archive.
struct RarcIterator {
file: MappedFile,
base_path: PathBuf,
paths: Vec<(PathBuf, u64, u32)>,
index: usize,
}
impl RarcIterator {
pub fn new(file: MappedFile, base_path: &Path) -> Result<Self> {
let reader = RarcReader::new(&mut file.as_reader())?;
let paths = Self::collect_paths(&reader, base_path);
Ok(Self { file, base_path: base_path.to_owned(), paths, index: 0 })
}
fn collect_paths(reader: &RarcReader, base_path: &Path) -> Vec<(PathBuf, u64, u32)> {
let mut current_path = PathBuf::new();
let mut paths = vec![];
for node in reader.nodes() {
match node {
Node::DirectoryBegin { name } => {
current_path.push(name.name);
}
Node::DirectoryEnd { name: _ } => {
current_path.pop();
}
Node::File { name, offset, size } => {
let path = base_path.join(&current_path).join(name.name);
paths.push((path, offset, size));
}
Node::CurrentDirectory => {}
Node::ParentDirectory => {}
}
}
paths
}
}
impl Iterator for RarcIterator {
type Item = Result<(PathBuf, Box<[u8]>)>;
fn next(&mut self) -> Option<Self::Item> {
if self.index >= self.paths.len() {
return None;
}
let (path, off, size) = self.paths[self.index].clone();
self.index += 1;
let slice = &self.file.as_slice()[off as usize..off as usize + size as usize];
match decompress_if_needed(slice) {
Ok(buf) => Some(Ok((path, buf.into_owned()))),
Err(e) => Some(Err(e)),
}
}
}
/// A file entry, either a memory mapped file or an owned buffer.
pub enum FileEntry {
MappedFile(MappedFile),
Buffer(Box<[u8]>, FileTime),
}
impl FileEntry {
/// Creates a reader for the file.
pub fn as_reader(&self) -> Cursor<&[u8]> {
match self {
Self::MappedFile(file) => file.as_reader(),
Self::Buffer(slice, _) => Cursor::new(slice),
}
}
pub fn as_slice(&self) -> &[u8] {
match self {
Self::MappedFile(file) => file.as_slice(),
Self::Buffer(slice, _) => slice,
}
}
pub fn len(&self) -> u64 {
match self {
Self::MappedFile(file) => file.len(),
Self::Buffer(slice, _) => slice.len() as u64,
}
}
pub fn is_empty(&self) -> bool {
match self {
Self::MappedFile(file) => file.is_empty(),
Self::Buffer(slice, _) => slice.is_empty(),
}
}
pub fn mtime(&self) -> FileTime {
match self {
Self::MappedFile(file) => file.mtime,
Self::Buffer(_, mtime) => *mtime,
}
}
}
/// Information about a file when it was read.
/// Used to determine if a file has changed since it was read (mtime)
/// and if it needs to be written (hash).
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct FileReadInfo {
pub mtime: FileTime,
pub mtime: Option<FileTime>,
pub hash: u64,
}
impl FileReadInfo {
pub fn new(entry: &FileEntry) -> Result<Self> {
let hash = xxh3_64(entry.as_slice());
Ok(Self { mtime: entry.mtime(), hash })
pub fn new(entry: &mut dyn VfsFile) -> Result<Self> {
let hash = xxh3_64(entry.map()?);
let metadata = entry.metadata()?;
Ok(Self { mtime: metadata.mtime, hash })
}
}
@ -393,104 +109,34 @@ impl FileReadInfo {
pub struct FileIterator {
paths: Vec<PathBuf>,
index: usize,
rarc: Option<RarcIterator>,
}
impl FileIterator {
pub fn new(paths: &[PathBuf]) -> Result<Self> {
Ok(Self { paths: process_rsp(paths)?, index: 0, rarc: None })
Ok(Self { paths: process_rsp(paths)?, index: 0 })
}
fn next_rarc(&mut self) -> Option<Result<(PathBuf, FileEntry)>> {
if let Some(rarc) = &mut self.rarc {
match rarc.next() {
Some(Ok((path, buf))) => {
let mut path_str = rarc.base_path.as_os_str().to_os_string();
path_str.push(OsStr::new(":"));
path_str.push(path.as_os_str());
return Some(Ok((path, FileEntry::Buffer(buf, rarc.file.mtime))));
}
Some(Err(err)) => return Some(Err(err)),
None => self.rarc = None,
}
}
None
}
fn next_path(&mut self) -> Option<Result<(PathBuf, FileEntry)>> {
fn next_path(&mut self) -> Option<Result<(PathBuf, Box<dyn VfsFile>)>> {
if self.index >= self.paths.len() {
return None;
}
let path = self.paths[self.index].clone();
self.index += 1;
match map_file(&path) {
Ok(FileEntry::MappedFile(map)) => self.handle_file(map, path),
Ok(entry) => Some(Ok((path, entry))),
Err(err) => Some(Err(err)),
match open_path(&path, true) {
Ok(file) => Some(Ok((path, file))),
Err(e) => Some(Err(e)),
}
}
fn handle_file(
&mut self,
file: MappedFile,
path: PathBuf,
) -> Option<Result<(PathBuf, FileEntry)>> {
let buf = file.as_slice();
if buf.len() <= 4 {
return Some(Ok((path, FileEntry::MappedFile(file))));
}
match *array_ref!(buf, 0, 4) {
YAZ0_MAGIC => self.handle_yaz0(file, path),
YAY0_MAGIC => self.handle_yay0(file, path),
RARC_MAGIC => self.handle_rarc(file, path),
_ => Some(Ok((path, FileEntry::MappedFile(file)))),
}
}
fn handle_yaz0(
&mut self,
file: MappedFile,
path: PathBuf,
) -> Option<Result<(PathBuf, FileEntry)>> {
Some(match decompress_yaz0(file.as_slice()) {
Ok(buf) => Ok((path, FileEntry::Buffer(buf, file.mtime))),
Err(e) => Err(e),
})
}
fn handle_yay0(
&mut self,
file: MappedFile,
path: PathBuf,
) -> Option<Result<(PathBuf, FileEntry)>> {
Some(match decompress_yay0(file.as_slice()) {
Ok(buf) => Ok((path, FileEntry::Buffer(buf, file.mtime))),
Err(e) => Err(e),
})
}
fn handle_rarc(
&mut self,
file: MappedFile,
path: PathBuf,
) -> Option<Result<(PathBuf, FileEntry)>> {
self.rarc = match RarcIterator::new(file, &path) {
Ok(iter) => Some(iter),
Err(e) => return Some(Err(e)),
};
self.next()
}
}
impl Iterator for FileIterator {
type Item = Result<(PathBuf, FileEntry)>;
type Item = Result<(PathBuf, Box<dyn VfsFile>)>;
fn next(&mut self) -> Option<Self::Item> { self.next_rarc().or_else(|| self.next_path()) }
fn next(&mut self) -> Option<Self::Item> { self.next_path() }
}
pub fn touch<P>(path: P) -> std::io::Result<()>
pub fn touch<P>(path: P) -> io::Result<()>
where P: AsRef<Path> {
if path.as_ref().exists() {
set_file_mtime(path, FileTime::now())

View File

@ -23,9 +23,9 @@ use crate::{
ObjSections, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
ObjSymbols, ObjUnit,
},
util::{file::map_file, nested::NestedVec},
util::nested::NestedVec,
vfs::open_path,
};
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum SymbolKind {
Function,
@ -722,8 +722,8 @@ pub fn apply_map_file<P>(
where
P: AsRef<Path>,
{
let file = map_file(&path)?;
let info = process_map(&mut file.as_reader(), common_bss_start, mw_comment_version)?;
let mut file = open_path(path.as_ref(), true)?;
let info = process_map(file.as_mut(), common_bss_start, mw_comment_version)?;
apply_map(info, obj)
}

View File

@ -1,425 +1,292 @@
// Source: https://github.com/Julgodis/picori/blob/650da9f4fe6050b39b80d5360416591c748058d5/src/rarc.rs
// License: MIT
// Modified to use `std::io::Cursor<&[u8]>` and project's FromReader trait
use std::{
collections::HashMap,
fmt::Display,
hash::{Hash, Hasher},
io,
io::{Read, Seek, SeekFrom},
path::{Component, Path, PathBuf},
};
use std::{borrow::Cow, ffi::CStr};
use anyhow::{anyhow, bail, ensure, Result};
use zerocopy::{big_endian::*, AsBytes, FromBytes, FromZeroes};
use crate::util::{
file::read_c_string,
reader::{struct_size, Endian, FromReader},
};
#[derive(Debug, Clone)]
pub struct NamedHash {
pub name: String,
pub hash: u16,
}
impl Display for NamedHash {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.name)
}
}
impl Hash for NamedHash {
fn hash<H>(&self, state: &mut H)
where H: Hasher {
self.hash.hash(state);
}
}
impl PartialEq for NamedHash {
fn eq(&self, other: &Self) -> bool {
if self.hash == other.hash {
self.name == other.name
} else {
false
}
}
}
impl Eq for NamedHash {}
#[derive(Debug, Clone)]
enum RarcDirectory {
File {
/// Name of the file.
name: NamedHash,
/// Offset of the file in the RARC file. This offset is relative to the start of the RARC file.
offset: u64,
/// Size of the file.
size: u32,
},
Folder {
/// Name of the folder.
name: NamedHash,
},
CurrentFolder,
ParentFolder,
}
#[derive(Debug, Clone)]
struct RarcNode {
/// Index of first directory.
pub index: u32,
/// Number of directories.
pub count: u32,
}
pub struct RarcReader {
directories: Vec<RarcDirectory>,
nodes: HashMap<NamedHash, RarcNode>,
root_node: NamedHash,
}
use crate::static_assert;
pub const RARC_MAGIC: [u8; 4] = *b"RARC";
struct RarcHeader {
#[derive(Copy, Clone, Debug, PartialEq, FromBytes, FromZeroes, AsBytes)]
#[repr(C, align(4))]
pub struct RarcHeader {
/// Magic identifier. (Always "RARC")
magic: [u8; 4],
_file_length: u32,
header_length: u32,
file_offset: u32,
_file_length_2: u32,
_unk0: u32,
_unk1: u32,
_unk2: u32,
node_count: u32,
node_offset: u32,
directory_count: u32,
directory_offset: u32,
string_table_length: u32,
string_table_offset: u32,
_file_count: u16,
_unk3: u16,
_unk4: u32,
/// Length of the RARC file.
file_len: U32,
/// Length of the header. (Always 32)
header_len: U32,
/// Start of the file data, relative to the end of the file header.
data_offset: U32,
/// Length of the file data.
data_len: U32,
_unk1: U32,
_unk2: U32,
_unk3: U32,
}
impl FromReader for RarcHeader {
type Args = ();
static_assert!(size_of::<RarcHeader>() == 0x20);
const STATIC_SIZE: usize = struct_size([
4, // magic
u32::STATIC_SIZE, // file_length
u32::STATIC_SIZE, // header_length
u32::STATIC_SIZE, // file_offset
u32::STATIC_SIZE, // file_length
u32::STATIC_SIZE, // unk0
u32::STATIC_SIZE, // unk1
u32::STATIC_SIZE, // unk2
u32::STATIC_SIZE, // node_count
u32::STATIC_SIZE, // node_offset
u32::STATIC_SIZE, // directory_count
u32::STATIC_SIZE, // directory_offset
u32::STATIC_SIZE, // string_table_length
u32::STATIC_SIZE, // string_table_offset
u16::STATIC_SIZE, // file_count
u16::STATIC_SIZE, // unk3
u32::STATIC_SIZE, // unk4
]);
impl RarcHeader {
/// Length of the RARC file.
pub fn file_len(&self) -> u32 { self.file_len.get() }
fn from_reader_args<R>(reader: &mut R, e: Endian, _args: Self::Args) -> io::Result<Self>
where R: Read + Seek + ?Sized {
let header = Self {
magic: <[u8; 4]>::from_reader(reader, e)?,
_file_length: u32::from_reader(reader, e)?,
header_length: u32::from_reader(reader, e)?,
file_offset: u32::from_reader(reader, e)?,
_file_length_2: u32::from_reader(reader, e)?,
_unk0: u32::from_reader(reader, e)?,
_unk1: u32::from_reader(reader, e)?,
_unk2: u32::from_reader(reader, e)?,
node_count: u32::from_reader(reader, e)?,
node_offset: u32::from_reader(reader, e)?,
directory_count: u32::from_reader(reader, e)?,
directory_offset: u32::from_reader(reader, e)?,
string_table_length: u32::from_reader(reader, e)?,
string_table_offset: u32::from_reader(reader, e)?,
_file_count: u16::from_reader(reader, e)?,
_unk3: u16::from_reader(reader, e)?,
_unk4: u32::from_reader(reader, e)?,
/// Length of the header.
pub fn header_len(&self) -> u32 { self.header_len.get() }
/// Start of the file data, relative to the end of the file header.
pub fn data_offset(&self) -> u32 { self.data_offset.get() }
/// Length of the file data.
pub fn data_len(&self) -> u32 { self.data_len.get() }
}
#[derive(Copy, Clone, Debug, PartialEq, FromBytes, FromZeroes, AsBytes)]
#[repr(C, align(4))]
struct RarcInfo {
/// Number of directories in the directory table.
directory_count: U32,
/// Offset to the start of the directory table, relative to the end of the file header.
directory_offset: U32,
/// Number of nodes in the node table.
node_count: U32,
/// Offset to the start of the node table, relative to the end of the file header.
node_offset: U32,
/// Length of the string table.
string_table_len: U32,
/// Offset to the start of the string table, relative to the end of the file header.
string_table_offset: U32,
/// Number of files in the node table.
_file_count: U16,
_unk4: U16,
_unk5: U32,
}
static_assert!(size_of::<RarcInfo>() == 0x20);
#[derive(Copy, Clone, Debug, PartialEq, FromBytes, FromZeroes, AsBytes)]
#[repr(C, align(4))]
pub struct RarcNode {
/// Index of the node. (0xFFFF for directories)
index: U16,
/// Hash of the node name.
name_hash: U16,
/// Unknown. (0x200 for folders, 0x1100 for files)
_unk0: U16,
/// Offset in the string table to the node name.
name_offset: U16,
/// Files: Offset in the data to the file data.
/// Directories: Index of the directory in the directory table.
data_offset: U32,
/// Files: Length of the data.
/// Directories: Unknown. Always 16.
data_length: U32,
_unk1: U32,
}
static_assert!(size_of::<RarcNode>() == 0x14);
impl RarcNode {
/// Whether the node is a file.
pub fn is_file(&self) -> bool { self.index.get() != 0xFFFF }
/// Whether the node is a directory.
pub fn is_dir(&self) -> bool { self.index.get() == 0xFFFF }
/// Offset in the string table to the node name.
pub fn name_offset(&self) -> u32 { self.name_offset.get() as u32 }
/// Files: Offset in the data to the file data.
/// Directories: Index of the directory in the directory table.
pub fn data_offset(&self) -> u32 { self.data_offset.get() }
/// Files: Length of the data.
/// Directories: Unknown. Always 16.
pub fn data_length(&self) -> u32 { self.data_length.get() }
}
#[derive(Copy, Clone, Debug, PartialEq, FromBytes, FromZeroes, AsBytes)]
#[repr(C, align(4))]
pub struct RarcDirectory {
/// Identifier of the directory.
identifier: [u8; 4],
/// Offset in the string table to the directory name.
name_offset: U32,
/// Hash of the directory name.
name_hash: U16,
/// Number of nodes in the directory.
count: U16,
/// Index of the first node in the directory.
index: U32,
}
static_assert!(size_of::<RarcDirectory>() == 0x10);
impl RarcDirectory {
/// Offset in the string table to the directory name.
pub fn name_offset(&self) -> u32 { self.name_offset.get() }
/// Index of the first node in the directory.
pub fn node_index(&self) -> u32 { self.index.get() }
/// Number of nodes in the directory.
pub fn node_count(&self) -> u16 { self.count.get() }
}
/// A view into a RARC archive.
pub struct RarcView<'a> {
/// The RARC archive header.
pub header: &'a RarcHeader,
/// The directories in the RARC archive.
pub directories: &'a [RarcDirectory],
/// The nodes in the RARC archive.
pub nodes: &'a [RarcNode],
/// The string table containing all file and directory names.
pub string_table: &'a [u8],
/// The file data.
pub data: &'a [u8],
}
impl<'a> RarcView<'a> {
/// Create a new RARC view from a buffer.
pub fn new(buf: &'a [u8]) -> Result<Self, &'static str> {
let Some(header) = RarcHeader::ref_from_prefix(buf) else {
return Err("Buffer not large enough for RARC header");
};
if header.magic != RARC_MAGIC {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("invalid RARC magic: {:?}", header.magic),
));
}
if header.node_count >= 0x10000 {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("invalid node count: {}", header.node_count),
));
}
if header.directory_count >= 0x10000 {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("invalid directory count: {}", header.directory_count),
));
}
Ok(header)
return Err("RARC magic mismatch");
}
if header.header_len.get() as usize != size_of::<RarcHeader>() {
return Err("RARC header size mismatch");
}
struct RarcFileNode {
index: u16,
name_hash: u16,
_unk0: u16, // 0x200 for folders, 0x1100 for files
name_offset: u16,
data_offset: u32,
data_length: u32,
_unk1: u32,
// All offsets are relative to the _end_ of the header, so we can
// just trim the header from the buffer and use the offsets as is.
let buf = &buf[size_of::<RarcHeader>()..];
let Some(info) = RarcInfo::ref_from_prefix(buf) else {
return Err("Buffer not large enough for RARC info");
};
let directory_table_offset = info.directory_offset.get() as usize;
let directory_table_size = info.directory_count.get() as usize * size_of::<RarcDirectory>();
let directories_buf = buf
.get(directory_table_offset..directory_table_offset + directory_table_size)
.ok_or("RARC directory table out of bounds")?;
let directories =
RarcDirectory::slice_from(directories_buf).ok_or("RARC directory table not aligned")?;
if directories.is_empty() || directories[0].identifier != *b"ROOT" {
return Err("RARC root directory not found");
}
impl FromReader for RarcFileNode {
type Args = ();
let node_table_offset = info.node_offset.get() as usize;
let node_table_size = info.node_count.get() as usize * size_of::<RarcNode>();
let nodes_buf = buf
.get(node_table_offset..node_table_offset + node_table_size)
.ok_or("RARC node table out of bounds")?;
let nodes = RarcNode::slice_from(nodes_buf).ok_or("RARC node table not aligned")?;
const STATIC_SIZE: usize = struct_size([
u16::STATIC_SIZE, // index
u16::STATIC_SIZE, // name_hash
u16::STATIC_SIZE, // unk0
u16::STATIC_SIZE, // name_offset
u32::STATIC_SIZE, // data_offset
u32::STATIC_SIZE, // data_length
u32::STATIC_SIZE, // unk1
]);
let string_table_offset = info.string_table_offset.get() as usize;
let string_table_size = info.string_table_len.get() as usize;
let string_table = buf
.get(string_table_offset..string_table_offset + string_table_size)
.ok_or("RARC string table out of bounds")?;
fn from_reader_args<R>(reader: &mut R, e: Endian, _args: Self::Args) -> io::Result<Self>
where R: Read + Seek + ?Sized {
Ok(Self {
index: u16::from_reader(reader, e)?,
name_hash: u16::from_reader(reader, e)?,
_unk0: u16::from_reader(reader, e)?,
name_offset: u16::from_reader(reader, e)?,
data_offset: u32::from_reader(reader, e)?,
data_length: u32::from_reader(reader, e)?,
_unk1: u32::from_reader(reader, e)?,
})
}
let data_offset = header.data_offset.get() as usize;
let data_size = header.data_len.get() as usize;
let data =
buf.get(data_offset..data_offset + data_size).ok_or("RARC file data out of bounds")?;
Ok(Self { header, directories, nodes, string_table, data })
}
struct RarcDirectoryNode {
_identifier: u32,
name_offset: u32,
name_hash: u16,
count: u16,
index: u32,
/// Get a string from the string table at the given offset.
pub fn get_string(&self, offset: u32) -> Result<Cow<str>, String> {
let name_buf = self.string_table.get(offset as usize..).ok_or_else(|| {
format!(
"RARC: name offset {} out of bounds (string table size: {})",
offset,
self.string_table.len()
)
})?;
let c_string = CStr::from_bytes_until_nul(name_buf)
.map_err(|_| format!("RARC: name at offset {} not null-terminated", offset))?;
Ok(c_string.to_string_lossy())
}
impl FromReader for RarcDirectoryNode {
type Args = ();
const STATIC_SIZE: usize = struct_size([
u32::STATIC_SIZE, // identifier
u32::STATIC_SIZE, // name_offset
u16::STATIC_SIZE, // name_hash
u16::STATIC_SIZE, // count
u32::STATIC_SIZE, // index
]);
fn from_reader_args<R>(reader: &mut R, e: Endian, _args: Self::Args) -> io::Result<Self>
where R: Read + Seek + ?Sized {
Ok(Self {
_identifier: u32::from_reader(reader, e)?,
name_offset: u32::from_reader(reader, e)?,
name_hash: u16::from_reader(reader, e)?,
count: u16::from_reader(reader, e)?,
index: u32::from_reader(reader, e)?,
})
/// Get the data for a file node.
pub fn get_data(&self, node: RarcNode) -> Result<&[u8], &'static str> {
if node.is_dir() {
return Err("Cannot get data for a directory node");
}
let offset = node.data_offset.get() as usize;
let size = node.data_length.get() as usize;
self.data.get(offset..offset + size).ok_or("RARC file data out of bounds")
}
impl RarcReader {
/// Creates a new RARC reader.
pub fn new<R>(reader: &mut R) -> Result<Self>
where R: Read + Seek + ?Sized {
let base = reader.stream_position()?;
let header = RarcHeader::from_reader(reader, Endian::Big)?;
/// Finds a particular file or directory by path.
pub fn find(&self, path: &str) -> Option<RarcNodeKind> {
let mut split = path.split('/');
let mut current = next_non_empty(&mut split);
let base = base + header.header_length as u64;
let directory_base = base + header.directory_offset as u64;
let data_base = base + header.file_offset as u64;
let mut directories = Vec::with_capacity(header.directory_count as usize);
for i in 0..header.directory_count {
reader.seek(SeekFrom::Start(directory_base + 20 * i as u64))?;
let node = RarcFileNode::from_reader(reader, Endian::Big)?;
let mut dir_idx = 0;
let mut dir = self.directories[dir_idx];
// Allow matching the root directory by name optionally
if let Ok(root_name) = self.get_string(dir.name_offset()) {
if root_name.eq_ignore_ascii_case(current) {
current = next_non_empty(&mut split);
}
}
if current.is_empty() {
return Some(RarcNodeKind::Directory(dir_idx, dir));
}
let name = {
let offset = header.string_table_offset as u64;
let offset = offset + node.name_offset as u64;
ensure!(
(node.name_offset as u32) < header.string_table_length,
"invalid string table offset"
);
read_c_string(reader, base + offset)
}?;
if node.index == 0xFFFF {
if name == "." {
directories.push(RarcDirectory::CurrentFolder);
} else if name == ".." {
directories.push(RarcDirectory::ParentFolder);
let mut idx = dir.index.get() as usize;
while idx < dir.index.get() as usize + dir.count.get() as usize {
let node = self.nodes.get(idx).copied()?;
let Ok(name) = self.get_string(node.name_offset()) else {
idx += 1;
continue;
};
if name.eq_ignore_ascii_case(current) {
current = next_non_empty(&mut split);
if node.is_dir() {
dir_idx = node.data_offset.get() as usize;
dir = self.directories.get(dir_idx).cloned()?;
idx = dir.index.get() as usize;
if current.is_empty() {
return Some(RarcNodeKind::Directory(dir_idx, dir));
} else {
directories.push(RarcDirectory::Folder {
name: NamedHash { name, hash: node.name_hash },
});
continue;
}
} else {
directories.push(RarcDirectory::File {
name: NamedHash { name, hash: node.name_hash },
offset: data_base + node.data_offset as u64,
size: node.data_length,
});
return Some(RarcNodeKind::File(idx, node));
}
}
idx += 1;
}
let node_base = base + header.node_offset as u64;
let mut root_node: Option<NamedHash> = None;
let mut nodes = HashMap::with_capacity(header.node_count as usize);
for i in 0..header.node_count {
reader.seek(SeekFrom::Start(node_base + 16 * i as u64))?;
let node = RarcDirectoryNode::from_reader(reader, Endian::Big)?;
ensure!(node.index < header.directory_count, "first directory index out of bounds");
let last_index = node.index.checked_add(node.count as u32);
ensure!(
last_index.is_some() && last_index.unwrap() <= header.directory_count,
"last directory index out of bounds"
);
let name = {
let offset = header.string_table_offset as u64;
let offset = offset + node.name_offset as u64;
ensure!(
node.name_offset < header.string_table_length,
"invalid string table offset"
);
read_c_string(reader, base + offset)
}?;
// FIXME: this assumes that the root node is the first node in the list
if root_node.is_none() {
root_node = Some(NamedHash { name: name.clone(), hash: node.name_hash });
}
let name = NamedHash { name, hash: node.name_hash };
nodes.insert(name.clone(), RarcNode { index: node.index, count: node.count as u32 });
}
if let Some(root_node) = root_node {
Ok(Self { directories, nodes, root_node })
} else {
Err(anyhow!("no root node"))
}
}
/// Get a iterator over the nodes in the RARC file.
pub fn nodes(&self) -> Nodes<'_> {
let root_node = self.root_node.clone();
Nodes { parent: self, stack: vec![NodeState::Begin(root_node)] }
}
/// Find a file in the RARC file.
pub fn find_file<P>(&self, path: P) -> Result<Option<(u64, u32)>>
where P: AsRef<Path> {
let mut cmp_path = PathBuf::new();
for component in path.as_ref().components() {
match component {
Component::Normal(name) => cmp_path.push(name.to_ascii_lowercase()),
Component::RootDir => {}
component => bail!("Invalid path component: {:?}", component),
}
}
let mut current_path = PathBuf::new();
for node in self.nodes() {
match node {
Node::DirectoryBegin { name } => {
current_path.push(name.name.to_ascii_lowercase());
}
Node::DirectoryEnd { name: _ } => {
current_path.pop();
}
Node::File { name, offset, size } => {
if current_path.join(name.name.to_ascii_lowercase()) == cmp_path {
return Ok(Some((offset, size)));
}
}
Node::CurrentDirectory => {}
Node::ParentDirectory => {}
}
}
Ok(None)
}
}
/// A node in an RARC file.
pub enum Node {
/// A directory that has been entered.
DirectoryBegin { name: NamedHash },
/// A directory that has been exited.
DirectoryEnd { name: NamedHash },
/// A file in the current directory.
File { name: NamedHash, offset: u64, size: u32 },
/// The current directory. This is equivalent to ".".
CurrentDirectory,
/// The parent directory. This is equivalent to "..".
ParentDirectory,
}
enum NodeState {
Begin(NamedHash),
End(NamedHash),
File(NamedHash, u32),
}
/// An iterator over the nodes in an RARC file.
pub struct Nodes<'parent> {
parent: &'parent RarcReader,
stack: Vec<NodeState>,
}
impl<'parent> Iterator for Nodes<'parent> {
type Item = Node;
fn next(&mut self) -> Option<Self::Item> {
match self.stack.pop()? {
NodeState::Begin(name) => {
self.stack.push(NodeState::File(name.clone(), 0));
Some(Node::DirectoryBegin { name })
}
NodeState::End(name) => Some(Node::DirectoryEnd { name }),
NodeState::File(name, index) => {
if let Some(node) = self.parent.nodes.get(&name) {
if index + 1 >= node.count {
self.stack.push(NodeState::End(name.clone()));
} else {
self.stack.push(NodeState::File(name.clone(), index + 1));
}
let directory = &self.parent.directories[(node.index + index) as usize];
match directory {
RarcDirectory::CurrentFolder => Some(Node::CurrentDirectory),
RarcDirectory::ParentFolder => Some(Node::ParentDirectory),
RarcDirectory::Folder { name } => {
self.stack.push(NodeState::Begin(name.clone()));
self.next()
}
RarcDirectory::File { name, offset, size } => {
Some(Node::File { name: name.clone(), offset: *offset, size: *size })
}
}
} else {
None
}
}
/// Get the children of a directory.
pub fn children(&self, dir: RarcDirectory) -> &[RarcNode] {
let start = dir.node_index() as usize;
let end = start + dir.node_count() as usize;
self.nodes.get(start..end).unwrap_or(&[])
}
}
#[derive(Debug)]
pub enum RarcNodeKind {
File(usize, RarcNode),
Directory(usize, RarcDirectory),
}
fn next_non_empty<'a>(iter: &mut impl Iterator<Item = &'a str>) -> &'a str {
loop {
match iter.next() {
Some("") => continue,
Some(next) => break next,
None => break "",
}
}
}

View File

@ -32,7 +32,7 @@ pub enum U8NodeKind {
}
/// An individual file system node.
#[derive(Clone, Debug, PartialEq, FromBytes, FromZeroes, AsBytes)]
#[derive(Copy, Clone, Debug, PartialEq, FromBytes, FromZeroes, AsBytes)]
#[repr(C, align(4))]
pub struct U8Node {
kind: u8,
@ -121,7 +121,7 @@ impl<'a> U8View<'a> {
pub fn iter(&self) -> U8Iter { U8Iter { inner: self, idx: 1 } }
/// Get the name of a node.
pub fn get_name(&self, node: &U8Node) -> Result<Cow<str>, String> {
pub fn get_name(&self, node: U8Node) -> Result<Cow<str>, String> {
let name_buf = self.string_table.get(node.name_offset() as usize..).ok_or_else(|| {
format!(
"U8: name offset {} out of bounds (string table size: {})",
@ -136,22 +136,29 @@ impl<'a> U8View<'a> {
}
/// Finds a particular file or directory by path.
pub fn find(&self, path: &str) -> Option<(usize, &U8Node)> {
let mut split = path.trim_matches('/').split('/');
let mut current = split.next()?;
pub fn find(&self, path: &str) -> Option<(usize, U8Node)> {
let mut split = path.split('/');
let mut current = next_non_empty(&mut split);
if current.is_empty() {
return Some((0, self.nodes[0]));
}
let mut idx = 1;
let mut stop_at = None;
while let Some(node) = self.nodes.get(idx) {
if self.get_name(node).as_ref().map_or(false, |name| name.eq_ignore_ascii_case(current))
{
if let Some(next) = split.next() {
current = next;
} else {
while let Some(node) = self.nodes.get(idx).copied() {
if self.get_name(node).map_or(false, |name| name.eq_ignore_ascii_case(current)) {
current = next_non_empty(&mut split);
if current.is_empty() {
return Some((idx, node));
}
if node.is_dir() {
// Descend into directory
idx += 1;
stop_at = Some(node.length() as usize + idx);
} else {
// Not a directory
break;
}
} else if node.is_dir() {
// Skip directory
idx = node.length() as usize;
@ -169,6 +176,16 @@ impl<'a> U8View<'a> {
}
}
fn next_non_empty<'a>(iter: &mut impl Iterator<Item = &'a str>) -> &'a str {
loop {
match iter.next() {
Some("") => continue,
Some(next) => break next,
None => break "",
}
}
}
/// Iterator over the nodes in a U8 archive.
pub struct U8Iter<'a> {
inner: &'a U8View<'a>,
@ -176,11 +193,11 @@ pub struct U8Iter<'a> {
}
impl<'a> Iterator for U8Iter<'a> {
type Item = (usize, &'a U8Node, Result<Cow<'a, str>, String>);
type Item = (usize, U8Node, Result<Cow<'a, str>, String>);
fn next(&mut self) -> Option<Self::Item> {
let idx = self.idx;
let node = self.inner.nodes.get(idx)?;
let node = self.inner.nodes.get(idx).copied()?;
let name = self.inner.get_name(node);
self.idx += 1;
Some((idx, node, name))

130
src/vfs/common.rs Normal file
View File

@ -0,0 +1,130 @@
use std::{
io,
io::{BufRead, Cursor, Read, Seek, SeekFrom},
sync::Arc,
};
use filetime::FileTime;
use super::{DiscStream, VfsFileType, VfsMetadata};
use crate::vfs::VfsFile;
#[derive(Clone)]
pub struct StaticFile {
inner: Cursor<Arc<[u8]>>,
mtime: Option<FileTime>,
}
impl StaticFile {
pub fn new(data: Arc<[u8]>, mtime: Option<FileTime>) -> Self {
Self { inner: Cursor::new(data), mtime }
}
}
impl BufRead for StaticFile {
fn fill_buf(&mut self) -> io::Result<&[u8]> { self.inner.fill_buf() }
fn consume(&mut self, amt: usize) { self.inner.consume(amt) }
}
impl Read for StaticFile {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.inner.read(buf) }
}
impl Seek for StaticFile {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { self.inner.seek(pos) }
}
impl VfsFile for StaticFile {
fn map(&mut self) -> io::Result<&[u8]> { Ok(self.inner.get_ref()) }
fn metadata(&mut self) -> io::Result<VfsMetadata> {
Ok(VfsMetadata {
file_type: VfsFileType::File,
len: self.inner.get_ref().len() as u64,
mtime: self.mtime,
})
}
fn into_disc_stream(self: Box<Self>) -> Box<dyn DiscStream> { self }
}
#[derive(Clone)]
pub struct WindowedFile {
base: Box<dyn VfsFile>,
pos: u64,
begin: u64,
end: u64,
}
impl WindowedFile {
pub fn new(mut base: Box<dyn VfsFile>, offset: u64, size: u64) -> io::Result<Self> {
base.seek(SeekFrom::Start(offset))?;
Ok(Self { base, pos: offset, begin: offset, end: offset + size })
}
#[inline]
pub fn len(&self) -> u64 { self.end - self.begin }
}
impl BufRead for WindowedFile {
fn fill_buf(&mut self) -> io::Result<&[u8]> {
let buf = self.base.fill_buf()?;
let remaining = self.end.saturating_sub(self.pos);
Ok(&buf[..buf.len().min(remaining as usize)])
}
fn consume(&mut self, amt: usize) {
let remaining = self.end.saturating_sub(self.pos);
let amt = amt.min(remaining as usize);
self.base.consume(amt);
self.pos += amt as u64;
}
}
impl Read for WindowedFile {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let remaining = self.end.saturating_sub(self.pos);
if remaining == 0 {
return Ok(0);
}
let len = buf.len().min(remaining as usize);
self.base.read(&mut buf[..len])
}
}
impl Seek for WindowedFile {
#[inline]
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
let mut pos = match pos {
SeekFrom::Start(p) => self.begin + p,
SeekFrom::End(p) => self.end.saturating_add_signed(p),
SeekFrom::Current(p) => self.pos.saturating_add_signed(p),
};
if pos < self.begin {
pos = self.begin;
} else if pos > self.end {
pos = self.end;
}
let result = self.base.seek(SeekFrom::Start(pos))?;
self.pos = result;
Ok(result - self.begin)
}
#[inline]
fn stream_position(&mut self) -> io::Result<u64> { Ok(self.pos) }
}
impl VfsFile for WindowedFile {
fn map(&mut self) -> io::Result<&[u8]> {
let buf = self.base.map()?;
Ok(&buf[self.pos as usize..self.end as usize])
}
fn metadata(&mut self) -> io::Result<VfsMetadata> {
let metadata = self.base.metadata()?;
Ok(VfsMetadata { file_type: VfsFileType::File, len: self.len(), mtime: metadata.mtime })
}
fn into_disc_stream(self: Box<Self>) -> Box<dyn DiscStream> { self }
}

297
src/vfs/disc.rs Normal file
View File

@ -0,0 +1,297 @@
use std::{
io,
io::{BufRead, Cursor, Read, Seek, SeekFrom},
sync::Arc,
};
use filetime::FileTime;
use nodtool::{
nod,
nod::{DiscStream, Fst, NodeKind, OwnedFileStream, PartitionBase, PartitionMeta},
};
use super::{StaticFile, Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult};
#[derive(Clone)]
pub struct DiscFs {
base: Box<dyn PartitionBase>,
meta: Box<PartitionMeta>,
mtime: Option<FileTime>,
}
enum DiscNode<'a> {
None,
Root,
Sys,
Node(Fst<'a>, usize, nod::Node),
Static(&'a [u8]),
}
impl DiscFs {
pub fn new(mut base: Box<dyn PartitionBase>, mtime: Option<FileTime>) -> io::Result<Self> {
let meta = base.meta().map_err(nod_to_io_error)?;
Ok(Self { base, meta, mtime })
}
fn find(&self, path: &str) -> VfsResult<DiscNode> {
let path = path.trim_matches('/');
let mut split = path.split('/');
let Some(segment) = split.next() else {
return Ok(DiscNode::Root);
};
if segment.is_empty() {
return Ok(DiscNode::Root);
}
if segment.eq_ignore_ascii_case("files") {
let fst = Fst::new(&self.meta.raw_fst)?;
if split.next().is_none() {
let root = fst.nodes[0];
return Ok(DiscNode::Node(fst, 0, root));
}
let remainder = &path[segment.len() + 1..];
match fst.find(remainder) {
Some((idx, node)) => Ok(DiscNode::Node(fst, idx, node)),
None => Ok(DiscNode::None),
}
} else if segment.eq_ignore_ascii_case("sys") {
let Some(segment) = split.next() else {
return Ok(DiscNode::Sys);
};
// No directories in sys
if split.next().is_some() {
return Ok(DiscNode::None);
}
match segment.to_ascii_lowercase().as_str() {
"" => Ok(DiscNode::Sys),
"boot.bin" => Ok(DiscNode::Static(self.meta.raw_boot.as_slice())),
"bi2.bin" => Ok(DiscNode::Static(self.meta.raw_bi2.as_slice())),
"apploader.bin" => Ok(DiscNode::Static(self.meta.raw_apploader.as_ref())),
"fst.bin" => Ok(DiscNode::Static(self.meta.raw_fst.as_ref())),
"main.dol" => Ok(DiscNode::Static(self.meta.raw_dol.as_ref())),
"ticket.bin" => {
Ok(DiscNode::Static(self.meta.raw_ticket.as_deref().ok_or(VfsError::NotFound)?))
}
"tmd.bin" => {
Ok(DiscNode::Static(self.meta.raw_tmd.as_deref().ok_or(VfsError::NotFound)?))
}
"cert.bin" => Ok(DiscNode::Static(
self.meta.raw_cert_chain.as_deref().ok_or(VfsError::NotFound)?,
)),
"h3.bin" => Ok(DiscNode::Static(
self.meta.raw_h3_table.as_deref().ok_or(VfsError::NotFound)?,
)),
_ => Ok(DiscNode::None),
}
} else {
return Ok(DiscNode::None);
}
}
}
impl Vfs for DiscFs {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
match self.find(path)? {
DiscNode::None => Err(VfsError::NotFound),
DiscNode::Root => Err(VfsError::DirectoryExists),
DiscNode::Sys => Err(VfsError::DirectoryExists),
DiscNode::Node(_, _, node) => match node.kind() {
NodeKind::File => {
if node.length() > 2048 {
let file = self.base.clone().into_open_file(node)?;
Ok(Box::new(DiscFile::new(file, self.mtime)))
} else {
let len = node.length() as usize;
let mut file = self.base.open_file(node)?;
let mut data = vec![0u8; len];
file.read_exact(&mut data)?;
Ok(Box::new(StaticFile::new(Arc::from(data.as_slice()), self.mtime)))
}
}
NodeKind::Directory => Err(VfsError::FileExists),
NodeKind::Invalid => Err(VfsError::from("FST: Invalid node kind")),
},
DiscNode::Static(data) => Ok(Box::new(StaticFile::new(Arc::from(data), self.mtime))),
}
}
fn exists(&mut self, path: &str) -> VfsResult<bool> {
Ok(!matches!(self.find(path)?, DiscNode::None))
}
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> {
match self.find(path)? {
DiscNode::None => Err(VfsError::NotFound),
DiscNode::Root => Ok(vec!["files".to_string(), "sys".to_string()]),
DiscNode::Sys => {
let mut sys = vec![
"boot.bin".to_string(),
"bi2.bin".to_string(),
"apploader.bin".to_string(),
"fst.bin".to_string(),
"main.dol".to_string(),
];
if self.meta.raw_ticket.is_some() {
sys.push("ticket.bin".to_string());
}
if self.meta.raw_tmd.is_some() {
sys.push("tmd.bin".to_string());
}
if self.meta.raw_cert_chain.is_some() {
sys.push("cert.bin".to_string());
}
if self.meta.raw_h3_table.is_some() {
sys.push("h3.bin".to_string());
}
Ok(sys)
}
DiscNode::Node(fst, idx, node) => {
match node.kind() {
NodeKind::File => return Err(VfsError::FileExists),
NodeKind::Directory => {}
NodeKind::Invalid => return Err(VfsError::from("FST: Invalid node kind")),
}
let mut entries = Vec::new();
let mut idx = idx + 1;
let end = node.length() as usize;
while idx < end {
let child = fst
.nodes
.get(idx)
.copied()
.ok_or_else(|| VfsError::from("FST: Node index out of bounds"))?;
entries.push(fst.get_name(child)?.to_string());
if child.is_dir() {
idx = child.length() as usize;
} else {
idx += 1;
}
}
Ok(entries)
}
DiscNode::Static(_) => Err(VfsError::FileExists),
}
}
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> {
match self.find(path)? {
DiscNode::None => Err(VfsError::NotFound),
DiscNode::Root | DiscNode::Sys => {
Ok(VfsMetadata { file_type: VfsFileType::Directory, len: 0, mtime: self.mtime })
}
DiscNode::Node(_, _, node) => {
let (file_type, len) = match node.kind() {
NodeKind::File => (VfsFileType::File, node.length()),
NodeKind::Directory => (VfsFileType::Directory, 0),
NodeKind::Invalid => return Err(VfsError::from("FST: Invalid node kind")),
};
Ok(VfsMetadata { file_type, len, mtime: self.mtime })
}
DiscNode::Static(data) => Ok(VfsMetadata {
file_type: VfsFileType::File,
len: data.len() as u64,
mtime: self.mtime,
}),
}
}
}
#[derive(Clone)]
enum DiscFileInner {
Stream(OwnedFileStream),
Mapped(Cursor<Arc<[u8]>>),
}
#[derive(Clone)]
struct DiscFile {
inner: DiscFileInner,
mtime: Option<FileTime>,
}
impl DiscFile {
pub fn new(file: OwnedFileStream, mtime: Option<FileTime>) -> Self {
Self { inner: DiscFileInner::Stream(file), mtime }
}
fn convert_to_mapped(&mut self) {
match &mut self.inner {
DiscFileInner::Stream(stream) => {
let pos = stream.stream_position().unwrap();
stream.seek(SeekFrom::Start(0)).unwrap();
let mut data = vec![0u8; stream.len() as usize];
stream.read_exact(&mut data).unwrap();
let mut cursor = Cursor::new(Arc::from(data.as_slice()));
cursor.set_position(pos);
self.inner = DiscFileInner::Mapped(cursor);
}
DiscFileInner::Mapped(_) => {}
};
}
}
impl BufRead for DiscFile {
fn fill_buf(&mut self) -> io::Result<&[u8]> {
match &mut self.inner {
DiscFileInner::Stream(stream) => stream.fill_buf(),
DiscFileInner::Mapped(data) => data.fill_buf(),
}
}
fn consume(&mut self, amt: usize) {
match &mut self.inner {
DiscFileInner::Stream(stream) => stream.consume(amt),
DiscFileInner::Mapped(data) => data.consume(amt),
}
}
}
impl Read for DiscFile {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match &mut self.inner {
DiscFileInner::Stream(stream) => stream.read(buf),
DiscFileInner::Mapped(data) => data.read(buf),
}
}
}
impl Seek for DiscFile {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
match &mut self.inner {
DiscFileInner::Stream(stream) => stream.seek(pos),
DiscFileInner::Mapped(data) => data.seek(pos),
}
}
}
impl VfsFile for DiscFile {
fn map(&mut self) -> io::Result<&[u8]> {
self.convert_to_mapped();
match &mut self.inner {
DiscFileInner::Stream(_) => unreachable!(),
DiscFileInner::Mapped(data) => Ok(data.get_ref()),
}
}
fn metadata(&mut self) -> io::Result<VfsMetadata> {
match &mut self.inner {
DiscFileInner::Stream(stream) => Ok(VfsMetadata {
file_type: VfsFileType::File,
len: stream.len(),
mtime: self.mtime,
}),
DiscFileInner::Mapped(data) => Ok(VfsMetadata {
file_type: VfsFileType::File,
len: data.get_ref().len() as u64,
mtime: self.mtime,
}),
}
}
fn into_disc_stream(self: Box<Self>) -> Box<dyn DiscStream> { self }
}
pub fn nod_to_io_error(e: nod::Error) -> io::Error {
match e {
nod::Error::Io(msg, e) => io::Error::new(e.kind(), format!("{}: {}", msg, e)),
e => io::Error::new(io::ErrorKind::InvalidData, e),
}
}

289
src/vfs/mod.rs Normal file
View File

@ -0,0 +1,289 @@
mod common;
mod disc;
mod rarc;
mod std_fs;
mod u8_arc;
use std::{
error::Error,
fmt::{Debug, Display, Formatter},
io,
io::{BufRead, Read, Seek, SeekFrom},
path::Path,
sync::Arc,
};
use anyhow::{anyhow, Context};
use common::{StaticFile, WindowedFile};
use disc::{nod_to_io_error, DiscFs};
use dyn_clone::DynClone;
use filetime::FileTime;
use nodtool::{nod, nod::DiscStream};
use rarc::RarcFs;
pub use std_fs::StdFs;
use u8_arc::U8Fs;
use crate::util::{
ncompress::{YAY0_MAGIC, YAZ0_MAGIC},
rarc::RARC_MAGIC,
u8_arc::U8_MAGIC,
};
pub trait Vfs: DynClone + Send + Sync {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>>;
fn exists(&mut self, path: &str) -> VfsResult<bool>;
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>>;
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata>;
}
dyn_clone::clone_trait_object!(Vfs);
pub trait VfsFile: DiscStream + BufRead {
fn map(&mut self) -> io::Result<&[u8]>;
fn metadata(&mut self) -> io::Result<VfsMetadata>;
fn into_disc_stream(self: Box<Self>) -> Box<dyn DiscStream>;
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum VfsFileType {
File,
Directory,
}
pub struct VfsMetadata {
pub file_type: VfsFileType,
pub len: u64,
pub mtime: Option<FileTime>,
}
impl VfsMetadata {
pub fn is_file(&self) -> bool { self.file_type == VfsFileType::File }
pub fn is_dir(&self) -> bool { self.file_type == VfsFileType::Directory }
}
dyn_clone::clone_trait_object!(VfsFile);
#[derive(Debug)]
pub enum VfsError {
NotFound,
IoError(io::Error),
Other(String),
FileExists,
DirectoryExists,
}
pub type VfsResult<T, E = VfsError> = Result<T, E>;
impl From<io::Error> for VfsError {
fn from(e: io::Error) -> Self { VfsError::IoError(e) }
}
impl From<String> for VfsError {
fn from(e: String) -> Self { VfsError::Other(e) }
}
impl From<&str> for VfsError {
fn from(e: &str) -> Self { VfsError::Other(e.to_string()) }
}
impl Display for VfsError {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
match self {
VfsError::NotFound => write!(f, "File or directory not found"),
VfsError::IoError(e) => write!(f, "{}", e),
VfsError::Other(e) => write!(f, "{}", e),
VfsError::FileExists => write!(f, "File already exists"),
VfsError::DirectoryExists => write!(f, "Directory already exists"),
}
}
}
impl Error for VfsError {}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum FileFormat {
Regular,
Compressed(CompressionKind),
Archive(ArchiveKind),
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum CompressionKind {
Yay0,
Yaz0,
Nlzss,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum ArchiveKind {
Rarc,
U8,
Disc,
}
pub fn detect<R>(file: &mut R) -> io::Result<FileFormat>
where R: Read + Seek + ?Sized {
file.seek(SeekFrom::Start(0))?;
let mut magic = [0u8; 4];
match file.read_exact(&mut magic) {
Ok(_) => {}
Err(e) if e.kind() == io::ErrorKind::UnexpectedEof => return Ok(FileFormat::Regular),
Err(e) => return Err(e),
}
file.seek_relative(-4)?;
match magic {
YAY0_MAGIC => Ok(FileFormat::Compressed(CompressionKind::Yay0)),
YAZ0_MAGIC => Ok(FileFormat::Compressed(CompressionKind::Yaz0)),
RARC_MAGIC => Ok(FileFormat::Archive(ArchiveKind::Rarc)),
U8_MAGIC => Ok(FileFormat::Archive(ArchiveKind::U8)),
_ => {
let format = nod::Disc::detect(file)?;
file.seek(SeekFrom::Start(0))?;
match format {
Some(_) => Ok(FileFormat::Archive(ArchiveKind::Disc)),
None => Ok(FileFormat::Regular),
}
}
}
}
pub fn open_path(path: &Path, auto_decompress: bool) -> anyhow::Result<Box<dyn VfsFile>> {
open_path_fs(Box::new(StdFs), path, auto_decompress)
}
pub fn open_path_fs(
mut fs: Box<dyn Vfs>,
path: &Path,
auto_decompress: bool,
) -> anyhow::Result<Box<dyn VfsFile>> {
let str = path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
let mut split = str.split(':').peekable();
let mut within = String::new();
loop {
let path = split.next().unwrap();
let mut file = fs
.open(path)
.with_context(|| format!("Failed to open {}", format_path(path, &within)))?;
match detect(file.as_mut()).with_context(|| {
format!("Failed to detect file type for {}", format_path(path, &within))
})? {
FileFormat::Regular => {
return match split.next() {
None => Ok(file),
Some(segment) => {
if split.next().is_some() {
return Err(anyhow!(
"{} is not an archive",
format_path(path, &within)
));
}
match segment {
"nlzss" => Ok(decompress_file(file, CompressionKind::Nlzss)
.with_context(|| {
format!(
"Failed to decompress {} with NLZSS",
format_path(path, &within)
)
})?),
"yay0" => Ok(decompress_file(file, CompressionKind::Yay0)
.with_context(|| {
format!(
"Failed to decompress {} with Yay0",
format_path(path, &within)
)
})?),
"yaz0" => Ok(decompress_file(file, CompressionKind::Yaz0)
.with_context(|| {
format!(
"Failed to decompress {} with Yaz0",
format_path(path, &within)
)
})?),
_ => Err(anyhow!("{} is not an archive", format_path(path, &within))),
}
}
}
}
FileFormat::Compressed(kind) => {
return if split.peek().is_none() {
if auto_decompress {
Ok(decompress_file(file, kind).with_context(|| {
format!("Failed to decompress {}", format_path(path, &within))
})?)
} else {
Ok(file)
}
} else {
Err(anyhow!("{} is not an archive", format_path(path, &within)))
};
}
FileFormat::Archive(kind) => {
if split.peek().is_none() {
return Ok(file);
} else {
fs = open_fs(file, kind).with_context(|| {
format!("Failed to open container {}", format_path(path, &within))
})?;
if !within.is_empty() {
within.push(':');
}
within.push_str(path);
}
}
}
}
}
pub fn open_fs(mut file: Box<dyn VfsFile>, kind: ArchiveKind) -> io::Result<Box<dyn Vfs>> {
let metadata = file.metadata()?;
match kind {
ArchiveKind::Rarc => Ok(Box::new(RarcFs::new(file)?)),
ArchiveKind::U8 => Ok(Box::new(U8Fs::new(file)?)),
ArchiveKind::Disc => {
let disc = nod::Disc::new_stream(file.into_disc_stream()).map_err(nod_to_io_error)?;
let partition =
disc.open_partition_kind(nod::PartitionKind::Data).map_err(nod_to_io_error)?;
Ok(Box::new(DiscFs::new(partition, metadata.mtime)?))
}
}
}
pub fn decompress_file(
mut file: Box<dyn VfsFile>,
kind: CompressionKind,
) -> io::Result<Box<dyn VfsFile>> {
let metadata = file.metadata()?;
match kind {
CompressionKind::Yay0 => {
let data = file.map()?;
let result = orthrus_ncompress::yay0::Yay0::decompress_from(data)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
Ok(Box::new(StaticFile::new(Arc::from(result), metadata.mtime)))
}
CompressionKind::Yaz0 => {
let data = file.map()?;
let result = orthrus_ncompress::yaz0::Yaz0::decompress_from(data)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
Ok(Box::new(StaticFile::new(Arc::from(result), metadata.mtime)))
}
CompressionKind::Nlzss => {
let result = nintendo_lz::decompress(&mut file)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
Ok(Box::new(StaticFile::new(Arc::from(result.as_slice()), metadata.mtime)))
}
}
}
fn format_path(path: &str, within: &str) -> String {
if within.is_empty() {
format!("'{}'", path)
} else {
format!("'{}' (within '{}')", path, within)
}
}

76
src/vfs/rarc.rs Normal file
View File

@ -0,0 +1,76 @@
use std::io;
use super::{Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult, WindowedFile};
use crate::util::rarc::{RarcNodeKind, RarcView};
#[derive(Clone)]
pub struct RarcFs {
file: Box<dyn VfsFile>,
}
impl RarcFs {
pub fn new(file: Box<dyn VfsFile>) -> io::Result<Self> { Ok(Self { file }) }
fn view(&mut self) -> io::Result<RarcView> {
let data = self.file.map()?;
RarcView::new(data).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
}
impl Vfs for RarcFs {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
let view = self.view()?;
match view.find(path) {
Some(RarcNodeKind::File(_, node)) => {
let offset = view.header.header_len() as u64
+ view.header.data_offset() as u64
+ node.data_offset() as u64;
let len = node.data_length() as u64;
let file = WindowedFile::new(self.file.clone(), offset, len)?;
Ok(Box::new(file))
}
Some(RarcNodeKind::Directory(_, _)) => Err(VfsError::DirectoryExists),
None => Err(VfsError::NotFound),
}
}
fn exists(&mut self, path: &str) -> VfsResult<bool> {
let view = self.view()?;
Ok(view.find(path).is_some())
}
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> {
let view = self.view()?;
match view.find(path) {
Some(RarcNodeKind::Directory(_, dir)) => {
let mut entries = Vec::new();
for node in view.children(dir) {
let name = view.get_string(node.name_offset())?;
if name == "." || name == ".." {
continue;
}
entries.push(name.to_string());
}
Ok(entries)
}
Some(RarcNodeKind::File(_, _)) => Err(VfsError::FileExists),
None => Err(VfsError::NotFound),
}
}
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> {
let metadata = self.file.metadata()?;
let view = self.view()?;
match view.find(path) {
Some(RarcNodeKind::File(_, node)) => Ok(VfsMetadata {
file_type: VfsFileType::File,
len: node.data_length() as u64,
mtime: metadata.mtime,
}),
Some(RarcNodeKind::Directory(_, _)) => {
Ok(VfsMetadata { file_type: VfsFileType::Directory, len: 0, mtime: metadata.mtime })
}
None => Err(VfsError::NotFound),
}
}
}

106
src/vfs/std_fs.rs Normal file
View File

@ -0,0 +1,106 @@
use std::{
io,
io::{BufRead, BufReader, Read, Seek, SeekFrom},
path::{Path, PathBuf},
};
use filetime::FileTime;
use super::{DiscStream, Vfs, VfsFile, VfsFileType, VfsMetadata, VfsResult};
#[derive(Clone)]
pub struct StdFs;
impl Vfs for StdFs {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
let mut file = StdFile::new(PathBuf::from(path));
file.file()?; // Open the file now to check for errors
Ok(Box::new(file))
}
fn exists(&mut self, path: &str) -> VfsResult<bool> { Ok(Path::new(path).exists()) }
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> {
let entries = std::fs::read_dir(path)?
.map(|entry| entry.map(|e| e.file_name().to_string_lossy().into_owned()))
.collect::<Result<Vec<_>, _>>()?;
Ok(entries)
}
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> {
let metadata = std::fs::metadata(path)?;
Ok(VfsMetadata {
file_type: if metadata.is_dir() { VfsFileType::Directory } else { VfsFileType::File },
len: metadata.len(),
mtime: Some(FileTime::from_last_modification_time(&metadata)),
})
}
}
pub struct StdFile {
path: PathBuf,
file: Option<BufReader<std::fs::File>>,
mmap: Option<memmap2::Mmap>,
}
impl Clone for StdFile {
#[inline]
fn clone(&self) -> Self { Self { path: self.path.clone(), file: None, mmap: None } }
}
impl StdFile {
#[inline]
pub fn new(path: PathBuf) -> Self { StdFile { path, file: None, mmap: None } }
pub fn file(&mut self) -> io::Result<&mut BufReader<std::fs::File>> {
if self.file.is_none() {
self.file = Some(BufReader::new(std::fs::File::open(&self.path)?));
}
Ok(self.file.as_mut().unwrap())
}
}
impl BufRead for StdFile {
#[inline]
fn fill_buf(&mut self) -> io::Result<&[u8]> { self.file()?.fill_buf() }
#[inline]
fn consume(&mut self, amt: usize) {
if let Ok(file) = self.file() {
file.consume(amt);
}
}
}
impl Read for StdFile {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.file()?.read(buf) }
}
impl Seek for StdFile {
#[inline]
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { self.file()?.seek(pos) }
}
impl VfsFile for StdFile {
fn map(&mut self) -> io::Result<&[u8]> {
if self.file.is_none() {
self.file = Some(BufReader::new(std::fs::File::open(&self.path)?));
}
if self.mmap.is_none() {
self.mmap = Some(unsafe { memmap2::Mmap::map(self.file.as_ref().unwrap().get_ref())? });
}
Ok(self.mmap.as_ref().unwrap())
}
fn metadata(&mut self) -> io::Result<VfsMetadata> {
let metadata = std::fs::metadata(&self.path)?;
Ok(VfsMetadata {
file_type: if metadata.is_dir() { VfsFileType::Directory } else { VfsFileType::File },
len: metadata.len(),
mtime: Some(FileTime::from_last_modification_time(&metadata)),
})
}
fn into_disc_stream(self: Box<Self>) -> Box<dyn DiscStream> { self }
}

89
src/vfs/u8_arc.rs Normal file
View File

@ -0,0 +1,89 @@
use std::io;
use super::{Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult, WindowedFile};
use crate::util::u8_arc::{U8NodeKind, U8View};
#[derive(Clone)]
pub struct U8Fs {
file: Box<dyn VfsFile>,
}
impl U8Fs {
pub fn new(file: Box<dyn VfsFile>) -> io::Result<Self> { Ok(Self { file }) }
fn view(&mut self) -> io::Result<U8View> {
let data = self.file.map()?;
U8View::new(data).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
}
impl Vfs for U8Fs {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
let view = self.view()?;
match view.find(path) {
Some((_, node)) => match node.kind() {
U8NodeKind::File => {
let offset = node.offset() as u64;
let len = node.length() as u64;
let file = WindowedFile::new(self.file.clone(), offset, len)?;
Ok(Box::new(file))
}
U8NodeKind::Directory => Err(VfsError::DirectoryExists),
U8NodeKind::Invalid => Err(VfsError::from("U8: Invalid node kind")),
},
None => Err(VfsError::NotFound),
}
}
fn exists(&mut self, path: &str) -> VfsResult<bool> {
let view = self.view()?;
Ok(view.find(path).is_some())
}
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> {
let view = self.view()?;
match view.find(path) {
Some((idx, node)) => match node.kind() {
U8NodeKind::File => Err(VfsError::FileExists),
U8NodeKind::Directory => {
let mut entries = Vec::new();
let mut idx = idx + 1;
let end = node.length() as usize;
while idx < end {
let child = view.nodes.get(idx).copied().ok_or(VfsError::NotFound)?;
entries.push(view.get_name(child)?.to_string());
if child.is_dir() {
idx = child.length() as usize;
} else {
idx += 1;
}
}
Ok(entries)
}
U8NodeKind::Invalid => Err(VfsError::from("U8: Invalid node kind")),
},
None => Err(VfsError::NotFound),
}
}
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> {
let metdata = self.file.metadata()?;
let view = self.view()?;
match view.find(path) {
Some((_, node)) => match node.kind() {
U8NodeKind::File => Ok(VfsMetadata {
file_type: VfsFileType::File,
len: node.length() as u64,
mtime: metdata.mtime,
}),
U8NodeKind::Directory => Ok(VfsMetadata {
file_type: VfsFileType::Directory,
len: 0,
mtime: metdata.mtime,
}),
U8NodeKind::Invalid => Err(VfsError::from("U8: Invalid node kind")),
},
None => Err(VfsError::NotFound),
}
}
}