VFS fixes and improvements, update README.md
`vfs ls`: Now displays size, detected file format, and decompressed size (if applicable). `-r`/`--recursive` lists files recursively. `-s`/`--short` prints only filenames. `vfs cp`: Copies files recursively when the source is a directory. `--no-decompress` disables automatic decompression for Yay0/Yaz0. `-q` disables printing copied files. `rarc` and `u8` commands are now thin wrappers over `vfs ls` and `vfs cp`. For example, `rarc list file.arc` is now equivalent to `vfs ls file.arc:`. `rarc extract file.arc -o out` is equivalent to `vfs cp file.arc: out`.
This commit is contained in:
parent
281b0f7104
commit
ef7e0db095
|
@ -440,6 +440,7 @@ dependencies = [
|
||||||
"serde_repr",
|
"serde_repr",
|
||||||
"serde_yaml",
|
"serde_yaml",
|
||||||
"sha-1",
|
"sha-1",
|
||||||
|
"size",
|
||||||
"supports-color 3.0.1",
|
"supports-color 3.0.1",
|
||||||
"syntect",
|
"syntect",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
@ -917,9 +918,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nod"
|
name = "nod"
|
||||||
version = "1.4.0"
|
version = "1.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "75b9bd092c2ebed932654aa6de256e39d2156ce8c87ace31191f8086f6d22f02"
|
checksum = "8fc8822a8dac202a8589b1c49c7916bf8dec0adb8e8048f4f5cb979f9f4bc5e9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"adler",
|
"adler",
|
||||||
"aes",
|
"aes",
|
||||||
|
@ -942,9 +943,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nodtool"
|
name = "nodtool"
|
||||||
version = "1.4.0"
|
version = "1.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "598b0c24bb98d0094d37e8dc8d83bb600d857c9e42a475806ac1667ec52afbb3"
|
checksum = "7f838d5c364ae3f04fbec424baa64facadb401b8114a6b1f96ea35b2d99f5e13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"argp",
|
"argp",
|
||||||
"base16ct",
|
"base16ct",
|
||||||
|
|
|
@ -69,6 +69,7 @@ serde_json = "1.0"
|
||||||
serde_repr = "0.1"
|
serde_repr = "0.1"
|
||||||
serde_yaml = "0.9"
|
serde_yaml = "0.9"
|
||||||
sha-1 = "0.10"
|
sha-1 = "0.10"
|
||||||
|
size = "0.4"
|
||||||
supports-color = "3.0"
|
supports-color = "3.0"
|
||||||
syntect = { version = "5.2", features = ["parsing", "regex-fancy", "dump-load"], default-features = false }
|
syntect = { version = "5.2", features = ["parsing", "regex-fancy", "dump-load"], default-features = false }
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
|
|
106
README.md
106
README.md
|
@ -46,6 +46,8 @@ project structure and build system that uses decomp-toolkit under the hood.
|
||||||
- [rarc extract](#rarc-extract)
|
- [rarc extract](#rarc-extract)
|
||||||
- [u8 list](#u8-list)
|
- [u8 list](#u8-list)
|
||||||
- [u8 extract](#u8-extract)
|
- [u8 extract](#u8-extract)
|
||||||
|
- [vfs ls](#vfs-ls)
|
||||||
|
- [vfs cp](#vfs-cp)
|
||||||
- [yay0 decompress](#yay0-decompress)
|
- [yay0 decompress](#yay0-decompress)
|
||||||
- [yay0 compress](#yay0-compress)
|
- [yay0 compress](#yay0-compress)
|
||||||
- [yaz0 decompress](#yaz0-decompress)
|
- [yaz0 decompress](#yaz0-decompress)
|
||||||
|
@ -89,8 +91,6 @@ binary that is byte-for-byte identical to the original, then we know that the de
|
||||||
decomp-toolkit provides tooling for analyzing and splitting the original binary into relocatable objects, as well
|
decomp-toolkit provides tooling for analyzing and splitting the original binary into relocatable objects, as well
|
||||||
as generating the linker script and other files needed to link the decompiled code.
|
as generating the linker script and other files needed to link the decompiled code.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Analyzer features
|
## Analyzer features
|
||||||
|
|
||||||
**Function boundary analysis**
|
**Function boundary analysis**
|
||||||
|
@ -126,11 +126,6 @@ If desired, optionally writes GNU assembler-compatible files alongside the objec
|
||||||
**Linker script generation**
|
**Linker script generation**
|
||||||
Generates `ldscript.lcf` for `mwldeppc.exe`.
|
Generates `ldscript.lcf` for `mwldeppc.exe`.
|
||||||
|
|
||||||
**Future work**
|
|
||||||
|
|
||||||
- Support RSO files
|
|
||||||
- Add more signatures
|
|
||||||
|
|
||||||
## Commands
|
## Commands
|
||||||
|
|
||||||
### ar create
|
### ar create
|
||||||
|
@ -181,6 +176,8 @@ and its `nodtool` command line tool._
|
||||||
|
|
||||||
Displays information about disc images.
|
Displays information about disc images.
|
||||||
|
|
||||||
|
To list the contents of a disc image, use [vfs ls](#vfs-ls).
|
||||||
|
|
||||||
Supported disc image formats:
|
Supported disc image formats:
|
||||||
|
|
||||||
- ISO (GCM)
|
- ISO (GCM)
|
||||||
|
@ -201,6 +198,8 @@ Extracts the contents of disc images to a directory.
|
||||||
|
|
||||||
See [disc info](#disc-info) for supported formats.
|
See [disc info](#disc-info) for supported formats.
|
||||||
|
|
||||||
|
See [vfs cp](#vfs-cp) for a more powerful and flexible extraction tool that supports disc images.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ dtk disc extract /path/to/game.iso [outdir]
|
$ dtk disc extract /path/to/game.iso [outdir]
|
||||||
```
|
```
|
||||||
|
@ -235,21 +234,23 @@ $ dtk disc verify /path/to/game.iso
|
||||||
|
|
||||||
Analyzes a DOL file and outputs information section and symbol information.
|
Analyzes a DOL file and outputs information section and symbol information.
|
||||||
|
|
||||||
|
See [vfs ls](#vfs-ls) for information on the VFS abstraction.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ dtk dol info input.dol
|
$ dtk dol info input.dol
|
||||||
|
# or, directly from a disc image
|
||||||
|
$ dtk dol info 'disc.rvz:sys/main.dol'
|
||||||
```
|
```
|
||||||
|
|
||||||
### dol split
|
### dol split
|
||||||
|
|
||||||
> [!NOTE]
|
> [!IMPORTANT]
|
||||||
> This command is a work-in-progress.
|
> **This command is intended to be used as part of a decompilation project's build system.**
|
||||||
|
> For an example project structure and for documentation on the configuration, see
|
||||||
|
> [dtk-template](https://github.com/encounter/dtk-template).
|
||||||
|
|
||||||
Analyzes and splits a DOL file into relocatable objects based on user configuration.
|
Analyzes and splits a DOL file into relocatable objects based on user configuration.
|
||||||
|
|
||||||
**This command is intended to be used as part of a decompilation project's build system.**
|
|
||||||
For an example project structure and for documentation on the configuration, see
|
|
||||||
[dtk-template](https://github.com/encounter/dtk-template).
|
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ dtk dol split config.yml target
|
$ dtk dol split config.yml target
|
||||||
```
|
```
|
||||||
|
@ -348,8 +349,12 @@ $ dtk map symbol Game.MAP 'Function__5ClassFv'
|
||||||
|
|
||||||
Prints information about a REL file.
|
Prints information about a REL file.
|
||||||
|
|
||||||
|
See [vfs ls](#vfs-ls) for information on the VFS abstraction.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ dtk rel info input.rel
|
$ dtk rel info input.rel
|
||||||
|
# or, directly from a disc image
|
||||||
|
$ dtk rel info 'disc.rvz:files/RELS.arc:amem/d_a_tag_so.rel'
|
||||||
```
|
```
|
||||||
|
|
||||||
### rel merge
|
### rel merge
|
||||||
|
@ -412,6 +417,10 @@ $ dtk nlzss decompress rels/*.lz -o rels
|
||||||
|
|
||||||
### rarc list
|
### rarc list
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> [vfs ls](#vfs-ls) is more powerful and supports RARC archives.
|
||||||
|
> This command is now equivalent to `dtk vfs ls input.arc:`
|
||||||
|
|
||||||
Lists the contents of an RARC (older .arc) archive.
|
Lists the contents of an RARC (older .arc) archive.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
@ -420,6 +429,10 @@ $ dtk rarc list input.arc
|
||||||
|
|
||||||
### rarc extract
|
### rarc extract
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> [vfs cp](#vfs-cp) is more powerful and supports RARC archives.
|
||||||
|
> This command is now equivalent to `dtk vfs cp input.arc: output_dir`
|
||||||
|
|
||||||
Extracts the contents of an RARC (older .arc) archive.
|
Extracts the contents of an RARC (older .arc) archive.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
@ -428,6 +441,10 @@ $ dtk rarc extract input.arc -o output_dir
|
||||||
|
|
||||||
### u8 list
|
### u8 list
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> [vfs ls](#vfs-ls) is more powerful and supports U8 archives.
|
||||||
|
> This command is now equivalent to `dtk vfs ls input.arc:`
|
||||||
|
|
||||||
Extracts the contents of a U8 (newer .arc) archive.
|
Extracts the contents of a U8 (newer .arc) archive.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
@ -436,12 +453,75 @@ $ dtk u8 list input.arc
|
||||||
|
|
||||||
### u8 extract
|
### u8 extract
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> [vfs cp](#vfs-cp) is more powerful and supports U8 archives.
|
||||||
|
> This command is now equivalent to `dtk vfs cp input.arc: output_dir`
|
||||||
|
|
||||||
Extracts the contents of a U8 (newer .arc) archive.
|
Extracts the contents of a U8 (newer .arc) archive.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ dtk u8 extract input.arc -o output_dir
|
$ dtk u8 extract input.arc -o output_dir
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### vfs ls
|
||||||
|
|
||||||
|
decomp-toolkit has a powerful virtual filesystem (VFS) abstraction that allows you to work with a
|
||||||
|
variety of containers. All operations happen in memory with minimal overhead and no temporary files.
|
||||||
|
|
||||||
|
Supported containers:
|
||||||
|
|
||||||
|
- Disc images (see [disc info](#disc-info) for supported formats)
|
||||||
|
- RARC archives (older .arc)
|
||||||
|
- U8 archives (newer .arc)
|
||||||
|
|
||||||
|
Supported compression formats are handled transparently:
|
||||||
|
- Yay0 (SZP) / Yaz0 (SZS)
|
||||||
|
- NLZSS (.lz) (Use `:nlzss` in the path)
|
||||||
|
|
||||||
|
`vfs ls` lists the contents of a container or directory.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
- `-r`, `--recursive`: Recursively list contents.
|
||||||
|
- `-s`, `--short`: Only list file names.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# List the contents of the `amem` directory inside `RELS.arc` in a disc image
|
||||||
|
$ dtk vfs ls 'disc.rvz:files/RELS.arc:amem'
|
||||||
|
# List the contents of `RELS.arc` recursively
|
||||||
|
$ dtk vfs ls -r 'disc.rvz:files/RELS.arc:'
|
||||||
|
|
||||||
|
# All commands that accept a file path can also accept a VFS path
|
||||||
|
$ dtk rel info 'disc.rvz:files/RELS.arc:amem/d_a_tag_so.rel'
|
||||||
|
# Example disc image within a disc image
|
||||||
|
$ dtk dol info 'disc.rvz:files/zz_demo.tgc:sys/main.dol'
|
||||||
|
````
|
||||||
|
|
||||||
|
### vfs cp
|
||||||
|
|
||||||
|
See [vfs ls](#vfs-ls) for information on the VFS abstraction.
|
||||||
|
|
||||||
|
`vfs cp` copies files and directories recursively to the host filesystem.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
- `--no-decompress`: Do not decompress files when copying.
|
||||||
|
- `-q`, `--quiet`: Suppresses all output except errors.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# Extract a file from a nested path in a disc image to the current directory
|
||||||
|
$ dtk vfs cp 'disc.rvz:files/RELS.arc:amem/d_a_tag_so.rel' .
|
||||||
|
|
||||||
|
# Directories are copied recursively, making it easy to extract entire archives
|
||||||
|
$ dtk vfs cp 'disc.rvz:files/RELS.arc:' rels
|
||||||
|
# Or, to disable automatic decompression
|
||||||
|
$ dtk vfs cp --no-decompress 'disc.rvz:files/RELS.arc:' rels
|
||||||
|
```
|
||||||
|
|
||||||
### yay0 decompress
|
### yay0 decompress
|
||||||
|
|
||||||
Decompresses Yay0-compressed files.
|
Decompresses Yay0-compressed files.
|
||||||
|
|
|
@ -13,7 +13,7 @@ use crate::{
|
||||||
file::buf_writer,
|
file::buf_writer,
|
||||||
reader::{Endian, FromReader},
|
reader::{Endian, FromReader},
|
||||||
},
|
},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -61,7 +61,7 @@ pub fn run(args: Args) -> Result<()> {
|
||||||
|
|
||||||
fn hashes(args: HashesArgs) -> Result<()> {
|
fn hashes(args: HashesArgs) -> Result<()> {
|
||||||
let alf_file = {
|
let alf_file = {
|
||||||
let mut file = open_path(&args.alf_file, true)?;
|
let mut file = open_file(&args.alf_file, true)?;
|
||||||
AlfFile::from_reader(file.as_mut(), Endian::Little)?
|
AlfFile::from_reader(file.as_mut(), Endian::Little)?
|
||||||
};
|
};
|
||||||
let mut w: Box<dyn Write> = if let Some(output) = args.output {
|
let mut w: Box<dyn Write> = if let Some(output) = args.output {
|
||||||
|
|
|
@ -11,7 +11,7 @@ use object::{Object, ObjectSymbol, SymbolScope};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
util::file::{buf_writer, process_rsp},
|
util::file::{buf_writer, process_rsp},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -83,7 +83,7 @@ fn create(args: CreateArgs) -> Result<()> {
|
||||||
Entry::Vacant(e) => e.insert(Vec::new()),
|
Entry::Vacant(e) => e.insert(Vec::new()),
|
||||||
Entry::Occupied(_) => bail!("Duplicate file name '{path_str}'"),
|
Entry::Occupied(_) => bail!("Duplicate file name '{path_str}'"),
|
||||||
};
|
};
|
||||||
let mut file = open_path(path, false)?;
|
let mut file = open_file(path, false)?;
|
||||||
let obj = object::File::parse(file.map()?)?;
|
let obj = object::File::parse(file.map()?)?;
|
||||||
for symbol in obj.symbols() {
|
for symbol in obj.symbols() {
|
||||||
if symbol.scope() == SymbolScope::Dynamic {
|
if symbol.scope() == SymbolScope::Dynamic {
|
||||||
|
@ -129,7 +129,7 @@ fn extract(args: ExtractArgs) -> Result<()> {
|
||||||
println!("Extracting {} to {}", path.display(), out_dir.display());
|
println!("Extracting {} to {}", path.display(), out_dir.display());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut file = open_path(path, false)?;
|
let mut file = open_file(path, false)?;
|
||||||
let mut archive = ar::Archive::new(file.map()?);
|
let mut archive = ar::Archive::new(file.map()?);
|
||||||
while let Some(entry) = archive.next_entry() {
|
while let Some(entry) = archive.next_entry() {
|
||||||
let mut entry =
|
let mut entry =
|
||||||
|
|
|
@ -56,7 +56,7 @@ use crate::{
|
||||||
split::{is_linker_generated_object, split_obj, update_splits},
|
split::{is_linker_generated_object, split_obj, update_splits},
|
||||||
IntoCow, ToCow,
|
IntoCow, ToCow,
|
||||||
},
|
},
|
||||||
vfs::{open_fs, open_path, open_path_fs, ArchiveKind, Vfs, VfsFile},
|
vfs::{open_file, open_file_with_fs, open_fs, ArchiveKind, Vfs, VfsFile},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -485,7 +485,7 @@ fn apply_selfile(obj: &mut ObjInfo, buf: &[u8]) -> Result<()> {
|
||||||
|
|
||||||
pub fn info(args: InfoArgs) -> Result<()> {
|
pub fn info(args: InfoArgs) -> Result<()> {
|
||||||
let mut obj = {
|
let mut obj = {
|
||||||
let mut file = open_path(&args.dol_file, true)?;
|
let mut file = open_file(&args.dol_file, true)?;
|
||||||
process_dol(file.map()?, "")?
|
process_dol(file.map()?, "")?
|
||||||
};
|
};
|
||||||
apply_signatures(&mut obj)?;
|
apply_signatures(&mut obj)?;
|
||||||
|
@ -504,7 +504,7 @@ pub fn info(args: InfoArgs) -> Result<()> {
|
||||||
apply_signatures_post(&mut obj)?;
|
apply_signatures_post(&mut obj)?;
|
||||||
|
|
||||||
if let Some(selfile) = &args.selfile {
|
if let Some(selfile) = &args.selfile {
|
||||||
let mut file = open_path(selfile, true)?;
|
let mut file = open_file(selfile, true)?;
|
||||||
apply_selfile(&mut obj, file.map()?)?;
|
apply_selfile(&mut obj, file.map()?)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -847,7 +847,7 @@ fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<
|
||||||
|
|
||||||
if let Some(selfile) = &config.selfile {
|
if let Some(selfile) = &config.selfile {
|
||||||
log::info!("Loading {}", selfile.display());
|
log::info!("Loading {}", selfile.display());
|
||||||
let mut file = open_path(selfile, true)?;
|
let mut file = open_file(selfile, true)?;
|
||||||
let data = file.map()?;
|
let data = file.map()?;
|
||||||
if let Some(hash) = &config.selfile_hash {
|
if let Some(hash) = &config.selfile_hash {
|
||||||
verify_hash(data, hash)?;
|
verify_hash(data, hash)?;
|
||||||
|
@ -1009,7 +1009,7 @@ fn split_write_obj(
|
||||||
|
|
||||||
fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
|
fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
|
||||||
if path.is_file() {
|
if path.is_file() {
|
||||||
let mut old_file = open_path(path, true)?;
|
let mut old_file = open_file(path, true)?;
|
||||||
let old_data = old_file.map()?;
|
let old_data = old_file.map()?;
|
||||||
// If the file is the same size, check if the contents are the same
|
// If the file is the same size, check if the contents are the same
|
||||||
// Avoid writing if unchanged, since it will update the file's mtime
|
// Avoid writing if unchanged, since it will update the file's mtime
|
||||||
|
@ -1098,7 +1098,7 @@ fn split(args: SplitArgs) -> Result<()> {
|
||||||
let command_start = Instant::now();
|
let command_start = Instant::now();
|
||||||
info!("Loading {}", args.config.display());
|
info!("Loading {}", args.config.display());
|
||||||
let mut config: ProjectConfig = {
|
let mut config: ProjectConfig = {
|
||||||
let mut config_file = open_path(&args.config, true)?;
|
let mut config_file = open_file(&args.config, true)?;
|
||||||
serde_yaml::from_reader(config_file.as_mut())?
|
serde_yaml::from_reader(config_file.as_mut())?
|
||||||
};
|
};
|
||||||
let object_base = find_object_base(&config)?;
|
let object_base = find_object_base(&config)?;
|
||||||
|
@ -1550,7 +1550,7 @@ fn symbol_name_fuzzy_eq(a: &ObjSymbol, b: &ObjSymbol) -> bool {
|
||||||
|
|
||||||
fn diff(args: DiffArgs) -> Result<()> {
|
fn diff(args: DiffArgs) -> Result<()> {
|
||||||
log::info!("Loading {}", args.config.display());
|
log::info!("Loading {}", args.config.display());
|
||||||
let mut config_file = open_path(&args.config, true)?;
|
let mut config_file = open_file(&args.config, true)?;
|
||||||
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
|
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
|
||||||
let object_base = find_object_base(&config)?;
|
let object_base = find_object_base(&config)?;
|
||||||
|
|
||||||
|
@ -1731,7 +1731,7 @@ fn diff(args: DiffArgs) -> Result<()> {
|
||||||
|
|
||||||
fn apply(args: ApplyArgs) -> Result<()> {
|
fn apply(args: ApplyArgs) -> Result<()> {
|
||||||
log::info!("Loading {}", args.config.display());
|
log::info!("Loading {}", args.config.display());
|
||||||
let mut config_file = open_path(&args.config, true)?;
|
let mut config_file = open_file(&args.config, true)?;
|
||||||
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
|
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
|
||||||
let object_base = find_object_base(&config)?;
|
let object_base = find_object_base(&config)?;
|
||||||
|
|
||||||
|
@ -2012,9 +2012,9 @@ impl ObjectBase {
|
||||||
|
|
||||||
pub fn open(&self, path: &Path) -> Result<Box<dyn VfsFile>> {
|
pub fn open(&self, path: &Path) -> Result<Box<dyn VfsFile>> {
|
||||||
match self {
|
match self {
|
||||||
ObjectBase::None => open_path(path, true),
|
ObjectBase::None => open_file(path, true),
|
||||||
ObjectBase::Directory(base) => open_path(&base.join(path), true),
|
ObjectBase::Directory(base) => open_file(&base.join(path), true),
|
||||||
ObjectBase::Vfs(vfs_path, vfs) => open_path_fs(vfs.clone(), path, true)
|
ObjectBase::Vfs(vfs_path, vfs) => open_file_with_fs(vfs.clone(), path, true)
|
||||||
.with_context(|| format!("Using disc image {}", vfs_path.display())),
|
.with_context(|| format!("Using disc image {}", vfs_path.display())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2027,12 +2027,12 @@ pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> {
|
||||||
let entry = result?;
|
let entry = result?;
|
||||||
if entry.file_type()?.is_file() {
|
if entry.file_type()?.is_file() {
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
let mut file = open_path(&path, false)?;
|
let mut file = open_file(&path, false)?;
|
||||||
let format = nodtool::nod::Disc::detect(file.as_mut())?;
|
let format = nodtool::nod::Disc::detect(file.as_mut())?;
|
||||||
if format.is_some() {
|
if let Some(format) = format {
|
||||||
file.rewind()?;
|
file.rewind()?;
|
||||||
log::info!("Using disc image {}", path.display());
|
log::info!("Using disc image {}", path.display());
|
||||||
let fs = open_fs(file, ArchiveKind::Disc)?;
|
let fs = open_fs(file, ArchiveKind::Disc(format))?;
|
||||||
return Ok(ObjectBase::Vfs(path, fs));
|
return Ok(ObjectBase::Vfs(path, fs));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ use crate::{
|
||||||
},
|
},
|
||||||
file::buf_writer,
|
file::buf_writer,
|
||||||
},
|
},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -76,7 +76,7 @@ fn dump(args: DumpArgs) -> Result<()> {
|
||||||
let theme = theme_set.themes.get("Solarized (dark)").context("Failed to load theme")?;
|
let theme = theme_set.themes.get("Solarized (dark)").context("Failed to load theme")?;
|
||||||
let syntax = syntax_set.find_syntax_by_name("C++").context("Failed to find syntax")?.clone();
|
let syntax = syntax_set.find_syntax_by_name("C++").context("Failed to find syntax")?.clone();
|
||||||
|
|
||||||
let mut file = open_path(&args.in_file, true)?;
|
let mut file = open_file(&args.in_file, true)?;
|
||||||
let buf = file.map()?;
|
let buf = file.map()?;
|
||||||
if buf.starts_with(b"!<arch>\n") {
|
if buf.starts_with(b"!<arch>\n") {
|
||||||
let mut archive = ar::Archive::new(buf);
|
let mut archive = ar::Archive::new(buf);
|
||||||
|
|
|
@ -7,7 +7,7 @@ use anyhow::{anyhow, bail, ensure, Result};
|
||||||
use argp::FromArgs;
|
use argp::FromArgs;
|
||||||
use object::{Architecture, Endianness, Object, ObjectKind, ObjectSection, SectionKind};
|
use object::{Architecture, Endianness, Object, ObjectKind, ObjectSection, SectionKind};
|
||||||
|
|
||||||
use crate::{util::file::buf_writer, vfs::open_path};
|
use crate::{util::file::buf_writer, vfs::open_file};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||||
/// Converts an ELF file to a DOL file.
|
/// Converts an ELF file to a DOL file.
|
||||||
|
@ -46,7 +46,7 @@ const MAX_TEXT_SECTIONS: usize = 7;
|
||||||
const MAX_DATA_SECTIONS: usize = 11;
|
const MAX_DATA_SECTIONS: usize = 11;
|
||||||
|
|
||||||
pub fn run(args: Args) -> Result<()> {
|
pub fn run(args: Args) -> Result<()> {
|
||||||
let mut file = open_path(&args.elf_file, true)?;
|
let mut file = open_file(&args.elf_file, true)?;
|
||||||
let obj_file = object::read::File::parse(file.map()?)?;
|
let obj_file = object::read::File::parse(file.map()?)?;
|
||||||
match obj_file.architecture() {
|
match obj_file.architecture() {
|
||||||
Architecture::PowerPc => {}
|
Architecture::PowerPc => {}
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
||||||
map::{create_obj, process_map, SymbolEntry, SymbolRef},
|
map::{create_obj, process_map, SymbolEntry, SymbolRef},
|
||||||
split::update_splits,
|
split::update_splits,
|
||||||
},
|
},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -75,7 +75,7 @@ pub fn run(args: Args) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn entries(args: EntriesArgs) -> Result<()> {
|
fn entries(args: EntriesArgs) -> Result<()> {
|
||||||
let mut file = open_path(&args.map_file, true)?;
|
let mut file = open_file(&args.map_file, true)?;
|
||||||
let entries = process_map(file.as_mut(), None, None)?;
|
let entries = process_map(file.as_mut(), None, None)?;
|
||||||
match entries.unit_entries.get_vec(&args.unit) {
|
match entries.unit_entries.get_vec(&args.unit) {
|
||||||
Some(vec) => {
|
Some(vec) => {
|
||||||
|
@ -106,7 +106,7 @@ fn entries(args: EntriesArgs) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn symbol(args: SymbolArgs) -> Result<()> {
|
fn symbol(args: SymbolArgs) -> Result<()> {
|
||||||
let mut file = open_path(&args.map_file, true)?;
|
let mut file = open_file(&args.map_file, true)?;
|
||||||
log::info!("Processing map...");
|
log::info!("Processing map...");
|
||||||
let entries = process_map(file.as_mut(), None, None)?;
|
let entries = process_map(file.as_mut(), None, None)?;
|
||||||
log::info!("Done!");
|
log::info!("Done!");
|
||||||
|
@ -181,7 +181,7 @@ fn symbol(args: SymbolArgs) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn config(args: ConfigArgs) -> Result<()> {
|
fn config(args: ConfigArgs) -> Result<()> {
|
||||||
let mut file = open_path(&args.map_file, true)?;
|
let mut file = open_file(&args.map_file, true)?;
|
||||||
log::info!("Processing map...");
|
log::info!("Processing map...");
|
||||||
let entries = process_map(file.as_mut(), None, None)?;
|
let entries = process_map(file.as_mut(), None, None)?;
|
||||||
let mut obj = create_obj(&entries)?;
|
let mut obj = create_obj(&entries)?;
|
||||||
|
|
|
@ -5,7 +5,7 @@ use argp::FromArgs;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
util::{file::process_rsp, IntoCow, ToCow},
|
util::{file::process_rsp, IntoCow, ToCow},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -45,7 +45,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
||||||
let files = process_rsp(&args.files)?;
|
let files = process_rsp(&args.files)?;
|
||||||
let single_file = files.len() == 1;
|
let single_file = files.len() == 1;
|
||||||
for path in files {
|
for path in files {
|
||||||
let mut file = open_path(&path, false)?;
|
let mut file = open_file(&path, false)?;
|
||||||
let data = nintendo_lz::decompress(&mut file)
|
let data = nintendo_lz::decompress(&mut file)
|
||||||
.map_err(|e| anyhow!("Failed to decompress '{}' with NLZSS: {}", path.display(), e))?;
|
.map_err(|e| anyhow!("Failed to decompress '{}' with NLZSS: {}", path.display(), e))?;
|
||||||
let out_path = if let Some(output) = &args.output {
|
let out_path = if let Some(output) = &args.output {
|
||||||
|
|
|
@ -1,12 +1,9 @@
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::Result;
|
||||||
use argp::FromArgs;
|
use argp::FromArgs;
|
||||||
|
|
||||||
use crate::{
|
use super::vfs;
|
||||||
util::rarc::{RarcNodeKind, RarcView},
|
|
||||||
vfs::open_path,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
/// Commands for processing RSO files.
|
/// Commands for processing RSO files.
|
||||||
|
@ -30,6 +27,9 @@ pub struct ListArgs {
|
||||||
#[argp(positional)]
|
#[argp(positional)]
|
||||||
/// RARC file
|
/// RARC file
|
||||||
file: PathBuf,
|
file: PathBuf,
|
||||||
|
#[argp(switch, short = 's')]
|
||||||
|
/// Only print filenames.
|
||||||
|
short: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||||
|
@ -42,8 +42,11 @@ pub struct ExtractArgs {
|
||||||
#[argp(option, short = 'o')]
|
#[argp(option, short = 'o')]
|
||||||
/// output directory
|
/// output directory
|
||||||
output: Option<PathBuf>,
|
output: Option<PathBuf>,
|
||||||
|
#[argp(switch)]
|
||||||
|
/// Do not decompress files when copying.
|
||||||
|
no_decompress: bool,
|
||||||
#[argp(switch, short = 'q')]
|
#[argp(switch, short = 'q')]
|
||||||
/// quiet output
|
/// Quiet output. Don't print anything except errors.
|
||||||
quiet: bool,
|
quiet: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,42 +58,16 @@ pub fn run(args: Args) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list(args: ListArgs) -> Result<()> {
|
fn list(args: ListArgs) -> Result<()> {
|
||||||
let mut file = open_path(&args.file, true)?;
|
let path = PathBuf::from(format!("{}:", args.file.display()));
|
||||||
let view = RarcView::new(file.map()?).map_err(|e| anyhow!(e))?;
|
vfs::ls(vfs::LsArgs { path, short: args.short, recursive: true })
|
||||||
test(&view, "")?;
|
|
||||||
test(&view, "/")?;
|
|
||||||
test(&view, "//")?;
|
|
||||||
test(&view, "/rels")?;
|
|
||||||
test(&view, "/rels/")?;
|
|
||||||
test(&view, "/rels/amem")?;
|
|
||||||
test(&view, "/rels/amem/")?;
|
|
||||||
test(&view, "/rels/mmem")?;
|
|
||||||
test(&view, "/rels/mmem/../mmem")?;
|
|
||||||
test(&view, "/rels/amem/d_a_am.rel")?;
|
|
||||||
test(&view, "//amem/d_a_am.rel")?;
|
|
||||||
test(&view, "amem/d_a_am.rel")?;
|
|
||||||
test(&view, "amem/d_a_am.rel/")?;
|
|
||||||
test(&view, "mmem/d_a_obj_pirateship.rel")?;
|
|
||||||
test(&view, "mmem//d_a_obj_pirateship.rel")?;
|
|
||||||
test(&view, "mmem/da_obj_pirateship.rel")?;
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test(view: &RarcView, path: &str) -> Result<()> {
|
fn extract(args: ExtractArgs) -> Result<()> {
|
||||||
let option = view.find(path);
|
let path = PathBuf::from(format!("{}:", args.file.display()));
|
||||||
let data = if let Some(RarcNodeKind::File(_, node)) = option {
|
let output = args.output.unwrap_or_else(|| PathBuf::from("."));
|
||||||
view.get_data(node).map_err(|e| anyhow!(e))?
|
vfs::cp(vfs::CpArgs {
|
||||||
} else {
|
paths: vec![path, output],
|
||||||
&[]
|
no_decompress: args.no_decompress,
|
||||||
};
|
quiet: args.quiet,
|
||||||
let vec = data.iter().cloned().take(4).collect::<Vec<_>>();
|
})
|
||||||
println!("{:?}: {:?} (len: {:?})", path, option, vec.as_slice());
|
|
||||||
// if let Some(RarcNodeKind::Directory(_, dir)) = option {
|
|
||||||
// for node in view.children(dir) {
|
|
||||||
// println!("Child: {:?} ({:?})", node, view.get_string(node.name_offset()));
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract(_args: ExtractArgs) -> Result<()> { todo!() }
|
|
||||||
|
|
|
@ -41,7 +41,7 @@ use crate::{
|
||||||
},
|
},
|
||||||
IntoCow, ToCow,
|
IntoCow, ToCow,
|
||||||
},
|
},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -264,7 +264,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||||
let mut name_to_module_id = FxHashMap::<String, u32>::default();
|
let mut name_to_module_id = FxHashMap::<String, u32>::default();
|
||||||
if let Some(config_path) = &args.config {
|
if let Some(config_path) = &args.config {
|
||||||
let config: ProjectConfig = {
|
let config: ProjectConfig = {
|
||||||
let mut file = open_path(config_path, true)?;
|
let mut file = open_file(config_path, true)?;
|
||||||
serde_yaml::from_reader(file.as_mut())?
|
serde_yaml::from_reader(file.as_mut())?
|
||||||
};
|
};
|
||||||
let object_base = find_object_base(&config)?;
|
let object_base = find_object_base(&config)?;
|
||||||
|
@ -293,7 +293,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load all modules
|
// Load all modules
|
||||||
let mut files = paths.iter().map(|p| open_path(p, true)).collect::<Result<Vec<_>>>()?;
|
let mut files = paths.iter().map(|p| open_file(p, true)).collect::<Result<Vec<_>>>()?;
|
||||||
let modules = files
|
let modules = files
|
||||||
.par_iter_mut()
|
.par_iter_mut()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
|
@ -405,7 +405,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn info(args: InfoArgs) -> Result<()> {
|
fn info(args: InfoArgs) -> Result<()> {
|
||||||
let mut file = open_path(&args.rel_file, true)?;
|
let mut file = open_file(&args.rel_file, true)?;
|
||||||
let (header, mut module_obj) = process_rel(file.as_mut(), "")?;
|
let (header, mut module_obj) = process_rel(file.as_mut(), "")?;
|
||||||
|
|
||||||
let mut state = AnalyzerState::default();
|
let mut state = AnalyzerState::default();
|
||||||
|
@ -475,7 +475,7 @@ const fn align32(x: u32) -> u32 { (x + 31) & !31 }
|
||||||
fn merge(args: MergeArgs) -> Result<()> {
|
fn merge(args: MergeArgs) -> Result<()> {
|
||||||
log::info!("Loading {}", args.dol_file.display());
|
log::info!("Loading {}", args.dol_file.display());
|
||||||
let mut obj = {
|
let mut obj = {
|
||||||
let mut file = open_path(&args.dol_file, true)?;
|
let mut file = open_file(&args.dol_file, true)?;
|
||||||
let name = args.dol_file.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default();
|
let name = args.dol_file.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default();
|
||||||
process_dol(file.map()?, name.as_ref())?
|
process_dol(file.map()?, name.as_ref())?
|
||||||
};
|
};
|
||||||
|
|
|
@ -20,7 +20,7 @@ use crate::{
|
||||||
RSO_SECTION_NAMES,
|
RSO_SECTION_NAMES,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -77,7 +77,7 @@ pub fn run(args: Args) -> Result<()> {
|
||||||
|
|
||||||
fn info(args: InfoArgs) -> Result<()> {
|
fn info(args: InfoArgs) -> Result<()> {
|
||||||
let rso = {
|
let rso = {
|
||||||
let mut file = open_path(&args.rso_file, true)?;
|
let mut file = open_file(&args.rso_file, true)?;
|
||||||
let obj = process_rso(file.as_mut())?;
|
let obj = process_rso(file.as_mut())?;
|
||||||
#[allow(clippy::let_and_return)]
|
#[allow(clippy::let_and_return)]
|
||||||
obj
|
obj
|
||||||
|
@ -87,7 +87,7 @@ fn info(args: InfoArgs) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make(args: MakeArgs) -> Result<()> {
|
fn make(args: MakeArgs) -> Result<()> {
|
||||||
let mut file = open_path(&args.input, true)?;
|
let mut file = open_file(&args.input, true)?;
|
||||||
let obj_file = object::read::File::parse(file.map()?)?;
|
let obj_file = object::read::File::parse(file.map()?)?;
|
||||||
match obj_file.architecture() {
|
match obj_file.architecture() {
|
||||||
Architecture::PowerPc => {}
|
Architecture::PowerPc => {}
|
||||||
|
@ -102,7 +102,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
||||||
|
|
||||||
let symbols_to_export = match &args.export {
|
let symbols_to_export = match &args.export {
|
||||||
Some(export_file_path) => {
|
Some(export_file_path) => {
|
||||||
let export_file_reader = open_path(export_file_path, true)?;
|
let export_file_reader = open_file(export_file_path, true)?;
|
||||||
export_file_reader.lines().map_while(Result::ok).collect()
|
export_file_reader.lines().map_while(Result::ok).collect()
|
||||||
}
|
}
|
||||||
None => vec![],
|
None => vec![],
|
||||||
|
|
|
@ -12,7 +12,7 @@ use sha1::{Digest, Sha1};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
util::file::{buf_writer, process_rsp, touch},
|
util::file::{buf_writer, process_rsp, touch},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||||
|
@ -39,7 +39,7 @@ const DEFAULT_BUF_SIZE: usize = 8192;
|
||||||
pub fn run(args: Args) -> Result<()> {
|
pub fn run(args: Args) -> Result<()> {
|
||||||
if args.check {
|
if args.check {
|
||||||
for path in process_rsp(&args.files)? {
|
for path in process_rsp(&args.files)? {
|
||||||
let mut file = open_path(&path, false)?;
|
let mut file = open_file(&path, false)?;
|
||||||
check(&args, file.as_mut())?;
|
check(&args, file.as_mut())?;
|
||||||
}
|
}
|
||||||
if let Some(out_path) = &args.output {
|
if let Some(out_path) = &args.output {
|
||||||
|
@ -56,7 +56,7 @@ pub fn run(args: Args) -> Result<()> {
|
||||||
Box::new(stdout())
|
Box::new(stdout())
|
||||||
};
|
};
|
||||||
for path in process_rsp(&args.files)? {
|
for path in process_rsp(&args.files)? {
|
||||||
let mut file = open_path(&path, false)?;
|
let mut file = open_file(&path, false)?;
|
||||||
hash(w.as_mut(), file.as_mut(), &path)?
|
hash(w.as_mut(), file.as_mut(), &path)?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,9 @@
|
||||||
use std::{borrow::Cow, fs, fs::DirBuilder, path::PathBuf};
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::Result;
|
||||||
use argp::FromArgs;
|
use argp::FromArgs;
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
use crate::{
|
use super::vfs;
|
||||||
util::{
|
|
||||||
file::decompress_if_needed,
|
|
||||||
u8_arc::{U8Node, U8View},
|
|
||||||
},
|
|
||||||
vfs::open_path,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
/// Commands for processing U8 (arc) files.
|
/// Commands for processing U8 (arc) files.
|
||||||
|
@ -34,6 +27,9 @@ pub struct ListArgs {
|
||||||
#[argp(positional)]
|
#[argp(positional)]
|
||||||
/// U8 (arc) file
|
/// U8 (arc) file
|
||||||
file: PathBuf,
|
file: PathBuf,
|
||||||
|
#[argp(switch, short = 's')]
|
||||||
|
/// Only print filenames.
|
||||||
|
short: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||||
|
@ -46,8 +42,11 @@ pub struct ExtractArgs {
|
||||||
#[argp(option, short = 'o')]
|
#[argp(option, short = 'o')]
|
||||||
/// output directory
|
/// output directory
|
||||||
output: Option<PathBuf>,
|
output: Option<PathBuf>,
|
||||||
|
#[argp(switch)]
|
||||||
|
/// Do not decompress files when copying.
|
||||||
|
no_decompress: bool,
|
||||||
#[argp(switch, short = 'q')]
|
#[argp(switch, short = 'q')]
|
||||||
/// quiet output
|
/// Quiet output. Don't print anything except errors.
|
||||||
quiet: bool,
|
quiet: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -59,66 +58,16 @@ pub fn run(args: Args) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list(args: ListArgs) -> Result<()> {
|
fn list(args: ListArgs) -> Result<()> {
|
||||||
let mut file = open_path(&args.file, true)?;
|
let path = PathBuf::from(format!("{}:", args.file.display()));
|
||||||
let view = U8View::new(file.map()?)
|
vfs::ls(vfs::LsArgs { path, short: args.short, recursive: true })
|
||||||
.map_err(|e| anyhow!("Failed to open U8 file '{}': {}", args.file.display(), e))?;
|
|
||||||
visit_files(&view, |_, node, path| {
|
|
||||||
println!("{}: {} bytes, offset {:#X}", path, node.length(), node.offset());
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract(args: ExtractArgs) -> Result<()> {
|
fn extract(args: ExtractArgs) -> Result<()> {
|
||||||
let mut file = open_path(&args.file, true)?;
|
let path = PathBuf::from(format!("{}:", args.file.display()));
|
||||||
let data = file.map()?;
|
let output = args.output.unwrap_or_else(|| PathBuf::from("."));
|
||||||
let view = U8View::new(data)
|
vfs::cp(vfs::CpArgs {
|
||||||
.map_err(|e| anyhow!("Failed to open U8 file '{}': {}", args.file.display(), e))?;
|
paths: vec![path, output],
|
||||||
visit_files(&view, |_, node, path| {
|
no_decompress: args.no_decompress,
|
||||||
let offset = node.offset();
|
quiet: args.quiet,
|
||||||
let size = node.length();
|
|
||||||
let file_data =
|
|
||||||
decompress_if_needed(&data[offset as usize..offset as usize + size as usize])?;
|
|
||||||
let output_path = args
|
|
||||||
.output
|
|
||||||
.as_ref()
|
|
||||||
.map(|p| p.join(&path))
|
|
||||||
.unwrap_or_else(|| PathBuf::from(path.clone()));
|
|
||||||
if !args.quiet {
|
|
||||||
println!("Extracting {} to {} ({} bytes)", path, output_path.display(), size);
|
|
||||||
}
|
|
||||||
if let Some(parent) = output_path.parent() {
|
|
||||||
DirBuilder::new().recursive(true).create(parent)?;
|
|
||||||
}
|
|
||||||
fs::write(&output_path, file_data)
|
|
||||||
.with_context(|| format!("Failed to write file '{}'", output_path.display()))?;
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_files(
|
|
||||||
view: &U8View,
|
|
||||||
mut visitor: impl FnMut(usize, U8Node, String) -> Result<()>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let mut path_segments = Vec::<(Cow<str>, usize)>::new();
|
|
||||||
for (idx, node, name) in view.iter() {
|
|
||||||
// Remove ended path segments
|
|
||||||
let mut new_size = 0;
|
|
||||||
for (_, end) in path_segments.iter() {
|
|
||||||
if *end == idx {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
new_size += 1;
|
|
||||||
}
|
|
||||||
path_segments.truncate(new_size);
|
|
||||||
|
|
||||||
// Add the new path segment
|
|
||||||
let end = if node.is_dir() { node.length() as usize } else { idx + 1 };
|
|
||||||
path_segments.push((name.map_err(|e| anyhow!("{}", e))?, end));
|
|
||||||
|
|
||||||
let path = path_segments.iter().map(|(name, _)| name.as_ref()).join("/");
|
|
||||||
if !node.is_dir() {
|
|
||||||
visitor(idx, node, path)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
302
src/cmd/vfs.rs
302
src/cmd/vfs.rs
|
@ -1,10 +1,19 @@
|
||||||
use std::{fs::File, io, io::Write, path::PathBuf};
|
use std::{
|
||||||
|
fs,
|
||||||
|
fs::File,
|
||||||
|
io,
|
||||||
|
io::{BufRead, Write},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
use anyhow::{anyhow, bail};
|
use anyhow::{anyhow, bail, Context};
|
||||||
use argp::FromArgs;
|
use argp::FromArgs;
|
||||||
use nodtool::nod::ResultContext;
|
use size::Size;
|
||||||
|
|
||||||
use crate::vfs::{decompress_file, detect, open_fs, FileFormat, StdFs, Vfs, VfsFileType};
|
use crate::vfs::{
|
||||||
|
decompress_file, detect, open_path, FileFormat, OpenResult, Vfs, VfsFile, VfsFileType,
|
||||||
|
VfsMetadata,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
/// Commands for interacting with discs and containers.
|
/// Commands for interacting with discs and containers.
|
||||||
|
@ -26,8 +35,14 @@ enum SubCommand {
|
||||||
#[argp(subcommand, name = "ls")]
|
#[argp(subcommand, name = "ls")]
|
||||||
pub struct LsArgs {
|
pub struct LsArgs {
|
||||||
#[argp(positional)]
|
#[argp(positional)]
|
||||||
/// Path to the container.
|
/// Directory or container path.
|
||||||
path: PathBuf,
|
pub path: PathBuf,
|
||||||
|
#[argp(switch, short = 's')]
|
||||||
|
/// Only print filenames.
|
||||||
|
pub short: bool,
|
||||||
|
#[argp(switch, short = 'r')]
|
||||||
|
/// Recursively list files in directories.
|
||||||
|
pub recursive: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||||
|
@ -36,7 +51,13 @@ pub struct LsArgs {
|
||||||
pub struct CpArgs {
|
pub struct CpArgs {
|
||||||
#[argp(positional)]
|
#[argp(positional)]
|
||||||
/// Source path(s) and destination path.
|
/// Source path(s) and destination path.
|
||||||
paths: Vec<PathBuf>,
|
pub paths: Vec<PathBuf>,
|
||||||
|
#[argp(switch)]
|
||||||
|
/// Do not decompress files when copying.
|
||||||
|
pub no_decompress: bool,
|
||||||
|
#[argp(switch, short = 'q')]
|
||||||
|
/// Quiet output. Don't print anything except errors.
|
||||||
|
pub quiet: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(args: Args) -> anyhow::Result<()> {
|
pub fn run(args: Args) -> anyhow::Result<()> {
|
||||||
|
@ -46,75 +67,248 @@ pub fn run(args: Args) -> anyhow::Result<()> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find(path: &str) -> anyhow::Result<(Box<dyn Vfs>, &str)> {
|
const SEPARATOR: &str = " | ";
|
||||||
let mut split = path.split(':');
|
type Columns<const N: usize> = [String; N];
|
||||||
let mut fs: Box<dyn Vfs> = Box::new(StdFs);
|
|
||||||
let mut path = split.next().unwrap();
|
fn column_widths<const N: usize>(entries: &[Columns<N>]) -> [usize; N] {
|
||||||
for next in split {
|
let mut widths = [0usize; N];
|
||||||
let mut file = fs.open(path)?;
|
for text in entries {
|
||||||
match detect(file.as_mut())? {
|
for (i, column) in text.iter().enumerate() {
|
||||||
FileFormat::Archive(kind) => {
|
widths[i] = widths[i].max(column.len());
|
||||||
fs = open_fs(file, kind)?;
|
|
||||||
path = next;
|
|
||||||
}
|
|
||||||
_ => bail!("'{}' is not a container", path),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok((fs, path))
|
widths
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ls(args: LsArgs) -> anyhow::Result<()> {
|
fn file_info(
|
||||||
let str = args.path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
|
filename: &str,
|
||||||
let (mut fs, mut path) = find(str)?;
|
file: &mut dyn VfsFile,
|
||||||
let metadata = fs.metadata(path)?;
|
metadata: &VfsMetadata,
|
||||||
if metadata.is_file() {
|
) -> anyhow::Result<Columns<5>> {
|
||||||
let mut file = fs.open(path)?;
|
let format =
|
||||||
match detect(file.as_mut())? {
|
detect(file).with_context(|| format!("Failed to detect file format for {}", filename))?;
|
||||||
FileFormat::Archive(kind) => {
|
let mut info: Columns<5> = [
|
||||||
fs = open_fs(file, kind)?;
|
Size::from_bytes(metadata.len).to_string(),
|
||||||
path = "";
|
filename.to_string(),
|
||||||
|
format.to_string(),
|
||||||
|
String::new(),
|
||||||
|
String::new(),
|
||||||
|
];
|
||||||
|
if let FileFormat::Compressed(kind) = format {
|
||||||
|
let mut decompressed = decompress_file(file, kind)?;
|
||||||
|
let metadata = decompressed
|
||||||
|
.metadata()
|
||||||
|
.with_context(|| format!("Failed to fetch metadata for {}", filename))?;
|
||||||
|
let format = detect(decompressed.as_mut())
|
||||||
|
.with_context(|| format!("Failed to detect file format for {}", filename))?;
|
||||||
|
info[3] = format!("Decompressed: {}", Size::from_bytes(metadata.len));
|
||||||
|
info[4] = format.to_string();
|
||||||
}
|
}
|
||||||
_ => bail!("'{}' is not a directory", path),
|
Ok(info)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ls(args: LsArgs) -> anyhow::Result<()> {
|
||||||
|
let mut files = Vec::new();
|
||||||
|
match open_path(&args.path, false)? {
|
||||||
|
OpenResult::File(mut file, path) => {
|
||||||
|
let filename = Path::new(path)
|
||||||
|
.file_name()
|
||||||
|
.ok_or_else(|| anyhow!("Path has no filename"))?
|
||||||
|
.to_string_lossy();
|
||||||
|
if args.short {
|
||||||
|
println!("{}", filename);
|
||||||
|
} else {
|
||||||
|
let metadata = file
|
||||||
|
.metadata()
|
||||||
|
.with_context(|| format!("Failed to fetch metadata for {}", path))?;
|
||||||
|
files.push(file_info(&filename, file.as_mut(), &metadata)?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let entries = fs.read_dir(path)?;
|
OpenResult::Directory(mut fs, path) => {
|
||||||
for entry in entries {
|
ls_directory(fs.as_mut(), path, "", &args, &mut files)?;
|
||||||
println!("{}", entry);
|
}
|
||||||
|
}
|
||||||
|
if !args.short {
|
||||||
|
let widths = column_widths(&files);
|
||||||
|
for entry in files {
|
||||||
|
let mut written = 0;
|
||||||
|
for (i, column) in entry.iter().enumerate() {
|
||||||
|
if widths[i] > 0 {
|
||||||
|
if written > 0 {
|
||||||
|
print!("{}", SEPARATOR);
|
||||||
|
}
|
||||||
|
written += 1;
|
||||||
|
print!("{:width$}", column, width = widths[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cp(mut args: CpArgs) -> anyhow::Result<()> {
|
fn ls_directory(
|
||||||
|
fs: &mut dyn Vfs,
|
||||||
|
path: &str,
|
||||||
|
base_filename: &str,
|
||||||
|
args: &LsArgs,
|
||||||
|
files: &mut Vec<Columns<5>>,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let entries = fs.read_dir(path)?;
|
||||||
|
files.reserve(entries.len());
|
||||||
|
for filename in entries {
|
||||||
|
let entry_path = format!("{}/{}", path, filename);
|
||||||
|
let display_filename = format!("{}{}", base_filename, filename);
|
||||||
|
let metadata = fs
|
||||||
|
.metadata(&entry_path)
|
||||||
|
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?;
|
||||||
|
match metadata.file_type {
|
||||||
|
VfsFileType::File => {
|
||||||
|
let mut file = fs
|
||||||
|
.open(&entry_path)
|
||||||
|
.with_context(|| format!("Failed to open file {}", entry_path))?;
|
||||||
|
if args.short {
|
||||||
|
println!("{}", display_filename);
|
||||||
|
} else {
|
||||||
|
files.push(file_info(&display_filename, file.as_mut(), &metadata)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
VfsFileType::Directory => {
|
||||||
|
if args.short {
|
||||||
|
println!("{}/", display_filename);
|
||||||
|
} else {
|
||||||
|
files.push([
|
||||||
|
" ".to_string(),
|
||||||
|
format!("{}/", display_filename),
|
||||||
|
"Directory".to_string(),
|
||||||
|
String::new(),
|
||||||
|
String::new(),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
if args.recursive {
|
||||||
|
let base_filename = format!("{}/", display_filename);
|
||||||
|
ls_directory(fs, &entry_path, &base_filename, args, files)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cp(mut args: CpArgs) -> anyhow::Result<()> {
|
||||||
if args.paths.len() < 2 {
|
if args.paths.len() < 2 {
|
||||||
bail!("Both source and destination paths must be provided");
|
bail!("Both source and destination paths must be provided");
|
||||||
}
|
}
|
||||||
let dest = args.paths.pop().unwrap();
|
let dest = args.paths.pop().unwrap();
|
||||||
let dest_is_dir = args.paths.len() > 1 || dest.metadata().ok().is_some_and(|m| m.is_dir());
|
let dest_is_dir = args.paths.len() > 1 || dest.metadata().ok().is_some_and(|m| m.is_dir());
|
||||||
|
let auto_decompress = !args.no_decompress;
|
||||||
for path in args.paths {
|
for path in args.paths {
|
||||||
let str = path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
|
match open_path(&path, auto_decompress)? {
|
||||||
let (mut fs, path) = find(str)?;
|
OpenResult::File(file, path) => {
|
||||||
let metadata = fs.metadata(path)?;
|
|
||||||
match metadata.file_type {
|
|
||||||
VfsFileType::File => {
|
|
||||||
let mut file = fs.open(path)?;
|
|
||||||
if let FileFormat::Compressed(kind) = detect(file.as_mut())? {
|
|
||||||
file = decompress_file(file, kind)?;
|
|
||||||
}
|
|
||||||
let dest = if dest_is_dir {
|
let dest = if dest_is_dir {
|
||||||
let name = path.rsplit('/').next().unwrap();
|
fs::create_dir_all(&dest).with_context(|| {
|
||||||
dest.join(name)
|
format!("Failed to create directory {}", dest.display())
|
||||||
|
})?;
|
||||||
|
let filename = Path::new(path)
|
||||||
|
.file_name()
|
||||||
|
.ok_or_else(|| anyhow!("Path has no filename"))?;
|
||||||
|
dest.join(filename)
|
||||||
} else {
|
} else {
|
||||||
dest.clone()
|
dest.clone()
|
||||||
};
|
};
|
||||||
let mut dest_file = File::create(&dest)
|
cp_file(file, path, &dest, auto_decompress, args.quiet)?;
|
||||||
.with_context(|| format!("Failed to create file {}", dest.display()))?;
|
}
|
||||||
io::copy(file.as_mut(), &mut dest_file)
|
OpenResult::Directory(mut fs, path) => {
|
||||||
.with_context(|| format!("Failed to write file {}", dest.display()))?;
|
cp_recursive(fs.as_mut(), path, &dest, auto_decompress, args.quiet)?;
|
||||||
dest_file
|
}
|
||||||
.flush()
|
}
|
||||||
.with_context(|| format!("Failed to flush file {}", dest.display()))?;
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cp_file(
|
||||||
|
mut file: Box<dyn VfsFile>,
|
||||||
|
path: &str,
|
||||||
|
dest: &Path,
|
||||||
|
auto_decompress: bool,
|
||||||
|
quiet: bool,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let mut compression = None;
|
||||||
|
if let FileFormat::Compressed(kind) = detect(file.as_mut())? {
|
||||||
|
if auto_decompress {
|
||||||
|
file = decompress_file(file.as_mut(), kind)
|
||||||
|
.with_context(|| format!("Failed to decompress file {}", dest.display()))?;
|
||||||
|
compression = Some(kind);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let metadata = file
|
||||||
|
.metadata()
|
||||||
|
.with_context(|| format!("Failed to fetch metadata for {}", dest.display()))?;
|
||||||
|
if !quiet {
|
||||||
|
if let Some(kind) = compression {
|
||||||
|
println!(
|
||||||
|
"{} -> {} ({}) [Decompressed {}]",
|
||||||
|
path,
|
||||||
|
dest.display(),
|
||||||
|
Size::from_bytes(metadata.len),
|
||||||
|
kind
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
println!("{} -> {} ({})", path, dest.display(), Size::from_bytes(metadata.len));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut dest_file =
|
||||||
|
File::create(dest).with_context(|| format!("Failed to create file {}", dest.display()))?;
|
||||||
|
buf_copy(file.as_mut(), &mut dest_file)
|
||||||
|
.with_context(|| format!("Failed to copy file {}", dest.display()))?;
|
||||||
|
dest_file.flush().with_context(|| format!("Failed to flush file {}", dest.display()))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn buf_copy<R, W>(reader: &mut R, writer: &mut W) -> io::Result<u64>
|
||||||
|
where
|
||||||
|
R: BufRead + ?Sized,
|
||||||
|
W: Write + ?Sized,
|
||||||
|
{
|
||||||
|
let mut copied = 0;
|
||||||
|
loop {
|
||||||
|
let buf = reader.fill_buf()?;
|
||||||
|
let len = buf.len();
|
||||||
|
if len == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
writer.write_all(buf)?;
|
||||||
|
reader.consume(len);
|
||||||
|
copied += len as u64;
|
||||||
|
}
|
||||||
|
Ok(copied)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cp_recursive(
|
||||||
|
fs: &mut dyn Vfs,
|
||||||
|
path: &str,
|
||||||
|
dest: &Path,
|
||||||
|
auto_decompress: bool,
|
||||||
|
quiet: bool,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
fs::create_dir_all(dest)
|
||||||
|
.with_context(|| format!("Failed to create directory {}", dest.display()))?;
|
||||||
|
let entries = fs.read_dir(path)?;
|
||||||
|
for filename in entries {
|
||||||
|
let entry_path = format!("{}/{}", path, filename);
|
||||||
|
let metadata = fs
|
||||||
|
.metadata(&entry_path)
|
||||||
|
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?;
|
||||||
|
match metadata.file_type {
|
||||||
|
VfsFileType::File => {
|
||||||
|
let file = fs
|
||||||
|
.open(&entry_path)
|
||||||
|
.with_context(|| format!("Failed to open file {}", entry_path))?;
|
||||||
|
cp_file(file, &entry_path, &dest.join(filename), auto_decompress, quiet)?;
|
||||||
|
}
|
||||||
|
VfsFileType::Directory => {
|
||||||
|
cp_recursive(fs, &entry_path, &dest.join(filename), auto_decompress, quiet)?;
|
||||||
}
|
}
|
||||||
VfsFileType::Directory => bail!("Cannot copy directory"),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -9,7 +9,7 @@ use crate::{
|
||||||
ncompress::{compress_yay0, decompress_yay0},
|
ncompress::{compress_yay0, decompress_yay0},
|
||||||
IntoCow, ToCow,
|
IntoCow, ToCow,
|
||||||
},
|
},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -65,7 +65,7 @@ fn compress(args: CompressArgs) -> Result<()> {
|
||||||
let single_file = files.len() == 1;
|
let single_file = files.len() == 1;
|
||||||
for path in files {
|
for path in files {
|
||||||
let data = {
|
let data = {
|
||||||
let mut file = open_path(&path, false)?;
|
let mut file = open_file(&path, false)?;
|
||||||
compress_yay0(file.map()?)
|
compress_yay0(file.map()?)
|
||||||
};
|
};
|
||||||
let out_path = if let Some(output) = &args.output {
|
let out_path = if let Some(output) = &args.output {
|
||||||
|
@ -88,7 +88,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
||||||
let single_file = files.len() == 1;
|
let single_file = files.len() == 1;
|
||||||
for path in files {
|
for path in files {
|
||||||
let data = {
|
let data = {
|
||||||
let mut file = open_path(&path, true)?;
|
let mut file = open_file(&path, true)?;
|
||||||
decompress_yay0(file.map()?)
|
decompress_yay0(file.map()?)
|
||||||
.with_context(|| format!("Failed to decompress '{}' using Yay0", path.display()))?
|
.with_context(|| format!("Failed to decompress '{}' using Yay0", path.display()))?
|
||||||
};
|
};
|
||||||
|
|
|
@ -9,7 +9,7 @@ use crate::{
|
||||||
ncompress::{compress_yaz0, decompress_yaz0},
|
ncompress::{compress_yaz0, decompress_yaz0},
|
||||||
IntoCow, ToCow,
|
IntoCow, ToCow,
|
||||||
},
|
},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
@ -65,7 +65,7 @@ fn compress(args: CompressArgs) -> Result<()> {
|
||||||
let single_file = files.len() == 1;
|
let single_file = files.len() == 1;
|
||||||
for path in files {
|
for path in files {
|
||||||
let data = {
|
let data = {
|
||||||
let mut file = open_path(&path, false)?;
|
let mut file = open_file(&path, false)?;
|
||||||
compress_yaz0(file.map()?)
|
compress_yaz0(file.map()?)
|
||||||
};
|
};
|
||||||
let out_path = if let Some(output) = &args.output {
|
let out_path = if let Some(output) = &args.output {
|
||||||
|
@ -88,7 +88,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
||||||
let single_file = files.len() == 1;
|
let single_file = files.len() == 1;
|
||||||
for path in files {
|
for path in files {
|
||||||
let data = {
|
let data = {
|
||||||
let mut file = open_path(&path, false)?;
|
let mut file = open_file(&path, false)?;
|
||||||
decompress_yaz0(file.map()?)
|
decompress_yaz0(file.map()?)
|
||||||
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path.display()))?
|
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path.display()))?
|
||||||
};
|
};
|
||||||
|
|
|
@ -24,7 +24,7 @@ use crate::{
|
||||||
file::{buf_writer, FileReadInfo},
|
file::{buf_writer, FileReadInfo},
|
||||||
split::default_section_align,
|
split::default_section_align,
|
||||||
},
|
},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn parse_u32(s: &str) -> Result<u32, ParseIntError> {
|
pub fn parse_u32(s: &str) -> Result<u32, ParseIntError> {
|
||||||
|
@ -49,7 +49,7 @@ pub fn apply_symbols_file<P>(path: P, obj: &mut ObjInfo) -> Result<Option<FileRe
|
||||||
where P: AsRef<Path> {
|
where P: AsRef<Path> {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
Ok(if path.is_file() {
|
Ok(if path.is_file() {
|
||||||
let mut file = open_path(path, true)?;
|
let mut file = open_file(path, true)?;
|
||||||
let cached = FileReadInfo::new(file.as_mut())?;
|
let cached = FileReadInfo::new(file.as_mut())?;
|
||||||
for result in file.lines() {
|
for result in file.lines() {
|
||||||
let line = match result {
|
let line = match result {
|
||||||
|
@ -629,7 +629,7 @@ pub fn apply_splits_file<P>(path: P, obj: &mut ObjInfo) -> Result<Option<FileRea
|
||||||
where P: AsRef<Path> {
|
where P: AsRef<Path> {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
Ok(if path.is_file() {
|
Ok(if path.is_file() {
|
||||||
let mut file = open_path(path, true)?;
|
let mut file = open_file(path, true)?;
|
||||||
let cached = FileReadInfo::new(file.as_mut())?;
|
let cached = FileReadInfo::new(file.as_mut())?;
|
||||||
apply_splits(file.as_mut(), obj)?;
|
apply_splits(file.as_mut(), obj)?;
|
||||||
Some(cached)
|
Some(cached)
|
||||||
|
@ -744,7 +744,7 @@ where P: AsRef<Path> {
|
||||||
if !path.is_file() {
|
if !path.is_file() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
let file = open_path(path, true)?;
|
let file = open_file(path, true)?;
|
||||||
let mut sections = Vec::new();
|
let mut sections = Vec::new();
|
||||||
let mut state = SplitState::None;
|
let mut state = SplitState::None;
|
||||||
for result in file.lines() {
|
for result in file.lines() {
|
||||||
|
|
|
@ -31,7 +31,7 @@ use crate::{
|
||||||
comment::{CommentSym, MWComment},
|
comment::{CommentSym, MWComment},
|
||||||
reader::{Endian, FromReader, ToWriter},
|
reader::{Endian, FromReader, ToWriter},
|
||||||
},
|
},
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const SHT_MWCATS: u32 = SHT_LOUSER + 0x4A2A82C2;
|
pub const SHT_MWCATS: u32 = SHT_LOUSER + 0x4A2A82C2;
|
||||||
|
@ -48,7 +48,7 @@ enum BoundaryState {
|
||||||
pub fn process_elf<P>(path: P) -> Result<ObjInfo>
|
pub fn process_elf<P>(path: P) -> Result<ObjInfo>
|
||||||
where P: AsRef<Path> {
|
where P: AsRef<Path> {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
let mut file = open_path(path, true)?;
|
let mut file = open_file(path, true)?;
|
||||||
let obj_file = object::read::File::parse(file.map()?)?;
|
let obj_file = object::read::File::parse(file.map()?)?;
|
||||||
let architecture = match obj_file.architecture() {
|
let architecture = match obj_file.architecture() {
|
||||||
Architecture::PowerPc => ObjArchitecture::PowerPc,
|
Architecture::PowerPc => ObjArchitecture::PowerPc,
|
||||||
|
|
|
@ -17,7 +17,7 @@ use crate::{
|
||||||
ncompress::{decompress_yay0, decompress_yaz0, YAY0_MAGIC, YAZ0_MAGIC},
|
ncompress::{decompress_yay0, decompress_yaz0, YAY0_MAGIC, YAZ0_MAGIC},
|
||||||
Bytes,
|
Bytes,
|
||||||
},
|
},
|
||||||
vfs::{open_path, VfsFile},
|
vfs::{open_file, VfsFile},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Creates a buffered writer around a file (not memory mapped).
|
/// Creates a buffered writer around a file (not memory mapped).
|
||||||
|
@ -67,8 +67,7 @@ pub fn process_rsp(files: &[PathBuf]) -> Result<Vec<PathBuf>> {
|
||||||
let path_str =
|
let path_str =
|
||||||
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
|
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
|
||||||
if let Some(rsp_file) = path_str.strip_prefix('@') {
|
if let Some(rsp_file) = path_str.strip_prefix('@') {
|
||||||
let rsp_path = Path::new(rsp_file);
|
let file = open_file(Path::new(rsp_file), true)?;
|
||||||
let file = open_path(rsp_path, true)?;
|
|
||||||
for result in file.lines() {
|
for result in file.lines() {
|
||||||
let line = result?;
|
let line = result?;
|
||||||
if !line.is_empty() {
|
if !line.is_empty() {
|
||||||
|
@ -123,7 +122,7 @@ impl FileIterator {
|
||||||
|
|
||||||
let path = self.paths[self.index].clone();
|
let path = self.paths[self.index].clone();
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
match open_path(&path, true) {
|
match open_file(&path, true) {
|
||||||
Ok(file) => Some(Ok((path, file))),
|
Ok(file) => Some(Ok((path, file))),
|
||||||
Err(e) => Some(Err(e)),
|
Err(e) => Some(Err(e)),
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ use crate::{
|
||||||
ObjSymbols, ObjUnit,
|
ObjSymbols, ObjUnit,
|
||||||
},
|
},
|
||||||
util::nested::NestedVec,
|
util::nested::NestedVec,
|
||||||
vfs::open_path,
|
vfs::open_file,
|
||||||
};
|
};
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
pub enum SymbolKind {
|
pub enum SymbolKind {
|
||||||
|
@ -722,7 +722,7 @@ pub fn apply_map_file<P>(
|
||||||
where
|
where
|
||||||
P: AsRef<Path>,
|
P: AsRef<Path>,
|
||||||
{
|
{
|
||||||
let mut file = open_path(path.as_ref(), true)?;
|
let mut file = open_file(path.as_ref(), true)?;
|
||||||
let info = process_map(file.as_mut(), common_bss_start, mw_comment_version)?;
|
let info = process_map(file.as_mut(), common_bss_start, mw_comment_version)?;
|
||||||
apply_map(info, obj)
|
apply_map(info, obj)
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ use std::{borrow::Cow, ffi::CStr};
|
||||||
|
|
||||||
use zerocopy::{big_endian::*, FromBytes, Immutable, IntoBytes, KnownLayout};
|
use zerocopy::{big_endian::*, FromBytes, Immutable, IntoBytes, KnownLayout};
|
||||||
|
|
||||||
use crate::static_assert;
|
use crate::{static_assert, vfs::next_non_empty};
|
||||||
|
|
||||||
pub const RARC_MAGIC: [u8; 4] = *b"RARC";
|
pub const RARC_MAGIC: [u8; 4] = *b"RARC";
|
||||||
|
|
||||||
|
@ -280,13 +280,3 @@ pub enum RarcNodeKind {
|
||||||
File(usize, RarcNode),
|
File(usize, RarcNode),
|
||||||
Directory(usize, RarcDirectory),
|
Directory(usize, RarcDirectory),
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_non_empty<'a>(iter: &mut impl Iterator<Item = &'a str>) -> &'a str {
|
|
||||||
loop {
|
|
||||||
match iter.next() {
|
|
||||||
Some("") => continue,
|
|
||||||
Some(next) => break next,
|
|
||||||
None => break "",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::{borrow::Cow, ffi::CStr, mem::size_of};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use zerocopy::{big_endian::U32, FromBytes, Immutable, IntoBytes, KnownLayout};
|
use zerocopy::{big_endian::U32, FromBytes, Immutable, IntoBytes, KnownLayout};
|
||||||
|
|
||||||
use crate::static_assert;
|
use crate::{static_assert, vfs::next_non_empty};
|
||||||
|
|
||||||
pub const U8_MAGIC: [u8; 4] = [0x55, 0xAA, 0x38, 0x2D];
|
pub const U8_MAGIC: [u8; 4] = [0x55, 0xAA, 0x38, 0x2D];
|
||||||
|
|
||||||
|
@ -178,16 +178,6 @@ impl<'a> U8View<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_non_empty<'a>(iter: &mut impl Iterator<Item = &'a str>) -> &'a str {
|
|
||||||
loop {
|
|
||||||
match iter.next() {
|
|
||||||
Some("") => continue,
|
|
||||||
Some(next) => break next,
|
|
||||||
None => break "",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterator over the nodes in a U8 archive.
|
/// Iterator over the nodes in a U8 archive.
|
||||||
pub struct U8Iter<'a> {
|
pub struct U8Iter<'a> {
|
||||||
inner: &'a U8View<'a>,
|
inner: &'a U8View<'a>,
|
||||||
|
|
|
@ -10,7 +10,9 @@ use nodtool::{
|
||||||
nod::{DiscStream, Fst, NodeKind, OwnedFileStream, PartitionBase, PartitionMeta},
|
nod::{DiscStream, Fst, NodeKind, OwnedFileStream, PartitionBase, PartitionMeta},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{StaticFile, Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult};
|
use super::{
|
||||||
|
next_non_empty, StaticFile, Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct DiscFs {
|
pub struct DiscFs {
|
||||||
|
@ -36,15 +38,13 @@ impl DiscFs {
|
||||||
fn find(&self, path: &str) -> VfsResult<DiscNode> {
|
fn find(&self, path: &str) -> VfsResult<DiscNode> {
|
||||||
let path = path.trim_matches('/');
|
let path = path.trim_matches('/');
|
||||||
let mut split = path.split('/');
|
let mut split = path.split('/');
|
||||||
let Some(segment) = split.next() else {
|
let mut segment = next_non_empty(&mut split);
|
||||||
return Ok(DiscNode::Root);
|
|
||||||
};
|
|
||||||
if segment.is_empty() {
|
if segment.is_empty() {
|
||||||
return Ok(DiscNode::Root);
|
return Ok(DiscNode::Root);
|
||||||
}
|
}
|
||||||
if segment.eq_ignore_ascii_case("files") {
|
if segment.eq_ignore_ascii_case("files") {
|
||||||
let fst = Fst::new(&self.meta.raw_fst)?;
|
let fst = Fst::new(&self.meta.raw_fst)?;
|
||||||
if split.next().is_none() {
|
if next_non_empty(&mut split).is_empty() {
|
||||||
let root = fst.nodes[0];
|
let root = fst.nodes[0];
|
||||||
return Ok(DiscNode::Node(fst, 0, root));
|
return Ok(DiscNode::Node(fst, 0, root));
|
||||||
}
|
}
|
||||||
|
@ -54,9 +54,7 @@ impl DiscFs {
|
||||||
None => Ok(DiscNode::None),
|
None => Ok(DiscNode::None),
|
||||||
}
|
}
|
||||||
} else if segment.eq_ignore_ascii_case("sys") {
|
} else if segment.eq_ignore_ascii_case("sys") {
|
||||||
let Some(segment) = split.next() else {
|
segment = next_non_empty(&mut split);
|
||||||
return Ok(DiscNode::Sys);
|
|
||||||
};
|
|
||||||
// No directories in sys
|
// No directories in sys
|
||||||
if split.next().is_some() {
|
if split.next().is_some() {
|
||||||
return Ok(DiscNode::None);
|
return Ok(DiscNode::None);
|
||||||
|
@ -92,8 +90,8 @@ impl Vfs for DiscFs {
|
||||||
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
|
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
|
||||||
match self.find(path)? {
|
match self.find(path)? {
|
||||||
DiscNode::None => Err(VfsError::NotFound),
|
DiscNode::None => Err(VfsError::NotFound),
|
||||||
DiscNode::Root => Err(VfsError::DirectoryExists),
|
DiscNode::Root => Err(VfsError::IsADirectory),
|
||||||
DiscNode::Sys => Err(VfsError::DirectoryExists),
|
DiscNode::Sys => Err(VfsError::IsADirectory),
|
||||||
DiscNode::Node(_, _, node) => match node.kind() {
|
DiscNode::Node(_, _, node) => match node.kind() {
|
||||||
NodeKind::File => {
|
NodeKind::File => {
|
||||||
if node.length() > 2048 {
|
if node.length() > 2048 {
|
||||||
|
@ -107,7 +105,7 @@ impl Vfs for DiscFs {
|
||||||
Ok(Box::new(StaticFile::new(Arc::from(data.as_slice()), self.mtime)))
|
Ok(Box::new(StaticFile::new(Arc::from(data.as_slice()), self.mtime)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
NodeKind::Directory => Err(VfsError::FileExists),
|
NodeKind::Directory => Err(VfsError::IsADirectory),
|
||||||
NodeKind::Invalid => Err(VfsError::from("FST: Invalid node kind")),
|
NodeKind::Invalid => Err(VfsError::from("FST: Invalid node kind")),
|
||||||
},
|
},
|
||||||
DiscNode::Static(data) => Ok(Box::new(StaticFile::new(Arc::from(data), self.mtime))),
|
DiscNode::Static(data) => Ok(Box::new(StaticFile::new(Arc::from(data), self.mtime))),
|
||||||
|
@ -146,7 +144,7 @@ impl Vfs for DiscFs {
|
||||||
}
|
}
|
||||||
DiscNode::Node(fst, idx, node) => {
|
DiscNode::Node(fst, idx, node) => {
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
NodeKind::File => return Err(VfsError::FileExists),
|
NodeKind::File => return Err(VfsError::NotADirectory),
|
||||||
NodeKind::Directory => {}
|
NodeKind::Directory => {}
|
||||||
NodeKind::Invalid => return Err(VfsError::from("FST: Invalid node kind")),
|
NodeKind::Invalid => return Err(VfsError::from("FST: Invalid node kind")),
|
||||||
}
|
}
|
||||||
|
@ -168,7 +166,7 @@ impl Vfs for DiscFs {
|
||||||
}
|
}
|
||||||
Ok(entries)
|
Ok(entries)
|
||||||
}
|
}
|
||||||
DiscNode::Static(_) => Err(VfsError::FileExists),
|
DiscNode::Static(_) => Err(VfsError::NotADirectory),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
255
src/vfs/mod.rs
255
src/vfs/mod.rs
|
@ -72,16 +72,24 @@ dyn_clone::clone_trait_object!(VfsFile);
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum VfsError {
|
pub enum VfsError {
|
||||||
NotFound,
|
NotFound,
|
||||||
|
NotADirectory,
|
||||||
|
IsADirectory,
|
||||||
IoError(io::Error),
|
IoError(io::Error),
|
||||||
Other(String),
|
Other(String),
|
||||||
FileExists,
|
|
||||||
DirectoryExists,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type VfsResult<T, E = VfsError> = Result<T, E>;
|
pub type VfsResult<T, E = VfsError> = Result<T, E>;
|
||||||
|
|
||||||
impl From<io::Error> for VfsError {
|
impl From<io::Error> for VfsError {
|
||||||
fn from(e: io::Error) -> Self { VfsError::IoError(e) }
|
fn from(e: io::Error) -> Self {
|
||||||
|
match e.kind() {
|
||||||
|
io::ErrorKind::NotFound => VfsError::NotFound,
|
||||||
|
// TODO: stabilized in Rust 1.83
|
||||||
|
// io::ErrorKind::NotADirectory => VfsError::NotADirectory,
|
||||||
|
// io::ErrorKind::IsADirectory => VfsError::IsADirectory,
|
||||||
|
_ => VfsError::IoError(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<String> for VfsError {
|
impl From<String> for VfsError {
|
||||||
|
@ -98,33 +106,63 @@ impl Display for VfsError {
|
||||||
VfsError::NotFound => write!(f, "File or directory not found"),
|
VfsError::NotFound => write!(f, "File or directory not found"),
|
||||||
VfsError::IoError(e) => write!(f, "{}", e),
|
VfsError::IoError(e) => write!(f, "{}", e),
|
||||||
VfsError::Other(e) => write!(f, "{}", e),
|
VfsError::Other(e) => write!(f, "{}", e),
|
||||||
VfsError::FileExists => write!(f, "File already exists"),
|
VfsError::NotADirectory => write!(f, "Path is a file, not a directory"),
|
||||||
VfsError::DirectoryExists => write!(f, "Directory already exists"),
|
VfsError::IsADirectory => write!(f, "Path is a directory, not a file"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Error for VfsError {}
|
impl Error for VfsError {}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
pub enum FileFormat {
|
pub enum FileFormat {
|
||||||
Regular,
|
Regular,
|
||||||
Compressed(CompressionKind),
|
Compressed(CompressionKind),
|
||||||
Archive(ArchiveKind),
|
Archive(ArchiveKind),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
impl Display for FileFormat {
|
||||||
|
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
FileFormat::Regular => write!(f, "File"),
|
||||||
|
FileFormat::Compressed(kind) => write!(f, "Compressed: {}", kind),
|
||||||
|
FileFormat::Archive(kind) => write!(f, "Archive: {}", kind),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
pub enum CompressionKind {
|
pub enum CompressionKind {
|
||||||
Yay0,
|
Yay0,
|
||||||
Yaz0,
|
Yaz0,
|
||||||
Nlzss,
|
Nlzss,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
impl Display for CompressionKind {
|
||||||
|
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
CompressionKind::Yay0 => write!(f, "Yay0"),
|
||||||
|
CompressionKind::Yaz0 => write!(f, "Yaz0"),
|
||||||
|
CompressionKind::Nlzss => write!(f, "NLZSS"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
pub enum ArchiveKind {
|
pub enum ArchiveKind {
|
||||||
Rarc,
|
Rarc,
|
||||||
U8,
|
U8,
|
||||||
Disc,
|
Disc(nod::Format),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for ArchiveKind {
|
||||||
|
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
ArchiveKind::Rarc => write!(f, "RARC"),
|
||||||
|
ArchiveKind::U8 => write!(f, "U8"),
|
||||||
|
ArchiveKind::Disc(format) => write!(f, "Disc ({})", format),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn detect<R>(file: &mut R) -> io::Result<FileFormat>
|
pub fn detect<R>(file: &mut R) -> io::Result<FileFormat>
|
||||||
|
@ -146,97 +184,138 @@ where R: Read + Seek + ?Sized {
|
||||||
let format = nod::Disc::detect(file)?;
|
let format = nod::Disc::detect(file)?;
|
||||||
file.seek(SeekFrom::Start(0))?;
|
file.seek(SeekFrom::Start(0))?;
|
||||||
match format {
|
match format {
|
||||||
Some(_) => Ok(FileFormat::Archive(ArchiveKind::Disc)),
|
Some(format) => Ok(FileFormat::Archive(ArchiveKind::Disc(format))),
|
||||||
None => Ok(FileFormat::Regular),
|
None => Ok(FileFormat::Regular),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn open_path(path: &Path, auto_decompress: bool) -> anyhow::Result<Box<dyn VfsFile>> {
|
pub enum OpenResult<'a> {
|
||||||
open_path_fs(Box::new(StdFs), path, auto_decompress)
|
File(Box<dyn VfsFile>, &'a str),
|
||||||
|
Directory(Box<dyn Vfs>, &'a str),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn open_path_fs(
|
pub fn open_path(path: &Path, auto_decompress: bool) -> anyhow::Result<OpenResult> {
|
||||||
|
open_path_with_fs(Box::new(StdFs), path, auto_decompress)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn open_path_with_fs(
|
||||||
mut fs: Box<dyn Vfs>,
|
mut fs: Box<dyn Vfs>,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
auto_decompress: bool,
|
auto_decompress: bool,
|
||||||
) -> anyhow::Result<Box<dyn VfsFile>> {
|
) -> anyhow::Result<OpenResult> {
|
||||||
let str = path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
|
let str = path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
|
||||||
let mut split = str.split(':').peekable();
|
let mut split = str.split(':').peekable();
|
||||||
let mut within = String::new();
|
let mut current_path = String::new();
|
||||||
|
let mut file: Option<Box<dyn VfsFile>> = None;
|
||||||
|
let mut segment = "";
|
||||||
loop {
|
loop {
|
||||||
let path = split.next().unwrap();
|
// Open the next segment if necessary
|
||||||
let mut file = fs
|
if file.is_none() {
|
||||||
.open(path)
|
segment = split.next().unwrap();
|
||||||
.with_context(|| format!("Failed to open {}", format_path(path, &within)))?;
|
if !current_path.is_empty() {
|
||||||
match detect(file.as_mut()).with_context(|| {
|
current_path.push(':');
|
||||||
format!("Failed to detect file type for {}", format_path(path, &within))
|
}
|
||||||
})? {
|
current_path.push_str(segment);
|
||||||
|
let file_type = match fs.metadata(segment) {
|
||||||
|
Ok(metadata) => metadata.file_type,
|
||||||
|
Err(VfsError::NotFound) => return Err(anyhow!("{} not found", current_path)),
|
||||||
|
Err(e) => return Err(e).context(format!("Failed to open {}", current_path)),
|
||||||
|
};
|
||||||
|
match file_type {
|
||||||
|
VfsFileType::File => {
|
||||||
|
file = Some(
|
||||||
|
fs.open(segment)
|
||||||
|
.with_context(|| format!("Failed to open {}", current_path))?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
VfsFileType::Directory => {
|
||||||
|
return if split.peek().is_some() {
|
||||||
|
Err(anyhow!("{} is not a file", current_path))
|
||||||
|
} else {
|
||||||
|
Ok(OpenResult::Directory(fs, segment))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut current_file = file.take().unwrap();
|
||||||
|
let format = detect(current_file.as_mut())
|
||||||
|
.with_context(|| format!("Failed to detect file type for {}", current_path))?;
|
||||||
|
if let Some(&next) = split.peek() {
|
||||||
|
match next {
|
||||||
|
"nlzss" => {
|
||||||
|
split.next();
|
||||||
|
file = Some(
|
||||||
|
decompress_file(current_file.as_mut(), CompressionKind::Nlzss)
|
||||||
|
.with_context(|| {
|
||||||
|
format!("Failed to decompress {} with NLZSS", current_path)
|
||||||
|
})?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
"yay0" => {
|
||||||
|
split.next();
|
||||||
|
file = Some(
|
||||||
|
decompress_file(current_file.as_mut(), CompressionKind::Yay0)
|
||||||
|
.with_context(|| {
|
||||||
|
format!("Failed to decompress {} with Yay0", current_path)
|
||||||
|
})?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
"yaz0" => {
|
||||||
|
split.next();
|
||||||
|
file = Some(
|
||||||
|
decompress_file(current_file.as_mut(), CompressionKind::Yaz0)
|
||||||
|
.with_context(|| {
|
||||||
|
format!("Failed to decompress {} with Yaz0", current_path)
|
||||||
|
})?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => match format {
|
||||||
FileFormat::Regular => {
|
FileFormat::Regular => {
|
||||||
return match split.next() {
|
return Err(anyhow!("{} is not an archive", current_path))
|
||||||
None => Ok(file),
|
|
||||||
Some(segment) => {
|
|
||||||
if split.next().is_some() {
|
|
||||||
return Err(anyhow!(
|
|
||||||
"{} is not an archive",
|
|
||||||
format_path(path, &within)
|
|
||||||
));
|
|
||||||
}
|
|
||||||
match segment {
|
|
||||||
"nlzss" => Ok(decompress_file(file, CompressionKind::Nlzss)
|
|
||||||
.with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to decompress {} with NLZSS",
|
|
||||||
format_path(path, &within)
|
|
||||||
)
|
|
||||||
})?),
|
|
||||||
"yay0" => Ok(decompress_file(file, CompressionKind::Yay0)
|
|
||||||
.with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to decompress {} with Yay0",
|
|
||||||
format_path(path, &within)
|
|
||||||
)
|
|
||||||
})?),
|
|
||||||
"yaz0" => Ok(decompress_file(file, CompressionKind::Yaz0)
|
|
||||||
.with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to decompress {} with Yaz0",
|
|
||||||
format_path(path, &within)
|
|
||||||
)
|
|
||||||
})?),
|
|
||||||
_ => Err(anyhow!("{} is not an archive", format_path(path, &within))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
FileFormat::Compressed(kind) => {
|
FileFormat::Compressed(kind) => {
|
||||||
return if split.peek().is_none() {
|
file =
|
||||||
if auto_decompress {
|
Some(decompress_file(current_file.as_mut(), kind).with_context(
|
||||||
Ok(decompress_file(file, kind).with_context(|| {
|
|| format!("Failed to decompress {}", current_path),
|
||||||
format!("Failed to decompress {}", format_path(path, &within))
|
)?);
|
||||||
})?)
|
// Continue the loop to detect the new format
|
||||||
} else {
|
|
||||||
Ok(file)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Err(anyhow!("{} is not an archive", format_path(path, &within)))
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
FileFormat::Archive(kind) => {
|
FileFormat::Archive(kind) => {
|
||||||
if split.peek().is_none() {
|
fs = open_fs(current_file, kind).with_context(|| {
|
||||||
return Ok(file);
|
format!("Failed to open container {}", current_path)
|
||||||
} else {
|
|
||||||
fs = open_fs(file, kind).with_context(|| {
|
|
||||||
format!("Failed to open container {}", format_path(path, &within))
|
|
||||||
})?;
|
})?;
|
||||||
if !within.is_empty() {
|
// Continue the loop to open the next segment
|
||||||
within.push(':');
|
|
||||||
}
|
|
||||||
within.push_str(path);
|
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No more segments, return as-is
|
||||||
|
return match format {
|
||||||
|
FileFormat::Compressed(kind) if auto_decompress => Ok(OpenResult::File(
|
||||||
|
decompress_file(current_file.as_mut(), kind)
|
||||||
|
.with_context(|| format!("Failed to decompress {}", current_path))?,
|
||||||
|
segment,
|
||||||
|
)),
|
||||||
|
_ => Ok(OpenResult::File(current_file, segment)),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn open_file(path: &Path, auto_decompress: bool) -> anyhow::Result<Box<dyn VfsFile>> {
|
||||||
|
open_file_with_fs(Box::new(StdFs), path, auto_decompress)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn open_file_with_fs(
|
||||||
|
fs: Box<dyn Vfs>,
|
||||||
|
path: &Path,
|
||||||
|
auto_decompress: bool,
|
||||||
|
) -> anyhow::Result<Box<dyn VfsFile>> {
|
||||||
|
match open_path_with_fs(fs, path, auto_decompress)? {
|
||||||
|
OpenResult::File(file, _) => Ok(file),
|
||||||
|
OpenResult::Directory(_, _) => Err(VfsError::IsADirectory.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -245,7 +324,7 @@ pub fn open_fs(mut file: Box<dyn VfsFile>, kind: ArchiveKind) -> io::Result<Box<
|
||||||
match kind {
|
match kind {
|
||||||
ArchiveKind::Rarc => Ok(Box::new(RarcFs::new(file)?)),
|
ArchiveKind::Rarc => Ok(Box::new(RarcFs::new(file)?)),
|
||||||
ArchiveKind::U8 => Ok(Box::new(U8Fs::new(file)?)),
|
ArchiveKind::U8 => Ok(Box::new(U8Fs::new(file)?)),
|
||||||
ArchiveKind::Disc => {
|
ArchiveKind::Disc(_) => {
|
||||||
let disc = nod::Disc::new_stream(file.into_disc_stream()).map_err(nod_to_io_error)?;
|
let disc = nod::Disc::new_stream(file.into_disc_stream()).map_err(nod_to_io_error)?;
|
||||||
let partition =
|
let partition =
|
||||||
disc.open_partition_kind(nod::PartitionKind::Data).map_err(nod_to_io_error)?;
|
disc.open_partition_kind(nod::PartitionKind::Data).map_err(nod_to_io_error)?;
|
||||||
|
@ -255,35 +334,37 @@ pub fn open_fs(mut file: Box<dyn VfsFile>, kind: ArchiveKind) -> io::Result<Box<
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decompress_file(
|
pub fn decompress_file(
|
||||||
mut file: Box<dyn VfsFile>,
|
file: &mut dyn VfsFile,
|
||||||
kind: CompressionKind,
|
kind: CompressionKind,
|
||||||
) -> io::Result<Box<dyn VfsFile>> {
|
) -> io::Result<Box<dyn VfsFile>> {
|
||||||
let metadata = file.metadata()?;
|
let metadata = file.metadata()?;
|
||||||
|
let data = file.map()?;
|
||||||
match kind {
|
match kind {
|
||||||
CompressionKind::Yay0 => {
|
CompressionKind::Yay0 => {
|
||||||
let data = file.map()?;
|
|
||||||
let result = orthrus_ncompress::yay0::Yay0::decompress_from(data)
|
let result = orthrus_ncompress::yay0::Yay0::decompress_from(data)
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
|
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
|
||||||
Ok(Box::new(StaticFile::new(Arc::from(result), metadata.mtime)))
|
Ok(Box::new(StaticFile::new(Arc::from(result), metadata.mtime)))
|
||||||
}
|
}
|
||||||
CompressionKind::Yaz0 => {
|
CompressionKind::Yaz0 => {
|
||||||
let data = file.map()?;
|
|
||||||
let result = orthrus_ncompress::yaz0::Yaz0::decompress_from(data)
|
let result = orthrus_ncompress::yaz0::Yaz0::decompress_from(data)
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
|
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
|
||||||
Ok(Box::new(StaticFile::new(Arc::from(result), metadata.mtime)))
|
Ok(Box::new(StaticFile::new(Arc::from(result), metadata.mtime)))
|
||||||
}
|
}
|
||||||
CompressionKind::Nlzss => {
|
CompressionKind::Nlzss => {
|
||||||
let result = nintendo_lz::decompress(&mut file)
|
let result = nintendo_lz::decompress_arr(data)
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
|
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
|
||||||
Ok(Box::new(StaticFile::new(Arc::from(result.as_slice()), metadata.mtime)))
|
Ok(Box::new(StaticFile::new(Arc::from(result.as_slice()), metadata.mtime)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_path(path: &str, within: &str) -> String {
|
#[inline]
|
||||||
if within.is_empty() {
|
pub fn next_non_empty<'a>(iter: &mut impl Iterator<Item = &'a str>) -> &'a str {
|
||||||
format!("'{}'", path)
|
loop {
|
||||||
} else {
|
match iter.next() {
|
||||||
format!("'{}' (within '{}')", path, within)
|
Some("") => continue,
|
||||||
|
Some(next) => break next,
|
||||||
|
None => break "",
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ impl Vfs for RarcFs {
|
||||||
let file = WindowedFile::new(self.file.clone(), offset, len)?;
|
let file = WindowedFile::new(self.file.clone(), offset, len)?;
|
||||||
Ok(Box::new(file))
|
Ok(Box::new(file))
|
||||||
}
|
}
|
||||||
Some(RarcNodeKind::Directory(_, _)) => Err(VfsError::DirectoryExists),
|
Some(RarcNodeKind::Directory(_, _)) => Err(VfsError::IsADirectory),
|
||||||
None => Err(VfsError::NotFound),
|
None => Err(VfsError::NotFound),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -53,7 +53,7 @@ impl Vfs for RarcFs {
|
||||||
}
|
}
|
||||||
Ok(entries)
|
Ok(entries)
|
||||||
}
|
}
|
||||||
Some(RarcNodeKind::File(_, _)) => Err(VfsError::FileExists),
|
Some(RarcNodeKind::File(_, _)) => Err(VfsError::NotADirectory),
|
||||||
None => Err(VfsError::NotFound),
|
None => Err(VfsError::NotFound),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ impl Vfs for U8Fs {
|
||||||
let file = WindowedFile::new(self.file.clone(), offset, len)?;
|
let file = WindowedFile::new(self.file.clone(), offset, len)?;
|
||||||
Ok(Box::new(file))
|
Ok(Box::new(file))
|
||||||
}
|
}
|
||||||
U8NodeKind::Directory => Err(VfsError::DirectoryExists),
|
U8NodeKind::Directory => Err(VfsError::IsADirectory),
|
||||||
U8NodeKind::Invalid => Err(VfsError::from("U8: Invalid node kind")),
|
U8NodeKind::Invalid => Err(VfsError::from("U8: Invalid node kind")),
|
||||||
},
|
},
|
||||||
None => Err(VfsError::NotFound),
|
None => Err(VfsError::NotFound),
|
||||||
|
@ -44,7 +44,7 @@ impl Vfs for U8Fs {
|
||||||
let view = self.view()?;
|
let view = self.view()?;
|
||||||
match view.find(path) {
|
match view.find(path) {
|
||||||
Some((idx, node)) => match node.kind() {
|
Some((idx, node)) => match node.kind() {
|
||||||
U8NodeKind::File => Err(VfsError::FileExists),
|
U8NodeKind::File => Err(VfsError::NotADirectory),
|
||||||
U8NodeKind::Directory => {
|
U8NodeKind::Directory => {
|
||||||
let mut entries = Vec::new();
|
let mut entries = Vec::new();
|
||||||
let mut idx = idx + 1;
|
let mut idx = idx + 1;
|
||||||
|
|
Loading…
Reference in New Issue