Add Yay0/Yaz0 compression & decompression
Uses orthrus-ncompress Fixes #6
This commit is contained in:
parent
e46c6a72bc
commit
989293a477
|
@ -297,6 +297,7 @@ dependencies = [
|
||||||
"objdiff-core",
|
"objdiff-core",
|
||||||
"object 0.34.0",
|
"object 0.34.0",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
"orthrus-ncompress",
|
||||||
"owo-colors",
|
"owo-colors",
|
||||||
"path-slash",
|
"path-slash",
|
||||||
"petgraph",
|
"petgraph",
|
||||||
|
@ -473,6 +474,12 @@ dependencies = [
|
||||||
"ahash",
|
"ahash",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "heck"
|
||||||
|
version = "0.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hermit-abi"
|
name = "hermit-abi"
|
||||||
version = "0.1.19"
|
version = "0.1.19"
|
||||||
|
@ -702,6 +709,25 @@ version = "1.19.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "orthrus-core"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ad3db74dec173db0794aadee37558a5ca1f944ed0bd0c340bbad7103af0dc06a"
|
||||||
|
dependencies = [
|
||||||
|
"snafu",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "orthrus-ncompress"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "770b9c12ef43e3204d9c5e4e77ed5fcd48a08a104b6ba17a3a10a0dc975deb07"
|
||||||
|
dependencies = [
|
||||||
|
"orthrus-core",
|
||||||
|
"snafu",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "overload"
|
name = "overload"
|
||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
|
@ -986,6 +1012,27 @@ version = "1.13.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
|
checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "snafu"
|
||||||
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "75976f4748ab44f6e5332102be424e7c2dc18daeaf7e725f2040c3ebb133512e"
|
||||||
|
dependencies = [
|
||||||
|
"snafu-derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "snafu-derive"
|
||||||
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b4b19911debfb8c2fb1107bc6cb2d61868aaf53a988449213959bb1b5b1ed95f"
|
||||||
|
dependencies = [
|
||||||
|
"heck",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.52",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strsim"
|
name = "strsim"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
|
|
|
@ -51,6 +51,7 @@ objdiff-core = { git = "https://github.com/encounter/objdiff", rev = "a5668b484b
|
||||||
#objdiff-core = { path = "../objdiff/objdiff-core", features = ["ppc"] }
|
#objdiff-core = { path = "../objdiff/objdiff-core", features = ["ppc"] }
|
||||||
object = { version = "0.34.0", features = ["read_core", "std", "elf", "write_std"], default-features = false }
|
object = { version = "0.34.0", features = ["read_core", "std", "elf", "write_std"], default-features = false }
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.19.0"
|
||||||
|
orthrus-ncompress = "0.2.0"
|
||||||
owo-colors = { version = "4.0.0", features = ["supports-colors"] }
|
owo-colors = { version = "4.0.0", features = ["supports-colors"] }
|
||||||
path-slash = "0.2.1"
|
path-slash = "0.2.1"
|
||||||
petgraph = { version = "0.6.4", default-features = false }
|
petgraph = { version = "0.6.4", default-features = false }
|
||||||
|
|
33
README.md
33
README.md
|
@ -39,7 +39,10 @@ project structure and build system that uses decomp-toolkit under the hood.
|
||||||
- [nlzss decompress](#nlzss-decompress)
|
- [nlzss decompress](#nlzss-decompress)
|
||||||
- [rarc list](#rarc-list)
|
- [rarc list](#rarc-list)
|
||||||
- [rarc extract](#rarc-extract)
|
- [rarc extract](#rarc-extract)
|
||||||
|
- [yay0 decompress](#yay0-decompress)
|
||||||
|
- [yay0 compress](#yay0-compress)
|
||||||
- [yaz0 decompress](#yaz0-decompress)
|
- [yaz0 decompress](#yaz0-decompress)
|
||||||
|
- [yaz0 compress](#yaz0-compress)
|
||||||
|
|
||||||
## Goals
|
## Goals
|
||||||
|
|
||||||
|
@ -341,6 +344,26 @@ Extracts the contents of an RARC archive.
|
||||||
$ dtk rarc extract input.arc -o output_dir
|
$ dtk rarc extract input.arc -o output_dir
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### yay0 decompress
|
||||||
|
|
||||||
|
Decompresses Yay0-compressed files.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ dtk yay0 decompress input.bin.yay0 -o output.bin
|
||||||
|
# or, for batch processing
|
||||||
|
$ dtk yay0 decompress rels/*.yay0 -o rels
|
||||||
|
```
|
||||||
|
|
||||||
|
### yay0 compress
|
||||||
|
|
||||||
|
Compresses files using Yay0 compression.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ dtk yay0 compress input.bin -o output.bin.yay0
|
||||||
|
# or, for batch processing
|
||||||
|
$ dtk yay0 compress rels/* -o rels
|
||||||
|
```
|
||||||
|
|
||||||
### yaz0 decompress
|
### yaz0 decompress
|
||||||
|
|
||||||
Decompresses Yaz0-compressed files.
|
Decompresses Yaz0-compressed files.
|
||||||
|
@ -350,3 +373,13 @@ $ dtk yaz0 decompress input.bin.yaz0 -o output.bin
|
||||||
# or, for batch processing
|
# or, for batch processing
|
||||||
$ dtk yaz0 decompress rels/*.yaz0 -o rels
|
$ dtk yaz0 decompress rels/*.yaz0 -o rels
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### yaz0 compress
|
||||||
|
|
||||||
|
Compresses files using Yaz0 compression.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ dtk yaz0 compress input.bin -o output.bin.yaz0
|
||||||
|
# or, for batch processing
|
||||||
|
$ dtk yaz0 compress rels/* -o rels
|
||||||
|
```
|
||||||
|
|
|
@ -9,7 +9,7 @@ use anyhow::{anyhow, bail, Context, Result};
|
||||||
use argp::FromArgs;
|
use argp::FromArgs;
|
||||||
use object::{Object, ObjectSymbol, SymbolScope};
|
use object::{Object, ObjectSymbol, SymbolScope};
|
||||||
|
|
||||||
use crate::util::file::{buf_writer, map_file, process_rsp};
|
use crate::util::file::{buf_writer, map_file, map_file_basic, process_rsp};
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
/// Commands for processing static libraries.
|
/// Commands for processing static libraries.
|
||||||
|
@ -80,7 +80,7 @@ fn create(args: CreateArgs) -> Result<()> {
|
||||||
Entry::Vacant(e) => e.insert(Vec::new()),
|
Entry::Vacant(e) => e.insert(Vec::new()),
|
||||||
Entry::Occupied(_) => bail!("Duplicate file name '{path_str}'"),
|
Entry::Occupied(_) => bail!("Duplicate file name '{path_str}'"),
|
||||||
};
|
};
|
||||||
let file = map_file(path)?;
|
let file = map_file_basic(path)?;
|
||||||
let obj = object::File::parse(file.as_slice())?;
|
let obj = object::File::parse(file.as_slice())?;
|
||||||
for symbol in obj.symbols() {
|
for symbol in obj.symbols() {
|
||||||
if symbol.scope() == SymbolScope::Dynamic {
|
if symbol.scope() == SymbolScope::Dynamic {
|
||||||
|
|
|
@ -46,7 +46,10 @@ use crate::{
|
||||||
dep::DepFile,
|
dep::DepFile,
|
||||||
dol::process_dol,
|
dol::process_dol,
|
||||||
elf::{process_elf, write_elf},
|
elf::{process_elf, write_elf},
|
||||||
file::{buf_reader, buf_writer, map_file, touch, verify_hash, FileIterator, FileReadInfo},
|
file::{
|
||||||
|
buf_reader, buf_writer, map_file, map_file_basic, touch, verify_hash, FileIterator,
|
||||||
|
FileReadInfo,
|
||||||
|
},
|
||||||
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
|
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
|
||||||
map::apply_map_file,
|
map::apply_map_file,
|
||||||
rel::{process_rel, process_rel_header, update_rel_section_alignment},
|
rel::{process_rel, process_rel_header, update_rel_section_alignment},
|
||||||
|
@ -156,7 +159,7 @@ mod path_slash_serde {
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use path_slash::PathBufExt as _;
|
use path_slash::PathBufExt as _;
|
||||||
use serde::{self, Deserialize, Deserializer, Serializer};
|
use serde::{Deserialize, Deserializer, Serializer};
|
||||||
|
|
||||||
pub fn serialize<S>(path: &PathBuf, s: S) -> Result<S::Ok, S::Error>
|
pub fn serialize<S>(path: &PathBuf, s: S) -> Result<S::Ok, S::Error>
|
||||||
where S: Serializer {
|
where S: Serializer {
|
||||||
|
@ -174,7 +177,7 @@ mod path_slash_serde_option {
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use path_slash::PathBufExt as _;
|
use path_slash::PathBufExt as _;
|
||||||
use serde::{self, Deserialize, Deserializer, Serializer};
|
use serde::{Deserialize, Deserializer, Serializer};
|
||||||
|
|
||||||
pub fn serialize<S>(path: &Option<PathBuf>, s: S) -> Result<S::Ok, S::Error>
|
pub fn serialize<S>(path: &Option<PathBuf>, s: S) -> Result<S::Ok, S::Error>
|
||||||
where S: Serializer {
|
where S: Serializer {
|
||||||
|
@ -938,7 +941,7 @@ fn split_write_obj(
|
||||||
|
|
||||||
fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
|
fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
|
||||||
if path.is_file() {
|
if path.is_file() {
|
||||||
let old_file = map_file(path)?;
|
let old_file = map_file_basic(path)?;
|
||||||
// If the file is the same size, check if the contents are the same
|
// If the file is the same size, check if the contents are the same
|
||||||
// Avoid writing if unchanged, since it will update the file's mtime
|
// Avoid writing if unchanged, since it will update the file's mtime
|
||||||
if old_file.len() == contents.len() as u64
|
if old_file.len() == contents.len() as u64
|
||||||
|
@ -1639,7 +1642,7 @@ fn apply(args: ApplyArgs) -> Result<()> {
|
||||||
orig_sym.kind,
|
orig_sym.kind,
|
||||||
linked_sym.name
|
linked_sym.name
|
||||||
);
|
);
|
||||||
updated_sym.name = linked_sym.name.clone();
|
updated_sym.name.clone_from(&linked_sym.name);
|
||||||
}
|
}
|
||||||
if linked_sym.size != orig_sym.size {
|
if linked_sym.size != orig_sym.size {
|
||||||
log::info!(
|
log::info!(
|
||||||
|
|
|
@ -12,4 +12,5 @@ pub mod rarc;
|
||||||
pub mod rel;
|
pub mod rel;
|
||||||
pub mod rso;
|
pub mod rso;
|
||||||
pub mod shasum;
|
pub mod shasum;
|
||||||
|
pub mod yay0;
|
||||||
pub mod yaz0;
|
pub mod yaz0;
|
||||||
|
|
|
@ -0,0 +1,105 @@
|
||||||
|
use std::{fs, path::PathBuf};
|
||||||
|
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use argp::FromArgs;
|
||||||
|
|
||||||
|
use crate::util::{
|
||||||
|
file::{map_file_basic, process_rsp},
|
||||||
|
ncompress::{compress_yay0, decompress_yay0},
|
||||||
|
IntoCow, ToCow,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
/// Commands for processing YAY0-compressed files.
|
||||||
|
#[argp(subcommand, name = "yay0")]
|
||||||
|
pub struct Args {
|
||||||
|
#[argp(subcommand)]
|
||||||
|
command: SubCommand,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
|
#[argp(subcommand)]
|
||||||
|
enum SubCommand {
|
||||||
|
Compress(CompressArgs),
|
||||||
|
Decompress(DecompressArgs),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||||
|
/// Compresses files using YAY0.
|
||||||
|
#[argp(subcommand, name = "compress")]
|
||||||
|
pub struct CompressArgs {
|
||||||
|
#[argp(positional)]
|
||||||
|
/// Files to compress
|
||||||
|
files: Vec<PathBuf>,
|
||||||
|
#[argp(option, short = 'o')]
|
||||||
|
/// Output file (or directory, if multiple files are specified).
|
||||||
|
/// If not specified, compresses in-place.
|
||||||
|
output: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||||
|
/// Decompresses YAY0-compressed files.
|
||||||
|
#[argp(subcommand, name = "decompress")]
|
||||||
|
pub struct DecompressArgs {
|
||||||
|
#[argp(positional)]
|
||||||
|
/// YAY0-compressed files
|
||||||
|
files: Vec<PathBuf>,
|
||||||
|
#[argp(option, short = 'o')]
|
||||||
|
/// Output file (or directory, if multiple files are specified).
|
||||||
|
/// If not specified, decompresses in-place.
|
||||||
|
output: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run(args: Args) -> Result<()> {
|
||||||
|
match args.command {
|
||||||
|
SubCommand::Compress(args) => compress(args),
|
||||||
|
SubCommand::Decompress(args) => decompress(args),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compress(args: CompressArgs) -> Result<()> {
|
||||||
|
let files = process_rsp(&args.files)?;
|
||||||
|
let single_file = files.len() == 1;
|
||||||
|
for path in files {
|
||||||
|
let data = {
|
||||||
|
let file = map_file_basic(&path)?;
|
||||||
|
compress_yay0(file.as_slice())
|
||||||
|
};
|
||||||
|
let out_path = if let Some(output) = &args.output {
|
||||||
|
if single_file {
|
||||||
|
output.as_path().to_cow()
|
||||||
|
} else {
|
||||||
|
output.join(path.file_name().unwrap()).into_cow()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
path.as_path().to_cow()
|
||||||
|
};
|
||||||
|
fs::write(out_path.as_ref(), data)
|
||||||
|
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decompress(args: DecompressArgs) -> Result<()> {
|
||||||
|
let files = process_rsp(&args.files)?;
|
||||||
|
let single_file = files.len() == 1;
|
||||||
|
for path in files {
|
||||||
|
let data = {
|
||||||
|
let file = map_file_basic(&path)?;
|
||||||
|
decompress_yay0(file.as_slice())
|
||||||
|
.with_context(|| format!("Failed to decompress '{}' using Yay0", path.display()))?
|
||||||
|
};
|
||||||
|
let out_path = if let Some(output) = &args.output {
|
||||||
|
if single_file {
|
||||||
|
output.as_path().to_cow()
|
||||||
|
} else {
|
||||||
|
output.join(path.file_name().unwrap()).into_cow()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
path.as_path().to_cow()
|
||||||
|
};
|
||||||
|
fs::write(out_path.as_ref(), data)
|
||||||
|
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -4,7 +4,8 @@ use anyhow::{Context, Result};
|
||||||
use argp::FromArgs;
|
use argp::FromArgs;
|
||||||
|
|
||||||
use crate::util::{
|
use crate::util::{
|
||||||
file::{decompress_reader, open_file, process_rsp},
|
file::{map_file_basic, process_rsp},
|
||||||
|
ncompress::{compress_yaz0, decompress_yaz0},
|
||||||
IntoCow, ToCow,
|
IntoCow, ToCow,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -19,9 +20,23 @@ pub struct Args {
|
||||||
#[derive(FromArgs, PartialEq, Debug)]
|
#[derive(FromArgs, PartialEq, Debug)]
|
||||||
#[argp(subcommand)]
|
#[argp(subcommand)]
|
||||||
enum SubCommand {
|
enum SubCommand {
|
||||||
|
Compress(CompressArgs),
|
||||||
Decompress(DecompressArgs),
|
Decompress(DecompressArgs),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||||
|
/// Compresses files using YAZ0.
|
||||||
|
#[argp(subcommand, name = "compress")]
|
||||||
|
pub struct CompressArgs {
|
||||||
|
#[argp(positional)]
|
||||||
|
/// Files to compress
|
||||||
|
files: Vec<PathBuf>,
|
||||||
|
#[argp(option, short = 'o')]
|
||||||
|
/// Output file (or directory, if multiple files are specified).
|
||||||
|
/// If not specified, compresses in-place.
|
||||||
|
output: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||||
/// Decompresses YAZ0-compressed files.
|
/// Decompresses YAZ0-compressed files.
|
||||||
#[argp(subcommand, name = "decompress")]
|
#[argp(subcommand, name = "decompress")]
|
||||||
|
@ -37,15 +52,43 @@ pub struct DecompressArgs {
|
||||||
|
|
||||||
pub fn run(args: Args) -> Result<()> {
|
pub fn run(args: Args) -> Result<()> {
|
||||||
match args.command {
|
match args.command {
|
||||||
|
SubCommand::Compress(args) => compress(args),
|
||||||
SubCommand::Decompress(args) => decompress(args),
|
SubCommand::Decompress(args) => decompress(args),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn decompress(args: DecompressArgs) -> Result<()> {
|
fn compress(args: CompressArgs) -> Result<()> {
|
||||||
let files = process_rsp(&args.files)?;
|
let files = process_rsp(&args.files)?;
|
||||||
let single_file = files.len() == 1;
|
let single_file = files.len() == 1;
|
||||||
for path in files {
|
for path in files {
|
||||||
let data = decompress_reader(&mut open_file(&path)?)?;
|
let data = {
|
||||||
|
let file = map_file_basic(&path)?;
|
||||||
|
compress_yaz0(file.as_slice())
|
||||||
|
};
|
||||||
|
let out_path = if let Some(output) = &args.output {
|
||||||
|
if single_file {
|
||||||
|
output.as_path().to_cow()
|
||||||
|
} else {
|
||||||
|
output.join(path.file_name().unwrap()).into_cow()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
path.as_path().to_cow()
|
||||||
|
};
|
||||||
|
fs::write(out_path.as_ref(), data)
|
||||||
|
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decompress(args: DecompressArgs) -> Result<()> {
|
||||||
|
let files = process_rsp(&args.files)?;
|
||||||
|
let single_file = files.len() == 1;
|
||||||
|
for path in files {
|
||||||
|
let data = {
|
||||||
|
let file = map_file_basic(&path)?;
|
||||||
|
decompress_yaz0(file.as_slice())
|
||||||
|
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path.display()))?
|
||||||
|
};
|
||||||
let out_path = if let Some(output) = &args.output {
|
let out_path = if let Some(output) = &args.output {
|
||||||
if single_file {
|
if single_file {
|
||||||
output.as_path().to_cow()
|
output.as_path().to_cow()
|
||||||
|
|
13
src/main.rs
13
src/main.rs
|
@ -1,4 +1,4 @@
|
||||||
use std::{env, ffi::OsStr, path::PathBuf, process::exit, str::FromStr};
|
use std::{env, ffi::OsStr, fmt::Display, path::PathBuf, process::exit, str::FromStr};
|
||||||
|
|
||||||
use anyhow::Error;
|
use anyhow::Error;
|
||||||
use argp::{FromArgValue, FromArgs};
|
use argp::{FromArgValue, FromArgs};
|
||||||
|
@ -37,16 +37,15 @@ impl FromStr for LogLevel {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToString for LogLevel {
|
impl Display for LogLevel {
|
||||||
fn to_string(&self) -> String {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
f.write_str(match self {
|
||||||
LogLevel::Error => "error",
|
LogLevel::Error => "error",
|
||||||
LogLevel::Warn => "warn",
|
LogLevel::Warn => "warn",
|
||||||
LogLevel::Info => "info",
|
LogLevel::Info => "info",
|
||||||
LogLevel::Debug => "debug",
|
LogLevel::Debug => "debug",
|
||||||
LogLevel::Trace => "trace",
|
LogLevel::Trace => "trace",
|
||||||
}
|
})
|
||||||
.to_string()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,6 +93,7 @@ enum SubCommand {
|
||||||
Rel(cmd::rel::Args),
|
Rel(cmd::rel::Args),
|
||||||
Rso(cmd::rso::Args),
|
Rso(cmd::rso::Args),
|
||||||
Shasum(cmd::shasum::Args),
|
Shasum(cmd::shasum::Args),
|
||||||
|
Yay0(cmd::yay0::Args),
|
||||||
Yaz0(cmd::yaz0::Args),
|
Yaz0(cmd::yaz0::Args),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,6 +166,7 @@ fn main() {
|
||||||
SubCommand::Rel(c_args) => cmd::rel::run(c_args),
|
SubCommand::Rel(c_args) => cmd::rel::run(c_args),
|
||||||
SubCommand::Rso(c_args) => cmd::rso::run(c_args),
|
SubCommand::Rso(c_args) => cmd::rso::run(c_args),
|
||||||
SubCommand::Shasum(c_args) => cmd::shasum::run(c_args),
|
SubCommand::Shasum(c_args) => cmd::shasum::run(c_args),
|
||||||
|
SubCommand::Yay0(c_args) => cmd::yay0::run(c_args),
|
||||||
SubCommand::Yaz0(c_args) => cmd::yaz0::run(c_args),
|
SubCommand::Yaz0(c_args) => cmd::yaz0::run(c_args),
|
||||||
});
|
});
|
||||||
if let Err(e) = result {
|
if let Err(e) = result {
|
||||||
|
|
|
@ -270,7 +270,7 @@ impl ObjInfo {
|
||||||
existing.end,
|
existing.end,
|
||||||
split.unit
|
split.unit
|
||||||
);
|
);
|
||||||
existing.unit = split.unit.clone();
|
existing.unit.clone_from(&split.unit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.add_split(section_index, new_start, ObjSplit {
|
self.add_split(section_index, new_start, ObjSplit {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use std::{
|
use std::{
|
||||||
cmp::max,
|
cmp::max,
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
convert::TryFrom,
|
|
||||||
fmt::{Display, Formatter, Write},
|
fmt::{Display, Formatter, Write},
|
||||||
io::{BufRead, Cursor, Seek, SeekFrom},
|
io::{BufRead, Cursor, Seek, SeekFrom},
|
||||||
num::NonZeroU32,
|
num::NonZeroU32,
|
||||||
|
|
|
@ -186,7 +186,7 @@ where P: AsRef<Path> {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if kind == ObjKind::Relocatable {
|
if kind == ObjKind::Relocatable {
|
||||||
obj_name = file_name.clone();
|
obj_name.clone_from(&file_name);
|
||||||
}
|
}
|
||||||
let sections = match section_starts.entry(file_name.clone()) {
|
let sections = match section_starts.entry(file_name.clone()) {
|
||||||
indexmap::map::Entry::Occupied(_) => {
|
indexmap::map::Entry::Occupied(_) => {
|
||||||
|
@ -197,7 +197,7 @@ where P: AsRef<Path> {
|
||||||
*index += 1;
|
*index += 1;
|
||||||
let new_name = format!("{}_{}", file_name, index);
|
let new_name = format!("{}_{}", file_name, index);
|
||||||
// log::info!("Renaming {} to {}", file_name, new_name);
|
// log::info!("Renaming {} to {}", file_name, new_name);
|
||||||
file_name = new_name.clone();
|
file_name.clone_from(&new_name);
|
||||||
match section_starts.entry(new_name.clone()) {
|
match section_starts.entry(new_name.clone()) {
|
||||||
indexmap::map::Entry::Occupied(_) => {
|
indexmap::map::Entry::Occupied(_) => {
|
||||||
bail!("Duplicate filename '{}'", new_name)
|
bail!("Duplicate filename '{}'", new_name)
|
||||||
|
|
124
src/util/file.rs
124
src/util/file.rs
|
@ -1,5 +1,4 @@
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
fs::{DirBuilder, File, OpenOptions},
|
fs::{DirBuilder, File, OpenOptions},
|
||||||
io::{BufRead, BufReader, BufWriter, Cursor, Read, Seek, SeekFrom},
|
io::{BufRead, BufReader, BufWriter, Cursor, Read, Seek, SeekFrom},
|
||||||
|
@ -16,12 +15,11 @@ use xxhash_rust::xxh3::xxh3_64;
|
||||||
use crate::{
|
use crate::{
|
||||||
array_ref,
|
array_ref,
|
||||||
util::{
|
util::{
|
||||||
|
ncompress::{decompress_yay0, decompress_yaz0, YAY0_MAGIC, YAZ0_MAGIC},
|
||||||
rarc,
|
rarc,
|
||||||
rarc::{Node, RARC_MAGIC},
|
rarc::{Node, RARC_MAGIC},
|
||||||
take_seek::{TakeSeek, TakeSeekExt},
|
take_seek::{TakeSeek, TakeSeekExt},
|
||||||
yaz0,
|
Bytes,
|
||||||
yaz0::YAZ0_MAGIC,
|
|
||||||
IntoCow, ToCow,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -78,24 +76,36 @@ where P: AsRef<Path> {
|
||||||
let mmap = unsafe { MmapOptions::new().map(&file) }
|
let mmap = unsafe { MmapOptions::new().map(&file) }
|
||||||
.with_context(|| format!("Failed to mmap file: '{}'", base_path.display()))?;
|
.with_context(|| format!("Failed to mmap file: '{}'", base_path.display()))?;
|
||||||
let (offset, len) = if let Some(sub_path) = sub_path {
|
let (offset, len) = if let Some(sub_path) = sub_path {
|
||||||
let mut reader = Cursor::new(&*mmap);
|
|
||||||
if sub_path.as_os_str() == OsStr::new("nlzss") {
|
if sub_path.as_os_str() == OsStr::new("nlzss") {
|
||||||
return Ok(FileEntry::Buffer(
|
return Ok(FileEntry::Buffer(
|
||||||
nintendo_lz::decompress(&mut reader).map_err(|e| {
|
nintendo_lz::decompress(&mut mmap.as_ref())
|
||||||
anyhow!("Failed to decompress '{}' with NLZSS: {}", path.as_ref().display(), e)
|
.map_err(|e| {
|
||||||
})?,
|
anyhow!(
|
||||||
|
"Failed to decompress '{}' with NLZSS: {}",
|
||||||
|
path.as_ref().display(),
|
||||||
|
e
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.into_boxed_slice(),
|
||||||
mtime,
|
mtime,
|
||||||
));
|
));
|
||||||
} else if sub_path.as_os_str() == OsStr::new("yaz0") {
|
} else if sub_path.as_os_str() == OsStr::new("yaz0") {
|
||||||
return Ok(FileEntry::Buffer(
|
return Ok(FileEntry::Buffer(
|
||||||
yaz0::decompress_file(&mut reader).with_context(|| {
|
decompress_yaz0(mmap.as_ref()).with_context(|| {
|
||||||
format!("Failed to decompress '{}' with Yaz0", path.as_ref().display())
|
format!("Failed to decompress '{}' with Yaz0", path.as_ref().display())
|
||||||
})?,
|
})?,
|
||||||
mtime,
|
mtime,
|
||||||
));
|
));
|
||||||
|
} else if sub_path.as_os_str() == OsStr::new("yay0") {
|
||||||
|
return Ok(FileEntry::Buffer(
|
||||||
|
decompress_yay0(mmap.as_ref()).with_context(|| {
|
||||||
|
format!("Failed to decompress '{}' with Yay0", path.as_ref().display())
|
||||||
|
})?,
|
||||||
|
mtime,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let rarc = rarc::RarcReader::new(&mut reader)
|
let rarc = rarc::RarcReader::new(&mut Cursor::new(mmap.as_ref()))
|
||||||
.with_context(|| format!("Failed to open '{}' as RARC archive", base_path.display()))?;
|
.with_context(|| format!("Failed to open '{}' as RARC archive", base_path.display()))?;
|
||||||
rarc.find_file(&sub_path)?.map(|(o, s)| (o, s as u64)).ok_or_else(|| {
|
rarc.find_file(&sub_path)?.map(|(o, s)| (o, s as u64)).ok_or_else(|| {
|
||||||
anyhow!("File '{}' not found in '{}'", sub_path.display(), base_path.display())
|
anyhow!("File '{}' not found in '{}'", sub_path.display(), base_path.display())
|
||||||
|
@ -106,17 +116,43 @@ where P: AsRef<Path> {
|
||||||
let map = MappedFile { mmap, mtime, offset, len };
|
let map = MappedFile { mmap, mtime, offset, len };
|
||||||
let buf = map.as_slice();
|
let buf = map.as_slice();
|
||||||
// Auto-detect compression if there's a magic number.
|
// Auto-detect compression if there's a magic number.
|
||||||
if buf.len() > 4 && buf[0..4] == YAZ0_MAGIC {
|
if buf.len() > 4 {
|
||||||
return Ok(FileEntry::Buffer(
|
match *array_ref!(buf, 0, 4) {
|
||||||
yaz0::decompress_file(&mut map.as_reader()).with_context(|| {
|
YAZ0_MAGIC => {
|
||||||
format!("Failed to decompress '{}' with Yaz0", path.as_ref().display())
|
return Ok(FileEntry::Buffer(
|
||||||
})?,
|
decompress_yaz0(buf).with_context(|| {
|
||||||
mtime,
|
format!("Failed to decompress '{}' with Yaz0", path.as_ref().display())
|
||||||
));
|
})?,
|
||||||
|
mtime,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
YAY0_MAGIC => {
|
||||||
|
return Ok(FileEntry::Buffer(
|
||||||
|
decompress_yay0(buf).with_context(|| {
|
||||||
|
format!("Failed to decompress '{}' with Yay0", path.as_ref().display())
|
||||||
|
})?,
|
||||||
|
mtime,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(FileEntry::MappedFile(map))
|
Ok(FileEntry::MappedFile(map))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Opens a memory mapped file without decompression or archive handling.
|
||||||
|
pub fn map_file_basic<P>(path: P) -> Result<FileEntry>
|
||||||
|
where P: AsRef<Path> {
|
||||||
|
let path = path.as_ref();
|
||||||
|
let file =
|
||||||
|
File::open(path).with_context(|| format!("Failed to open file '{}'", path.display()))?;
|
||||||
|
let mtime = FileTime::from_last_modification_time(&file.metadata()?);
|
||||||
|
let mmap = unsafe { MmapOptions::new().map(&file) }
|
||||||
|
.with_context(|| format!("Failed to mmap file: '{}'", path.display()))?;
|
||||||
|
let len = mmap.len() as u64;
|
||||||
|
Ok(FileEntry::MappedFile(MappedFile { mmap, mtime, offset: 0, len }))
|
||||||
|
}
|
||||||
|
|
||||||
pub type OpenedFile = TakeSeek<File>;
|
pub type OpenedFile = TakeSeek<File>;
|
||||||
|
|
||||||
/// Opens a file (not memory mapped). No decompression is performed.
|
/// Opens a file (not memory mapped). No decompression is performed.
|
||||||
|
@ -254,7 +290,7 @@ impl RarcIterator {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Iterator for RarcIterator {
|
impl Iterator for RarcIterator {
|
||||||
type Item = Result<(PathBuf, Vec<u8>)>;
|
type Item = Result<(PathBuf, Box<[u8]>)>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
if self.index >= self.paths.len() {
|
if self.index >= self.paths.len() {
|
||||||
|
@ -275,7 +311,7 @@ impl Iterator for RarcIterator {
|
||||||
/// A file entry, either a memory mapped file or an owned buffer.
|
/// A file entry, either a memory mapped file or an owned buffer.
|
||||||
pub enum FileEntry {
|
pub enum FileEntry {
|
||||||
MappedFile(MappedFile),
|
MappedFile(MappedFile),
|
||||||
Buffer(Vec<u8>, FileTime),
|
Buffer(Box<[u8]>, FileTime),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileEntry {
|
impl FileEntry {
|
||||||
|
@ -283,14 +319,14 @@ impl FileEntry {
|
||||||
pub fn as_reader(&self) -> Cursor<&[u8]> {
|
pub fn as_reader(&self) -> Cursor<&[u8]> {
|
||||||
match self {
|
match self {
|
||||||
Self::MappedFile(file) => file.as_reader(),
|
Self::MappedFile(file) => file.as_reader(),
|
||||||
Self::Buffer(slice, _) => Cursor::new(slice.as_slice()),
|
Self::Buffer(slice, _) => Cursor::new(slice),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_slice(&self) -> &[u8] {
|
pub fn as_slice(&self) -> &[u8] {
|
||||||
match self {
|
match self {
|
||||||
Self::MappedFile(file) => file.as_slice(),
|
Self::MappedFile(file) => file.as_slice(),
|
||||||
Self::Buffer(slice, _) => slice.as_slice(),
|
Self::Buffer(slice, _) => slice,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -388,6 +424,7 @@ impl FileIterator {
|
||||||
|
|
||||||
match *array_ref!(buf, 0, 4) {
|
match *array_ref!(buf, 0, 4) {
|
||||||
YAZ0_MAGIC => self.handle_yaz0(file, path),
|
YAZ0_MAGIC => self.handle_yaz0(file, path),
|
||||||
|
YAY0_MAGIC => self.handle_yay0(file, path),
|
||||||
RARC_MAGIC => self.handle_rarc(file, path),
|
RARC_MAGIC => self.handle_rarc(file, path),
|
||||||
_ => Some(Ok((path, FileEntry::MappedFile(file)))),
|
_ => Some(Ok((path, FileEntry::MappedFile(file)))),
|
||||||
}
|
}
|
||||||
|
@ -398,7 +435,18 @@ impl FileIterator {
|
||||||
file: MappedFile,
|
file: MappedFile,
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
) -> Option<Result<(PathBuf, FileEntry)>> {
|
) -> Option<Result<(PathBuf, FileEntry)>> {
|
||||||
Some(match yaz0::decompress_file(&mut file.as_reader()) {
|
Some(match decompress_yaz0(file.as_slice()) {
|
||||||
|
Ok(buf) => Ok((path, FileEntry::Buffer(buf, file.mtime))),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_yay0(
|
||||||
|
&mut self,
|
||||||
|
file: MappedFile,
|
||||||
|
path: PathBuf,
|
||||||
|
) -> Option<Result<(PathBuf, FileEntry)>> {
|
||||||
|
Some(match decompress_yay0(file.as_slice()) {
|
||||||
Ok(buf) => Ok((path, FileEntry::Buffer(buf, file.mtime))),
|
Ok(buf) => Ok((path, FileEntry::Buffer(buf, file.mtime))),
|
||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
})
|
})
|
||||||
|
@ -435,31 +483,15 @@ where P: AsRef<Path> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decompress_if_needed(buf: &[u8]) -> Result<Cow<[u8]>> {
|
pub fn decompress_if_needed(buf: &[u8]) -> Result<Bytes> {
|
||||||
Ok(if buf.len() > 4 && buf[0..4] == YAZ0_MAGIC {
|
if buf.len() > 4 {
|
||||||
yaz0::decompress_file(&mut Cursor::new(buf))?.into_cow()
|
match *array_ref!(buf, 0, 4) {
|
||||||
} else {
|
YAZ0_MAGIC => return decompress_yaz0(buf).map(Bytes::Owned),
|
||||||
buf.to_cow()
|
YAY0_MAGIC => return decompress_yay0(buf).map(Bytes::Owned),
|
||||||
})
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decompress_reader<R>(reader: &mut R) -> Result<Vec<u8>>
|
|
||||||
where R: Read + Seek + ?Sized {
|
|
||||||
let mut magic = [0u8; 4];
|
|
||||||
if reader.read_exact(&mut magic).is_err() {
|
|
||||||
reader.seek(SeekFrom::Start(0))?;
|
|
||||||
let mut buf = vec![];
|
|
||||||
reader.read_to_end(&mut buf)?;
|
|
||||||
return Ok(buf);
|
|
||||||
}
|
}
|
||||||
Ok(if magic == YAZ0_MAGIC {
|
Ok(Bytes::Borrowed(buf))
|
||||||
reader.seek(SeekFrom::Start(0))?;
|
|
||||||
yaz0::decompress_file(reader)?
|
|
||||||
} else {
|
|
||||||
let mut buf = magic.to_vec();
|
|
||||||
reader.read_to_end(&mut buf)?;
|
|
||||||
buf
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn verify_hash(buf: &[u8], expected_str: &str) -> Result<()> {
|
pub fn verify_hash(buf: &[u8], expected_str: &str) -> Result<()> {
|
||||||
|
|
|
@ -478,7 +478,7 @@ impl StateMachine {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if !e.unused {
|
if !e.unused {
|
||||||
last_unit = e.unit.clone();
|
last_unit.clone_from(&e.unit);
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
});
|
});
|
||||||
|
|
|
@ -12,6 +12,7 @@ pub mod elf;
|
||||||
pub mod file;
|
pub mod file;
|
||||||
pub mod lcf;
|
pub mod lcf;
|
||||||
pub mod map;
|
pub mod map;
|
||||||
|
pub mod ncompress;
|
||||||
pub mod nested;
|
pub mod nested;
|
||||||
pub mod rarc;
|
pub mod rarc;
|
||||||
pub mod reader;
|
pub mod reader;
|
||||||
|
@ -20,7 +21,6 @@ pub mod rso;
|
||||||
pub mod signatures;
|
pub mod signatures;
|
||||||
pub mod split;
|
pub mod split;
|
||||||
pub mod take_seek;
|
pub mod take_seek;
|
||||||
pub mod yaz0;
|
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub const fn align_up(value: u32, align: u32) -> u32 { (value + (align - 1)) & !(align - 1) }
|
pub const fn align_up(value: u32, align: u32) -> u32 { (value + (align - 1)) & !(align - 1) }
|
||||||
|
@ -74,3 +74,26 @@ where B: ToOwned + ?Sized
|
||||||
{
|
{
|
||||||
fn to_cow(&'a self) -> Cow<'a, B> { Cow::Borrowed(self) }
|
fn to_cow(&'a self) -> Cow<'a, B> { Cow::Borrowed(self) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum Bytes<'a> {
|
||||||
|
Borrowed(&'a [u8]),
|
||||||
|
Owned(Box<[u8]>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Bytes<'a> {
|
||||||
|
pub fn into_owned(self) -> Box<[u8]> {
|
||||||
|
match self {
|
||||||
|
Bytes::Borrowed(s) => Box::from(s),
|
||||||
|
Bytes::Owned(b) => b,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> AsRef<[u8]> for Bytes<'a> {
|
||||||
|
fn as_ref(&self) -> &[u8] {
|
||||||
|
match self {
|
||||||
|
Bytes::Borrowed(s) => s,
|
||||||
|
Bytes::Owned(b) => b,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use orthrus_ncompress::{yay0::Yay0, yaz0::Yaz0};
|
||||||
|
|
||||||
|
pub const YAZ0_MAGIC: [u8; 4] = *b"Yaz0";
|
||||||
|
pub const YAY0_MAGIC: [u8; 4] = *b"Yay0";
|
||||||
|
|
||||||
|
/// Compresses the data into a new allocated buffer using Yaz0 compression.
|
||||||
|
pub fn compress_yaz0(input: &[u8]) -> Box<[u8]> {
|
||||||
|
let mut output = vec![0u8; Yaz0::worst_possible_size(input.len())];
|
||||||
|
let size = Yaz0::compress_n64(input, output.as_mut_slice());
|
||||||
|
output.truncate(size);
|
||||||
|
output.into_boxed_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decompresses the data into a new allocated buffer. Assumes a Yaz0 header followed by
|
||||||
|
/// compressed data.
|
||||||
|
pub fn decompress_yaz0(input: &[u8]) -> Result<Box<[u8]>> {
|
||||||
|
Yaz0::decompress_from(input).map_err(|e| anyhow!(e))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compresses the data into a new allocated buffer using Yay0 compression.
|
||||||
|
pub fn compress_yay0(input: &[u8]) -> Box<[u8]> {
|
||||||
|
let mut output = vec![0u8; Yay0::worst_possible_size(input.len())];
|
||||||
|
let size = Yay0::compress_n64(input, output.as_mut_slice());
|
||||||
|
output.truncate(size);
|
||||||
|
output.into_boxed_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decompresses the data into a new allocated buffer. Assumes a Yay0 header followed by
|
||||||
|
/// compressed data.
|
||||||
|
pub fn decompress_yay0(input: &[u8]) -> Result<Box<[u8]>> {
|
||||||
|
Yay0::decompress_from(input).map_err(|e| anyhow!(e))
|
||||||
|
}
|
|
@ -902,7 +902,7 @@ pub fn split_obj(obj: &ObjInfo, module_name: Option<&str>) -> Result<Vec<ObjInfo
|
||||||
split_obj.mw_comment = Some(MWComment::new(comment_version)?);
|
split_obj.mw_comment = Some(MWComment::new(comment_version)?);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
split_obj.mw_comment = obj.mw_comment.clone();
|
split_obj.mw_comment.clone_from(&obj.mw_comment);
|
||||||
}
|
}
|
||||||
split_obj.split_meta = Some(SplitMeta {
|
split_obj.split_meta = Some(SplitMeta {
|
||||||
generator: Some(format!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"))),
|
generator: Some(format!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"))),
|
||||||
|
@ -1187,7 +1187,7 @@ pub fn split_obj(obj: &ObjInfo, module_name: Option<&str>) -> Result<Vec<ObjInfo
|
||||||
for (globalize_idx, new_name) in &globalize_symbols {
|
for (globalize_idx, new_name) in &globalize_symbols {
|
||||||
if let Some(symbol_idx) = symbol_map[*globalize_idx] {
|
if let Some(symbol_idx) = symbol_map[*globalize_idx] {
|
||||||
let mut symbol = obj.symbols[symbol_idx].clone();
|
let mut symbol = obj.symbols[symbol_idx].clone();
|
||||||
symbol.name = new_name.clone();
|
symbol.name.clone_from(new_name);
|
||||||
if symbol.flags.is_local() {
|
if symbol.flags.is_local() {
|
||||||
log::debug!("Globalizing {} in {}", symbol.name, obj.name);
|
log::debug!("Globalizing {} in {}", symbol.name, obj.name);
|
||||||
symbol.flags.set_scope(ObjSymbolScope::Global);
|
symbol.flags.set_scope(ObjSymbolScope::Global);
|
||||||
|
|
101
src/util/yaz0.rs
101
src/util/yaz0.rs
|
@ -1,101 +0,0 @@
|
||||||
// Source: https://github.com/Julgodis/picori/blob/650da9f4fe6050b39b80d5360416591c748058d5/src/yaz0.rs
|
|
||||||
// License: MIT
|
|
||||||
// Modified to use `std::io::Read`/`Seek` and project's FromReader trait.
|
|
||||||
use std::io::{Read, Seek};
|
|
||||||
|
|
||||||
use anyhow::{ensure, Result};
|
|
||||||
|
|
||||||
use crate::util::reader::{skip_bytes, struct_size, Endian, FromReader};
|
|
||||||
|
|
||||||
pub const YAZ0_MAGIC: [u8; 4] = *b"Yaz0";
|
|
||||||
|
|
||||||
/// Yaz0 header.
|
|
||||||
pub struct Header {
|
|
||||||
/// Size of decompressed data.
|
|
||||||
pub decompressed_size: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromReader for Header {
|
|
||||||
type Args = ();
|
|
||||||
|
|
||||||
const STATIC_SIZE: usize = struct_size([
|
|
||||||
u32::STATIC_SIZE, // magic
|
|
||||||
u32::STATIC_SIZE, // decompressed_size
|
|
||||||
u32::STATIC_SIZE, // reserved0
|
|
||||||
u32::STATIC_SIZE, // reserved1
|
|
||||||
]);
|
|
||||||
|
|
||||||
fn from_reader_args<R>(reader: &mut R, e: Endian, _args: Self::Args) -> std::io::Result<Self>
|
|
||||||
where R: Read + Seek + ?Sized {
|
|
||||||
let magic = <[u8; 4]>::from_reader(reader, e)?;
|
|
||||||
if magic != YAZ0_MAGIC {
|
|
||||||
return Err(std::io::Error::new(
|
|
||||||
std::io::ErrorKind::InvalidData,
|
|
||||||
format!("Invalid Yaz0 magic: {:?}", magic),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
let decompressed_size = u32::from_reader(reader, e)?;
|
|
||||||
skip_bytes::<8, _>(reader)?;
|
|
||||||
Ok(Self { decompressed_size })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Decompresses the data into a new allocated [`Vec`]. Assumes a Yaz0 header followed by
|
|
||||||
/// compressed data.
|
|
||||||
pub fn decompress_file<R>(input: &mut R) -> Result<Vec<u8>>
|
|
||||||
where R: Read + Seek + ?Sized {
|
|
||||||
let header = Header::from_reader(input, Endian::Big)?;
|
|
||||||
decompress(input, header.decompressed_size as usize)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Decompresses the data into a new allocated [`Vec`]. `decompressed_size` can be determined
|
|
||||||
/// by looking at the Yaz0 header [`Header`].
|
|
||||||
pub fn decompress<R>(input: &mut R, decompressed_size: usize) -> Result<Vec<u8>>
|
|
||||||
where R: Read + Seek + ?Sized {
|
|
||||||
let mut output = vec![0; decompressed_size];
|
|
||||||
decompress_into(input, output.as_mut_slice())?;
|
|
||||||
Ok(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Decompresses the data into the given buffer. The buffer must be large
|
|
||||||
/// enough to hold the decompressed data.
|
|
||||||
pub fn decompress_into<R>(input: &mut R, destination: &mut [u8]) -> Result<()>
|
|
||||||
where R: Read + Seek + ?Sized {
|
|
||||||
let decompressed_size = destination.len();
|
|
||||||
let mut dest = 0;
|
|
||||||
let mut code = 0;
|
|
||||||
let mut code_bits = 0;
|
|
||||||
|
|
||||||
while dest < decompressed_size {
|
|
||||||
if code_bits == 0 {
|
|
||||||
code = u8::from_reader(input, Endian::Big)? as u32;
|
|
||||||
code_bits = 8;
|
|
||||||
}
|
|
||||||
|
|
||||||
if code & 0x80 != 0 {
|
|
||||||
destination[dest] = u8::from_reader(input, Endian::Big)?;
|
|
||||||
dest += 1;
|
|
||||||
} else {
|
|
||||||
let bytes = <[u8; 2]>::from_reader(input, Endian::Big)?;
|
|
||||||
let a = (bytes[0] & 0xf) as usize;
|
|
||||||
let b = (bytes[0] >> 4) as usize;
|
|
||||||
let offset = (a << 8) | (bytes[1] as usize);
|
|
||||||
let length = match b {
|
|
||||||
0 => (u8::from_reader(input, Endian::Big)? as usize) + 0x12,
|
|
||||||
length => length + 2,
|
|
||||||
};
|
|
||||||
|
|
||||||
ensure!(offset < dest, "Unexpected EOF");
|
|
||||||
let base = dest - (offset + 1);
|
|
||||||
for n in 0..length {
|
|
||||||
destination[dest] = destination[base + n];
|
|
||||||
dest += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
code <<= 1;
|
|
||||||
code_bits -= 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
Loading…
Reference in New Issue