Add conversion support & large refactor

This commit is contained in:
2024-11-22 00:01:26 -07:00
parent 374c6950b2
commit 3848edfe7b
55 changed files with 9110 additions and 3066 deletions

View File

@@ -16,31 +16,30 @@ categories = ["command-line-utilities", "parser-implementations"]
build = "build.rs"
[features]
asm = ["md-5/asm", "nod/asm", "sha1/asm"]
nightly = ["crc32fast/nightly"]
openssl = ["nod/openssl"]
openssl-vendored = ["nod/openssl-vendored"]
tracy = ["dep:tracing-tracy"]
[dependencies]
argp = "0.3"
base16ct = "0.2"
crc32fast = "1.4"
digest = "0.10"
digest = { workspace = true }
enable-ansi-support = "0.2"
hex = { version = "0.4", features = ["serde"] }
indicatif = "0.17"
itertools = "0.13"
log = "0.4"
md-5 = "0.10"
md-5 = { workspace = true }
nod = { version = "2.0.0-alpha", path = "../nod" }
quick-xml = { version = "0.36", features = ["serialize"] }
num_cpus = "1.16"
quick-xml = { version = "0.37", features = ["serialize"] }
serde = { version = "1.0", features = ["derive"] }
sha1 = "0.10"
sha1 = { workspace = true }
size = "0.4"
supports-color = "3.0"
tracing = "0.1"
tracing = { workspace = true }
tracing-attributes = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
xxhash-rust = { version = "0.8", features = ["xxh64"] }
zerocopy = { version = "0.8", features = ["alloc", "derive"] }
tracing-tracy = { version = "0.11", features = ["flush-on-exit"], optional = true }
zerocopy = { workspace = true }
zstd = "0.13"
[target.'cfg(target_env = "musl")'.dependencies]
@@ -48,7 +47,7 @@ mimalloc = "0.1"
[build-dependencies]
hex = { version = "0.4", features = ["serde"] }
quick-xml = { version = "0.36", features = ["serialize"] }
quick-xml = { version = "0.37", features = ["serialize"] }
serde = { version = "1.0", features = ["derive"] }
zerocopy = { version = "0.8", features = ["alloc", "derive"] }
zstd = "0.13"

View File

@@ -1,9 +1,13 @@
use std::path::PathBuf;
use std::{ffi::OsStr, path::PathBuf};
use argp::FromArgs;
use nod::OpenOptions;
use nod::{
common::Format,
read::{DiscOptions, PartitionEncryption},
write::FormatOptions,
};
use crate::util::{redump, shared::convert_and_verify};
use crate::util::{path_display, redump, shared::convert_and_verify};
#[derive(FromArgs, Debug)]
/// Converts a disc image to ISO.
@@ -27,6 +31,9 @@ pub struct Args {
#[argp(switch)]
/// encrypt Wii partition data
encrypt: bool,
#[argp(option, short = 'c')]
/// compression format and level (e.g. "zstd:19")
compress: Option<String>,
}
pub fn run(args: Args) -> nod::Result<()> {
@@ -34,15 +41,46 @@ pub fn run(args: Args) -> nod::Result<()> {
println!("Loading dat files...");
redump::load_dats(args.dat.iter().map(PathBuf::as_ref))?;
}
let options = OpenOptions {
let options = DiscOptions {
partition_encryption: match (args.decrypt, args.encrypt) {
(true, false) => nod::PartitionEncryptionMode::ForceDecrypted,
(false, true) => nod::PartitionEncryptionMode::ForceEncrypted,
(false, false) => nod::PartitionEncryptionMode::Original,
(true, false) => PartitionEncryption::ForceDecrypted,
(false, true) => PartitionEncryption::ForceEncrypted,
(false, false) => PartitionEncryption::Original,
(true, true) => {
return Err(nod::Error::Other("Both --decrypt and --encrypt specified".to_string()))
}
},
preloader_threads: 4,
};
convert_and_verify(&args.file, Some(&args.out), args.md5, &options)
let format = match args.out.extension() {
Some(ext)
if ext.eq_ignore_ascii_case(OsStr::new("iso"))
|| ext.eq_ignore_ascii_case(OsStr::new("gcm")) =>
{
Format::Iso
}
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("ciso")) => Format::Ciso,
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("gcz")) => Format::Gcz,
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("nfs")) => Format::Nfs,
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("rvz")) => Format::Rvz,
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("wbfs")) => Format::Wbfs,
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("wia")) => Format::Wia,
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("tgc")) => Format::Tgc,
Some(_) => {
return Err(nod::Error::Other(format!(
"Unknown file extension: {}",
path_display(&args.out)
)))
}
None => Format::Iso,
};
let mut compression = if let Some(compress) = args.compress {
compress.parse()?
} else {
format.default_compression()
};
compression.validate_level()?;
let format_options =
FormatOptions { format, compression, block_size: format.default_block_size() };
convert_and_verify(&args.file, Some(&args.out), args.md5, &options, &format_options)
}

View File

@@ -1,24 +1,19 @@
use std::{
cmp::min,
collections::BTreeMap,
fmt,
io::Read,
path::{Path, PathBuf},
sync::{mpsc::sync_channel, Arc},
thread,
};
use argp::FromArgs;
use indicatif::{ProgressBar, ProgressState, ProgressStyle};
use nod::{Disc, OpenOptions, PartitionEncryptionMode, Result, ResultContext};
use zerocopy::FromZeros;
use crate::util::{
digest::{digest_thread, DigestResult},
redump,
redump::GameResult,
use nod::{
read::{DiscOptions, DiscReader, PartitionEncryption},
write::{DiscWriter, FormatOptions, ProcessOptions},
Result, ResultContext,
};
use crate::util::{redump, redump::GameResult};
#[derive(FromArgs, Debug)]
/// Commands related to DAT files.
#[argp(subcommand, name = "dat")]
@@ -165,9 +160,9 @@ struct DiscHashes {
}
fn load_disc(path: &Path, name: &str, full_verify: bool) -> Result<DiscHashes> {
let options = OpenOptions { partition_encryption: PartitionEncryptionMode::Original };
let mut disc = Disc::new_with_options(path, &options)?;
let disc_size = disc.disc_size();
let options =
DiscOptions { partition_encryption: PartitionEncryption::Original, preloader_threads: 4 };
let disc = DiscReader::new(path, &options)?;
if !full_verify {
let meta = disc.meta();
if let (Some(crc32), Some(sha1)) = (meta.crc32, meta.sha1) {
@@ -175,7 +170,8 @@ fn load_disc(path: &Path, name: &str, full_verify: bool) -> Result<DiscHashes> {
}
}
let pb = ProgressBar::new(disc_size).with_message(format!("{}:", name));
let disc_writer = DiscWriter::new(disc, &FormatOptions::default())?;
let pb = ProgressBar::new(disc_writer.progress_bound()).with_message(format!("{}:", name));
pb.set_style(ProgressStyle::with_template("{msg} {spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec}, {eta})")
.unwrap()
.with_key("eta", |state: &ProgressState, w: &mut dyn fmt::Write| {
@@ -183,47 +179,22 @@ fn load_disc(path: &Path, name: &str, full_verify: bool) -> Result<DiscHashes> {
})
.progress_chars("#>-"));
const BUFFER_SIZE: usize = 1015808; // LCM(0x8000, 0x7C00)
let digest_threads = [digest_thread::<crc32fast::Hasher>(), digest_thread::<sha1::Sha1>()];
let (w_tx, w_rx) = sync_channel::<Arc<[u8]>>(1);
let w_thread = thread::spawn(move || {
let mut total_written = 0u64;
while let Ok(data) = w_rx.recv() {
let mut total_written = 0u64;
let finalization = disc_writer.process(
|data, pos, _| {
total_written += data.len() as u64;
pb.set_position(total_written);
}
pb.finish_and_clear();
});
pb.set_position(pos);
Ok(())
},
&ProcessOptions {
processor_threads: 12, // TODO
digest_crc32: true,
digest_md5: false,
digest_sha1: true,
digest_xxh64: false,
},
)?;
pb.finish();
let mut total_read = 0u64;
let mut buf = <[u8]>::new_box_zeroed_with_elems(BUFFER_SIZE)?;
while total_read < disc_size {
let read = min(BUFFER_SIZE as u64, disc_size - total_read) as usize;
disc.read_exact(&mut buf[..read]).with_context(|| {
format!("Reading {} bytes at disc offset {}", BUFFER_SIZE, total_read)
})?;
let arc = Arc::<[u8]>::from(&buf[..read]);
for (tx, _) in &digest_threads {
tx.send(arc.clone()).map_err(|_| "Sending data to hash thread")?;
}
w_tx.send(arc).map_err(|_| "Sending data to write thread")?;
total_read += read as u64;
}
drop(w_tx); // Close channel
w_thread.join().unwrap();
let mut crc32 = None;
let mut sha1 = None;
for (tx, handle) in digest_threads {
drop(tx); // Close channel
match handle.join().unwrap() {
DigestResult::Crc32(v) => crc32 = Some(v),
DigestResult::Sha1(v) => sha1 = Some(v),
_ => {}
}
}
Ok(DiscHashes { crc32: crc32.unwrap(), sha1: sha1.unwrap() })
Ok(DiscHashes { crc32: finalization.crc32.unwrap(), sha1: finalization.sha1.unwrap() })
}

View File

@@ -1,5 +1,4 @@
use std::{
borrow::Cow,
fs,
fs::File,
io::{BufRead, Write},
@@ -7,15 +6,16 @@ use std::{
};
use argp::FromArgs;
use itertools::Itertools;
use nod::{
Disc, Fst, Node, OpenOptions, PartitionBase, PartitionKind, PartitionMeta, PartitionOptions,
common::PartitionKind,
disc::fst::{Fst, Node},
read::{DiscOptions, DiscReader, PartitionMeta, PartitionOptions, PartitionReader},
ResultContext,
};
use size::{Base, Size};
use zerocopy::IntoBytes;
use crate::util::{display, has_extension};
use crate::util::{has_extension, path_display};
#[derive(FromArgs, Debug)]
/// Extracts a disc image.
@@ -53,77 +53,57 @@ pub fn run(args: Args) -> nod::Result<()> {
} else {
output_dir = args.file.with_extension("");
}
let disc = Disc::new_with_options(&args.file, &OpenOptions::default())?;
let disc =
DiscReader::new(&args.file, &DiscOptions { preloader_threads: 4, ..Default::default() })?;
let header = disc.header();
let is_wii = header.is_wii();
let partition_options = PartitionOptions { validate_hashes: args.validate };
let options = PartitionOptions { validate_hashes: args.validate };
if let Some(partition) = args.partition {
if partition.eq_ignore_ascii_case("all") {
for info in disc.partitions() {
let mut out_dir = output_dir.clone();
out_dir.push(info.kind.dir_name().as_ref());
let mut partition =
disc.open_partition_with_options(info.index, &partition_options)?;
let mut partition = disc.open_partition(info.index, &options)?;
extract_partition(&disc, partition.as_mut(), &out_dir, is_wii, args.quiet)?;
}
} else if partition.eq_ignore_ascii_case("data") {
let mut partition =
disc.open_partition_kind_with_options(PartitionKind::Data, &partition_options)?;
let mut partition = disc.open_partition_kind(PartitionKind::Data, &options)?;
extract_partition(&disc, partition.as_mut(), &output_dir, is_wii, args.quiet)?;
} else if partition.eq_ignore_ascii_case("update") {
let mut partition =
disc.open_partition_kind_with_options(PartitionKind::Update, &partition_options)?;
let mut partition = disc.open_partition_kind(PartitionKind::Update, &options)?;
extract_partition(&disc, partition.as_mut(), &output_dir, is_wii, args.quiet)?;
} else if partition.eq_ignore_ascii_case("channel") {
let mut partition =
disc.open_partition_kind_with_options(PartitionKind::Channel, &partition_options)?;
let mut partition = disc.open_partition_kind(PartitionKind::Channel, &options)?;
extract_partition(&disc, partition.as_mut(), &output_dir, is_wii, args.quiet)?;
} else {
let idx = partition.parse::<usize>().map_err(|_| "Invalid partition index")?;
let mut partition = disc.open_partition_with_options(idx, &partition_options)?;
let mut partition = disc.open_partition(idx, &options)?;
extract_partition(&disc, partition.as_mut(), &output_dir, is_wii, args.quiet)?;
}
} else {
let mut partition =
disc.open_partition_kind_with_options(PartitionKind::Data, &partition_options)?;
let mut partition = disc.open_partition_kind(PartitionKind::Data, &options)?;
extract_partition(&disc, partition.as_mut(), &output_dir, is_wii, args.quiet)?;
}
Ok(())
}
fn extract_partition(
disc: &Disc,
partition: &mut dyn PartitionBase,
disc: &DiscReader,
partition: &mut dyn PartitionReader,
out_dir: &Path,
is_wii: bool,
quiet: bool,
) -> nod::Result<()> {
let meta = partition.meta()?;
extract_sys_files(disc, meta.as_ref(), out_dir, quiet)?;
extract_sys_files(disc, &meta, out_dir, quiet)?;
// Extract FST
let files_dir = out_dir.join("files");
fs::create_dir_all(&files_dir)
.with_context(|| format!("Creating directory {}", display(&files_dir)))?;
.with_context(|| format!("Creating directory {}", path_display(&files_dir)))?;
let fst = Fst::new(&meta.raw_fst)?;
let mut path_segments = Vec::<(Cow<str>, usize)>::new();
for (idx, node, name) in fst.iter() {
// Remove ended path segments
let mut new_size = 0;
for (_, end) in path_segments.iter() {
if *end == idx {
break;
}
new_size += 1;
}
path_segments.truncate(new_size);
// Add the new path segment
let end = if node.is_dir() { node.length() as usize } else { idx + 1 };
path_segments.push((name?, end));
let path = path_segments.iter().map(|(name, _)| name.as_ref()).join("/");
for (_, node, path) in fst.iter() {
if node.is_dir() {
fs::create_dir_all(files_dir.join(&path))
.with_context(|| format!("Creating directory {}", path))?;
@@ -135,14 +115,14 @@ fn extract_partition(
}
fn extract_sys_files(
disc: &Disc,
disc: &DiscReader,
data: &PartitionMeta,
out_dir: &Path,
quiet: bool,
) -> nod::Result<()> {
let sys_dir = out_dir.join("sys");
fs::create_dir_all(&sys_dir)
.with_context(|| format!("Creating directory {}", display(&sys_dir)))?;
.with_context(|| format!("Creating directory {}", path_display(&sys_dir)))?;
extract_file(data.raw_boot.as_ref(), &sys_dir.join("boot.bin"), quiet)?;
extract_file(data.raw_bi2.as_ref(), &sys_dir.join("bi2.bin"), quiet)?;
extract_file(data.raw_apploader.as_ref(), &sys_dir.join("apploader.img"), quiet)?;
@@ -154,7 +134,7 @@ fn extract_sys_files(
if disc_header.is_wii() {
let disc_dir = out_dir.join("disc");
fs::create_dir_all(&disc_dir)
.with_context(|| format!("Creating directory {}", display(&disc_dir)))?;
.with_context(|| format!("Creating directory {}", path_display(&disc_dir)))?;
extract_file(&disc_header.as_bytes()[..0x100], &disc_dir.join("header.bin"), quiet)?;
if let Some(region) = disc.region() {
extract_file(region, &disc_dir.join("region.bin"), quiet)?;
@@ -179,17 +159,18 @@ fn extract_file(bytes: &[u8], out_path: &Path, quiet: bool) -> nod::Result<()> {
if !quiet {
println!(
"Extracting {} (size: {})",
display(out_path),
path_display(out_path),
Size::from_bytes(bytes.len()).format().with_base(Base::Base10)
);
}
fs::write(out_path, bytes).with_context(|| format!("Writing file {}", display(out_path)))?;
fs::write(out_path, bytes)
.with_context(|| format!("Writing file {}", path_display(out_path)))?;
Ok(())
}
fn extract_node(
node: Node,
partition: &mut dyn PartitionBase,
partition: &mut dyn PartitionReader,
base_path: &Path,
name: &str,
is_wii: bool,
@@ -199,12 +180,12 @@ fn extract_node(
if !quiet {
println!(
"Extracting {} (size: {})",
display(&file_path),
path_display(&file_path),
Size::from_bytes(node.length()).format().with_base(Base::Base10)
);
}
let mut file = File::create(&file_path)
.with_context(|| format!("Creating file {}", display(&file_path)))?;
.with_context(|| format!("Creating file {}", path_display(&file_path)))?;
let mut r = partition.open_file(node).with_context(|| {
format!(
"Opening file {} on disc for reading (offset {}, size {})",
@@ -214,15 +195,17 @@ fn extract_node(
)
})?;
loop {
let buf =
r.fill_buf().with_context(|| format!("Extracting file {}", display(&file_path)))?;
let buf = r
.fill_buf()
.with_context(|| format!("Extracting file {}", path_display(&file_path)))?;
let len = buf.len();
if len == 0 {
break;
}
file.write_all(buf).with_context(|| format!("Writing file {}", display(&file_path)))?;
file.write_all(buf)
.with_context(|| format!("Writing file {}", path_display(&file_path)))?;
r.consume(len);
}
file.flush().with_context(|| format!("Flushing file {}", display(&file_path)))?;
file.flush().with_context(|| format!("Flushing file {}", path_display(&file_path)))?;
Ok(())
}

771
nodtool/src/cmd/gen.rs Normal file
View File

@@ -0,0 +1,771 @@
use std::{
fs,
fs::File,
io,
io::{BufRead, Read, Seek, SeekFrom, Write},
path::{Path, PathBuf},
str::from_utf8,
time::Instant,
};
use argp::FromArgs;
use nod::{
build::gc::{FileCallback, FileInfo, GCPartitionBuilder, PartitionOverrides},
common::PartitionKind,
disc::{
fst::Fst, DiscHeader, PartitionHeader, BI2_SIZE, BOOT_SIZE, MINI_DVD_SIZE, SECTOR_SIZE,
},
read::{
DiscOptions, DiscReader, PartitionEncryption, PartitionMeta, PartitionOptions,
PartitionReader,
},
util::lfg::LaggedFibonacci,
write::{DiscWriter, FormatOptions, ProcessOptions},
ResultContext,
};
use tracing::{debug, error, info, warn};
use zerocopy::{FromBytes, FromZeros};
use crate::util::{array_ref, redump, shared::convert_and_verify};
#[derive(FromArgs, Debug)]
/// Generates a disc image.
#[argp(subcommand, name = "gen")]
pub struct Args {
#[argp(positional)]
/// Path to extracted disc image
dir: PathBuf,
#[argp(positional)]
/// Output ISO file
out: PathBuf,
}
#[derive(FromArgs, Debug)]
/// Test disc image generation.
#[argp(subcommand, name = "gentest")]
pub struct TestArgs {
#[argp(positional)]
/// Path to original disc images
inputs: Vec<PathBuf>,
#[argp(option, short = 'o')]
/// Output ISO file
output: Option<PathBuf>,
#[argp(option, short = 't')]
/// Output original ISO for comparison
test_output: Option<PathBuf>,
}
fn read_fixed<const N: usize>(path: &Path) -> nod::Result<Box<[u8; N]>> {
let mut buf = <[u8; N]>::new_box_zeroed()?;
File::open(path)
.with_context(|| format!("Failed to open {}", path.display()))?
.read_exact(buf.as_mut())
.with_context(|| format!("Failed to read {}", path.display()))?;
Ok(buf)
}
fn read_all(path: &Path) -> nod::Result<Box<[u8]>> {
let mut buf = Vec::new();
File::open(path)
.with_context(|| format!("Failed to open {}", path.display()))?
.read_to_end(&mut buf)
.with_context(|| format!("Failed to read {}", path.display()))?;
Ok(buf.into_boxed_slice())
}
struct FileWriteInfo {
name: String,
offset: u64,
length: u64,
}
fn file_size(path: &Path) -> nod::Result<u64> {
Ok(fs::metadata(path)
.with_context(|| format!("Failed to get metadata for {}", path.display()))?
.len())
}
fn check_file_size(path: &Path, expected: u64) -> nod::Result<()> {
let actual = file_size(path)?;
if actual != expected {
return Err(nod::Error::DiscFormat(format!(
"File {} has size {}, expected {}",
path.display(),
actual,
expected
)));
}
Ok(())
}
pub fn run(args: Args) -> nod::Result<()> {
let start = Instant::now();
// Validate file sizes
let boot_path = args.dir.join("sys/boot.bin");
check_file_size(&boot_path, BOOT_SIZE as u64)?;
let bi2_path = args.dir.join("sys/bi2.bin");
check_file_size(&bi2_path, BI2_SIZE as u64)?;
let apploader_path = args.dir.join("sys/apploader.img");
let apploader_size = file_size(&apploader_path)?;
let dol_path = args.dir.join("sys/main.dol");
let dol_size = file_size(&dol_path)?;
// Build metadata
let mut file_infos = Vec::new();
let boot_data: Box<[u8; BOOT_SIZE]> = read_fixed(&boot_path)?;
let header = DiscHeader::ref_from_bytes(&boot_data[..size_of::<DiscHeader>()])
.expect("Failed to read disc header");
let junk_id = get_junk_id(header);
let partition_header = PartitionHeader::ref_from_bytes(&boot_data[size_of::<DiscHeader>()..])
.expect("Failed to read partition header");
let fst_path = args.dir.join("sys/fst.bin");
let fst_data = read_all(&fst_path)?;
let fst = Fst::new(&fst_data).expect("Failed to parse FST");
file_infos.push(FileWriteInfo {
name: "sys/boot.bin".to_string(),
offset: 0,
length: BOOT_SIZE as u64,
});
file_infos.push(FileWriteInfo {
name: "sys/bi2.bin".to_string(),
offset: BOOT_SIZE as u64,
length: BI2_SIZE as u64,
});
file_infos.push(FileWriteInfo {
name: "sys/apploader.img".to_string(),
offset: BOOT_SIZE as u64 + BI2_SIZE as u64,
length: apploader_size,
});
let fst_offset = partition_header.fst_offset(false);
let dol_offset = partition_header.dol_offset(false);
if dol_offset < fst_offset {
file_infos.push(FileWriteInfo {
name: "sys/main.dol".to_string(),
offset: dol_offset,
length: dol_size,
});
} else {
let mut found = false;
for (_, node, path) in fst.iter() {
if !node.is_file() {
continue;
}
let offset = node.offset(false);
if offset == dol_offset {
info!("Using DOL from FST: {}", path);
found = true;
}
}
if !found {
return Err(nod::Error::DiscFormat("DOL not found in FST".to_string()));
}
}
let fst_size = partition_header.fst_size(false);
file_infos.push(FileWriteInfo {
name: "sys/fst.bin".to_string(),
offset: fst_offset,
length: fst_size,
});
// Collect files
for (_, node, path) in fst.iter() {
let length = node.length() as u64;
if node.is_dir() {
continue;
}
let mut file_path = args.dir.join("files");
file_path.extend(path.split('/'));
let metadata = match fs::metadata(&file_path) {
Ok(meta) => meta,
Err(e) if e.kind() == io::ErrorKind::NotFound => {
warn!("File not found: {}", file_path.display());
continue;
}
Err(e) => {
return Err(e)
.context(format!("Failed to get metadata for {}", file_path.display()))
}
};
if metadata.is_dir() {
return Err(nod::Error::Other(format!("Path {} is a directory", file_path.display())));
}
if metadata.len() != length {
return Err(nod::Error::Other(format!(
"File {} has size {}, expected {}",
file_path.display(),
metadata.len(),
length
)));
}
let offset = node.offset(false);
file_infos.push(FileWriteInfo {
name: file_path.into_os_string().into_string().unwrap(),
offset,
length,
});
}
sort_files(&mut file_infos)?;
// Write files
let mut out = File::create(&args.out)
.with_context(|| format!("Failed to create {}", args.out.display()))?;
info!("Writing disc image to {} ({} files)", args.out.display(), file_infos.len());
let crc = write_files(
&mut out,
&file_infos,
header,
partition_header,
junk_id,
|out, name| match name {
"sys/boot.bin" => out.write_all(boot_data.as_ref()),
"sys/fst.bin" => out.write_all(fst_data.as_ref()),
path => {
let mut in_file = File::open(args.dir.join(path))?;
io::copy(&mut in_file, out).map(|_| ())
}
},
)?;
out.flush().context("Failed to flush output file")?;
info!("Generated disc image in {:?} (CRC32: {:08X})", start.elapsed(), crc);
let redump_entry = redump::find_by_crc32(crc);
if let Some(entry) = &redump_entry {
println!("Redump: {}", entry.name);
} else {
println!("Redump: Not found ❌");
}
Ok(())
}
#[inline]
fn align_up<const N: u64>(n: u64) -> u64 { (n + N - 1) & !(N - 1) }
#[inline]
fn gcm_align(n: u64) -> u64 { (n + 31) & !3 }
/// Files can be located on the inner rim of the disc (closer to the center) or the outer rim
/// (closer to the edge). The inner rim is slower to read, so developers often configured certain
/// files to be located on the outer rim. This function attempts to find a gap in the file offsets
/// between the inner and outer rim, which we need to recreate junk data properly.
fn find_file_gap(file_infos: &[FileWriteInfo], fst_end: u64) -> Option<u64> {
let mut last_offset = 0;
for info in file_infos {
if last_offset > fst_end && info.offset > last_offset + SECTOR_SIZE as u64 {
debug!("Found file gap at {:X} -> {:X}", last_offset, info.offset);
return Some(last_offset);
}
last_offset = info.offset + info.length;
}
None
}
fn write_files<W>(
w: &mut W,
file_infos: &[FileWriteInfo],
header: &DiscHeader,
partition_header: &PartitionHeader,
junk_id: Option<[u8; 4]>,
mut callback: impl FnMut(&mut HashStream<&mut W>, &str) -> io::Result<()>,
) -> nod::Result<u32>
where
W: Write + ?Sized,
{
let fst_end = partition_header.fst_offset(false) + partition_header.fst_size(false);
let file_gap = find_file_gap(file_infos, fst_end);
let mut lfg = LaggedFibonacci::default();
let mut out = HashStream::new(w);
let mut last_end = 0;
for info in file_infos {
if let Some(junk_id) = junk_id {
let aligned_end = gcm_align(last_end);
if info.offset > aligned_end && last_end >= fst_end {
// Junk data is aligned to 4 bytes with a 28 byte padding (aka `(n + 31) & !3`)
// but a few cases don't have the 28 byte padding. Namely, the junk data after the
// FST, and the junk data in between the inner and outer rim files. This attempts to
// determine the correct alignment, but is not 100% accurate.
let junk_start =
if file_gap == Some(last_end) { align_up::<4>(last_end) } else { aligned_end };
debug!("Writing junk data at {:X} -> {:X}", junk_start, info.offset);
write_junk_data(
&mut lfg,
&mut out,
junk_id,
header.disc_num,
junk_start,
info.offset,
)?;
}
}
debug!(
"Writing file {} at {:X} -> {:X}",
info.name,
info.offset,
info.offset + info.length
);
out.seek(SeekFrom::Start(info.offset))
.with_context(|| format!("Seeking to offset {}", info.offset))?;
if info.length > 0 {
callback(&mut out, &info.name)
.with_context(|| format!("Failed to write file {}", info.name))?;
let cur = out.stream_position().context("Getting current position")?;
if cur != info.offset + info.length {
return Err(nod::Error::Other(format!(
"Wrote {} bytes, expected {}",
cur - info.offset,
info.length
)));
}
}
last_end = info.offset + info.length;
}
if let Some(junk_id) = junk_id {
let aligned_end = gcm_align(last_end);
if aligned_end < MINI_DVD_SIZE && aligned_end >= fst_end {
debug!("Writing junk data at {:X} -> {:X}", aligned_end, MINI_DVD_SIZE);
write_junk_data(
&mut lfg,
&mut out,
junk_id,
header.disc_num,
aligned_end,
MINI_DVD_SIZE,
)?;
last_end = MINI_DVD_SIZE;
}
}
out.write_zeroes(MINI_DVD_SIZE - last_end).context("Writing end of file")?;
out.flush().context("Flushing output")?;
Ok(out.finish())
}
fn write_junk_data<W>(
lfg: &mut LaggedFibonacci,
out: &mut W,
junk_id: [u8; 4],
disc_num: u8,
pos: u64,
end: u64,
) -> nod::Result<()>
where
W: Write + Seek + ?Sized,
{
out.seek(SeekFrom::Start(pos)).with_context(|| format!("Seeking to offset {}", pos))?;
lfg.write_sector_chunked(out, end - pos, junk_id, disc_num, pos)
.with_context(|| format!("Failed to write junk data at offset {}", pos))?;
Ok(())
}
pub fn run_test(args: TestArgs) -> nod::Result<()> {
let mut failed = vec![];
for input in args.inputs {
match in_memory_test(&input, args.output.as_deref(), args.test_output.as_deref()) {
Ok(()) => {}
Err(e) => {
error!("Failed to generate disc image: {:?}", e);
failed.push((input, e));
}
}
}
if !failed.is_empty() {
error!("Failed to generate disc images:");
for (input, e) in failed {
error!(" {}: {:?}", input.display(), e);
}
std::process::exit(1);
}
Ok(())
}
/// Some games (mainly beta and sample discs) have junk data that doesn't match the game ID. This
/// function returns the correct game ID to use, if an override is needed.
fn get_override_junk_id(header: &DiscHeader) -> Option<[u8; 4]> {
match &header.game_id {
// Dairantou Smash Brothers DX (Japan) (Taikenban)
b"DALJ01" if header.disc_num == 0 && header.disc_version == 0 => Some(*b"DPIJ"),
// 2002 FIFA World Cup (Japan) (Jitsuen-you Sample)
b"DFIJ13" if header.disc_num == 0 && header.disc_version == 0 => Some(*b"GFIJ"),
// Disney's Magical Park (Japan) (Jitsuen-you Sample)
b"DMTJ18" if header.disc_num == 0 && header.disc_version == 0 => Some(*b"GMTJ"),
// Star Wars - Rogue Squadron II (Japan) (Jitsuen-you Sample)
b"DSWJ13" if header.disc_num == 0 && header.disc_version == 0 => Some(*b"GSWJ"),
// Homeland (Japan) (Rev 1) [T-En by DOL-Translations v20230606] [i]
b"GHEE91" if header.disc_num == 0 && header.disc_version == 1 => Some(*b"GHEJ"),
// Kururin Squash! (Japan) [T-En by DOL-Translations v2.0.0]
b"GKQE01" if header.disc_num == 0 && header.disc_version == 0 => Some(*b"GKQJ"),
// Lupin III - Lost Treasure Under the Sea (Japan) (Disc 1) [T-En by DOL-Translations v0.5.0] [i] [n]
b"GL3EE8" if header.disc_num == 0 && header.disc_version == 0 => Some(*b"GL3J"),
// Lupin III - Lost Treasure Under the Sea (Japan) (Disc 2) [T-En by DOL-Translations v0.5.0] [i] [n]
b"GL3EE8" if header.disc_num == 1 && header.disc_version == 0 => Some(*b"GL3J"),
// Taxi 3 - The Game (France) [T-En by DOL-Translations v20230801] [n]
b"GXQP41" if header.disc_num == 0 && header.disc_version == 0 => Some(*b"GXQF"),
// Donkey Konga 3 - Tabehoudai! Haru Mogitate 50-kyoku (Japan) [T-En by DOL-Translations v0.1.1] [i]
b"GY3E01" if header.disc_num == 0 && header.disc_version == 0 => Some(*b"GY3J"),
// Need for Speed - Underground (Europe) (Alt)
b"PZHP69" if header.disc_num == 0 && header.disc_version == 0 => Some(*b"GNDP"),
_ => None,
}
}
fn get_junk_id(header: &DiscHeader) -> Option<[u8; 4]> {
Some(match get_override_junk_id(header) {
Some(id) => {
info!("Using override junk ID: {:X?}", from_utf8(&id).unwrap());
id
}
None => *array_ref!(header.game_id, 0, 4),
})
}
fn sort_files(files: &mut [FileWriteInfo]) -> nod::Result<()> {
files.sort_unstable_by_key(|info| (info.offset, info.length));
for i in 1..files.len() {
let prev = &files[i - 1];
let cur = &files[i];
if cur.offset < prev.offset + prev.length {
return Err(nod::Error::Other(format!(
"File {} ({:#X}-{:#X}) overlaps with {} ({:#X}-{:#X})",
cur.name,
cur.offset,
cur.offset + cur.length,
prev.name,
prev.offset,
prev.offset + prev.length
)));
}
}
Ok(())
}
fn in_memory_test(
path: &Path,
output: Option<&Path>,
test_output: Option<&Path>,
) -> nod::Result<()> {
let start = Instant::now();
info!("Opening disc image '{}'", path.display());
let disc = DiscReader::new(path, &DiscOptions::default())?;
info!(
"Opened disc image '{}' (Disc {}, Revision {})",
disc.header().game_title_str(),
disc.header().disc_num + 1,
disc.header().disc_version
);
let Some(orig_crc32) = disc.meta().crc32 else {
return Err(nod::Error::Other("CRC32 not found in disc metadata".to_string()));
};
let mut partition =
disc.open_partition_kind(PartitionKind::Data, &PartitionOptions::default())?;
let meta = partition.meta()?;
// Build metadata
let mut file_infos = Vec::new();
let header = meta.header();
let junk_id = get_junk_id(header);
let partition_header = meta.partition_header();
let fst = meta.fst()?;
file_infos.push(FileWriteInfo {
name: "sys/boot.bin".to_string(),
offset: 0,
length: BOOT_SIZE as u64,
});
file_infos.push(FileWriteInfo {
name: "sys/bi2.bin".to_string(),
offset: BOOT_SIZE as u64,
length: BI2_SIZE as u64,
});
file_infos.push(FileWriteInfo {
name: "sys/apploader.img".to_string(),
offset: BOOT_SIZE as u64 + BI2_SIZE as u64,
length: meta.raw_apploader.len() as u64,
});
let fst_offset = partition_header.fst_offset(false);
let dol_offset = partition_header.dol_offset(false);
if dol_offset < fst_offset {
file_infos.push(FileWriteInfo {
name: "sys/main.dol".to_string(),
offset: dol_offset,
length: meta.raw_dol.len() as u64,
});
} else {
let mut found = false;
for (_, node, name) in fst.iter() {
if !node.is_file() {
continue;
}
let offset = node.offset(false);
if offset == dol_offset {
info!("Using DOL from FST: {}", name);
found = true;
}
}
if !found {
return Err(nod::Error::Other("DOL not found in FST".to_string()));
}
}
let fst_size = partition_header.fst_size(false);
file_infos.push(FileWriteInfo {
name: "sys/fst.bin".to_string(),
offset: fst_offset,
length: fst_size,
});
// Collect files
let mut builder = GCPartitionBuilder::new(false, PartitionOverrides::default());
for (idx, node, path) in fst.iter() {
let offset = node.offset(false);
let length = node.length() as u64;
if node.is_dir() {
if length as usize == idx + 1 {
println!("Empty directory: {}", path);
}
continue;
}
if let Some(junk_id) = junk_id {
// Some games have junk data in place of files that were removed from the disc layout.
// This is a naive check to skip these files in our disc layout so that the junk data
// alignment is correct. This misses some cases where the junk data starts in the middle
// of a file, but handling those cases would require a more complex solution.
if length > 4
&& check_junk_data(partition.as_mut(), offset, length, junk_id, header.disc_num)?
{
warn!("Skipping junk data file: {} (size {})", path, length);
builder.add_junk_file(path);
continue;
}
}
builder.add_file(FileInfo {
name: path,
size: length,
offset: Some(offset),
alignment: None,
})?;
}
// Write files
info!("Writing disc image with {} files", file_infos.len());
for file in &file_infos {
builder.add_file(FileInfo {
name: file.name.clone(),
size: file.length,
offset: Some(file.offset),
alignment: None,
})?;
}
let writer = builder.build(|out: &mut dyn Write, name: &str| match name {
"sys/boot.bin" => out.write_all(meta.raw_boot.as_ref()),
"sys/bi2.bin" => out.write_all(meta.raw_bi2.as_ref()),
"sys/fst.bin" => out.write_all(meta.raw_fst.as_ref()),
"sys/apploader.img" => out.write_all(meta.raw_apploader.as_ref()),
"sys/main.dol" => out.write_all(meta.raw_dol.as_ref()),
path => {
let Some((_, node)) = fst.find(path) else {
return Err(io::Error::new(
io::ErrorKind::NotFound,
format!("File not found: {}", path),
));
};
let mut file = partition.open_file(node)?;
buf_copy(&mut file, out)?;
Ok(())
}
})?;
let disc_stream = writer.into_stream(PartitionFileReader { partition, meta })?;
let disc_reader = DiscReader::new_stream(disc_stream, &DiscOptions::default())?;
let disc_writer = DiscWriter::new(disc_reader, &FormatOptions::default())?;
let process_options = ProcessOptions { digest_crc32: true, ..Default::default() };
let finalization = if let Some(output) = output {
let mut out = File::create(output)
.with_context(|| format!("Failed to create {}", output.display()))?;
let finalization =
disc_writer.process(|data, _, _| out.write_all(data.as_ref()), &process_options)?;
out.flush().context("Failed to flush output file")?;
finalization
} else {
disc_writer.process(|_, _, _| Ok(()), &process_options)?
};
let crc = finalization.crc32.unwrap();
info!("Generated disc image in {:?} (CRC32: {:08X})", start.elapsed(), crc);
if crc != orig_crc32 {
if let Some(test_output) = test_output {
let open_options = DiscOptions {
partition_encryption: PartitionEncryption::Original,
preloader_threads: 4,
};
convert_and_verify(
path,
Some(test_output),
false,
&open_options,
&FormatOptions::default(),
)?;
}
return Err(nod::Error::Other(format!(
"CRC32 mismatch: {:08X} != {:08X}",
crc, orig_crc32
)));
}
Ok(())
}
#[derive(Clone)]
struct PartitionFileReader {
partition: Box<dyn PartitionReader>,
meta: PartitionMeta,
}
impl FileCallback for PartitionFileReader {
fn read_file(&mut self, out: &mut [u8], name: &str, offset: u64) -> io::Result<()> {
let data: &[u8] = match name {
"sys/boot.bin" => self.meta.raw_boot.as_ref(),
"sys/bi2.bin" => self.meta.raw_bi2.as_ref(),
"sys/fst.bin" => self.meta.raw_fst.as_ref(),
"sys/apploader.img" => self.meta.raw_apploader.as_ref(),
"sys/main.dol" => self.meta.raw_dol.as_ref(),
path => {
let fst = self.meta.fst().map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
let Some((_, node)) = fst.find(path) else {
return Err(io::Error::new(
io::ErrorKind::NotFound,
format!("File not found: {}", path),
));
};
let mut file = self.partition.open_file(node)?;
file.seek(SeekFrom::Start(offset))?;
file.read_exact(out)?;
return Ok(());
}
};
let offset = offset as usize;
let len = out.len().min(data.len() - offset);
out[..len].copy_from_slice(&data[offset..offset + len]);
Ok(())
}
}
/// Some disc files still exist in the FST, but were removed from the disc layout. These files had
/// junk data written in their place, since the disc creator did not know about them. To match the
/// original disc, we need to check for these files and remove them from our disc layout as well.
/// This ensures that the junk data alignment is correct.
fn check_junk_data(
partition: &mut dyn PartitionReader,
offset: u64,
len: u64,
junk_id: [u8; 4],
disc_num: u8,
) -> nod::Result<bool> {
if len == 0 {
return Ok(false);
}
partition
.seek(SeekFrom::Start(offset))
.with_context(|| format!("Seeking to offset {}", offset))?;
let mut lfg = LaggedFibonacci::default();
let mut pos = offset;
let mut remaining = len;
while remaining > 0 {
let file_buf = partition
.fill_buf()
.with_context(|| format!("Failed to read disc file at offset {}", offset))?;
let read_len = (file_buf.len() as u64).min(remaining) as usize;
if !lfg.check_sector_chunked(&file_buf[..read_len], junk_id, disc_num, pos) {
return Ok(false);
}
pos += read_len as u64;
remaining -= read_len as u64;
partition.consume(read_len);
}
Ok(true)
}
pub struct HashStream<W> {
inner: W,
hasher: crc32fast::Hasher,
position: u64,
}
impl<W> HashStream<W> {
pub fn new(inner: W) -> Self { Self { inner, hasher: Default::default(), position: 0 } }
pub fn finish(self) -> u32 { self.hasher.finalize() }
}
impl<W> HashStream<W>
where W: Write
{
pub fn write_zeroes(&mut self, mut len: u64) -> io::Result<()> {
while len > 0 {
let write_len = len.min(SECTOR_SIZE as u64) as usize;
self.write_all(&ZERO_SECTOR[..write_len])?;
len -= write_len as u64;
}
Ok(())
}
}
impl<W> Write for HashStream<W>
where W: Write
{
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.hasher.update(buf);
self.position += buf.len() as u64;
self.inner.write(buf)
}
fn flush(&mut self) -> io::Result<()> { self.inner.flush() }
}
const ZERO_SECTOR: [u8; SECTOR_SIZE] = [0; SECTOR_SIZE];
impl<W> Seek for HashStream<W>
where W: Write
{
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
let new_position = match pos {
SeekFrom::Start(v) => v,
SeekFrom::Current(v) => self.position.saturating_add_signed(v),
SeekFrom::End(_) => {
return Err(io::Error::new(
io::ErrorKind::Unsupported,
"HashStream: SeekFrom::End is not supported".to_string(),
));
}
};
if new_position < self.position {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"HashStream: Cannot seek backwards".to_string(),
));
}
self.write_zeroes(new_position - self.position)?;
Ok(new_position)
}
fn stream_position(&mut self) -> io::Result<u64> { Ok(self.position) }
}
/// Copies from a buffered reader to a writer without extra allocations.
fn buf_copy<R, W>(reader: &mut R, writer: &mut W) -> io::Result<u64>
where
R: BufRead + ?Sized,
W: Write + ?Sized,
{
let mut copied = 0;
loop {
let buf = reader.fill_buf()?;
let len = buf.len();
if len == 0 {
break;
}
writer.write_all(buf)?;
reader.consume(len);
copied += len as u64;
}
Ok(copied)
}

View File

@@ -1,10 +1,14 @@
use std::path::{Path, PathBuf};
use argp::FromArgs;
use nod::{Disc, SECTOR_SIZE};
use nod::{
disc::SECTOR_SIZE,
read::{DiscOptions, DiscReader, PartitionOptions},
};
use size::Size;
use tracing::info;
use crate::util::{display, shared::print_header};
use crate::util::{path_display, shared::print_header};
#[derive(FromArgs, Debug)]
/// Displays information about disc images.
@@ -23,15 +27,15 @@ pub fn run(args: Args) -> nod::Result<()> {
}
fn info_file(path: &Path) -> nod::Result<()> {
log::info!("Loading {}", display(path));
let disc = Disc::new(path)?;
info!("Loading {}", path_display(path));
let disc = DiscReader::new(path, &DiscOptions::default())?;
let header = disc.header();
let meta = disc.meta();
print_header(header, &meta);
if header.is_wii() {
for (idx, info) in disc.partitions().iter().enumerate() {
let mut partition = disc.open_partition(idx)?;
let mut partition = disc.open_partition(idx, &PartitionOptions::default())?;
let meta = partition.meta()?;
println!();

View File

@@ -1,5 +1,6 @@
pub mod convert;
pub mod dat;
pub mod extract;
pub mod gen;
pub mod info;
pub mod verify;

View File

@@ -1,7 +1,10 @@
use std::path::PathBuf;
use argp::FromArgs;
use nod::{OpenOptions, PartitionEncryptionMode};
use nod::{
read::{DiscOptions, PartitionEncryption},
write::FormatOptions,
};
use crate::util::{redump, shared::convert_and_verify};
@@ -18,6 +21,12 @@ pub struct Args {
#[argp(option, short = 'd')]
/// path to DAT file(s) for verification (optional)
dat: Vec<PathBuf>,
#[argp(switch)]
/// decrypt Wii partition data
decrypt: bool,
#[argp(switch)]
/// encrypt Wii partition data
encrypt: bool,
}
pub fn run(args: Args) -> nod::Result<()> {
@@ -25,9 +34,21 @@ pub fn run(args: Args) -> nod::Result<()> {
println!("Loading dat files...");
redump::load_dats(args.dat.iter().map(PathBuf::as_ref))?;
}
let options = OpenOptions { partition_encryption: PartitionEncryptionMode::Original };
let cpus = num_cpus::get();
let options = DiscOptions {
partition_encryption: match (args.decrypt, args.encrypt) {
(true, false) => PartitionEncryption::ForceDecrypted,
(false, true) => PartitionEncryption::ForceEncrypted,
(false, false) => PartitionEncryption::Original,
(true, true) => {
return Err(nod::Error::Other("Both --decrypt and --encrypt specified".to_string()))
}
},
preloader_threads: 4.min(cpus),
};
let format_options = FormatOptions::default();
for file in &args.file {
convert_and_verify(file, None, args.md5, &options)?;
convert_and_verify(file, None, args.md5, &options, &format_options)?;
println!();
}
Ok(())

View File

@@ -9,19 +9,23 @@ pub use nod;
#[derive(FromArgs, Debug)]
#[argp(subcommand)]
pub enum SubCommand {
Dat(cmd::dat::Args),
Info(cmd::info::Args),
Extract(cmd::extract::Args),
Convert(cmd::convert::Args),
Dat(cmd::dat::Args),
Extract(cmd::extract::Args),
// Gen(cmd::gen::Args),
GenTest(cmd::gen::TestArgs),
Info(cmd::info::Args),
Verify(cmd::verify::Args),
}
pub fn run(command: SubCommand) -> nod::Result<()> {
match command {
SubCommand::Dat(c_args) => cmd::dat::run(c_args),
SubCommand::Info(c_args) => cmd::info::run(c_args),
SubCommand::Convert(c_args) => cmd::convert::run(c_args),
SubCommand::Dat(c_args) => cmd::dat::run(c_args),
SubCommand::Extract(c_args) => cmd::extract::run(c_args),
// SubCommand::Gen(c_args) => cmd::gen::run(c_args),
SubCommand::GenTest(c_args) => cmd::gen::run_test(c_args),
SubCommand::Info(c_args) => cmd::info::run(c_args),
SubCommand::Verify(c_args) => cmd::verify::run(c_args),
}
}

View File

@@ -12,8 +12,6 @@ use argp::{FromArgValue, FromArgs};
use enable_ansi_support::enable_ansi_support;
use nodtool::{run, SubCommand};
use supports_color::Stream;
use tracing::level_filters::LevelFilter;
use tracing_subscriber::EnvFilter;
#[derive(FromArgs, Debug)]
/// Tool for reading GameCube and Wii disc images.
@@ -99,27 +97,43 @@ fn main() {
supports_color::on(Stream::Stdout).is_some_and(|c| c.has_basic)
};
let format =
tracing_subscriber::fmt::format().with_ansi(use_colors).with_target(false).without_time();
let builder = tracing_subscriber::fmt().event_format(format);
if let Some(level) = args.log_level {
builder
.with_max_level(match level {
LogLevel::Error => LevelFilter::ERROR,
LogLevel::Warn => LevelFilter::WARN,
LogLevel::Info => LevelFilter::INFO,
LogLevel::Debug => LevelFilter::DEBUG,
LogLevel::Trace => LevelFilter::TRACE,
})
.init();
} else {
builder
.with_env_filter(
EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy(),
)
.init();
#[cfg(feature = "tracy")]
{
use tracing_subscriber::layer::SubscriberExt;
tracing::subscriber::set_global_default(
tracing_subscriber::registry().with(tracing_tracy::TracyLayer::default()),
)
.expect("setup tracy layer");
}
#[cfg(not(feature = "tracy"))]
{
use tracing::level_filters::LevelFilter;
use tracing_subscriber::EnvFilter;
let format = tracing_subscriber::fmt::format()
.with_ansi(use_colors)
.with_target(false)
.without_time();
let builder = tracing_subscriber::fmt().event_format(format);
if let Some(level) = args.log_level {
builder
.with_max_level(match level {
LogLevel::Error => LevelFilter::ERROR,
LogLevel::Warn => LevelFilter::WARN,
LogLevel::Info => LevelFilter::INFO,
LogLevel::Debug => LevelFilter::DEBUG,
LogLevel::Trace => LevelFilter::TRACE,
})
.init();
} else {
builder
.with_env_filter(
EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy(),
)
.init();
}
}
let mut result = Ok(());

View File

@@ -1,29 +1,4 @@
use std::{
fmt,
sync::{
mpsc::{sync_channel, SyncSender},
Arc,
},
thread,
thread::JoinHandle,
};
use digest::{Digest, Output};
pub type DigestThread = (SyncSender<Arc<[u8]>>, JoinHandle<DigestResult>);
pub fn digest_thread<H>() -> DigestThread
where H: Hasher + Send + 'static {
let (tx, rx) = sync_channel::<Arc<[u8]>>(1);
let handle = thread::spawn(move || {
let mut hasher = H::new();
while let Ok(data) = rx.recv() {
hasher.update(data.as_ref());
}
hasher.finalize()
});
(tx, handle)
}
use std::fmt;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DigestResult {
@@ -48,49 +23,9 @@ impl fmt::Display for DigestResult {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
DigestResult::Crc32(crc) => write!(f, "{:08x}", crc),
DigestResult::Md5(md5) => write!(f, "{:032x}", <Output<md5::Md5>>::from(*md5)),
DigestResult::Sha1(sha1) => write!(f, "{:040x}", <Output<sha1::Sha1>>::from(*sha1)),
DigestResult::Md5(md5) => write!(f, "{}", hex::encode(md5)),
DigestResult::Sha1(sha1) => write!(f, "{}", hex::encode(sha1)),
DigestResult::Xxh64(xxh64) => write!(f, "{:016x}", xxh64),
}
}
}
pub trait Hasher {
fn new() -> Self;
fn finalize(self) -> DigestResult;
fn update(&mut self, data: &[u8]);
}
impl Hasher for md5::Md5 {
fn new() -> Self { Digest::new() }
fn finalize(self) -> DigestResult { DigestResult::Md5(Digest::finalize(self).into()) }
fn update(&mut self, data: &[u8]) { Digest::update(self, data) }
}
impl Hasher for sha1::Sha1 {
fn new() -> Self { Digest::new() }
fn finalize(self) -> DigestResult { DigestResult::Sha1(Digest::finalize(self).into()) }
fn update(&mut self, data: &[u8]) { Digest::update(self, data) }
}
impl Hasher for crc32fast::Hasher {
fn new() -> Self { crc32fast::Hasher::new() }
fn finalize(self) -> DigestResult { DigestResult::Crc32(crc32fast::Hasher::finalize(self)) }
fn update(&mut self, data: &[u8]) { crc32fast::Hasher::update(self, data) }
}
impl Hasher for xxhash_rust::xxh64::Xxh64 {
fn new() -> Self { xxhash_rust::xxh64::Xxh64::new(0) }
fn finalize(self) -> DigestResult {
DigestResult::Xxh64(xxhash_rust::xxh64::Xxh64::digest(&self))
}
fn update(&mut self, data: &[u8]) { xxhash_rust::xxh64::Xxh64::update(self, data) }
}

View File

@@ -8,7 +8,7 @@ use std::{
path::{Path, MAIN_SEPARATOR},
};
pub fn display(path: &Path) -> PathDisplay { PathDisplay { path } }
pub fn path_display(path: &Path) -> PathDisplay { PathDisplay { path } }
pub struct PathDisplay<'a> {
path: &'a Path,
@@ -19,7 +19,7 @@ impl fmt::Display for PathDisplay<'_> {
let mut first = true;
for segment in self.path.iter() {
let segment_str = segment.to_string_lossy();
if segment_str == "." {
if segment_str == "/" || segment_str == "." {
continue;
}
if first {
@@ -39,3 +39,15 @@ pub fn has_extension(filename: &Path, extension: &str) -> bool {
None => false,
}
}
/// Creates a fixed-size array reference from a slice.
macro_rules! array_ref {
($slice:expr, $offset:expr, $size:expr) => {{
#[inline(always)]
fn to_array<T>(slice: &[T]) -> &[T; $size] {
unsafe { &*(slice.as_ptr() as *const [_; $size]) }
}
to_array(&$slice[$offset..$offset + $size])
}};
}
pub(crate) use array_ref;

View File

@@ -8,10 +8,12 @@ use std::{
};
use hex::deserialize as deserialize_hex;
use nod::{array_ref, Result};
use nod::Result;
use serde::Deserialize;
use zerocopy::{FromBytes, FromZeros, Immutable, IntoBytes, KnownLayout};
use crate::util::array_ref;
#[derive(Clone, Debug)]
pub struct GameResult<'a> {
pub name: &'a str,

View File

@@ -1,22 +1,21 @@
use std::{
cmp::min,
fmt,
fs::File,
io::{Read, Write},
io::{Seek, SeekFrom, Write},
path::Path,
sync::{mpsc::sync_channel, Arc},
thread,
};
use indicatif::{ProgressBar, ProgressState, ProgressStyle};
use nod::{Compression, Disc, DiscHeader, DiscMeta, OpenOptions, Result, ResultContext};
use size::Size;
use zerocopy::FromZeros;
use crate::util::{
digest::{digest_thread, DigestResult},
display, redump,
use nod::{
common::Compression,
disc::DiscHeader,
read::{DiscMeta, DiscOptions, DiscReader, PartitionEncryption},
write::{DiscWriter, DiscWriterWeight, FormatOptions, ProcessOptions},
Result, ResultContext,
};
use size::Size;
use crate::util::{digest::DigestResult, path_display, redump};
pub fn print_header(header: &DiscHeader, meta: &DiscMeta) {
println!("Format: {}", meta.format);
@@ -29,52 +28,71 @@ pub fn print_header(header: &DiscHeader, meta: &DiscMeta) {
println!("Lossless: {}", meta.lossless);
println!(
"Verification data: {}",
meta.crc32.is_some()
|| meta.md5.is_some()
|| meta.sha1.is_some()
|| meta.xxhash64.is_some()
meta.crc32.is_some() || meta.md5.is_some() || meta.sha1.is_some() || meta.xxh64.is_some()
);
println!();
println!("Title: {}", header.game_title_str());
println!("Game ID: {}", header.game_id_str());
println!("Disc {}, Revision {}", header.disc_num + 1, header.disc_version);
if !header.has_partition_hashes() {
println!("[!] Disc has no hashes");
}
if !header.has_partition_encryption() {
println!("[!] Disc is not encrypted");
}
if !header.has_partition_hashes() {
println!("[!] Disc has no hashes");
}
}
pub fn convert_and_verify(
in_file: &Path,
out_file: Option<&Path>,
md5: bool,
options: &OpenOptions,
options: &DiscOptions,
format_options: &FormatOptions,
) -> Result<()> {
println!("Loading {}", display(in_file));
let mut disc = Disc::new_with_options(in_file, options)?;
println!("Loading {}", path_display(in_file));
let disc = DiscReader::new(in_file, options)?;
let header = disc.header();
let meta = disc.meta();
print_header(header, &meta);
let disc_size = disc.disc_size();
let mut file = if let Some(out_file) = out_file {
Some(
File::create(out_file)
.with_context(|| format!("Creating file {}", display(out_file)))?,
.with_context(|| format!("Creating file {}", path_display(out_file)))?,
)
} else {
None
};
if out_file.is_some() {
println!("\nConverting...");
match options.partition_encryption {
PartitionEncryption::ForceEncrypted => {
println!("\nConverting to {} (encrypted)...", format_options.format)
}
PartitionEncryption::ForceDecrypted => {
println!("\nConverting to {} (decrypted)...", format_options.format)
}
_ => println!("\nConverting to {}...", format_options.format),
}
if format_options.compression != Compression::None {
println!("Compression: {}", format_options.compression);
}
if format_options.block_size > 0 {
println!("Block size: {}", Size::from_bytes(format_options.block_size));
}
} else {
println!("\nVerifying...");
match options.partition_encryption {
PartitionEncryption::ForceEncrypted => {
println!("\nVerifying (encrypted)...")
}
PartitionEncryption::ForceDecrypted => {
println!("\nVerifying (decrypted)...")
}
_ => println!("\nVerifying..."),
}
}
let pb = ProgressBar::new(disc_size);
let disc_writer = DiscWriter::new(disc, format_options)?;
let pb = ProgressBar::new(disc_writer.progress_bound());
pb.set_style(ProgressStyle::with_template("{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec}, {eta})")
.unwrap()
.with_key("eta", |state: &ProgressState, w: &mut dyn fmt::Write| {
@@ -82,85 +100,71 @@ pub fn convert_and_verify(
})
.progress_chars("#>-"));
const BUFFER_SIZE: usize = 1015808; // LCM(0x8000, 0x7C00)
let digest_threads = if md5 {
vec![
digest_thread::<crc32fast::Hasher>(),
digest_thread::<md5::Md5>(),
digest_thread::<sha1::Sha1>(),
digest_thread::<xxhash_rust::xxh64::Xxh64>(),
]
} else {
vec![
digest_thread::<crc32fast::Hasher>(),
digest_thread::<sha1::Sha1>(),
digest_thread::<xxhash_rust::xxh64::Xxh64>(),
]
let cpus = num_cpus::get();
let processor_threads = match disc_writer.weight() {
DiscWriterWeight::Light => 0,
DiscWriterWeight::Medium => cpus / 2,
DiscWriterWeight::Heavy => cpus,
};
let (w_tx, w_rx) = sync_channel::<Arc<[u8]>>(1);
let w_thread = thread::spawn(move || {
let mut total_written = 0u64;
while let Ok(data) = w_rx.recv() {
let mut total_written = 0u64;
let finalization = disc_writer.process(
|data, pos, _| {
if let Some(file) = &mut file {
file.write_all(data.as_ref())
.with_context(|| {
format!("Writing {} bytes at offset {}", data.len(), total_written)
})
.unwrap();
file.write_all(data.as_ref())?;
}
total_written += data.len() as u64;
pb.set_position(total_written);
}
if let Some(mut file) = file {
file.flush().context("Flushing output file").unwrap();
}
pb.finish();
});
pb.set_position(pos);
Ok(())
},
&ProcessOptions {
processor_threads,
digest_crc32: true,
digest_md5: md5,
digest_sha1: true,
digest_xxh64: true,
},
)?;
pb.finish();
let mut total_read = 0u64;
let mut buf = <[u8]>::new_box_zeroed_with_elems(BUFFER_SIZE)?;
while total_read < disc_size {
let read = min(BUFFER_SIZE as u64, disc_size - total_read) as usize;
disc.read_exact(&mut buf[..read]).with_context(|| {
format!("Reading {} bytes at disc offset {}", BUFFER_SIZE, total_read)
})?;
let arc = Arc::<[u8]>::from(&buf[..read]);
for (tx, _) in &digest_threads {
tx.send(arc.clone()).map_err(|_| "Sending data to hash thread")?;
// Finalize disc writer
if !finalization.header.is_empty() {
if let Some(file) = &mut file {
file.seek(SeekFrom::Start(0)).context("Seeking to start of output file")?;
file.write_all(finalization.header.as_ref()).context("Writing header")?;
} else {
return Err(nod::Error::Other("No output file, but requires finalization".to_string()));
}
w_tx.send(arc).map_err(|_| "Sending data to write thread")?;
total_read += read as u64;
}
drop(w_tx); // Close channel
w_thread.join().unwrap();
if let Some(mut file) = file {
file.flush().context("Flushing output file")?;
}
println!();
if let Some(path) = out_file {
println!("Wrote {} to {}", Size::from_bytes(total_read), display(path));
println!("Wrote {} to {}", Size::from_bytes(total_written), path_display(path));
}
println!();
let mut crc32 = None;
let mut md5 = None;
let mut sha1 = None;
let mut xxh64 = None;
for (tx, handle) in digest_threads {
drop(tx); // Close channel
match handle.join().unwrap() {
DigestResult::Crc32(v) => crc32 = Some(v),
DigestResult::Md5(v) => md5 = Some(v),
DigestResult::Sha1(v) => sha1 = Some(v),
DigestResult::Xxh64(v) => xxh64 = Some(v),
}
}
let redump_entry = crc32.and_then(redump::find_by_crc32);
let expected_crc32 = meta.crc32.or(redump_entry.as_ref().map(|e| e.crc32));
let expected_md5 = meta.md5.or(redump_entry.as_ref().map(|e| e.md5));
let expected_sha1 = meta.sha1.or(redump_entry.as_ref().map(|e| e.sha1));
let expected_xxh64 = meta.xxhash64;
let mut redump_entry = None;
let mut expected_crc32 = None;
let mut expected_md5 = None;
let mut expected_sha1 = None;
let mut expected_xxh64 = None;
if options.partition_encryption == PartitionEncryption::Original {
// Use verification data in disc and check redump
redump_entry = finalization.crc32.and_then(redump::find_by_crc32);
expected_crc32 = meta.crc32.or(redump_entry.as_ref().map(|e| e.crc32));
expected_md5 = meta.md5.or(redump_entry.as_ref().map(|e| e.md5));
expected_sha1 = meta.sha1.or(redump_entry.as_ref().map(|e| e.sha1));
expected_xxh64 = meta.xxh64;
} else if options.partition_encryption == PartitionEncryption::ForceEncrypted {
// Ignore verification data in disc, but still check redump
redump_entry = finalization.crc32.and_then(redump::find_by_crc32);
expected_crc32 = redump_entry.as_ref().map(|e| e.crc32);
expected_md5 = redump_entry.as_ref().map(|e| e.md5);
expected_sha1 = redump_entry.as_ref().map(|e| e.sha1);
}
fn print_digest(value: DigestResult, expected: Option<DigestResult>) {
print!("{:<6}: ", value.name());
@@ -176,36 +180,36 @@ pub fn convert_and_verify(
println!();
}
if let Some(entry) = &redump_entry {
let mut full_match = true;
if let Some(md5) = md5 {
if entry.md5 != md5 {
full_match = false;
if let Some(crc32) = finalization.crc32 {
if let Some(entry) = &redump_entry {
let mut full_match = true;
if let Some(md5) = finalization.md5 {
if entry.md5 != md5 {
full_match = false;
}
}
}
if let Some(sha1) = sha1 {
if entry.sha1 != sha1 {
full_match = false;
if let Some(sha1) = finalization.sha1 {
if entry.sha1 != sha1 {
full_match = false;
}
}
if full_match {
println!("Redump: {}", entry.name);
} else {
println!("Redump: {} ❓ (partial match)", entry.name);
}
}
if full_match {
println!("Redump: {}", entry.name);
} else {
println!("Redump: {} ❓ (partial match)", entry.name);
println!("Redump: Not found ❌");
}
} else {
println!("Redump: Not found ❌");
}
if let Some(crc32) = crc32 {
print_digest(DigestResult::Crc32(crc32), expected_crc32.map(DigestResult::Crc32));
}
if let Some(md5) = md5 {
if let Some(md5) = finalization.md5 {
print_digest(DigestResult::Md5(md5), expected_md5.map(DigestResult::Md5));
}
if let Some(sha1) = sha1 {
if let Some(sha1) = finalization.sha1 {
print_digest(DigestResult::Sha1(sha1), expected_sha1.map(DigestResult::Sha1));
}
if let Some(xxh64) = xxh64 {
if let Some(xxh64) = finalization.xxh64 {
print_digest(DigestResult::Xxh64(xxh64), expected_xxh64.map(DigestResult::Xxh64));
}
Ok(())