Use typed-path in place of std Path/PathBuf

This allows handling path conversions in a more structured way,
as well as avoiding needless UTF-8 checks. All argument inputs
use `Utf8NativePathBuf`, while all config entries use
`Utf8UnixPathBuf`, ensuring that we deserialize/serialize using
forward slashes. We can omit `.display()` and lossy UTF-8
conversions since all paths are known valid UTF-8.
This commit is contained in:
Luke Street 2024-10-04 23:38:15 -06:00
parent 64d0491256
commit 2e524e6806
35 changed files with 624 additions and 600 deletions

14
Cargo.lock generated
View File

@ -381,7 +381,6 @@ dependencies = [
"once_cell", "once_cell",
"orthrus-ncompress", "orthrus-ncompress",
"owo-colors", "owo-colors",
"path-slash",
"petgraph", "petgraph",
"ppc750cl", "ppc750cl",
"rayon", "rayon",
@ -399,6 +398,7 @@ dependencies = [
"tracing", "tracing",
"tracing-attributes", "tracing-attributes",
"tracing-subscriber", "tracing-subscriber",
"typed-path",
"xxhash-rust", "xxhash-rust",
"zerocopy", "zerocopy",
] ]
@ -1057,12 +1057,6 @@ dependencies = [
"windows-targets", "windows-targets",
] ]
[[package]]
name = "path-slash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42"
[[package]] [[package]]
name = "pbjson-build" name = "pbjson-build"
version = "0.7.0" version = "0.7.0"
@ -1764,6 +1758,12 @@ dependencies = [
"syn 2.0.79", "syn 2.0.79",
] ]
[[package]]
name = "typed-path"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50c0c7479c430935701ff2532e3091e6680ec03f2f89ffcd9988b08e885b90a5"
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.17.0" version = "1.17.0"

View File

@ -9,7 +9,7 @@ publish = false
repository = "https://github.com/encounter/decomp-toolkit" repository = "https://github.com/encounter/decomp-toolkit"
readme = "README.md" readme = "README.md"
categories = ["command-line-utilities"] categories = ["command-line-utilities"]
rust-version = "1.80.0" rust-version = "1.81"
[[bin]] [[bin]]
name = "dtk" name = "dtk"
@ -31,6 +31,7 @@ argp = "0.3"
base16ct = "0.2" base16ct = "0.2"
base64 = "0.22" base64 = "0.22"
byteorder = "1.5" byteorder = "1.5"
typed-path = "0.9"
crossterm = "0.28" crossterm = "0.28"
cwdemangle = "1.0" cwdemangle = "1.0"
cwextab = "1.0" cwextab = "1.0"
@ -57,7 +58,6 @@ object = { version = "0.36", features = ["read_core", "std", "elf", "write_std"]
once_cell = "1.20" once_cell = "1.20"
orthrus-ncompress = "0.2" orthrus-ncompress = "0.2"
owo-colors = { version = "4.1", features = ["supports-colors"] } owo-colors = { version = "4.1", features = ["supports-colors"] }
path-slash = "0.2"
petgraph = { version = "0.6", default-features = false } petgraph = { version = "0.6", default-features = false }
ppc750cl = "0.3" ppc750cl = "0.3"
rayon = "1.10" rayon = "1.10"

View File

@ -1,16 +1,15 @@
use std::{ use std::io::{stdout, Write};
io::{stdout, Write},
path::PathBuf,
};
use anyhow::Result; use anyhow::Result;
use argp::FromArgs; use argp::FromArgs;
use typed_path::Utf8NativePathBuf;
use crate::{ use crate::{
cmd, cmd,
util::{ util::{
alf::AlfFile, alf::AlfFile,
file::buf_writer, file::buf_writer,
path::native_path,
reader::{Endian, FromReader}, reader::{Endian, FromReader},
}, },
vfs::open_file, vfs::open_file,
@ -35,21 +34,21 @@ enum SubCommand {
/// Prints information about an alf file. (Same as `dol info`) /// Prints information about an alf file. (Same as `dol info`)
#[argp(subcommand, name = "info")] #[argp(subcommand, name = "info")]
pub struct InfoArgs { pub struct InfoArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// alf file /// alf file
file: PathBuf, file: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Debug)] #[derive(FromArgs, PartialEq, Debug)]
/// Extracts symbol hashes from an alf file. /// Extracts symbol hashes from an alf file.
#[argp(subcommand, name = "hashes")] #[argp(subcommand, name = "hashes")]
pub struct HashesArgs { pub struct HashesArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// alf file /// alf file
alf_file: PathBuf, alf_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// output file /// output file
output: Option<PathBuf>, output: Option<Utf8NativePathBuf>,
} }
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
@ -64,7 +63,7 @@ fn hashes(args: HashesArgs) -> Result<()> {
let mut file = open_file(&args.alf_file, true)?; let mut file = open_file(&args.alf_file, true)?;
AlfFile::from_reader(file.as_mut(), Endian::Little)? AlfFile::from_reader(file.as_mut(), Endian::Little)?
}; };
let mut w: Box<dyn Write> = if let Some(output) = args.output { let mut w: Box<dyn Write> = if let Some(output) = &args.output {
Box::new(buf_writer(output)?) Box::new(buf_writer(output)?)
} else { } else {
Box::new(stdout()) Box::new(stdout())

View File

@ -2,15 +2,18 @@ use std::{
collections::{btree_map::Entry, BTreeMap}, collections::{btree_map::Entry, BTreeMap},
fs::File, fs::File,
io::Write, io::Write,
path::PathBuf,
}; };
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use argp::FromArgs; use argp::FromArgs;
use object::{Object, ObjectSymbol, SymbolScope}; use object::{Object, ObjectSymbol, SymbolScope};
use typed_path::Utf8NativePathBuf;
use crate::{ use crate::{
util::file::{buf_writer, process_rsp}, util::{
file::{buf_writer, process_rsp},
path::native_path,
},
vfs::open_file, vfs::open_file,
}; };
@ -33,24 +36,24 @@ enum SubCommand {
/// Creates a static library. /// Creates a static library.
#[argp(subcommand, name = "create")] #[argp(subcommand, name = "create")]
pub struct CreateArgs { pub struct CreateArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// output file /// output file
out: PathBuf, out: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input files /// input files
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Extracts a static library. /// Extracts a static library.
#[argp(subcommand, name = "extract")] #[argp(subcommand, name = "extract")]
pub struct ExtractArgs { pub struct ExtractArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input files /// input files
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// output directory /// output directory
out: Option<PathBuf>, out: Option<Utf8NativePathBuf>,
#[argp(switch, short = 'q')] #[argp(switch, short = 'q')]
/// quiet output /// quiet output
quiet: bool, quiet: bool,
@ -74,14 +77,13 @@ fn create(args: CreateArgs) -> Result<()> {
let mut identifiers = Vec::with_capacity(files.len()); let mut identifiers = Vec::with_capacity(files.len());
let mut symbol_table = BTreeMap::new(); let mut symbol_table = BTreeMap::new();
for path in &files { for path in &files {
let path_str = let unix_path = path.with_unix_encoding();
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?; let identifier = unix_path.as_str().as_bytes().to_vec();
let identifier = path_str.as_bytes().to_vec();
identifiers.push(identifier.clone()); identifiers.push(identifier.clone());
let entries = match symbol_table.entry(identifier) { let entries = match symbol_table.entry(identifier) {
Entry::Vacant(e) => e.insert(Vec::new()), Entry::Vacant(e) => e.insert(Vec::new()),
Entry::Occupied(_) => bail!("Duplicate file name '{path_str}'"), Entry::Occupied(_) => bail!("Duplicate file name '{unix_path}'"),
}; };
let mut file = open_file(path, false)?; let mut file = open_file(path, false)?;
let obj = object::File::parse(file.map()?)?; let obj = object::File::parse(file.map()?)?;
@ -102,10 +104,8 @@ fn create(args: CreateArgs) -> Result<()> {
symbol_table, symbol_table,
)?; )?;
for path in files { for path in files {
let path_str =
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
let mut file = File::open(&path)?; let mut file = File::open(&path)?;
builder.append_file(path_str.as_bytes(), &mut file)?; builder.append_file(path.as_str().as_bytes(), &mut file)?;
} }
builder.into_inner()?.flush()?; builder.into_inner()?.flush()?;
Ok(()) Ok(())
@ -118,7 +118,8 @@ fn extract(args: ExtractArgs) -> Result<()> {
// Extract files // Extract files
let mut num_files = 0; let mut num_files = 0;
for path in &files { for path in &files {
let mut out_dir = if let Some(out) = &args.out { out.clone() } else { PathBuf::new() }; let mut out_dir =
if let Some(out) = &args.out { out.clone() } else { Utf8NativePathBuf::new() };
// If there are multiple files, extract to separate directories // If there are multiple files, extract to separate directories
if files.len() > 1 { if files.len() > 1 {
out_dir out_dir
@ -126,14 +127,13 @@ fn extract(args: ExtractArgs) -> Result<()> {
} }
std::fs::create_dir_all(&out_dir)?; std::fs::create_dir_all(&out_dir)?;
if !args.quiet { if !args.quiet {
println!("Extracting {} to {}", path.display(), out_dir.display()); println!("Extracting {} to {}", path, out_dir);
} }
let mut file = open_file(path, false)?; let mut file = open_file(path, false)?;
let mut archive = ar::Archive::new(file.map()?); let mut archive = ar::Archive::new(file.map()?);
while let Some(entry) = archive.next_entry() { while let Some(entry) = archive.next_entry() {
let mut entry = let mut entry = entry.with_context(|| format!("Processing entry in {}", path))?;
entry.with_context(|| format!("Processing entry in {}", path.display()))?;
let file_name = std::str::from_utf8(entry.header().identifier())?; let file_name = std::str::from_utf8(entry.header().identifier())?;
if !args.quiet && args.verbose { if !args.quiet && args.verbose {
println!("\t{}", file_name); println!("\t{}", file_name);
@ -146,7 +146,7 @@ fn extract(args: ExtractArgs) -> Result<()> {
std::fs::create_dir_all(parent)?; std::fs::create_dir_all(parent)?;
} }
let mut file = File::create(&file_path) let mut file = File::create(&file_path)
.with_context(|| format!("Failed to create file {}", file_path.display()))?; .with_context(|| format!("Failed to create file {}", file_path))?;
std::io::copy(&mut entry, &mut file)?; std::io::copy(&mut entry, &mut file)?;
file.flush()?; file.flush()?;

View File

@ -1,13 +1,10 @@
use std::{ use std::{
borrow::Cow,
cmp::min, cmp::min,
collections::{btree_map::Entry, hash_map, BTreeMap, HashMap}, collections::{btree_map::Entry, hash_map, BTreeMap, HashMap},
ffi::OsStr,
fs, fs,
fs::DirBuilder, fs::DirBuilder,
io::{Cursor, Seek, Write}, io::{Cursor, Seek, Write},
mem::take, mem::take,
path::{Path, PathBuf},
time::Instant, time::Instant,
}; };
@ -18,6 +15,7 @@ use itertools::Itertools;
use rayon::prelude::*; use rayon::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::{debug, info, info_span}; use tracing::{debug, info, info_span};
use typed_path::{Utf8NativePath, Utf8NativePathBuf, Utf8UnixPath, Utf8UnixPathBuf};
use xxhash_rust::xxh3::xxh3_64; use xxhash_rust::xxh3::xxh3_64;
use crate::{ use crate::{
@ -51,6 +49,7 @@ use crate::{
file::{buf_writer, touch, verify_hash, FileIterator, FileReadInfo}, file::{buf_writer, touch, verify_hash, FileIterator, FileReadInfo},
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit}, lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
map::apply_map_file, map::apply_map_file,
path::{check_path_buf, native_path},
rel::{process_rel, process_rel_header, update_rel_section_alignment}, rel::{process_rel, process_rel_header, update_rel_section_alignment},
rso::{process_rso, DOL_SECTION_ABS, DOL_SECTION_ETI, DOL_SECTION_NAMES}, rso::{process_rso, DOL_SECTION_ABS, DOL_SECTION_ETI, DOL_SECTION_NAMES},
split::{is_linker_generated_object, split_obj, update_splits}, split::{is_linker_generated_object, split_obj, update_splits},
@ -81,24 +80,24 @@ enum SubCommand {
/// Views DOL file information. /// Views DOL file information.
#[argp(subcommand, name = "info")] #[argp(subcommand, name = "info")]
pub struct InfoArgs { pub struct InfoArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// DOL file /// DOL file
pub dol_file: PathBuf, pub dol_file: Utf8NativePathBuf,
#[argp(option, short = 's')] #[argp(option, short = 's', from_str_fn(native_path))]
/// optional path to selfile.sel /// optional path to selfile.sel
pub selfile: Option<PathBuf>, pub selfile: Option<Utf8NativePathBuf>,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Splits a DOL into relocatable objects. /// Splits a DOL into relocatable objects.
#[argp(subcommand, name = "split")] #[argp(subcommand, name = "split")]
pub struct SplitArgs { pub struct SplitArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input configuration file /// input configuration file
config: PathBuf, config: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// output directory /// output directory
out_dir: PathBuf, out_dir: Utf8NativePathBuf,
#[argp(switch)] #[argp(switch)]
/// skip updating splits & symbol files (for build systems) /// skip updating splits & symbol files (for build systems)
no_update: bool, no_update: bool,
@ -111,36 +110,36 @@ pub struct SplitArgs {
/// Diffs symbols in a linked ELF. /// Diffs symbols in a linked ELF.
#[argp(subcommand, name = "diff")] #[argp(subcommand, name = "diff")]
pub struct DiffArgs { pub struct DiffArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input configuration file /// input configuration file
config: PathBuf, config: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// linked ELF /// linked ELF
elf_file: PathBuf, elf_file: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Applies updated symbols from a linked ELF to the project configuration. /// Applies updated symbols from a linked ELF to the project configuration.
#[argp(subcommand, name = "apply")] #[argp(subcommand, name = "apply")]
pub struct ApplyArgs { pub struct ApplyArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input configuration file /// input configuration file
config: PathBuf, config: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// linked ELF /// linked ELF
elf_file: PathBuf, elf_file: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Generates a project configuration file from a DOL (& RELs). /// Generates a project configuration file from a DOL (& RELs).
#[argp(subcommand, name = "config")] #[argp(subcommand, name = "config")]
pub struct ConfigArgs { pub struct ConfigArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// object files /// object files
objects: Vec<PathBuf>, objects: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// output config YAML file /// output config YAML file
out_file: PathBuf, out_file: Utf8NativePathBuf,
} }
#[inline] #[inline]
@ -155,44 +154,37 @@ where T: Default + PartialEq {
t == &T::default() t == &T::default()
} }
mod path_slash_serde { mod unix_path_serde {
use std::path::PathBuf;
use path_slash::PathBufExt as _;
use serde::{Deserialize, Deserializer, Serializer}; use serde::{Deserialize, Deserializer, Serializer};
use typed_path::Utf8UnixPathBuf;
pub fn serialize<S>(path: &PathBuf, s: S) -> Result<S::Ok, S::Error> pub fn serialize<S>(path: &Utf8UnixPathBuf, s: S) -> Result<S::Ok, S::Error>
where S: Serializer { where S: Serializer {
let path_str = path.to_slash().ok_or_else(|| serde::ser::Error::custom("Invalid path"))?; s.serialize_str(path.as_str())
s.serialize_str(path_str.as_ref())
} }
pub fn deserialize<'de, D>(deserializer: D) -> Result<PathBuf, D::Error> pub fn deserialize<'de, D>(deserializer: D) -> Result<Utf8UnixPathBuf, D::Error>
where D: Deserializer<'de> { where D: Deserializer<'de> {
String::deserialize(deserializer).map(PathBuf::from_slash) String::deserialize(deserializer).map(Utf8UnixPathBuf::from)
} }
} }
mod path_slash_serde_option { mod unix_path_serde_option {
use std::path::PathBuf;
use path_slash::PathBufExt as _;
use serde::{Deserialize, Deserializer, Serializer}; use serde::{Deserialize, Deserializer, Serializer};
use typed_path::Utf8UnixPathBuf;
pub fn serialize<S>(path: &Option<PathBuf>, s: S) -> Result<S::Ok, S::Error> pub fn serialize<S>(path: &Option<Utf8UnixPathBuf>, s: S) -> Result<S::Ok, S::Error>
where S: Serializer { where S: Serializer {
if let Some(path) = path { if let Some(path) = path {
let path_str = s.serialize_str(path.as_str())
path.to_slash().ok_or_else(|| serde::ser::Error::custom("Invalid path"))?;
s.serialize_str(path_str.as_ref())
} else { } else {
s.serialize_none() s.serialize_none()
} }
} }
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<PathBuf>, D::Error> pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Utf8UnixPathBuf>, D::Error>
where D: Deserializer<'de> { where D: Deserializer<'de> {
Ok(Option::deserialize(deserializer)?.map(PathBuf::from_slash::<String>)) Ok(Option::<String>::deserialize(deserializer)?.map(Utf8UnixPathBuf::from))
} }
} }
@ -200,8 +192,8 @@ mod path_slash_serde_option {
pub struct ProjectConfig { pub struct ProjectConfig {
#[serde(flatten)] #[serde(flatten)]
pub base: ModuleConfig, pub base: ModuleConfig,
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "is_default")] #[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
pub selfile: Option<PathBuf>, pub selfile: Option<Utf8UnixPathBuf>,
#[serde(skip_serializing_if = "is_default")] #[serde(skip_serializing_if = "is_default")]
pub selfile_hash: Option<String>, pub selfile_hash: Option<String>,
/// Version of the MW `.comment` section format. /// Version of the MW `.comment` section format.
@ -235,8 +227,8 @@ pub struct ProjectConfig {
#[serde(default = "bool_true", skip_serializing_if = "is_true")] #[serde(default = "bool_true", skip_serializing_if = "is_true")]
pub export_all: bool, pub export_all: bool,
/// Optional base path for all object files. /// Optional base path for all object files.
#[serde(default, skip_serializing_if = "is_default")] #[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
pub object_base: Option<PathBuf>, pub object_base: Option<Utf8UnixPathBuf>,
} }
impl Default for ProjectConfig { impl Default for ProjectConfig {
@ -265,21 +257,21 @@ pub struct ModuleConfig {
/// Object name. If not specified, the file name without extension will be used. /// Object name. If not specified, the file name without extension will be used.
#[serde(skip_serializing_if = "is_default")] #[serde(skip_serializing_if = "is_default")]
pub name: Option<String>, pub name: Option<String>,
#[serde(with = "path_slash_serde")] #[serde(with = "unix_path_serde")]
pub object: PathBuf, pub object: Utf8UnixPathBuf,
#[serde(skip_serializing_if = "is_default")] #[serde(skip_serializing_if = "is_default")]
pub hash: Option<String>, pub hash: Option<String>,
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "is_default")] #[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
pub splits: Option<PathBuf>, pub splits: Option<Utf8UnixPathBuf>,
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "is_default")] #[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
pub symbols: Option<PathBuf>, pub symbols: Option<Utf8UnixPathBuf>,
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "is_default")] #[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
pub map: Option<PathBuf>, pub map: Option<Utf8UnixPathBuf>,
/// Forces the given symbols to be active (exported) in the linker script. /// Forces the given symbols to be active (exported) in the linker script.
#[serde(default, skip_serializing_if = "is_default")] #[serde(default, skip_serializing_if = "is_default")]
pub force_active: Vec<String>, pub force_active: Vec<String>,
#[serde(skip_serializing_if = "is_default")] #[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
pub ldscript_template: Option<PathBuf>, pub ldscript_template: Option<Utf8UnixPathBuf>,
/// Overrides links to other modules. /// Overrides links to other modules.
#[serde(skip_serializing_if = "is_default")] #[serde(skip_serializing_if = "is_default")]
pub links: Option<Vec<String>>, pub links: Option<Vec<String>>,
@ -297,12 +289,12 @@ pub struct ExtractConfig {
pub symbol: String, pub symbol: String,
/// If specified, the symbol's data will be extracted to the given file. /// If specified, the symbol's data will be extracted to the given file.
/// Path is relative to `out_dir/bin`. /// Path is relative to `out_dir/bin`.
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "Option::is_none")] #[serde(with = "unix_path_serde_option", default, skip_serializing_if = "Option::is_none")]
pub binary: Option<PathBuf>, pub binary: Option<Utf8UnixPathBuf>,
/// If specified, the symbol's data will be extracted to the given file as a C array. /// If specified, the symbol's data will be extracted to the given file as a C array.
/// Path is relative to `out_dir/include`. /// Path is relative to `out_dir/include`.
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "Option::is_none")] #[serde(with = "unix_path_serde_option", default, skip_serializing_if = "Option::is_none")]
pub header: Option<PathBuf>, pub header: Option<Utf8UnixPathBuf>,
} }
/// A relocation that should be blocked. /// A relocation that should be blocked.
@ -336,30 +328,20 @@ pub struct AddRelocationConfig {
} }
impl ModuleConfig { impl ModuleConfig {
pub fn file_name(&self) -> Cow<'_, str> { pub fn file_name(&self) -> &str { self.object.file_name().unwrap_or(self.object.as_str()) }
self.object.file_name().unwrap_or(self.object.as_os_str()).to_string_lossy()
pub fn file_prefix(&self) -> &str {
let file_name = self.file_name();
file_name.split_once('.').map(|(prefix, _)| prefix).unwrap_or(file_name)
} }
pub fn file_prefix(&self) -> Cow<'_, str> { pub fn name(&self) -> &str { self.name.as_deref().unwrap_or_else(|| self.file_prefix()) }
match self.file_name() {
Cow::Borrowed(s) => {
Cow::Borrowed(s.split_once('.').map(|(prefix, _)| prefix).unwrap_or(s))
}
Cow::Owned(s) => {
Cow::Owned(s.split_once('.').map(|(prefix, _)| prefix).unwrap_or(&s).to_string())
}
}
}
pub fn name(&self) -> Cow<'_, str> {
self.name.as_ref().map(|n| n.as_str().to_cow()).unwrap_or_else(|| self.file_prefix())
}
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct OutputUnit { pub struct OutputUnit {
#[serde(with = "path_slash_serde")] #[serde(with = "unix_path_serde")]
pub object: PathBuf, pub object: Utf8UnixPathBuf,
pub name: String, pub name: String,
pub autogenerated: bool, pub autogenerated: bool,
pub code_size: u32, pub code_size: u32,
@ -370,8 +352,8 @@ pub struct OutputUnit {
pub struct OutputModule { pub struct OutputModule {
pub name: String, pub name: String,
pub module_id: u32, pub module_id: u32,
#[serde(with = "path_slash_serde")] #[serde(with = "unix_path_serde")]
pub ldscript: PathBuf, pub ldscript: Utf8UnixPathBuf,
pub entry: Option<String>, pub entry: Option<String>,
pub units: Vec<OutputUnit>, pub units: Vec<OutputUnit>,
} }
@ -788,21 +770,21 @@ fn resolve_external_relocations(
struct AnalyzeResult { struct AnalyzeResult {
obj: ObjInfo, obj: ObjInfo,
dep: Vec<PathBuf>, dep: Vec<Utf8NativePathBuf>,
symbols_cache: Option<FileReadInfo>, symbols_cache: Option<FileReadInfo>,
splits_cache: Option<FileReadInfo>, splits_cache: Option<FileReadInfo>,
} }
fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<AnalyzeResult> { fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<AnalyzeResult> {
let object_path = object_base.join(&config.base.object); let object_path = object_base.join(&config.base.object);
log::debug!("Loading {}", object_path.display()); log::debug!("Loading {}", object_path);
let mut obj = { let mut obj = {
let mut file = object_base.open(&config.base.object)?; let mut file = object_base.open(&config.base.object)?;
let data = file.map()?; let data = file.map()?;
if let Some(hash_str) = &config.base.hash { if let Some(hash_str) = &config.base.hash {
verify_hash(data, hash_str)?; verify_hash(data, hash_str)?;
} }
process_dol(data, config.base.name().as_ref())? process_dol(data, config.base.name())?
}; };
let mut dep = vec![object_path]; let mut dep = vec![object_path];
@ -811,20 +793,25 @@ fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<
} }
if let Some(map_path) = &config.base.map { if let Some(map_path) = &config.base.map {
apply_map_file(map_path, &mut obj, config.common_start, config.mw_comment_version)?; let map_path = map_path.with_encoding();
dep.push(map_path.clone()); apply_map_file(&map_path, &mut obj, config.common_start, config.mw_comment_version)?;
dep.push(map_path);
} }
let splits_cache = if let Some(splits_path) = &config.base.splits { let splits_cache = if let Some(splits_path) = &config.base.splits {
dep.push(splits_path.clone()); let splits_path = splits_path.with_encoding();
apply_splits_file(splits_path, &mut obj)? let cache = apply_splits_file(&splits_path, &mut obj)?;
dep.push(splits_path);
cache
} else { } else {
None None
}; };
let symbols_cache = if let Some(symbols_path) = &config.base.symbols { let symbols_cache = if let Some(symbols_path) = &config.base.symbols {
dep.push(symbols_path.clone()); let symbols_path = symbols_path.with_encoding();
apply_symbols_file(symbols_path, &mut obj)? let cache = apply_symbols_file(&symbols_path, &mut obj)?;
dep.push(symbols_path);
cache
} else { } else {
None None
}; };
@ -850,8 +837,9 @@ fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<
} }
if let Some(selfile) = &config.selfile { if let Some(selfile) = &config.selfile {
log::info!("Loading {}", selfile.display()); let selfile: Utf8NativePathBuf = selfile.with_encoding();
let mut file = open_file(selfile, true)?; log::info!("Loading {}", selfile);
let mut file = open_file(&selfile, true)?;
let data = file.map()?; let data = file.map()?;
if let Some(hash) = &config.selfile_hash { if let Some(hash) = &config.selfile_hash {
verify_hash(data, hash)?; verify_hash(data, hash)?;
@ -872,8 +860,8 @@ fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<
fn split_write_obj( fn split_write_obj(
module: &mut ModuleInfo, module: &mut ModuleInfo,
config: &ProjectConfig, config: &ProjectConfig,
base_dir: &Path, base_dir: &Utf8NativePath,
out_dir: &Path, out_dir: &Utf8NativePath,
no_update: bool, no_update: bool,
) -> Result<OutputModule> { ) -> Result<OutputModule> {
debug!("Performing relocation analysis"); debug!("Performing relocation analysis");
@ -904,10 +892,15 @@ fn split_write_obj(
if !no_update { if !no_update {
debug!("Writing configuration"); debug!("Writing configuration");
if let Some(symbols_path) = &module.config.symbols { if let Some(symbols_path) = &module.config.symbols {
write_symbols_file(symbols_path, &module.obj, module.symbols_cache)?; write_symbols_file(&symbols_path.with_encoding(), &module.obj, module.symbols_cache)?;
} }
if let Some(splits_path) = &module.config.splits { if let Some(splits_path) = &module.config.splits {
write_splits_file(splits_path, &module.obj, false, module.splits_cache)?; write_splits_file(
&splits_path.with_encoding(),
&module.obj,
false,
module.splits_cache,
)?;
} }
} }
@ -919,7 +912,7 @@ fn split_write_obj(
DirBuilder::new() DirBuilder::new()
.recursive(true) .recursive(true)
.create(out_dir) .create(out_dir)
.with_context(|| format!("Failed to create out dir '{}'", out_dir.display()))?; .with_context(|| format!("Failed to create out dir '{}'", out_dir))?;
let obj_dir = out_dir.join("obj"); let obj_dir = out_dir.join("obj");
let entry = if module.obj.kind == ObjKind::Executable { let entry = if module.obj.kind == ObjKind::Executable {
module.obj.entry.and_then(|e| { module.obj.entry.and_then(|e| {
@ -934,7 +927,7 @@ fn split_write_obj(
let mut out_config = OutputModule { let mut out_config = OutputModule {
name: module_name, name: module_name,
module_id, module_id,
ldscript: out_dir.join("ldscript.lcf"), ldscript: out_dir.join("ldscript.lcf").with_unix_encoding(),
units: Vec::with_capacity(split_objs.len()), units: Vec::with_capacity(split_objs.len()),
entry, entry,
}; };
@ -942,7 +935,7 @@ fn split_write_obj(
let out_obj = write_elf(split_obj, config.export_all)?; let out_obj = write_elf(split_obj, config.export_all)?;
let out_path = obj_dir.join(obj_path_for_unit(&unit.name)); let out_path = obj_dir.join(obj_path_for_unit(&unit.name));
out_config.units.push(OutputUnit { out_config.units.push(OutputUnit {
object: out_path.clone(), object: out_path.with_unix_encoding(),
name: unit.name.clone(), name: unit.name.clone(),
autogenerated: unit.autogenerated, autogenerated: unit.autogenerated,
code_size: split_obj.code_size(), code_size: split_obj.code_size(),
@ -967,7 +960,7 @@ fn split_write_obj(
let data = section.symbol_data(symbol)?; let data = section.symbol_data(symbol)?;
if let Some(binary) = &extract.binary { if let Some(binary) = &extract.binary {
let out_path = base_dir.join("bin").join(binary); let out_path = base_dir.join("bin").join(binary.with_encoding());
if let Some(parent) = out_path.parent() { if let Some(parent) = out_path.parent() {
DirBuilder::new().recursive(true).create(parent)?; DirBuilder::new().recursive(true).create(parent)?;
} }
@ -976,7 +969,7 @@ fn split_write_obj(
if let Some(header) = &extract.header { if let Some(header) = &extract.header {
let header_string = bin2c(symbol, section, data); let header_string = bin2c(symbol, section, data);
let out_path = base_dir.join("include").join(header); let out_path = base_dir.join("include").join(header.with_encoding());
if let Some(parent) = out_path.parent() { if let Some(parent) = out_path.parent() {
DirBuilder::new().recursive(true).create(parent)?; DirBuilder::new().recursive(true).create(parent)?;
} }
@ -985,16 +978,18 @@ fn split_write_obj(
} }
// Generate ldscript.lcf // Generate ldscript.lcf
let ldscript_template = if let Some(template) = &module.config.ldscript_template { let ldscript_template = if let Some(template_path) = &module.config.ldscript_template {
Some(fs::read_to_string(template).with_context(|| { let template_path = template_path.with_encoding();
format!("Failed to read linker script template '{}'", template.display()) Some(fs::read_to_string(&template_path).with_context(|| {
format!("Failed to read linker script template '{}'", template_path)
})?) })?)
} else { } else {
None None
}; };
let ldscript_string = let ldscript_string =
generate_ldscript(&module.obj, ldscript_template.as_deref(), &module.config.force_active)?; generate_ldscript(&module.obj, ldscript_template.as_deref(), &module.config.force_active)?;
write_if_changed(&out_config.ldscript, ldscript_string.as_bytes())?; let ldscript_path = out_config.ldscript.with_encoding();
write_if_changed(&ldscript_path, ldscript_string.as_bytes())?;
if config.write_asm { if config.write_asm {
debug!("Writing disassembly"); debug!("Writing disassembly");
@ -1004,15 +999,15 @@ fn split_write_obj(
let mut w = buf_writer(&out_path)?; let mut w = buf_writer(&out_path)?;
write_asm(&mut w, split_obj) write_asm(&mut w, split_obj)
.with_context(|| format!("Failed to write {}", out_path.display()))?; .with_context(|| format!("Failed to write {}", out_path))?;
w.flush()?; w.flush()?;
} }
} }
Ok(out_config) Ok(out_config)
} }
fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> { fn write_if_changed(path: &Utf8NativePath, contents: &[u8]) -> Result<()> {
if path.is_file() { if fs::metadata(path).is_ok_and(|m| m.is_file()) {
let mut old_file = open_file(path, true)?; let mut old_file = open_file(path, true)?;
let old_data = old_file.map()?; let old_data = old_file.map()?;
// If the file is the same size, check if the contents are the same // If the file is the same size, check if the contents are the same
@ -1021,8 +1016,7 @@ fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
return Ok(()); return Ok(());
} }
} }
fs::write(path, contents) fs::write(path, contents).with_context(|| format!("Failed to write file '{}'", path))?;
.with_context(|| format!("Failed to write file '{}'", path.display()))?;
Ok(()) Ok(())
} }
@ -1032,14 +1026,13 @@ fn load_analyze_rel(
module_config: &ModuleConfig, module_config: &ModuleConfig,
) -> Result<AnalyzeResult> { ) -> Result<AnalyzeResult> {
let object_path = object_base.join(&module_config.object); let object_path = object_base.join(&module_config.object);
debug!("Loading {}", object_path.display()); debug!("Loading {}", object_path);
let mut file = object_base.open(&module_config.object)?; let mut file = object_base.open(&module_config.object)?;
let data = file.map()?; let data = file.map()?;
if let Some(hash_str) = &module_config.hash { if let Some(hash_str) = &module_config.hash {
verify_hash(data, hash_str)?; verify_hash(data, hash_str)?;
} }
let (header, mut module_obj) = let (header, mut module_obj) = process_rel(&mut Cursor::new(data), module_config.name())?;
process_rel(&mut Cursor::new(data), module_config.name().as_ref())?;
if let Some(comment_version) = config.mw_comment_version { if let Some(comment_version) = config.mw_comment_version {
module_obj.mw_comment = Some(MWComment::new(comment_version)?); module_obj.mw_comment = Some(MWComment::new(comment_version)?);
@ -1047,20 +1040,25 @@ fn load_analyze_rel(
let mut dep = vec![object_path]; let mut dep = vec![object_path];
if let Some(map_path) = &module_config.map { if let Some(map_path) = &module_config.map {
apply_map_file(map_path, &mut module_obj, None, None)?; let map_path = map_path.with_encoding();
dep.push(map_path.clone()); apply_map_file(&map_path, &mut module_obj, None, None)?;
dep.push(map_path);
} }
let splits_cache = if let Some(splits_path) = &module_config.splits { let splits_cache = if let Some(splits_path) = &module_config.splits {
dep.push(splits_path.clone()); let splits_path = splits_path.with_encoding();
apply_splits_file(splits_path, &mut module_obj)? let cache = apply_splits_file(&splits_path, &mut module_obj)?;
dep.push(splits_path);
cache
} else { } else {
None None
}; };
let symbols_cache = if let Some(symbols_path) = &module_config.symbols { let symbols_cache = if let Some(symbols_path) = &module_config.symbols {
dep.push(symbols_path.clone()); let symbols_path = symbols_path.with_encoding();
apply_symbols_file(symbols_path, &mut module_obj)? let cache = apply_symbols_file(&symbols_path, &mut module_obj)?;
dep.push(symbols_path);
cache
} else { } else {
None None
}; };
@ -1100,7 +1098,7 @@ fn split(args: SplitArgs) -> Result<()> {
} }
let command_start = Instant::now(); let command_start = Instant::now();
info!("Loading {}", args.config.display()); info!("Loading {}", args.config);
let mut config: ProjectConfig = { let mut config: ProjectConfig = {
let mut config_file = open_file(&args.config, true)?; let mut config_file = open_file(&args.config, true)?;
serde_yaml::from_reader(config_file.as_mut())? serde_yaml::from_reader(config_file.as_mut())?
@ -1302,7 +1300,7 @@ fn split(args: SplitArgs) -> Result<()> {
let _span = let _span =
info_span!("module", name = %module.config.name(), id = module.obj.module_id) info_span!("module", name = %module.config.name(), id = module.obj.module_id)
.entered(); .entered();
let out_dir = args.out_dir.join(module.config.name().as_ref()); let out_dir = args.out_dir.join(module.config.name());
split_write_obj(module, &config, &args.out_dir, &out_dir, args.no_update).with_context( split_write_obj(module, &config, &args.out_dir, &out_dir, args.no_update).with_context(
|| { || {
format!( format!(
@ -1363,7 +1361,7 @@ fn split(args: SplitArgs) -> Result<()> {
// Write dep file // Write dep file
{ {
let dep_path = args.out_dir.join("dep"); let dep_path = args.out_dir.join("dep");
let mut dep_file = buf_writer(dep_path)?; let mut dep_file = buf_writer(&dep_path)?;
dep.write(&mut dep_file)?; dep.write(&mut dep_file)?;
dep_file.flush()?; dep_file.flush()?;
} }
@ -1379,8 +1377,7 @@ fn split(args: SplitArgs) -> Result<()> {
} }
#[allow(dead_code)] #[allow(dead_code)]
fn validate<P>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -> Result<()> fn validate(obj: &ObjInfo, elf_file: &Utf8NativePath, state: &AnalyzerState) -> Result<()> {
where P: AsRef<Path> {
let real_obj = process_elf(elf_file)?; let real_obj = process_elf(elf_file)?;
for (section_index, real_section) in real_obj.sections.iter() { for (section_index, real_section) in real_obj.sections.iter() {
let obj_section = match obj.sections.get(section_index) { let obj_section = match obj.sections.get(section_index) {
@ -1553,26 +1550,26 @@ fn symbol_name_fuzzy_eq(a: &ObjSymbol, b: &ObjSymbol) -> bool {
} }
fn diff(args: DiffArgs) -> Result<()> { fn diff(args: DiffArgs) -> Result<()> {
log::info!("Loading {}", args.config.display()); log::info!("Loading {}", args.config);
let mut config_file = open_file(&args.config, true)?; let mut config_file = open_file(&args.config, true)?;
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?; let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
let object_base = find_object_base(&config)?; let object_base = find_object_base(&config)?;
log::info!("Loading {}", object_base.join(&config.base.object).display()); log::info!("Loading {}", object_base.join(&config.base.object));
let mut obj = { let mut obj = {
let mut file = object_base.open(&config.base.object)?; let mut file = object_base.open(&config.base.object)?;
let data = file.map()?; let data = file.map()?;
if let Some(hash_str) = &config.base.hash { if let Some(hash_str) = &config.base.hash {
verify_hash(data, hash_str)?; verify_hash(data, hash_str)?;
} }
process_dol(data, config.base.name().as_ref())? process_dol(data, config.base.name())?
}; };
if let Some(symbols_path) = &config.base.symbols { if let Some(symbols_path) = &config.base.symbols {
apply_symbols_file(symbols_path, &mut obj)?; apply_symbols_file(&symbols_path.with_encoding(), &mut obj)?;
} }
log::info!("Loading {}", args.elf_file.display()); log::info!("Loading {}", args.elf_file);
let linked_obj = process_elf(&args.elf_file)?; let linked_obj = process_elf(&args.elf_file)?;
let common_bss = obj.sections.common_bss_start(); let common_bss = obj.sections.common_bss_start();
@ -1734,29 +1731,30 @@ fn diff(args: DiffArgs) -> Result<()> {
} }
fn apply(args: ApplyArgs) -> Result<()> { fn apply(args: ApplyArgs) -> Result<()> {
log::info!("Loading {}", args.config.display()); log::info!("Loading {}", args.config);
let mut config_file = open_file(&args.config, true)?; let mut config_file = open_file(&args.config, true)?;
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?; let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
let object_base = find_object_base(&config)?; let object_base = find_object_base(&config)?;
log::info!("Loading {}", object_base.join(&config.base.object).display()); log::info!("Loading {}", object_base.join(&config.base.object));
let mut obj = { let mut obj = {
let mut file = object_base.open(&config.base.object)?; let mut file = object_base.open(&config.base.object)?;
let data = file.map()?; let data = file.map()?;
if let Some(hash_str) = &config.base.hash { if let Some(hash_str) = &config.base.hash {
verify_hash(data, hash_str)?; verify_hash(data, hash_str)?;
} }
process_dol(data, config.base.name().as_ref())? process_dol(data, config.base.name())?
}; };
let Some(symbols_path) = &config.base.symbols else { let Some(symbols_path) = &config.base.symbols else {
bail!("No symbols file specified in config"); bail!("No symbols file specified in config");
}; };
let Some(symbols_cache) = apply_symbols_file(symbols_path, &mut obj)? else { let symbols_path = symbols_path.with_encoding();
bail!("Symbols file '{}' does not exist", symbols_path.display()); let Some(symbols_cache) = apply_symbols_file(&symbols_path, &mut obj)? else {
bail!("Symbols file '{}' does not exist", symbols_path);
}; };
log::info!("Loading {}", args.elf_file.display()); log::info!("Loading {}", args.elf_file);
let linked_obj = process_elf(&args.elf_file)?; let linked_obj = process_elf(&args.elf_file)?;
let mut replacements: Vec<(SymbolIndex, Option<ObjSymbol>)> = vec![]; let mut replacements: Vec<(SymbolIndex, Option<ObjSymbol>)> = vec![];
@ -1892,7 +1890,8 @@ fn apply(args: ApplyArgs) -> Result<()> {
} }
} }
write_symbols_file(config.base.symbols.as_ref().unwrap(), &obj, Some(symbols_cache))?; let symbols_path = config.base.symbols.as_ref().unwrap();
write_symbols_file(&symbols_path.with_encoding(), &obj, Some(symbols_cache))?;
Ok(()) Ok(())
} }
@ -1902,39 +1901,41 @@ fn config(args: ConfigArgs) -> Result<()> {
let mut modules = Vec::<(u32, ModuleConfig)>::new(); let mut modules = Vec::<(u32, ModuleConfig)>::new();
for result in FileIterator::new(&args.objects)? { for result in FileIterator::new(&args.objects)? {
let (path, mut entry) = result?; let (path, mut entry) = result?;
log::info!("Loading {}", path.display()); log::info!("Loading {}", path);
let Some(ext) = path.extension() else {
match path.extension() { bail!("No file extension for {}", path);
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("dol")) => { };
config.base.object = path; match ext.to_ascii_lowercase().as_str() {
"dol" => {
config.base.object = path.with_unix_encoding();
config.base.hash = Some(file_sha1_string(&mut entry)?); config.base.hash = Some(file_sha1_string(&mut entry)?);
} }
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("rel")) => { "rel" => {
let header = process_rel_header(&mut entry)?; let header = process_rel_header(&mut entry)?;
entry.rewind()?; entry.rewind()?;
modules.push((header.module_id, ModuleConfig { modules.push((header.module_id, ModuleConfig {
object: path, object: path.with_unix_encoding(),
hash: Some(file_sha1_string(&mut entry)?), hash: Some(file_sha1_string(&mut entry)?),
..Default::default() ..Default::default()
})); }));
} }
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("sel")) => { "sel" => {
config.selfile = Some(path); config.selfile = Some(path.with_unix_encoding());
config.selfile_hash = Some(file_sha1_string(&mut entry)?); config.selfile_hash = Some(file_sha1_string(&mut entry)?);
} }
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("rso")) => { "rso" => {
config.modules.push(ModuleConfig { config.modules.push(ModuleConfig {
object: path, object: path.with_unix_encoding(),
hash: Some(file_sha1_string(&mut entry)?), hash: Some(file_sha1_string(&mut entry)?),
..Default::default() ..Default::default()
}); });
} }
_ => bail!("Unknown file extension: '{}'", path.display()), _ => bail!("Unknown file extension: '{}'", ext),
} }
} }
modules.sort_by(|(a_id, a_config), (b_id, b_config)| { modules.sort_by(|(a_id, a_config), (b_id, b_config)| {
// Sort by module ID, then by name // Sort by module ID, then by name
a_id.cmp(b_id).then(a_config.name().cmp(&b_config.name())) a_id.cmp(b_id).then(a_config.name().cmp(b_config.name()))
}); });
config.modules.extend(modules.into_iter().map(|(_, m)| m)); config.modules.extend(modules.into_iter().map(|(_, m)| m));
@ -1999,49 +2000,50 @@ fn apply_add_relocations(obj: &mut ObjInfo, relocations: &[AddRelocationConfig])
pub enum ObjectBase { pub enum ObjectBase {
None, None,
Directory(PathBuf), Directory(Utf8NativePathBuf),
Vfs(PathBuf, Box<dyn Vfs + Send + Sync>), Vfs(Utf8NativePathBuf, Box<dyn Vfs + Send + Sync>),
} }
impl ObjectBase { impl ObjectBase {
pub fn join(&self, path: &Path) -> PathBuf { pub fn join(&self, path: &Utf8UnixPath) -> Utf8NativePathBuf {
match self { match self {
ObjectBase::None => path.to_path_buf(), ObjectBase::None => path.with_encoding(),
ObjectBase::Directory(base) => base.join(path), ObjectBase::Directory(base) => base.join(path.with_encoding()),
ObjectBase::Vfs(base, _) => { ObjectBase::Vfs(base, _) => Utf8NativePathBuf::from(format!("{}:{}", base, path)),
PathBuf::from(format!("{}:{}", base.display(), path.display()))
}
} }
} }
pub fn open(&self, path: &Path) -> Result<Box<dyn VfsFile>> { pub fn open(&self, path: &Utf8UnixPath) -> Result<Box<dyn VfsFile>> {
match self { match self {
ObjectBase::None => open_file(path, true), ObjectBase::None => open_file(&path.with_encoding(), true),
ObjectBase::Directory(base) => open_file(&base.join(path), true), ObjectBase::Directory(base) => open_file(&base.join(path.with_encoding()), true),
ObjectBase::Vfs(vfs_path, vfs) => open_file_with_fs(vfs.clone(), path, true) ObjectBase::Vfs(vfs_path, vfs) => {
.with_context(|| format!("Using disc image {}", vfs_path.display())), open_file_with_fs(vfs.clone(), &path.with_encoding(), true)
.with_context(|| format!("Using disc image {}", vfs_path))
}
} }
} }
} }
pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> { pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> {
if let Some(base) = &config.object_base { if let Some(base) = &config.object_base {
let base = base.with_encoding();
// Search for disc images in the object base directory // Search for disc images in the object base directory
for result in base.read_dir()? { for result in fs::read_dir(&base)? {
let entry = result?; let entry = result?;
if entry.file_type()?.is_file() { if entry.file_type()?.is_file() {
let path = entry.path(); let path = check_path_buf(entry.path())?;
let mut file = open_file(&path, false)?; let mut file = open_file(&path, false)?;
let format = nodtool::nod::Disc::detect(file.as_mut())?; let format = nodtool::nod::Disc::detect(file.as_mut())?;
if let Some(format) = format { if let Some(format) = format {
file.rewind()?; file.rewind()?;
log::info!("Using disc image {}", path.display()); log::info!("Using disc image {}", path);
let fs = open_fs(file, ArchiveKind::Disc(format))?; let fs = open_fs(file, ArchiveKind::Disc(format))?;
return Ok(ObjectBase::Vfs(path, fs)); return Ok(ObjectBase::Vfs(path, fs));
} }
} }
} }
return Ok(ObjectBase::Directory(base.clone())); return Ok(ObjectBase::Directory(base));
} }
Ok(ObjectBase::None) Ok(ObjectBase::None)
} }

View File

@ -2,7 +2,6 @@ use std::{
collections::{btree_map, BTreeMap}, collections::{btree_map, BTreeMap},
io::{stdout, Cursor, Read, Write}, io::{stdout, Cursor, Read, Write},
ops::Bound::{Excluded, Unbounded}, ops::Bound::{Excluded, Unbounded},
path::PathBuf,
str::from_utf8, str::from_utf8,
}; };
@ -15,6 +14,7 @@ use syntect::{
highlighting::{Color, HighlightIterator, HighlightState, Highlighter, Theme, ThemeSet}, highlighting::{Color, HighlightIterator, HighlightState, Highlighter, Theme, ThemeSet},
parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet}, parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet},
}; };
use typed_path::Utf8NativePathBuf;
use crate::{ use crate::{
util::{ util::{
@ -23,6 +23,7 @@ use crate::{
should_skip_tag, tag_type_string, AttributeKind, TagKind, should_skip_tag, tag_type_string, AttributeKind, TagKind,
}, },
file::buf_writer, file::buf_writer,
path::native_path,
}, },
vfs::open_file, vfs::open_file,
}; };
@ -45,12 +46,12 @@ enum SubCommand {
/// Dumps DWARF 1.1 info from an object or archive. /// Dumps DWARF 1.1 info from an object or archive.
#[argp(subcommand, name = "dump")] #[argp(subcommand, name = "dump")]
pub struct DumpArgs { pub struct DumpArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// Input object. (ELF or archive) /// Input object. (ELF or archive)
in_file: PathBuf, in_file: Utf8NativePathBuf,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// Output file. (Or directory, for archive) /// Output file. (Or directory, for archive)
out: Option<PathBuf>, out: Option<Utf8NativePathBuf>,
#[argp(switch)] #[argp(switch)]
/// Disable color output. /// Disable color output.
no_color: bool, no_color: bool,
@ -104,7 +105,7 @@ fn dump(args: DumpArgs) -> Result<()> {
let name = name.trim_start_matches("D:").replace('\\', "/"); let name = name.trim_start_matches("D:").replace('\\', "/");
let name = name.rsplit_once('/').map(|(_, b)| b).unwrap_or(&name); let name = name.rsplit_once('/').map(|(_, b)| b).unwrap_or(&name);
let file_path = out_path.join(format!("{}.txt", name)); let file_path = out_path.join(format!("{}.txt", name));
let mut file = buf_writer(file_path)?; let mut file = buf_writer(&file_path)?;
dump_debug_section(&args, &mut file, &obj_file, debug_section)?; dump_debug_section(&args, &mut file, &obj_file, debug_section)?;
file.flush()?; file.flush()?;
} else if args.no_color { } else if args.no_color {

View File

@ -3,7 +3,6 @@ use std::{
fs, fs,
fs::DirBuilder, fs::DirBuilder,
io::{Cursor, Write}, io::{Cursor, Write},
path::PathBuf,
}; };
use anyhow::{anyhow, bail, ensure, Context, Result}; use anyhow::{anyhow, bail, ensure, Context, Result};
@ -15,6 +14,7 @@ use object::{
FileFlags, Object, ObjectSection, ObjectSymbol, RelocationTarget, SectionFlags, SectionIndex, FileFlags, Object, ObjectSection, ObjectSymbol, RelocationTarget, SectionFlags, SectionIndex,
SectionKind, SymbolFlags, SymbolIndex, SymbolKind, SymbolScope, SymbolSection, SectionKind, SymbolFlags, SymbolIndex, SymbolKind, SymbolScope, SymbolSection,
}; };
use typed_path::{Utf8NativePath, Utf8NativePathBuf};
use crate::{ use crate::{
obj::ObjKind, obj::ObjKind,
@ -24,6 +24,7 @@ use crate::{
config::{write_splits_file, write_symbols_file}, config::{write_splits_file, write_symbols_file},
elf::{process_elf, write_elf}, elf::{process_elf, write_elf},
file::{buf_writer, process_rsp}, file::{buf_writer, process_rsp},
path::native_path,
reader::{Endian, FromReader}, reader::{Endian, FromReader},
signatures::{compare_signature, generate_signature, FunctionSignature}, signatures::{compare_signature, generate_signature, FunctionSignature},
split::split_obj, split::split_obj,
@ -54,72 +55,72 @@ enum SubCommand {
/// Disassembles an ELF file. /// Disassembles an ELF file.
#[argp(subcommand, name = "disasm")] #[argp(subcommand, name = "disasm")]
pub struct DisasmArgs { pub struct DisasmArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input file /// input file
elf_file: PathBuf, elf_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// output file (.o) or directory (.elf) /// output file (.o) or directory (.elf)
out: PathBuf, out: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Fixes issues with GNU assembler built object files. /// Fixes issues with GNU assembler built object files.
#[argp(subcommand, name = "fixup")] #[argp(subcommand, name = "fixup")]
pub struct FixupArgs { pub struct FixupArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input file /// input file
in_file: PathBuf, in_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// output file /// output file
out_file: PathBuf, out_file: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Splits an executable ELF into relocatable objects. /// Splits an executable ELF into relocatable objects.
#[argp(subcommand, name = "split")] #[argp(subcommand, name = "split")]
pub struct SplitArgs { pub struct SplitArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input file /// input file
in_file: PathBuf, in_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// output directory /// output directory
out_dir: PathBuf, out_dir: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Generates configuration files from an executable ELF. /// Generates configuration files from an executable ELF.
#[argp(subcommand, name = "config")] #[argp(subcommand, name = "config")]
pub struct ConfigArgs { pub struct ConfigArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input file /// input file
in_file: PathBuf, in_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// output directory /// output directory
out_dir: PathBuf, out_dir: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Builds function signatures from an ELF file. /// Builds function signatures from an ELF file.
#[argp(subcommand, name = "sigs")] #[argp(subcommand, name = "sigs")]
pub struct SignaturesArgs { pub struct SignaturesArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input file(s) /// input file(s)
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 's')] #[argp(option, short = 's')]
/// symbol name /// symbol name
symbol: String, symbol: String,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// output yml /// output yml
out_file: PathBuf, out_file: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Prints information about an ELF file. /// Prints information about an ELF file.
#[argp(subcommand, name = "info")] #[argp(subcommand, name = "info")]
pub struct InfoArgs { pub struct InfoArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input file /// input file
input: PathBuf, input: Utf8NativePathBuf,
} }
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
@ -134,17 +135,17 @@ pub fn run(args: Args) -> Result<()> {
} }
fn config(args: ConfigArgs) -> Result<()> { fn config(args: ConfigArgs) -> Result<()> {
log::info!("Loading {}", args.in_file.display()); log::info!("Loading {}", args.in_file);
let obj = process_elf(&args.in_file)?; let obj = process_elf(&args.in_file)?;
DirBuilder::new().recursive(true).create(&args.out_dir)?; DirBuilder::new().recursive(true).create(&args.out_dir)?;
write_symbols_file(args.out_dir.join("symbols.txt"), &obj, None)?; write_symbols_file(&args.out_dir.join("symbols.txt"), &obj, None)?;
write_splits_file(args.out_dir.join("splits.txt"), &obj, false, None)?; write_splits_file(&args.out_dir.join("splits.txt"), &obj, false, None)?;
Ok(()) Ok(())
} }
fn disasm(args: DisasmArgs) -> Result<()> { fn disasm(args: DisasmArgs) -> Result<()> {
log::info!("Loading {}", args.elf_file.display()); log::info!("Loading {}", args.elf_file);
let obj = process_elf(&args.elf_file)?; let obj = process_elf(&args.elf_file)?;
match obj.kind { match obj.kind {
ObjKind::Executable => { ObjKind::Executable => {
@ -156,12 +157,12 @@ fn disasm(args: DisasmArgs) -> Result<()> {
DirBuilder::new().recursive(true).create(&include_dir)?; DirBuilder::new().recursive(true).create(&include_dir)?;
fs::write(include_dir.join("macros.inc"), include_bytes!("../../assets/macros.inc"))?; fs::write(include_dir.join("macros.inc"), include_bytes!("../../assets/macros.inc"))?;
let mut files_out = buf_writer(args.out.join("link_order.txt"))?; let mut files_out = buf_writer(&args.out.join("link_order.txt"))?;
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) { for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
let out_path = asm_dir.join(file_name_from_unit(&unit.name, ".s")); let out_path = asm_dir.join(file_name_from_unit(&unit.name, ".s"));
log::info!("Writing {}", out_path.display()); log::info!("Writing {}", out_path);
let mut w = buf_writer(out_path)?; let mut w = buf_writer(&out_path)?;
write_asm(&mut w, split_obj)?; write_asm(&mut w, split_obj)?;
w.flush()?; w.flush()?;
@ -170,7 +171,7 @@ fn disasm(args: DisasmArgs) -> Result<()> {
files_out.flush()?; files_out.flush()?;
} }
ObjKind::Relocatable => { ObjKind::Relocatable => {
let mut w = buf_writer(args.out)?; let mut w = buf_writer(&args.out)?;
write_asm(&mut w, &obj)?; write_asm(&mut w, &obj)?;
w.flush()?; w.flush()?;
} }
@ -193,18 +194,17 @@ fn split(args: SplitArgs) -> Result<()> {
}; };
} }
let mut rsp_file = buf_writer("rsp")?; let mut rsp_file = buf_writer(Utf8NativePath::new("rsp"))?;
for unit in &obj.link_order { for unit in &obj.link_order {
let object = file_map let object = file_map
.get(&unit.name) .get(&unit.name)
.ok_or_else(|| anyhow!("Failed to find object file for unit '{}'", unit.name))?; .ok_or_else(|| anyhow!("Failed to find object file for unit '{}'", unit.name))?;
let out_path = args.out_dir.join(file_name_from_unit(&unit.name, ".o")); let out_path = args.out_dir.join(file_name_from_unit(&unit.name, ".o"));
writeln!(rsp_file, "{}", out_path.display())?; writeln!(rsp_file, "{}", out_path)?;
if let Some(parent) = out_path.parent() { if let Some(parent) = out_path.parent() {
DirBuilder::new().recursive(true).create(parent)?; DirBuilder::new().recursive(true).create(parent)?;
} }
fs::write(&out_path, object) fs::write(&out_path, object).with_context(|| format!("Failed to write '{}'", out_path))?;
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
} }
rsp_file.flush()?; rsp_file.flush()?;
Ok(()) Ok(())
@ -237,7 +237,7 @@ const ASM_SUFFIX: &str = " (asm)";
fn fixup(args: FixupArgs) -> Result<()> { fn fixup(args: FixupArgs) -> Result<()> {
let in_buf = fs::read(&args.in_file) let in_buf = fs::read(&args.in_file)
.with_context(|| format!("Failed to open input file: '{}'", args.in_file.display()))?; .with_context(|| format!("Failed to open input file: '{}'", args.in_file))?;
let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?; let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?;
let mut out_file = let mut out_file =
object::write::Object::new(in_file.format(), in_file.architecture(), in_file.endianness()); object::write::Object::new(in_file.format(), in_file.architecture(), in_file.endianness());
@ -262,10 +262,7 @@ fn fixup(args: FixupArgs) -> Result<()> {
let file_name = args let file_name = args
.in_file .in_file
.file_name() .file_name()
.ok_or_else(|| anyhow!("'{}' is not a file path", args.in_file.display()))?; .ok_or_else(|| anyhow!("'{}' is not a file path", args.in_file))?;
let file_name = file_name
.to_str()
.ok_or_else(|| anyhow!("'{}' is not valid UTF-8", file_name.to_string_lossy()))?;
let mut name_bytes = file_name.as_bytes().to_vec(); let mut name_bytes = file_name.as_bytes().to_vec();
name_bytes.append(&mut ASM_SUFFIX.as_bytes().to_vec()); name_bytes.append(&mut ASM_SUFFIX.as_bytes().to_vec());
out_file.add_symbol(object::write::Symbol { out_file.add_symbol(object::write::Symbol {
@ -445,7 +442,7 @@ fn signatures(args: SignaturesArgs) -> Result<()> {
let mut signatures: HashMap<String, FunctionSignature> = HashMap::new(); let mut signatures: HashMap<String, FunctionSignature> = HashMap::new();
for path in files { for path in files {
log::info!("Processing {}", path.display()); log::info!("Processing {}", path);
let signature = match generate_signature(&path, &args.symbol) { let signature = match generate_signature(&path, &args.symbol) {
Ok(Some(signature)) => signature, Ok(Some(signature)) => signature,
Ok(None) => continue, Ok(None) => continue,
@ -472,7 +469,7 @@ fn signatures(args: SignaturesArgs) -> Result<()> {
fn info(args: InfoArgs) -> Result<()> { fn info(args: InfoArgs) -> Result<()> {
let in_buf = fs::read(&args.input) let in_buf = fs::read(&args.input)
.with_context(|| format!("Failed to open input file: '{}'", args.input.display()))?; .with_context(|| format!("Failed to open input file: '{}'", args.input))?;
let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?; let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?;
println!("ELF type: {:?}", in_file.kind()); println!("ELF type: {:?}", in_file.kind());

View File

@ -1,24 +1,25 @@
use std::{ use std::io::{Seek, SeekFrom, Write};
io::{Seek, SeekFrom, Write},
path::PathBuf,
};
use anyhow::{anyhow, bail, ensure, Result}; use anyhow::{anyhow, bail, ensure, Result};
use argp::FromArgs; use argp::FromArgs;
use object::{Architecture, Endianness, Object, ObjectKind, ObjectSection, SectionKind}; use object::{Architecture, Endianness, Object, ObjectKind, ObjectSection, SectionKind};
use typed_path::Utf8NativePathBuf;
use crate::{util::file::buf_writer, vfs::open_file}; use crate::{
util::{file::buf_writer, path::native_path},
vfs::open_file,
};
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Converts an ELF file to a DOL file. /// Converts an ELF file to a DOL file.
#[argp(subcommand, name = "elf2dol")] #[argp(subcommand, name = "elf2dol")]
pub struct Args { pub struct Args {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// path to input ELF /// path to input ELF
elf_file: PathBuf, elf_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// path to output DOL /// path to output DOL
dol_file: PathBuf, dol_file: Utf8NativePathBuf,
/// sections (by name) to ignore /// sections (by name) to ignore
#[argp(option, long = "ignore")] #[argp(option, long = "ignore")]
deny_sections: Vec<String>, deny_sections: Vec<String>,

View File

@ -1,14 +1,16 @@
use std::{fs::DirBuilder, path::PathBuf}; use std::fs::DirBuilder;
use anyhow::{bail, ensure, Result}; use anyhow::{bail, ensure, Result};
use argp::FromArgs; use argp::FromArgs;
use cwdemangle::{demangle, DemangleOptions}; use cwdemangle::{demangle, DemangleOptions};
use tracing::error; use tracing::error;
use typed_path::Utf8NativePathBuf;
use crate::{ use crate::{
util::{ util::{
config::{write_splits_file, write_symbols_file}, config::{write_splits_file, write_symbols_file},
map::{create_obj, process_map, SymbolEntry, SymbolRef}, map::{create_obj, process_map, SymbolEntry, SymbolRef},
path::native_path,
split::update_splits, split::update_splits,
}, },
vfs::open_file, vfs::open_file,
@ -34,9 +36,9 @@ enum SubCommand {
/// Displays all entries for a particular TU. /// Displays all entries for a particular TU.
#[argp(subcommand, name = "entries")] #[argp(subcommand, name = "entries")]
pub struct EntriesArgs { pub struct EntriesArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// path to input map /// path to input map
map_file: PathBuf, map_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional)]
/// TU to display entries for /// TU to display entries for
unit: String, unit: String,
@ -46,9 +48,9 @@ pub struct EntriesArgs {
/// Displays all references to a symbol. /// Displays all references to a symbol.
#[argp(subcommand, name = "symbol")] #[argp(subcommand, name = "symbol")]
pub struct SymbolArgs { pub struct SymbolArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// path to input map /// path to input map
map_file: PathBuf, map_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional)]
/// symbol to display references for /// symbol to display references for
symbol: String, symbol: String,
@ -58,12 +60,12 @@ pub struct SymbolArgs {
/// Generates project configuration files from a map. (symbols.txt, splits.txt) /// Generates project configuration files from a map. (symbols.txt, splits.txt)
#[argp(subcommand, name = "config")] #[argp(subcommand, name = "config")]
pub struct ConfigArgs { pub struct ConfigArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// path to input map /// path to input map
map_file: PathBuf, map_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// output directory for symbols.txt and splits.txt /// output directory for symbols.txt and splits.txt
out_dir: PathBuf, out_dir: Utf8NativePathBuf,
} }
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
@ -189,8 +191,8 @@ fn config(args: ConfigArgs) -> Result<()> {
error!("Failed to update splits: {}", e) error!("Failed to update splits: {}", e)
} }
DirBuilder::new().recursive(true).create(&args.out_dir)?; DirBuilder::new().recursive(true).create(&args.out_dir)?;
write_symbols_file(args.out_dir.join("symbols.txt"), &obj, None)?; write_symbols_file(&args.out_dir.join("symbols.txt"), &obj, None)?;
write_splits_file(args.out_dir.join("splits.txt"), &obj, false, None)?; write_splits_file(&args.out_dir.join("splits.txt"), &obj, false, None)?;
log::info!("Done!"); log::info!("Done!");
Ok(()) Ok(())
} }

View File

@ -1,10 +1,11 @@
use std::{fs, path::PathBuf}; use std::fs;
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use argp::FromArgs; use argp::FromArgs;
use typed_path::Utf8NativePathBuf;
use crate::{ use crate::{
util::{file::process_rsp, nlzss, IntoCow, ToCow}, util::{file::process_rsp, nlzss, path::native_path, IntoCow, ToCow},
vfs::open_file, vfs::open_file,
}; };
@ -26,13 +27,13 @@ enum SubCommand {
/// Decompresses NLZSS-compressed files. /// Decompresses NLZSS-compressed files.
#[argp(subcommand, name = "decompress")] #[argp(subcommand, name = "decompress")]
pub struct DecompressArgs { pub struct DecompressArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// NLZSS-compressed file(s) /// NLZSS-compressed file(s)
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// Output file (or directory, if multiple files are specified). /// Output file (or directory, if multiple files are specified).
/// If not specified, decompresses in-place. /// If not specified, decompresses in-place.
output: Option<PathBuf>, output: Option<Utf8NativePathBuf>,
} }
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
@ -47,7 +48,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
for path in files { for path in files {
let mut file = open_file(&path, false)?; let mut file = open_file(&path, false)?;
let data = nlzss::decompress(file.as_mut()) let data = nlzss::decompress(file.as_mut())
.map_err(|e| anyhow!("Failed to decompress '{}' with NLZSS: {}", path.display(), e))?; .map_err(|e| anyhow!("Failed to decompress '{}' with NLZSS: {}", path, e))?;
let out_path = if let Some(output) = &args.output { let out_path = if let Some(output) = &args.output {
if single_file { if single_file {
output.as_path().to_cow() output.as_path().to_cow()
@ -58,7 +59,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path.display()))?; .with_context(|| format!("Failed to write '{}'", out_path))?;
} }
Ok(()) Ok(())
} }

View File

@ -1,9 +1,9 @@
use std::path::PathBuf;
use anyhow::Result; use anyhow::Result;
use argp::FromArgs; use argp::FromArgs;
use typed_path::Utf8NativePathBuf;
use super::vfs; use super::vfs;
use crate::util::path::native_path;
#[derive(FromArgs, PartialEq, Debug)] #[derive(FromArgs, PartialEq, Debug)]
/// Commands for processing RSO files. /// Commands for processing RSO files.
@ -24,9 +24,9 @@ enum SubCommand {
/// Views RARC file information. /// Views RARC file information.
#[argp(subcommand, name = "list")] #[argp(subcommand, name = "list")]
pub struct ListArgs { pub struct ListArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// RARC file /// RARC file
file: PathBuf, file: Utf8NativePathBuf,
#[argp(switch, short = 's')] #[argp(switch, short = 's')]
/// Only print filenames. /// Only print filenames.
short: bool, short: bool,
@ -36,12 +36,12 @@ pub struct ListArgs {
/// Extracts RARC file contents. /// Extracts RARC file contents.
#[argp(subcommand, name = "extract")] #[argp(subcommand, name = "extract")]
pub struct ExtractArgs { pub struct ExtractArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// RARC file /// RARC file
file: PathBuf, file: Utf8NativePathBuf,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// output directory /// output directory
output: Option<PathBuf>, output: Option<Utf8NativePathBuf>,
#[argp(switch)] #[argp(switch)]
/// Do not decompress files when copying. /// Do not decompress files when copying.
no_decompress: bool, no_decompress: bool,
@ -58,13 +58,13 @@ pub fn run(args: Args) -> Result<()> {
} }
fn list(args: ListArgs) -> Result<()> { fn list(args: ListArgs) -> Result<()> {
let path = PathBuf::from(format!("{}:", args.file.display())); let path = Utf8NativePathBuf::from(format!("{}:", args.file));
vfs::ls(vfs::LsArgs { path, short: args.short, recursive: true }) vfs::ls(vfs::LsArgs { path, short: args.short, recursive: true })
} }
fn extract(args: ExtractArgs) -> Result<()> { fn extract(args: ExtractArgs) -> Result<()> {
let path = PathBuf::from(format!("{}:", args.file.display())); let path = Utf8NativePathBuf::from(format!("{}:", args.file));
let output = args.output.unwrap_or_else(|| PathBuf::from(".")); let output = args.output.unwrap_or_else(|| Utf8NativePathBuf::from("."));
vfs::cp(vfs::CpArgs { vfs::cp(vfs::CpArgs {
paths: vec![path, output], paths: vec![path, output],
no_decompress: args.no_decompress, no_decompress: args.no_decompress,

View File

@ -2,7 +2,6 @@ use std::{
collections::{btree_map, BTreeMap}, collections::{btree_map, BTreeMap},
fs, fs,
io::{Cursor, Write}, io::{Cursor, Write},
path::PathBuf,
time::Instant, time::Instant,
}; };
@ -15,6 +14,7 @@ use object::{
use rayon::prelude::*; use rayon::prelude::*;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use tracing::{info, info_span}; use tracing::{info, info_span};
use typed_path::Utf8NativePathBuf;
use crate::{ use crate::{
analysis::{ analysis::{
@ -38,6 +38,7 @@ use crate::{
elf::{to_obj_reloc_kind, write_elf}, elf::{to_obj_reloc_kind, write_elf},
file::{buf_writer, process_rsp, verify_hash, FileIterator}, file::{buf_writer, process_rsp, verify_hash, FileIterator},
nested::NestedMap, nested::NestedMap,
path::native_path,
rel::{ rel::{
print_relocations, process_rel, process_rel_header, process_rel_sections, write_rel, print_relocations, process_rel, process_rel_header, process_rel_sections, write_rel,
RelHeader, RelReloc, RelSectionHeader, RelWriteInfo, PERMITTED_SECTIONS, RelHeader, RelReloc, RelSectionHeader, RelWriteInfo, PERMITTED_SECTIONS,
@ -67,9 +68,9 @@ enum SubCommand {
/// Views REL file information. /// Views REL file information.
#[argp(subcommand, name = "info")] #[argp(subcommand, name = "info")]
pub struct InfoArgs { pub struct InfoArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// REL file /// REL file
rel_file: PathBuf, rel_file: Utf8NativePathBuf,
#[argp(switch, short = 'r')] #[argp(switch, short = 'r')]
/// print relocations /// print relocations
relocations: bool, relocations: bool,
@ -79,27 +80,27 @@ pub struct InfoArgs {
/// Merges a DOL + REL(s) into an ELF. /// Merges a DOL + REL(s) into an ELF.
#[argp(subcommand, name = "merge")] #[argp(subcommand, name = "merge")]
pub struct MergeArgs { pub struct MergeArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// DOL file /// DOL file
dol_file: PathBuf, dol_file: Utf8NativePathBuf,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// REL file(s) /// REL file(s)
rel_files: Vec<PathBuf>, rel_files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// output ELF /// output ELF
out_file: PathBuf, out_file: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Creates RELs from an ELF + PLF(s). /// Creates RELs from an ELF + PLF(s).
#[argp(subcommand, name = "make")] #[argp(subcommand, name = "make")]
pub struct MakeArgs { pub struct MakeArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// input file(s) /// input file(s)
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'c')] #[argp(option, short = 'c', from_str_fn(native_path))]
/// (optional) project configuration file /// (optional) project configuration file
config: Option<PathBuf>, config: Option<Utf8NativePathBuf>,
#[argp(option, short = 'n')] #[argp(option, short = 'n')]
/// (optional) module names /// (optional) module names
names: Vec<String>, names: Vec<String>,
@ -176,7 +177,7 @@ fn load_rel(module_config: &ModuleConfig, object_base: &ObjectBase) -> Result<Re
let header = process_rel_header(&mut reader)?; let header = process_rel_header(&mut reader)?;
let sections = process_rel_sections(&mut reader, &header)?; let sections = process_rel_sections(&mut reader, &header)?;
let section_defs = if let Some(splits_path) = &module_config.splits { let section_defs = if let Some(splits_path) = &module_config.splits {
read_splits_sections(splits_path)? read_splits_sections(&splits_path.with_encoding())?
} else { } else {
None None
}; };
@ -186,7 +187,7 @@ fn load_rel(module_config: &ModuleConfig, object_base: &ObjectBase) -> Result<Re
struct LoadedModule<'a> { struct LoadedModule<'a> {
module_id: u32, module_id: u32,
file: File<'a>, file: File<'a>,
path: PathBuf, path: Utf8NativePathBuf,
} }
fn resolve_relocations( fn resolve_relocations(
@ -273,12 +274,12 @@ fn make(args: MakeArgs) -> Result<()> {
let object_base = find_object_base(&config)?; let object_base = find_object_base(&config)?;
for module_config in &config.modules { for module_config in &config.modules {
let module_name = module_config.name(); let module_name = module_config.name();
if !args.names.is_empty() && !args.names.iter().any(|n| n == &module_name) { if !args.names.is_empty() && !args.names.iter().any(|n| n == module_name) {
continue; continue;
} }
let _span = info_span!("module", name = %module_name).entered(); let _span = info_span!("module", name = %module_name).entered();
let info = load_rel(module_config, &object_base).with_context(|| { let info = load_rel(module_config, &object_base).with_context(|| {
format!("While loading REL '{}'", object_base.join(&module_config.object).display()) format!("While loading REL '{}'", object_base.join(&module_config.object))
})?; })?;
name_to_module_id.insert(module_name.to_string(), info.0.module_id); name_to_module_id.insert(module_name.to_string(), info.0.module_id);
match existing_headers.entry(info.0.module_id) { match existing_headers.entry(info.0.module_id) {
@ -312,7 +313,7 @@ fn make(args: MakeArgs) -> Result<()> {
.unwrap_or(idx as u32); .unwrap_or(idx as u32);
load_obj(file.map()?) load_obj(file.map()?)
.map(|o| LoadedModule { module_id, file: o, path: path.clone() }) .map(|o| LoadedModule { module_id, file: o, path: path.clone() })
.with_context(|| format!("Failed to load '{}'", path.display())) .with_context(|| format!("Failed to load '{}'", path))
}) })
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
@ -320,7 +321,7 @@ fn make(args: MakeArgs) -> Result<()> {
let start = Instant::now(); let start = Instant::now();
let mut symbol_map = FxHashMap::<&[u8], (u32, SymbolIndex)>::default(); let mut symbol_map = FxHashMap::<&[u8], (u32, SymbolIndex)>::default();
for module_info in modules.iter() { for module_info in modules.iter() {
let _span = info_span!("file", path = %module_info.path.display()).entered(); let _span = info_span!("file", path = %module_info.path).entered();
for symbol in module_info.file.symbols() { for symbol in module_info.file.symbols() {
if symbol.scope() == object::SymbolScope::Dynamic { if symbol.scope() == object::SymbolScope::Dynamic {
symbol_map symbol_map
@ -335,7 +336,7 @@ fn make(args: MakeArgs) -> Result<()> {
let mut relocations = Vec::<Vec<RelReloc>>::with_capacity(modules.len() - 1); let mut relocations = Vec::<Vec<RelReloc>>::with_capacity(modules.len() - 1);
relocations.resize_with(modules.len() - 1, Vec::new); relocations.resize_with(modules.len() - 1, Vec::new);
for (module_info, relocations) in modules.iter().skip(1).zip(&mut relocations) { for (module_info, relocations) in modules.iter().skip(1).zip(&mut relocations) {
let _span = info_span!("file", path = %module_info.path.display()).entered(); let _span = info_span!("file", path = %module_info.path).entered();
resolved += resolve_relocations( resolved += resolve_relocations(
&module_info.file, &module_info.file,
&existing_headers, &existing_headers,
@ -344,9 +345,7 @@ fn make(args: MakeArgs) -> Result<()> {
&modules, &modules,
relocations, relocations,
) )
.with_context(|| { .with_context(|| format!("While resolving relocations in '{}'", module_info.path))?;
format!("While resolving relocations in '{}'", module_info.path.display())
})?;
} }
if !args.quiet { if !args.quiet {
@ -362,7 +361,7 @@ fn make(args: MakeArgs) -> Result<()> {
// Write RELs // Write RELs
let start = Instant::now(); let start = Instant::now();
for (module_info, relocations) in modules.iter().skip(1).zip(relocations) { for (module_info, relocations) in modules.iter().skip(1).zip(relocations) {
let _span = info_span!("file", path = %module_info.path.display()).entered(); let _span = info_span!("file", path = %module_info.path).entered();
let mut info = RelWriteInfo { let mut info = RelWriteInfo {
module_id: module_info.module_id, module_id: module_info.module_id,
version: 3, version: 3,
@ -393,7 +392,7 @@ fn make(args: MakeArgs) -> Result<()> {
let rel_path = module_info.path.with_extension("rel"); let rel_path = module_info.path.with_extension("rel");
let mut w = buf_writer(&rel_path)?; let mut w = buf_writer(&rel_path)?;
write_rel(&mut w, &info, &module_info.file, relocations) write_rel(&mut w, &info, &module_info.file, relocations)
.with_context(|| format!("Failed to write '{}'", rel_path.display()))?; .with_context(|| format!("Failed to write '{}'", rel_path))?;
w.flush()?; w.flush()?;
} }
@ -476,11 +475,11 @@ fn info(args: InfoArgs) -> Result<()> {
const fn align32(x: u32) -> u32 { (x + 31) & !31 } const fn align32(x: u32) -> u32 { (x + 31) & !31 }
fn merge(args: MergeArgs) -> Result<()> { fn merge(args: MergeArgs) -> Result<()> {
log::info!("Loading {}", args.dol_file.display()); log::info!("Loading {}", args.dol_file);
let mut obj = { let mut obj = {
let mut file = open_file(&args.dol_file, true)?; let mut file = open_file(&args.dol_file, true)?;
let name = args.dol_file.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default(); let name = args.dol_file.file_stem().unwrap_or_default();
process_dol(file.map()?, name.as_ref())? process_dol(file.map()?, name)?
}; };
log::info!("Performing signature analysis"); log::info!("Performing signature analysis");
@ -491,9 +490,9 @@ fn merge(args: MergeArgs) -> Result<()> {
let mut module_map = BTreeMap::<u32, ObjInfo>::new(); let mut module_map = BTreeMap::<u32, ObjInfo>::new();
for result in FileIterator::new(&args.rel_files)? { for result in FileIterator::new(&args.rel_files)? {
let (path, mut entry) = result?; let (path, mut entry) = result?;
log::info!("Loading {}", path.display()); log::info!("Loading {}", path);
let name = path.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default(); let name = path.file_stem().unwrap_or_default();
let (_, obj) = process_rel(&mut entry, name.as_ref())?; let (_, obj) = process_rel(&mut entry, name)?;
match module_map.entry(obj.module_id) { match module_map.entry(obj.module_id) {
btree_map::Entry::Vacant(e) => e.insert(obj), btree_map::Entry::Vacant(e) => e.insert(obj),
btree_map::Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id), btree_map::Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id),
@ -610,7 +609,7 @@ fn merge(args: MergeArgs) -> Result<()> {
tracker.apply(&mut obj, false)?; tracker.apply(&mut obj, false)?;
// Write ELF // Write ELF
log::info!("Writing {}", args.out_file.display()); log::info!("Writing {}", args.out_file);
fs::write(&args.out_file, write_elf(&obj, false)?)?; fs::write(&args.out_file, write_elf(&obj, false)?)?;
Ok(()) Ok(())
} }

View File

@ -1,7 +1,4 @@
use std::{ use std::io::{BufRead, Seek, Write};
io::{BufRead, Seek, Write},
path::{Path, PathBuf},
};
use anyhow::{bail, ensure, Context, Result}; use anyhow::{bail, ensure, Context, Result};
use argp::FromArgs; use argp::FromArgs;
@ -10,10 +7,12 @@ use object::{
Architecture, Endianness, Object, ObjectKind, ObjectSection, ObjectSymbol, SectionKind, Architecture, Endianness, Object, ObjectKind, ObjectSection, ObjectSymbol, SectionKind,
SymbolIndex, SymbolKind, SymbolSection, SymbolIndex, SymbolKind, SymbolSection,
}; };
use typed_path::{Utf8NativePath, Utf8NativePathBuf};
use crate::{ use crate::{
util::{ util::{
file::buf_writer, file::buf_writer,
path::native_path,
reader::{Endian, ToWriter}, reader::{Endian, ToWriter},
rso::{ rso::{
process_rso, symbol_hash, RsoHeader, RsoRelocation, RsoSectionHeader, RsoSymbol, process_rso, symbol_hash, RsoHeader, RsoRelocation, RsoSectionHeader, RsoSymbol,
@ -42,30 +41,30 @@ enum SubCommand {
/// Views RSO file information. /// Views RSO file information.
#[argp(subcommand, name = "info")] #[argp(subcommand, name = "info")]
pub struct InfoArgs { pub struct InfoArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// RSO file /// RSO file
rso_file: PathBuf, rso_file: Utf8NativePathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Creates an RSO from an ELF. /// Creates an RSO from an ELF.
#[argp(subcommand, name = "make")] #[argp(subcommand, name = "make")]
pub struct MakeArgs { pub struct MakeArgs {
#[argp(positional, arg_name = "ELF File")] #[argp(positional, arg_name = "ELF File", from_str_fn(native_path))]
/// elf file /// elf file
input: PathBuf, input: Utf8NativePathBuf,
#[argp(option, short = 'o', arg_name = "File")] #[argp(option, short = 'o', arg_name = "File", from_str_fn(native_path))]
/// output file path /// output file path
output: PathBuf, output: Utf8NativePathBuf,
#[argp(option, short = 'm', arg_name = "Name")] #[argp(option, short = 'm', arg_name = "Name")]
/// module name (or path). Default: input name /// module name (or path). Default: input name
module_name: Option<String>, module_name: Option<String>,
#[argp(option, short = 'e', arg_name = "File")] #[argp(option, short = 'e', arg_name = "File", from_str_fn(native_path))]
/// file containing exported symbol names (newline separated) /// file containing exported symbol names (newline separated)
export: Option<PathBuf>, export: Option<Utf8NativePathBuf>,
} }
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
@ -78,9 +77,7 @@ pub fn run(args: Args) -> Result<()> {
fn info(args: InfoArgs) -> Result<()> { fn info(args: InfoArgs) -> Result<()> {
let rso = { let rso = {
let mut file = open_file(&args.rso_file, true)?; let mut file = open_file(&args.rso_file, true)?;
let obj = process_rso(file.as_mut())?; process_rso(file.as_mut())?
#[allow(clippy::let_and_return)]
obj
}; };
println!("Read RSO module {}", rso.name); println!("Read RSO module {}", rso.name);
Ok(()) Ok(())
@ -97,7 +94,7 @@ fn make(args: MakeArgs) -> Result<()> {
let module_name = match args.module_name { let module_name = match args.module_name {
Some(n) => n, Some(n) => n,
None => args.input.display().to_string(), None => args.input.to_string(),
}; };
let symbols_to_export = match &args.export { let symbols_to_export = match &args.export {
@ -121,18 +118,18 @@ fn make(args: MakeArgs) -> Result<()> {
Ok(()) Ok(())
} }
fn make_sel<P: AsRef<Path>>( fn make_sel(
_file: object::File, _file: object::File,
_output: P, _output: &Utf8NativePath,
_module_name: &str, _module_name: &str,
_symbols_to_export: Vec<String>, _symbols_to_export: Vec<String>,
) -> Result<()> { ) -> Result<()> {
bail!("Creating SEL files is not supported yet."); bail!("Creating SEL files is not supported yet.");
} }
fn make_rso<P: AsRef<Path>>( fn make_rso(
file: object::File, file: object::File,
output: P, output: &Utf8NativePath,
module_name: &str, module_name: &str,
symbols_to_export: Vec<String>, symbols_to_export: Vec<String>,
) -> Result<()> { ) -> Result<()> {

View File

@ -1,17 +1,19 @@
use std::{ use std::{
fs::File, fs::File,
io::{stdout, BufRead, Read, Write}, io::{stdout, BufRead, Read, Write},
path::{Path, PathBuf},
}; };
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use argp::FromArgs; use argp::FromArgs;
use owo_colors::{OwoColorize, Stream}; use owo_colors::{OwoColorize, Stream};
use path_slash::PathExt;
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use typed_path::{Utf8NativePath, Utf8NativePathBuf};
use crate::{ use crate::{
util::file::{buf_writer, process_rsp, touch}, util::{
file::{buf_writer, process_rsp, touch},
path::native_path,
},
vfs::open_file, vfs::open_file,
}; };
@ -22,13 +24,13 @@ pub struct Args {
#[argp(switch, short = 'c')] #[argp(switch, short = 'c')]
/// check SHA sums against given list /// check SHA sums against given list
check: bool, check: bool,
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// path to input file(s) /// path to input file(s)
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// (check) touch output file on successful check /// (check) touch output file on successful check
/// (hash) write hash(es) to output file /// (hash) write hash(es) to output file
output: Option<PathBuf>, output: Option<Utf8NativePathBuf>,
#[argp(switch, short = 'q')] #[argp(switch, short = 'q')]
/// only print failures and a summary /// only print failures and a summary
quiet: bool, quiet: bool,
@ -44,17 +46,17 @@ pub fn run(args: Args) -> Result<()> {
} }
if let Some(out_path) = &args.output { if let Some(out_path) = &args.output {
touch(out_path) touch(out_path)
.with_context(|| format!("Failed to touch output file '{}'", out_path.display()))?; .with_context(|| format!("Failed to touch output file '{}'", out_path))?;
} }
} else { } else {
let mut w: Box<dyn Write> = let mut w: Box<dyn Write> = if let Some(out_path) = &args.output {
if let Some(out_path) = &args.output { Box::new(
Box::new(buf_writer(out_path).with_context(|| { buf_writer(out_path)
format!("Failed to open output file '{}'", out_path.display()) .with_context(|| format!("Failed to open output file '{}'", out_path))?,
})?) )
} else { } else {
Box::new(stdout()) Box::new(stdout())
}; };
for path in process_rsp(&args.files)? { for path in process_rsp(&args.files)? {
let mut file = open_file(&path, false)?; let mut file = open_file(&path, false)?;
hash(w.as_mut(), file.as_mut(), &path)? hash(w.as_mut(), file.as_mut(), &path)?
@ -114,7 +116,7 @@ where R: BufRead + ?Sized {
Ok(()) Ok(())
} }
fn hash<R, W>(w: &mut W, reader: &mut R, path: &Path) -> Result<()> fn hash<R, W>(w: &mut W, reader: &mut R, path: &Utf8NativePath) -> Result<()>
where where
R: Read + ?Sized, R: Read + ?Sized,
W: Write + ?Sized, W: Write + ?Sized,
@ -123,7 +125,7 @@ where
let mut hash_buf = [0u8; 40]; let mut hash_buf = [0u8; 40];
let hash_str = base16ct::lower::encode_str(&hash, &mut hash_buf) let hash_str = base16ct::lower::encode_str(&hash, &mut hash_buf)
.map_err(|e| anyhow!("Failed to encode hash: {e}"))?; .map_err(|e| anyhow!("Failed to encode hash: {e}"))?;
writeln!(w, "{} {}", hash_str, path.to_slash_lossy())?; writeln!(w, "{} {}", hash_str, path.with_unix_encoding())?;
Ok(()) Ok(())
} }

View File

@ -1,9 +1,9 @@
use std::path::PathBuf;
use anyhow::Result; use anyhow::Result;
use argp::FromArgs; use argp::FromArgs;
use typed_path::Utf8NativePathBuf;
use super::vfs; use super::vfs;
use crate::util::path::native_path;
#[derive(FromArgs, PartialEq, Debug)] #[derive(FromArgs, PartialEq, Debug)]
/// Commands for processing U8 (arc) files. /// Commands for processing U8 (arc) files.
@ -24,9 +24,9 @@ enum SubCommand {
/// Views U8 (arc) file information. /// Views U8 (arc) file information.
#[argp(subcommand, name = "list")] #[argp(subcommand, name = "list")]
pub struct ListArgs { pub struct ListArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// U8 (arc) file /// U8 (arc) file
file: PathBuf, file: Utf8NativePathBuf,
#[argp(switch, short = 's')] #[argp(switch, short = 's')]
/// Only print filenames. /// Only print filenames.
short: bool, short: bool,
@ -36,12 +36,12 @@ pub struct ListArgs {
/// Extracts U8 (arc) file contents. /// Extracts U8 (arc) file contents.
#[argp(subcommand, name = "extract")] #[argp(subcommand, name = "extract")]
pub struct ExtractArgs { pub struct ExtractArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// U8 (arc) file /// U8 (arc) file
file: PathBuf, file: Utf8NativePathBuf,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// output directory /// output directory
output: Option<PathBuf>, output: Option<Utf8NativePathBuf>,
#[argp(switch)] #[argp(switch)]
/// Do not decompress files when copying. /// Do not decompress files when copying.
no_decompress: bool, no_decompress: bool,
@ -58,13 +58,13 @@ pub fn run(args: Args) -> Result<()> {
} }
fn list(args: ListArgs) -> Result<()> { fn list(args: ListArgs) -> Result<()> {
let path = PathBuf::from(format!("{}:", args.file.display())); let path = Utf8NativePathBuf::from(format!("{}:", args.file));
vfs::ls(vfs::LsArgs { path, short: args.short, recursive: true }) vfs::ls(vfs::LsArgs { path, short: args.short, recursive: true })
} }
fn extract(args: ExtractArgs) -> Result<()> { fn extract(args: ExtractArgs) -> Result<()> {
let path = PathBuf::from(format!("{}:", args.file.display())); let path = Utf8NativePathBuf::from(format!("{}:", args.file));
let output = args.output.unwrap_or_else(|| PathBuf::from(".")); let output = args.output.unwrap_or_else(|| Utf8NativePathBuf::from("."));
vfs::cp(vfs::CpArgs { vfs::cp(vfs::CpArgs {
paths: vec![path, output], paths: vec![path, output],
no_decompress: args.no_decompress, no_decompress: args.no_decompress,

View File

@ -3,16 +3,19 @@ use std::{
fs::File, fs::File,
io, io,
io::{BufRead, Write}, io::{BufRead, Write},
path::{Path, PathBuf},
}; };
use anyhow::{anyhow, bail, Context}; use anyhow::{anyhow, bail, Context};
use argp::FromArgs; use argp::FromArgs;
use size::Size; use size::Size;
use typed_path::{Utf8NativePath, Utf8NativePathBuf, Utf8UnixPath};
use crate::vfs::{ use crate::{
decompress_file, detect, open_path, FileFormat, OpenResult, Vfs, VfsFile, VfsFileType, util::path::native_path,
VfsMetadata, vfs::{
decompress_file, detect, open_path, FileFormat, OpenResult, Vfs, VfsFile, VfsFileType,
VfsMetadata,
},
}; };
#[derive(FromArgs, PartialEq, Debug)] #[derive(FromArgs, PartialEq, Debug)]
@ -34,9 +37,9 @@ enum SubCommand {
/// List files in a directory or container. /// List files in a directory or container.
#[argp(subcommand, name = "ls")] #[argp(subcommand, name = "ls")]
pub struct LsArgs { pub struct LsArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// Directory or container path. /// Directory or container path.
pub path: PathBuf, pub path: Utf8NativePathBuf,
#[argp(switch, short = 's')] #[argp(switch, short = 's')]
/// Only print filenames. /// Only print filenames.
pub short: bool, pub short: bool,
@ -49,9 +52,9 @@ pub struct LsArgs {
/// Copy files from a container. /// Copy files from a container.
#[argp(subcommand, name = "cp")] #[argp(subcommand, name = "cp")]
pub struct CpArgs { pub struct CpArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// Source path(s) and destination path. /// Source path(s) and destination path.
pub paths: Vec<PathBuf>, pub paths: Vec<Utf8NativePathBuf>,
#[argp(switch)] #[argp(switch)]
/// Do not decompress files when copying. /// Do not decompress files when copying.
pub no_decompress: bool, pub no_decompress: bool,
@ -111,21 +114,18 @@ pub fn ls(args: LsArgs) -> anyhow::Result<()> {
let mut files = Vec::new(); let mut files = Vec::new();
match open_path(&args.path, false)? { match open_path(&args.path, false)? {
OpenResult::File(mut file, path) => { OpenResult::File(mut file, path) => {
let filename = Path::new(path) let filename = path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?;
.file_name()
.ok_or_else(|| anyhow!("Path has no filename"))?
.to_string_lossy();
if args.short { if args.short {
println!("{}", filename); println!("{}", filename);
} else { } else {
let metadata = file let metadata = file
.metadata() .metadata()
.with_context(|| format!("Failed to fetch metadata for {}", path))?; .with_context(|| format!("Failed to fetch metadata for {}", path))?;
files.push(file_info(&filename, file.as_mut(), &metadata)?); files.push(file_info(filename, file.as_mut(), &metadata)?);
} }
} }
OpenResult::Directory(mut fs, path) => { OpenResult::Directory(mut fs, path) => {
ls_directory(fs.as_mut(), path, "", &args, &mut files)?; ls_directory(fs.as_mut(), &path, Utf8UnixPath::new(""), &args, &mut files)?;
} }
} }
if !args.short { if !args.short {
@ -149,16 +149,16 @@ pub fn ls(args: LsArgs) -> anyhow::Result<()> {
fn ls_directory( fn ls_directory(
fs: &mut dyn Vfs, fs: &mut dyn Vfs,
path: &str, path: &Utf8UnixPath,
base_filename: &str, base_filename: &Utf8UnixPath,
args: &LsArgs, args: &LsArgs,
files: &mut Vec<Columns<5>>, files: &mut Vec<Columns<5>>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let entries = fs.read_dir(path)?; let entries = fs.read_dir(path)?;
files.reserve(entries.len()); files.reserve(entries.len());
for filename in entries { for filename in entries {
let entry_path = format!("{}/{}", path, filename); let entry_path = path.join(&filename);
let display_filename = format!("{}{}", base_filename, filename); let display_path = base_filename.join(&filename);
let metadata = fs let metadata = fs
.metadata(&entry_path) .metadata(&entry_path)
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?; .with_context(|| format!("Failed to fetch metadata for {}", entry_path))?;
@ -168,26 +168,25 @@ fn ls_directory(
.open(&entry_path) .open(&entry_path)
.with_context(|| format!("Failed to open file {}", entry_path))?; .with_context(|| format!("Failed to open file {}", entry_path))?;
if args.short { if args.short {
println!("{}", display_filename); println!("{}", display_path);
} else { } else {
files.push(file_info(&display_filename, file.as_mut(), &metadata)?); files.push(file_info(display_path.as_str(), file.as_mut(), &metadata)?);
} }
} }
VfsFileType::Directory => { VfsFileType::Directory => {
if args.short { if args.short {
println!("{}/", display_filename); println!("{}/", display_path);
} else { } else {
files.push([ files.push([
" ".to_string(), " ".to_string(),
format!("{}/", display_filename), format!("{}/", display_path),
"Directory".to_string(), "Directory".to_string(),
String::new(), String::new(),
String::new(), String::new(),
]); ]);
} }
if args.recursive { if args.recursive {
let base_filename = format!("{}/", display_filename); ls_directory(fs, &entry_path, &display_path, args, files)?;
ls_directory(fs, &entry_path, &base_filename, args, files)?;
} }
} }
} }
@ -200,26 +199,24 @@ pub fn cp(mut args: CpArgs) -> anyhow::Result<()> {
bail!("Both source and destination paths must be provided"); bail!("Both source and destination paths must be provided");
} }
let dest = args.paths.pop().unwrap(); let dest = args.paths.pop().unwrap();
let dest_is_dir = args.paths.len() > 1 || dest.metadata().ok().is_some_and(|m| m.is_dir()); let dest_is_dir = args.paths.len() > 1 || fs::metadata(&dest).ok().is_some_and(|m| m.is_dir());
let auto_decompress = !args.no_decompress; let auto_decompress = !args.no_decompress;
for path in args.paths { for path in args.paths {
match open_path(&path, auto_decompress)? { match open_path(&path, auto_decompress)? {
OpenResult::File(file, path) => { OpenResult::File(file, path) => {
let dest = if dest_is_dir { let dest = if dest_is_dir {
fs::create_dir_all(&dest).with_context(|| { fs::create_dir_all(&dest)
format!("Failed to create directory {}", dest.display()) .with_context(|| format!("Failed to create directory {}", dest))?;
})?; let filename =
let filename = Path::new(path) path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?;
.file_name()
.ok_or_else(|| anyhow!("Path has no filename"))?;
dest.join(filename) dest.join(filename)
} else { } else {
dest.clone() dest.clone()
}; };
cp_file(file, path, &dest, auto_decompress, args.quiet)?; cp_file(file, &path, &dest, auto_decompress, args.quiet)?;
} }
OpenResult::Directory(mut fs, path) => { OpenResult::Directory(mut fs, path) => {
cp_recursive(fs.as_mut(), path, &dest, auto_decompress, args.quiet)?; cp_recursive(fs.as_mut(), &path, &dest, auto_decompress, args.quiet)?;
} }
} }
} }
@ -228,8 +225,8 @@ pub fn cp(mut args: CpArgs) -> anyhow::Result<()> {
fn cp_file( fn cp_file(
mut file: Box<dyn VfsFile>, mut file: Box<dyn VfsFile>,
path: &str, path: &Utf8UnixPath,
dest: &Path, dest: &Utf8NativePath,
auto_decompress: bool, auto_decompress: bool,
quiet: bool, quiet: bool,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
@ -237,31 +234,30 @@ fn cp_file(
if let FileFormat::Compressed(kind) = detect(file.as_mut())? { if let FileFormat::Compressed(kind) = detect(file.as_mut())? {
if auto_decompress { if auto_decompress {
file = decompress_file(file.as_mut(), kind) file = decompress_file(file.as_mut(), kind)
.with_context(|| format!("Failed to decompress file {}", dest.display()))?; .with_context(|| format!("Failed to decompress file {}", dest))?;
compression = Some(kind); compression = Some(kind);
} }
} }
let metadata = file let metadata =
.metadata() file.metadata().with_context(|| format!("Failed to fetch metadata for {}", dest))?;
.with_context(|| format!("Failed to fetch metadata for {}", dest.display()))?;
if !quiet { if !quiet {
if let Some(kind) = compression { if let Some(kind) = compression {
println!( println!(
"{} -> {} ({}) [Decompressed {}]", "{} -> {} ({}) [Decompressed {}]",
path, path,
dest.display(), dest,
Size::from_bytes(metadata.len), Size::from_bytes(metadata.len),
kind kind
); );
} else { } else {
println!("{} -> {} ({})", path, dest.display(), Size::from_bytes(metadata.len)); println!("{} -> {} ({})", path, dest, Size::from_bytes(metadata.len));
} }
} }
let mut dest_file = let mut dest_file =
File::create(dest).with_context(|| format!("Failed to create file {}", dest.display()))?; File::create(dest).with_context(|| format!("Failed to create file {}", dest))?;
buf_copy(file.as_mut(), &mut dest_file) buf_copy(file.as_mut(), &mut dest_file)
.with_context(|| format!("Failed to copy file {}", dest.display()))?; .with_context(|| format!("Failed to copy file {}", dest))?;
dest_file.flush().with_context(|| format!("Failed to flush file {}", dest.display()))?; dest_file.flush().with_context(|| format!("Failed to flush file {}", dest))?;
Ok(()) Ok(())
} }
@ -286,16 +282,15 @@ where
fn cp_recursive( fn cp_recursive(
fs: &mut dyn Vfs, fs: &mut dyn Vfs,
path: &str, path: &Utf8UnixPath,
dest: &Path, dest: &Utf8NativePath,
auto_decompress: bool, auto_decompress: bool,
quiet: bool, quiet: bool,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
fs::create_dir_all(dest) fs::create_dir_all(dest).with_context(|| format!("Failed to create directory {}", dest))?;
.with_context(|| format!("Failed to create directory {}", dest.display()))?;
let entries = fs.read_dir(path)?; let entries = fs.read_dir(path)?;
for filename in entries { for filename in entries {
let entry_path = format!("{}/{}", path, filename); let entry_path = path.join(&filename);
let metadata = fs let metadata = fs
.metadata(&entry_path) .metadata(&entry_path)
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?; .with_context(|| format!("Failed to fetch metadata for {}", entry_path))?;

View File

@ -1,12 +1,14 @@
use std::{fs, path::PathBuf}; use std::fs;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use argp::FromArgs; use argp::FromArgs;
use typed_path::Utf8NativePathBuf;
use crate::{ use crate::{
util::{ util::{
file::process_rsp, file::process_rsp,
ncompress::{compress_yay0, decompress_yay0}, ncompress::{compress_yay0, decompress_yay0},
path::native_path,
IntoCow, ToCow, IntoCow, ToCow,
}, },
vfs::open_file, vfs::open_file,
@ -31,26 +33,26 @@ enum SubCommand {
/// Compresses files using YAY0. /// Compresses files using YAY0.
#[argp(subcommand, name = "compress")] #[argp(subcommand, name = "compress")]
pub struct CompressArgs { pub struct CompressArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// Files to compress /// Files to compress
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// Output file (or directory, if multiple files are specified). /// Output file (or directory, if multiple files are specified).
/// If not specified, compresses in-place. /// If not specified, compresses in-place.
output: Option<PathBuf>, output: Option<Utf8NativePathBuf>,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Decompresses YAY0-compressed files. /// Decompresses YAY0-compressed files.
#[argp(subcommand, name = "decompress")] #[argp(subcommand, name = "decompress")]
pub struct DecompressArgs { pub struct DecompressArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// YAY0-compressed files /// YAY0-compressed files
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// Output file (or directory, if multiple files are specified). /// Output file (or directory, if multiple files are specified).
/// If not specified, decompresses in-place. /// If not specified, decompresses in-place.
output: Option<PathBuf>, output: Option<Utf8NativePathBuf>,
} }
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
@ -78,7 +80,7 @@ fn compress(args: CompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path.display()))?; .with_context(|| format!("Failed to write '{}'", out_path))?;
} }
Ok(()) Ok(())
} }
@ -90,7 +92,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let data = { let data = {
let mut file = open_file(&path, true)?; let mut file = open_file(&path, true)?;
decompress_yay0(file.map()?) decompress_yay0(file.map()?)
.with_context(|| format!("Failed to decompress '{}' using Yay0", path.display()))? .with_context(|| format!("Failed to decompress '{}' using Yay0", path))?
}; };
let out_path = if let Some(output) = &args.output { let out_path = if let Some(output) = &args.output {
if single_file { if single_file {
@ -102,7 +104,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path.display()))?; .with_context(|| format!("Failed to write '{}'", out_path))?;
} }
Ok(()) Ok(())
} }

View File

@ -1,12 +1,14 @@
use std::{fs, path::PathBuf}; use std::fs;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use argp::FromArgs; use argp::FromArgs;
use typed_path::Utf8NativePathBuf;
use crate::{ use crate::{
util::{ util::{
file::process_rsp, file::process_rsp,
ncompress::{compress_yaz0, decompress_yaz0}, ncompress::{compress_yaz0, decompress_yaz0},
path::native_path,
IntoCow, ToCow, IntoCow, ToCow,
}, },
vfs::open_file, vfs::open_file,
@ -31,26 +33,26 @@ enum SubCommand {
/// Compresses files using YAZ0. /// Compresses files using YAZ0.
#[argp(subcommand, name = "compress")] #[argp(subcommand, name = "compress")]
pub struct CompressArgs { pub struct CompressArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// Files to compress /// Files to compress
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// Output file (or directory, if multiple files are specified). /// Output file (or directory, if multiple files are specified).
/// If not specified, compresses in-place. /// If not specified, compresses in-place.
output: Option<PathBuf>, output: Option<Utf8NativePathBuf>,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Decompresses YAZ0-compressed files. /// Decompresses YAZ0-compressed files.
#[argp(subcommand, name = "decompress")] #[argp(subcommand, name = "decompress")]
pub struct DecompressArgs { pub struct DecompressArgs {
#[argp(positional)] #[argp(positional, from_str_fn(native_path))]
/// YAZ0-compressed files /// YAZ0-compressed files
files: Vec<PathBuf>, files: Vec<Utf8NativePathBuf>,
#[argp(option, short = 'o')] #[argp(option, short = 'o', from_str_fn(native_path))]
/// Output file (or directory, if multiple files are specified). /// Output file (or directory, if multiple files are specified).
/// If not specified, decompresses in-place. /// If not specified, decompresses in-place.
output: Option<PathBuf>, output: Option<Utf8NativePathBuf>,
} }
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
@ -78,7 +80,7 @@ fn compress(args: CompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path.display()))?; .with_context(|| format!("Failed to write '{}'", out_path))?;
} }
Ok(()) Ok(())
} }
@ -90,7 +92,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
let data = { let data = {
let mut file = open_file(&path, false)?; let mut file = open_file(&path, false)?;
decompress_yaz0(file.map()?) decompress_yaz0(file.map()?)
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path.display()))? .with_context(|| format!("Failed to decompress '{}' using Yaz0", path))?
}; };
let out_path = if let Some(output) = &args.output { let out_path = if let Some(output) = &args.output {
if single_file { if single_file {
@ -102,7 +104,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
path.as_path().to_cow() path.as_path().to_cow()
}; };
fs::write(out_path.as_ref(), data) fs::write(out_path.as_ref(), data)
.with_context(|| format!("Failed to write '{}'", out_path.display()))?; .with_context(|| format!("Failed to write '{}'", out_path))?;
} }
Ok(()) Ok(())
} }

View File

@ -1,3 +1,4 @@
#![deny(unused_crate_dependencies)]
use std::{env, ffi::OsStr, fmt::Display, path::PathBuf, process::exit, str::FromStr}; use std::{env, ffi::OsStr, fmt::Display, path::PathBuf, process::exit, str::FromStr};
use anyhow::Error; use anyhow::Error;

View File

@ -2,7 +2,6 @@ use std::{
fs, fs,
io::{BufRead, Write}, io::{BufRead, Write},
num::ParseIntError, num::ParseIntError,
path::Path,
str::FromStr, str::FromStr,
}; };
@ -12,6 +11,7 @@ use filetime::FileTime;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::{Captures, Regex}; use regex::{Captures, Regex};
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use typed_path::Utf8NativePath;
use xxhash_rust::xxh3::xxh3_64; use xxhash_rust::xxh3::xxh3_64;
use crate::{ use crate::{
@ -45,10 +45,11 @@ pub fn parse_i32(s: &str) -> Result<i32, ParseIntError> {
} }
} }
pub fn apply_symbols_file<P>(path: P, obj: &mut ObjInfo) -> Result<Option<FileReadInfo>> pub fn apply_symbols_file(
where P: AsRef<Path> { path: &Utf8NativePath,
let path = path.as_ref(); obj: &mut ObjInfo,
Ok(if path.is_file() { ) -> Result<Option<FileReadInfo>> {
Ok(if fs::metadata(path).is_ok_and(|m| m.is_file()) {
let mut file = open_file(path, true)?; let mut file = open_file(path, true)?;
let cached = FileReadInfo::new(file.as_mut())?; let cached = FileReadInfo::new(file.as_mut())?;
for result in file.lines() { for result in file.lines() {
@ -199,19 +200,21 @@ pub fn is_auto_label(symbol: &ObjSymbol) -> bool { symbol.name.starts_with("lbl_
pub fn is_auto_jump_table(symbol: &ObjSymbol) -> bool { symbol.name.starts_with("jumptable_") } pub fn is_auto_jump_table(symbol: &ObjSymbol) -> bool { symbol.name.starts_with("jumptable_") }
fn write_if_unchanged<P, Cb>(path: P, cb: Cb, cached_file: Option<FileReadInfo>) -> Result<()> fn write_if_unchanged<Cb>(
path: &Utf8NativePath,
cb: Cb,
cached_file: Option<FileReadInfo>,
) -> Result<()>
where where
P: AsRef<Path>,
Cb: FnOnce(&mut dyn Write) -> Result<()>, Cb: FnOnce(&mut dyn Write) -> Result<()>,
{ {
if let Some(cached_file) = cached_file { if let Some(cached_file) = cached_file {
// Check file mtime // Check file mtime
let path = path.as_ref();
let new_mtime = fs::metadata(path).ok().map(|m| FileTime::from_last_modification_time(&m)); let new_mtime = fs::metadata(path).ok().map(|m| FileTime::from_last_modification_time(&m));
if let (Some(new_mtime), Some(old_mtime)) = (new_mtime, cached_file.mtime) { if let (Some(new_mtime), Some(old_mtime)) = (new_mtime, cached_file.mtime) {
if new_mtime != old_mtime { if new_mtime != old_mtime {
// File changed, don't write // File changed, don't write
warn!(path = %path.display(), "File changed since read, not updating"); warn!(path = %path, "File changed since read, not updating");
return Ok(()); return Ok(());
} }
} }
@ -221,12 +224,12 @@ where
cb(&mut buf)?; cb(&mut buf)?;
if xxh3_64(&buf) == cached_file.hash { if xxh3_64(&buf) == cached_file.hash {
// No changes // No changes
debug!(path = %path.display(), "File unchanged"); debug!(path = %path, "File unchanged");
return Ok(()); return Ok(());
} }
// Write to file // Write to file
info!("Writing updated {}", path.display()); info!("Writing updated {}", path);
fs::write(path, &buf)?; fs::write(path, &buf)?;
} else { } else {
// Write directly // Write directly
@ -238,14 +241,11 @@ where
} }
#[inline] #[inline]
pub fn write_symbols_file<P>( pub fn write_symbols_file(
path: P, path: &Utf8NativePath,
obj: &ObjInfo, obj: &ObjInfo,
cached_file: Option<FileReadInfo>, cached_file: Option<FileReadInfo>,
) -> Result<()> ) -> Result<()> {
where
P: AsRef<Path>,
{
write_if_unchanged(path, |w| write_symbols(w, obj), cached_file) write_if_unchanged(path, |w| write_symbols(w, obj), cached_file)
} }
@ -413,15 +413,12 @@ fn section_kind_to_str(kind: ObjSectionKind) -> &'static str {
} }
#[inline] #[inline]
pub fn write_splits_file<P>( pub fn write_splits_file(
path: P, path: &Utf8NativePath,
obj: &ObjInfo, obj: &ObjInfo,
all: bool, all: bool,
cached_file: Option<FileReadInfo>, cached_file: Option<FileReadInfo>,
) -> Result<()> ) -> Result<()> {
where
P: AsRef<Path>,
{
write_if_unchanged(path, |w| write_splits(w, obj, all), cached_file) write_if_unchanged(path, |w| write_splits(w, obj, all), cached_file)
} }
@ -625,10 +622,8 @@ enum SplitState {
Unit(String), Unit(String),
} }
pub fn apply_splits_file<P>(path: P, obj: &mut ObjInfo) -> Result<Option<FileReadInfo>> pub fn apply_splits_file(path: &Utf8NativePath, obj: &mut ObjInfo) -> Result<Option<FileReadInfo>> {
where P: AsRef<Path> { Ok(if fs::metadata(path).is_ok_and(|m| m.is_file()) {
let path = path.as_ref();
Ok(if path.is_file() {
let mut file = open_file(path, true)?; let mut file = open_file(path, true)?;
let cached = FileReadInfo::new(file.as_mut())?; let cached = FileReadInfo::new(file.as_mut())?;
apply_splits(file.as_mut(), obj)?; apply_splits(file.as_mut(), obj)?;
@ -738,10 +733,8 @@ where R: BufRead + ?Sized {
Ok(()) Ok(())
} }
pub fn read_splits_sections<P>(path: P) -> Result<Option<Vec<SectionDef>>> pub fn read_splits_sections(path: &Utf8NativePath) -> Result<Option<Vec<SectionDef>>> {
where P: AsRef<Path> { if !fs::metadata(path).is_ok_and(|m| m.is_file()) {
let path = path.as_ref();
if !path.is_file() {
return Ok(None); return Ok(None);
} }
let file = open_file(path, true)?; let file = open_file(path, true)?;

View File

@ -1,41 +1,39 @@
use std::{ use std::io::Write;
io::Write,
path::{Path, PathBuf},
};
use itertools::Itertools; use itertools::Itertools;
use typed_path::{Utf8NativePath, Utf8NativePathBuf, Utf8UnixPathBuf};
pub struct DepFile { pub struct DepFile {
pub name: String, pub name: Utf8UnixPathBuf,
pub dependencies: Vec<String>, pub dependencies: Vec<Utf8UnixPathBuf>,
} }
fn normalize_path(path: &Path) -> String { fn normalize_path(path: Utf8NativePathBuf) -> Utf8UnixPathBuf {
let path = path.to_string_lossy().replace('\\', "/"); if let Some((a, _)) = path.as_str().split_once(':') {
path.split_once(':').map(|(p, _)| p.to_string()).unwrap_or(path) Utf8NativePath::new(a).with_unix_encoding()
} else {
path.with_unix_encoding()
}
} }
impl DepFile { impl DepFile {
pub fn new(name: PathBuf) -> Self { pub fn new(name: Utf8NativePathBuf) -> Self {
Self { name: name.to_string_lossy().into_owned(), dependencies: vec![] } Self { name: name.with_unix_encoding(), dependencies: vec![] }
} }
pub fn push<P>(&mut self, dependency: P) pub fn push(&mut self, dependency: Utf8NativePathBuf) {
where P: AsRef<Path> { self.dependencies.push(normalize_path(dependency));
let path = dependency.as_ref().to_string_lossy().replace('\\', "/");
let path = path.split_once(':').map(|(p, _)| p.to_string()).unwrap_or(path);
self.dependencies.push(path);
} }
pub fn extend(&mut self, dependencies: Vec<PathBuf>) { pub fn extend(&mut self, dependencies: Vec<Utf8NativePathBuf>) {
self.dependencies.extend(dependencies.iter().map(|dependency| normalize_path(dependency))); self.dependencies.extend(dependencies.into_iter().map(normalize_path));
} }
pub fn write<W>(&self, w: &mut W) -> std::io::Result<()> pub fn write<W>(&self, w: &mut W) -> std::io::Result<()>
where W: Write + ?Sized { where W: Write + ?Sized {
write!(w, "{}:", self.name)?; write!(w, "{}:", self.name)?;
for dep in self.dependencies.iter().unique() { for dep in self.dependencies.iter().unique() {
write!(w, " \\\n {}", dep.replace(' ', "\\ "))?; write!(w, " \\\n {}", dep.as_str().replace(' ', "\\ "))?;
} }
Ok(()) Ok(())
} }

View File

@ -20,6 +20,7 @@ use object::{
Architecture, Endianness, Object, ObjectKind, ObjectSection, ObjectSymbol, Relocation, Architecture, Endianness, Object, ObjectKind, ObjectSection, ObjectSymbol, Relocation,
RelocationFlags, RelocationTarget, SectionKind, Symbol, SymbolKind, SymbolScope, SymbolSection, RelocationFlags, RelocationTarget, SectionKind, Symbol, SymbolKind, SymbolScope, SymbolSection,
}; };
use typed_path::Utf8NativePath;
use crate::{ use crate::{
array_ref, array_ref,
@ -46,9 +47,7 @@ enum BoundaryState {
FilesEnded, FilesEnded,
} }
pub fn process_elf<P>(path: P) -> Result<ObjInfo> pub fn process_elf(path: &Utf8NativePath) -> Result<ObjInfo> {
where P: AsRef<Path> {
let path = path.as_ref();
let mut file = open_file(path, true)?; let mut file = open_file(path, true)?;
let obj_file = object::read::File::parse(file.map()?)?; let obj_file = object::read::File::parse(file.map()?)?;
let architecture = match obj_file.architecture() { let architecture = match obj_file.architecture() {

View File

@ -1,33 +1,32 @@
use std::{ use std::{
fs,
fs::{DirBuilder, File, OpenOptions}, fs::{DirBuilder, File, OpenOptions},
io, io,
io::{BufRead, BufWriter, Read, Seek, SeekFrom}, io::{BufRead, BufWriter, Read, Seek, SeekFrom},
path::{Path, PathBuf},
}; };
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use filetime::{set_file_mtime, FileTime}; use filetime::{set_file_mtime, FileTime};
use path_slash::PathBufExt;
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use typed_path::{Utf8NativePath, Utf8NativePathBuf, Utf8UnixPathBuf};
use xxhash_rust::xxh3::xxh3_64; use xxhash_rust::xxh3::xxh3_64;
use crate::{ use crate::{
array_ref, array_ref,
util::{ util::{
ncompress::{decompress_yay0, decompress_yaz0, YAY0_MAGIC, YAZ0_MAGIC}, ncompress::{decompress_yay0, decompress_yaz0, YAY0_MAGIC, YAZ0_MAGIC},
path::check_path_buf,
Bytes, Bytes,
}, },
vfs::{open_file, VfsFile}, vfs::{open_file, VfsFile},
}; };
/// Creates a buffered writer around a file (not memory mapped). /// Creates a buffered writer around a file (not memory mapped).
pub fn buf_writer<P>(path: P) -> Result<BufWriter<File>> pub fn buf_writer(path: &Utf8NativePath) -> Result<BufWriter<File>> {
where P: AsRef<Path> { if let Some(parent) = path.parent() {
if let Some(parent) = path.as_ref().parent() {
DirBuilder::new().recursive(true).create(parent)?; DirBuilder::new().recursive(true).create(parent)?;
} }
let file = File::create(&path) let file = File::create(path).with_context(|| format!("Failed to create file '{}'", path))?;
.with_context(|| format!("Failed to create file '{}'", path.as_ref().display()))?;
Ok(BufWriter::new(file)) Ok(BufWriter::new(file))
} }
@ -61,22 +60,21 @@ where R: Read + Seek + ?Sized {
} }
/// Process response files (starting with '@') and glob patterns (*). /// Process response files (starting with '@') and glob patterns (*).
pub fn process_rsp(files: &[PathBuf]) -> Result<Vec<PathBuf>> { pub fn process_rsp(files: &[Utf8NativePathBuf]) -> Result<Vec<Utf8NativePathBuf>> {
let mut out = Vec::with_capacity(files.len()); let mut out = Vec::<Utf8NativePathBuf>::with_capacity(files.len());
for path in files { for path in files {
let path_str = if let Some(rsp_file) = path.as_str().strip_prefix('@') {
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?; let file = open_file(Utf8NativePath::new(rsp_file), true)?;
if let Some(rsp_file) = path_str.strip_prefix('@') {
let file = open_file(Path::new(rsp_file), true)?;
for result in file.lines() { for result in file.lines() {
let line = result?; let line = result?;
if !line.is_empty() { if !line.is_empty() {
out.push(PathBuf::from_slash(line)); out.push(Utf8UnixPathBuf::from(line).with_encoding());
} }
} }
} else if path_str.contains('*') { } else if path.as_str().contains('*') {
for entry in glob::glob(path_str)? { for entry in glob::glob(path.as_str())? {
out.push(entry?); let path = check_path_buf(entry?)?;
out.push(path.with_encoding());
} }
} else { } else {
out.push(path.clone()); out.push(path.clone());
@ -106,16 +104,16 @@ impl FileReadInfo {
/// If a file is a RARC archive, iterate over its contents. /// If a file is a RARC archive, iterate over its contents.
/// If a file is a Yaz0 compressed file, decompress it. /// If a file is a Yaz0 compressed file, decompress it.
pub struct FileIterator { pub struct FileIterator {
paths: Vec<PathBuf>, paths: Vec<Utf8NativePathBuf>,
index: usize, index: usize,
} }
impl FileIterator { impl FileIterator {
pub fn new(paths: &[PathBuf]) -> Result<Self> { pub fn new(paths: &[Utf8NativePathBuf]) -> Result<Self> {
Ok(Self { paths: process_rsp(paths)?, index: 0 }) Ok(Self { paths: process_rsp(paths)?, index: 0 })
} }
fn next_path(&mut self) -> Option<Result<(PathBuf, Box<dyn VfsFile>)>> { fn next_path(&mut self) -> Option<Result<(Utf8NativePathBuf, Box<dyn VfsFile>)>> {
if self.index >= self.paths.len() { if self.index >= self.paths.len() {
return None; return None;
} }
@ -130,14 +128,13 @@ impl FileIterator {
} }
impl Iterator for FileIterator { impl Iterator for FileIterator {
type Item = Result<(PathBuf, Box<dyn VfsFile>)>; type Item = Result<(Utf8NativePathBuf, Box<dyn VfsFile>)>;
fn next(&mut self) -> Option<Self::Item> { self.next_path() } fn next(&mut self) -> Option<Self::Item> { self.next_path() }
} }
pub fn touch<P>(path: P) -> io::Result<()> pub fn touch(path: &Utf8NativePath) -> io::Result<()> {
where P: AsRef<Path> { if fs::exists(path)? {
if path.as_ref().exists() {
set_file_mtime(path, FileTime::now()) set_file_mtime(path, FileTime::now())
} else { } else {
match OpenOptions::new().create(true).truncate(true).write(true).open(path) { match OpenOptions::new().create(true).truncate(true).write(true).open(path) {

View File

@ -1,8 +1,6 @@
use std::path::PathBuf;
use anyhow::Result; use anyhow::Result;
use itertools::Itertools; use itertools::Itertools;
use path_slash::PathBufExt; use typed_path::{Utf8NativePathBuf, Utf8UnixPath};
use crate::obj::{ObjInfo, ObjKind}; use crate::obj::{ObjInfo, ObjKind};
@ -33,7 +31,7 @@ pub fn generate_ldscript(
let mut force_files = Vec::with_capacity(obj.link_order.len()); let mut force_files = Vec::with_capacity(obj.link_order.len());
for unit in &obj.link_order { for unit in &obj.link_order {
let obj_path = obj_path_for_unit(&unit.name); let obj_path = obj_path_for_unit(&unit.name);
force_files.push(obj_path.file_name().unwrap().to_str().unwrap().to_string()); force_files.push(obj_path.file_name().unwrap().to_string());
} }
let mut force_active = force_active.to_vec(); let mut force_active = force_active.to_vec();
@ -82,7 +80,7 @@ pub fn generate_ldscript_partial(
let mut force_files = Vec::with_capacity(obj.link_order.len()); let mut force_files = Vec::with_capacity(obj.link_order.len());
for unit in &obj.link_order { for unit in &obj.link_order {
let obj_path = obj_path_for_unit(&unit.name); let obj_path = obj_path_for_unit(&unit.name);
force_files.push(obj_path.file_name().unwrap().to_str().unwrap().to_string()); force_files.push(obj_path.file_name().unwrap().to_string());
} }
let mut force_active = force_active.to_vec(); let mut force_active = force_active.to_vec();
@ -99,6 +97,10 @@ pub fn generate_ldscript_partial(
Ok(out) Ok(out)
} }
pub fn obj_path_for_unit(unit: &str) -> PathBuf { PathBuf::from_slash(unit).with_extension("o") } pub fn obj_path_for_unit(unit: &str) -> Utf8NativePathBuf {
Utf8UnixPath::new(unit).with_encoding().with_extension("o")
}
pub fn asm_path_for_unit(unit: &str) -> PathBuf { PathBuf::from_slash(unit).with_extension("s") } pub fn asm_path_for_unit(unit: &str) -> Utf8NativePathBuf {
Utf8UnixPath::new(unit).with_encoding().with_extension("s")
}

View File

@ -5,7 +5,6 @@ use std::{
hash::Hash, hash::Hash,
io::BufRead, io::BufRead,
mem::{replace, take}, mem::{replace, take},
path::Path,
}; };
use anyhow::{anyhow, bail, Error, Result}; use anyhow::{anyhow, bail, Error, Result};
@ -16,6 +15,7 @@ use itertools::Itertools;
use multimap::MultiMap; use multimap::MultiMap;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::{Captures, Regex}; use regex::{Captures, Regex};
use typed_path::Utf8NativePath;
use crate::{ use crate::{
obj::{ obj::{
@ -26,6 +26,7 @@ use crate::{
util::nested::NestedVec, util::nested::NestedVec,
vfs::open_file, vfs::open_file,
}; };
#[derive(Debug, Copy, Clone, Eq, PartialEq)] #[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum SymbolKind { pub enum SymbolKind {
Function, Function,
@ -713,16 +714,13 @@ where
Ok(sm.result) Ok(sm.result)
} }
pub fn apply_map_file<P>( pub fn apply_map_file(
path: P, path: &Utf8NativePath,
obj: &mut ObjInfo, obj: &mut ObjInfo,
common_bss_start: Option<u32>, common_bss_start: Option<u32>,
mw_comment_version: Option<u8>, mw_comment_version: Option<u8>,
) -> Result<()> ) -> Result<()> {
where let mut file = open_file(path, true)?;
P: AsRef<Path>,
{
let mut file = open_file(path.as_ref(), true)?;
let info = process_map(file.as_mut(), common_bss_start, mw_comment_version)?; let info = process_map(file.as_mut(), common_bss_start, mw_comment_version)?;
apply_map(info, obj) apply_map(info, obj)
} }

View File

@ -16,6 +16,7 @@ pub mod map;
pub mod ncompress; pub mod ncompress;
pub mod nested; pub mod nested;
pub mod nlzss; pub mod nlzss;
pub mod path;
pub mod rarc; pub mod rarc;
pub mod reader; pub mod reader;
pub mod rel; pub mod rel;

26
src/util/path.rs Normal file
View File

@ -0,0 +1,26 @@
use std::{
path::{Path, PathBuf},
str::Utf8Error,
string::FromUtf8Error,
};
use typed_path::{NativePath, NativePathBuf, Utf8NativePath, Utf8NativePathBuf};
// For argp::FromArgs
pub fn native_path(value: &str) -> Result<Utf8NativePathBuf, String> {
Ok(Utf8NativePathBuf::from(value))
}
/// Checks if the path is valid UTF-8 and returns it as a [`Utf8NativePath`].
#[inline]
pub fn check_path(path: &Path) -> Result<&Utf8NativePath, Utf8Error> {
Utf8NativePath::from_bytes_path(NativePath::new(path.as_os_str().as_encoded_bytes()))
}
/// Checks if the path is valid UTF-8 and returns it as a [`Utf8NativePathBuf`].
#[inline]
pub fn check_path_buf(path: PathBuf) -> Result<Utf8NativePathBuf, FromUtf8Error> {
Utf8NativePathBuf::from_bytes_path_buf(NativePathBuf::from(
path.into_os_string().into_encoded_bytes(),
))
}

View File

@ -1,5 +1,6 @@
use std::{borrow::Cow, ffi::CStr}; use std::{borrow::Cow, ffi::CStr};
use typed_path::Utf8UnixPath;
use zerocopy::{big_endian::*, FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{big_endian::*, FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::{static_assert, vfs::next_non_empty}; use crate::{static_assert, vfs::next_non_empty};
@ -223,8 +224,8 @@ impl<'a> RarcView<'a> {
} }
/// Finds a particular file or directory by path. /// Finds a particular file or directory by path.
pub fn find(&self, path: &str) -> Option<RarcNodeKind> { pub fn find(&self, path: &Utf8UnixPath) -> Option<RarcNodeKind> {
let mut split = path.split('/'); let mut split = path.as_str().split('/');
let mut current = next_non_empty(&mut split); let mut current = next_non_empty(&mut split);
let mut dir_idx = 0; let mut dir_idx = 0;

View File

@ -1,13 +1,11 @@
use std::{ use std::collections::{btree_map, BTreeMap};
collections::{btree_map, BTreeMap},
path::Path,
};
use anyhow::{anyhow, bail, ensure, Result}; use anyhow::{anyhow, bail, ensure, Result};
use base64::{engine::general_purpose::STANDARD, Engine}; use base64::{engine::general_purpose::STANDARD, Engine};
use cwdemangle::{demangle, DemangleOptions}; use cwdemangle::{demangle, DemangleOptions};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use typed_path::Utf8NativePath;
use crate::{ use crate::{
analysis::{ analysis::{
@ -246,8 +244,10 @@ pub fn compare_signature(existing: &mut FunctionSignature, new: &FunctionSignatu
Ok(()) Ok(())
} }
pub fn generate_signature<P>(path: P, symbol_name: &str) -> Result<Option<FunctionSignature>> pub fn generate_signature(
where P: AsRef<Path> { path: &Utf8NativePath,
symbol_name: &str,
) -> Result<Option<FunctionSignature>> {
let mut out_symbols: Vec<OutSymbol> = Vec::new(); let mut out_symbols: Vec<OutSymbol> = Vec::new();
let mut out_relocs: Vec<OutReloc> = Vec::new(); let mut out_relocs: Vec<OutReloc> = Vec::new();
let mut symbol_map: BTreeMap<SymbolIndex, u32> = BTreeMap::new(); let mut symbol_map: BTreeMap<SymbolIndex, u32> = BTreeMap::new();

View File

@ -1,6 +1,7 @@
use std::{borrow::Cow, ffi::CStr, mem::size_of}; use std::{borrow::Cow, ffi::CStr, mem::size_of};
use anyhow::Result; use anyhow::Result;
use typed_path::Utf8UnixPath;
use zerocopy::{big_endian::U32, FromBytes, Immutable, IntoBytes, KnownLayout}; use zerocopy::{big_endian::U32, FromBytes, Immutable, IntoBytes, KnownLayout};
use crate::{static_assert, vfs::next_non_empty}; use crate::{static_assert, vfs::next_non_empty};
@ -138,8 +139,8 @@ impl<'a> U8View<'a> {
} }
/// Finds a particular file or directory by path. /// Finds a particular file or directory by path.
pub fn find(&self, path: &str) -> Option<(usize, U8Node)> { pub fn find(&self, path: &Utf8UnixPath) -> Option<(usize, U8Node)> {
let mut split = path.split('/'); let mut split = path.as_str().split('/');
let mut current = next_non_empty(&mut split); let mut current = next_non_empty(&mut split);
if current.is_empty() { if current.is_empty() {
return Some((0, self.nodes[0])); return Some((0, self.nodes[0]));

View File

@ -9,6 +9,7 @@ use nodtool::{
nod, nod,
nod::{Disc, DiscStream, Fst, NodeKind, OwnedFileStream, PartitionBase, PartitionMeta}, nod::{Disc, DiscStream, Fst, NodeKind, OwnedFileStream, PartitionBase, PartitionMeta},
}; };
use typed_path::Utf8UnixPath;
use zerocopy::IntoBytes; use zerocopy::IntoBytes;
use super::{ use super::{
@ -46,8 +47,8 @@ impl DiscFs {
Ok(Self { disc, base, meta, mtime }) Ok(Self { disc, base, meta, mtime })
} }
fn find(&self, path: &str) -> VfsResult<DiscNode> { fn find(&self, path: &Utf8UnixPath) -> VfsResult<DiscNode> {
let path = path.trim_matches('/'); let path = path.as_str().trim_matches('/');
let mut split = path.split('/'); let mut split = path.split('/');
let mut segment = next_non_empty(&mut split); let mut segment = next_non_empty(&mut split);
match segment.to_ascii_lowercase().as_str() { match segment.to_ascii_lowercase().as_str() {
@ -116,7 +117,7 @@ impl DiscFs {
} }
impl Vfs for DiscFs { impl Vfs for DiscFs {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> { fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>> {
match self.find(path)? { match self.find(path)? {
DiscNode::None => Err(VfsError::NotFound), DiscNode::None => Err(VfsError::NotFound),
DiscNode::Special(_) => Err(VfsError::IsADirectory), DiscNode::Special(_) => Err(VfsError::IsADirectory),
@ -140,11 +141,11 @@ impl Vfs for DiscFs {
} }
} }
fn exists(&mut self, path: &str) -> VfsResult<bool> { fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool> {
Ok(!matches!(self.find(path)?, DiscNode::None)) Ok(!matches!(self.find(path)?, DiscNode::None))
} }
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> { fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>> {
match self.find(path)? { match self.find(path)? {
DiscNode::None => Err(VfsError::NotFound), DiscNode::None => Err(VfsError::NotFound),
DiscNode::Special(SpecialDir::Root) => { DiscNode::Special(SpecialDir::Root) => {
@ -211,7 +212,7 @@ impl Vfs for DiscFs {
} }
} }
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> { fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata> {
match self.find(path)? { match self.find(path)? {
DiscNode::None => Err(VfsError::NotFound), DiscNode::None => Err(VfsError::NotFound),
DiscNode::Special(_) => { DiscNode::Special(_) => {

View File

@ -9,7 +9,6 @@ use std::{
fmt::{Debug, Display, Formatter}, fmt::{Debug, Display, Formatter},
io, io,
io::{BufRead, Read, Seek, SeekFrom}, io::{BufRead, Read, Seek, SeekFrom},
path::Path,
sync::Arc, sync::Arc,
}; };
@ -21,6 +20,7 @@ use filetime::FileTime;
use nodtool::{nod, nod::DiscStream}; use nodtool::{nod, nod::DiscStream};
use rarc::RarcFs; use rarc::RarcFs;
pub use std_fs::StdFs; pub use std_fs::StdFs;
use typed_path::{Utf8NativePath, Utf8UnixPath, Utf8UnixPathBuf};
use u8_arc::U8Fs; use u8_arc::U8Fs;
use crate::util::{ use crate::util::{
@ -31,13 +31,13 @@ use crate::util::{
}; };
pub trait Vfs: DynClone + Send + Sync { pub trait Vfs: DynClone + Send + Sync {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>>; fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>>;
fn exists(&mut self, path: &str) -> VfsResult<bool>; fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool>;
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>>; fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>>;
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata>; fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata>;
} }
dyn_clone::clone_trait_object!(Vfs); dyn_clone::clone_trait_object!(Vfs);
@ -192,33 +192,33 @@ where R: Read + Seek + ?Sized {
} }
} }
pub enum OpenResult<'a> { pub enum OpenResult {
File(Box<dyn VfsFile>, &'a str), File(Box<dyn VfsFile>, Utf8UnixPathBuf),
Directory(Box<dyn Vfs>, &'a str), Directory(Box<dyn Vfs>, Utf8UnixPathBuf),
} }
pub fn open_path(path: &Path, auto_decompress: bool) -> anyhow::Result<OpenResult> { pub fn open_path(path: &Utf8NativePath, auto_decompress: bool) -> anyhow::Result<OpenResult> {
open_path_with_fs(Box::new(StdFs), path, auto_decompress) open_path_with_fs(Box::new(StdFs), path, auto_decompress)
} }
pub fn open_path_with_fs( pub fn open_path_with_fs(
mut fs: Box<dyn Vfs>, mut fs: Box<dyn Vfs>,
path: &Path, path: &Utf8NativePath,
auto_decompress: bool, auto_decompress: bool,
) -> anyhow::Result<OpenResult> { ) -> anyhow::Result<OpenResult> {
let str = path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?; let path = path.with_unix_encoding();
let mut split = str.split(':').peekable(); let mut split = path.as_str().split(':').peekable();
let mut current_path = String::new(); let mut current_path = String::new();
let mut file: Option<Box<dyn VfsFile>> = None; let mut file: Option<Box<dyn VfsFile>> = None;
let mut segment = ""; let mut segment = Utf8UnixPath::new("");
loop { loop {
// Open the next segment if necessary // Open the next segment if necessary
if file.is_none() { if file.is_none() {
segment = split.next().unwrap(); segment = Utf8UnixPath::new(split.next().unwrap());
if !current_path.is_empty() { if !current_path.is_empty() {
current_path.push(':'); current_path.push(':');
} }
current_path.push_str(segment); current_path.push_str(segment.as_str());
let file_type = match fs.metadata(segment) { let file_type = match fs.metadata(segment) {
Ok(metadata) => metadata.file_type, Ok(metadata) => metadata.file_type,
Err(VfsError::NotFound) => return Err(anyhow!("{} not found", current_path)), Err(VfsError::NotFound) => return Err(anyhow!("{} not found", current_path)),
@ -235,7 +235,7 @@ pub fn open_path_with_fs(
return if split.peek().is_some() { return if split.peek().is_some() {
Err(anyhow!("{} is not a file", current_path)) Err(anyhow!("{} is not a file", current_path))
} else { } else {
Ok(OpenResult::Directory(fs, segment)) Ok(OpenResult::Directory(fs, segment.to_path_buf()))
} }
} }
} }
@ -297,21 +297,21 @@ pub fn open_path_with_fs(
FileFormat::Compressed(kind) if auto_decompress => Ok(OpenResult::File( FileFormat::Compressed(kind) if auto_decompress => Ok(OpenResult::File(
decompress_file(current_file.as_mut(), kind) decompress_file(current_file.as_mut(), kind)
.with_context(|| format!("Failed to decompress {}", current_path))?, .with_context(|| format!("Failed to decompress {}", current_path))?,
segment, segment.to_path_buf(),
)), )),
_ => Ok(OpenResult::File(current_file, segment)), _ => Ok(OpenResult::File(current_file, segment.to_path_buf())),
}; };
} }
} }
} }
pub fn open_file(path: &Path, auto_decompress: bool) -> anyhow::Result<Box<dyn VfsFile>> { pub fn open_file(path: &Utf8NativePath, auto_decompress: bool) -> anyhow::Result<Box<dyn VfsFile>> {
open_file_with_fs(Box::new(StdFs), path, auto_decompress) open_file_with_fs(Box::new(StdFs), path, auto_decompress)
} }
pub fn open_file_with_fs( pub fn open_file_with_fs(
fs: Box<dyn Vfs>, fs: Box<dyn Vfs>,
path: &Path, path: &Utf8NativePath,
auto_decompress: bool, auto_decompress: bool,
) -> anyhow::Result<Box<dyn VfsFile>> { ) -> anyhow::Result<Box<dyn VfsFile>> {
match open_path_with_fs(fs, path, auto_decompress)? { match open_path_with_fs(fs, path, auto_decompress)? {

View File

@ -1,5 +1,7 @@
use std::io; use std::io;
use typed_path::Utf8UnixPath;
use super::{Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult, WindowedFile}; use super::{Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult, WindowedFile};
use crate::util::rarc::{RarcNodeKind, RarcView}; use crate::util::rarc::{RarcNodeKind, RarcView};
@ -18,7 +20,7 @@ impl RarcFs {
} }
impl Vfs for RarcFs { impl Vfs for RarcFs {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> { fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>> {
let view = self.view()?; let view = self.view()?;
match view.find(path) { match view.find(path) {
Some(RarcNodeKind::File(_, node)) => { Some(RarcNodeKind::File(_, node)) => {
@ -34,12 +36,12 @@ impl Vfs for RarcFs {
} }
} }
fn exists(&mut self, path: &str) -> VfsResult<bool> { fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool> {
let view = self.view()?; let view = self.view()?;
Ok(view.find(path).is_some()) Ok(view.find(path).is_some())
} }
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> { fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>> {
let view = self.view()?; let view = self.view()?;
match view.find(path) { match view.find(path) {
Some(RarcNodeKind::Directory(_, dir)) => { Some(RarcNodeKind::Directory(_, dir)) => {
@ -58,7 +60,7 @@ impl Vfs for RarcFs {
} }
} }
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> { fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata> {
let metadata = self.file.metadata()?; let metadata = self.file.metadata()?;
let view = self.view()?; let view = self.view()?;
match view.find(path) { match view.find(path) {

View File

@ -1,10 +1,10 @@
use std::{ use std::{
io, fs, io,
io::{BufRead, BufReader, Read, Seek, SeekFrom}, io::{BufRead, BufReader, Read, Seek, SeekFrom},
path::{Path, PathBuf},
}; };
use filetime::FileTime; use filetime::FileTime;
use typed_path::{Utf8NativePathBuf, Utf8UnixPath};
use super::{DiscStream, Vfs, VfsFile, VfsFileType, VfsMetadata, VfsResult}; use super::{DiscStream, Vfs, VfsFile, VfsFileType, VfsMetadata, VfsResult};
@ -12,23 +12,25 @@ use super::{DiscStream, Vfs, VfsFile, VfsFileType, VfsMetadata, VfsResult};
pub struct StdFs; pub struct StdFs;
impl Vfs for StdFs { impl Vfs for StdFs {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> { fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>> {
let mut file = StdFile::new(PathBuf::from(path)); let mut file = StdFile::new(path.with_encoding());
file.file()?; // Open the file now to check for errors file.file()?; // Open the file now to check for errors
Ok(Box::new(file)) Ok(Box::new(file))
} }
fn exists(&mut self, path: &str) -> VfsResult<bool> { Ok(Path::new(path).exists()) } fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool> {
Ok(fs::exists(path.with_encoding())?)
}
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> { fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>> {
let entries = std::fs::read_dir(path)? let entries = fs::read_dir(path.with_encoding())?
.map(|entry| entry.map(|e| e.file_name().to_string_lossy().into_owned())) .map(|entry| entry.map(|e| e.file_name().to_string_lossy().into_owned()))
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
Ok(entries) Ok(entries)
} }
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> { fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata> {
let metadata = std::fs::metadata(path)?; let metadata = fs::metadata(path.with_encoding())?;
Ok(VfsMetadata { Ok(VfsMetadata {
file_type: if metadata.is_dir() { VfsFileType::Directory } else { VfsFileType::File }, file_type: if metadata.is_dir() { VfsFileType::Directory } else { VfsFileType::File },
len: metadata.len(), len: metadata.len(),
@ -38,8 +40,8 @@ impl Vfs for StdFs {
} }
pub struct StdFile { pub struct StdFile {
path: PathBuf, path: Utf8NativePathBuf,
file: Option<BufReader<std::fs::File>>, file: Option<BufReader<fs::File>>,
mmap: Option<memmap2::Mmap>, mmap: Option<memmap2::Mmap>,
} }
@ -50,11 +52,11 @@ impl Clone for StdFile {
impl StdFile { impl StdFile {
#[inline] #[inline]
pub fn new(path: PathBuf) -> Self { StdFile { path, file: None, mmap: None } } pub fn new(path: Utf8NativePathBuf) -> Self { StdFile { path, file: None, mmap: None } }
pub fn file(&mut self) -> io::Result<&mut BufReader<std::fs::File>> { pub fn file(&mut self) -> io::Result<&mut BufReader<fs::File>> {
if self.file.is_none() { if self.file.is_none() {
self.file = Some(BufReader::new(std::fs::File::open(&self.path)?)); self.file = Some(BufReader::new(fs::File::open(&self.path)?));
} }
Ok(self.file.as_mut().unwrap()) Ok(self.file.as_mut().unwrap())
} }
@ -85,7 +87,7 @@ impl Seek for StdFile {
impl VfsFile for StdFile { impl VfsFile for StdFile {
fn map(&mut self) -> io::Result<&[u8]> { fn map(&mut self) -> io::Result<&[u8]> {
if self.file.is_none() { if self.file.is_none() {
self.file = Some(BufReader::new(std::fs::File::open(&self.path)?)); self.file = Some(BufReader::new(fs::File::open(&self.path)?));
} }
if self.mmap.is_none() { if self.mmap.is_none() {
self.mmap = Some(unsafe { memmap2::Mmap::map(self.file.as_ref().unwrap().get_ref())? }); self.mmap = Some(unsafe { memmap2::Mmap::map(self.file.as_ref().unwrap().get_ref())? });
@ -94,7 +96,7 @@ impl VfsFile for StdFile {
} }
fn metadata(&mut self) -> io::Result<VfsMetadata> { fn metadata(&mut self) -> io::Result<VfsMetadata> {
let metadata = std::fs::metadata(&self.path)?; let metadata = fs::metadata(&self.path)?;
Ok(VfsMetadata { Ok(VfsMetadata {
file_type: if metadata.is_dir() { VfsFileType::Directory } else { VfsFileType::File }, file_type: if metadata.is_dir() { VfsFileType::Directory } else { VfsFileType::File },
len: metadata.len(), len: metadata.len(),

View File

@ -1,5 +1,7 @@
use std::io; use std::io;
use typed_path::Utf8UnixPath;
use super::{Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult, WindowedFile}; use super::{Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult, WindowedFile};
use crate::util::u8_arc::{U8NodeKind, U8View}; use crate::util::u8_arc::{U8NodeKind, U8View};
@ -18,7 +20,7 @@ impl U8Fs {
} }
impl Vfs for U8Fs { impl Vfs for U8Fs {
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> { fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>> {
let view = self.view()?; let view = self.view()?;
match view.find(path) { match view.find(path) {
Some((_, node)) => match node.kind() { Some((_, node)) => match node.kind() {
@ -35,12 +37,12 @@ impl Vfs for U8Fs {
} }
} }
fn exists(&mut self, path: &str) -> VfsResult<bool> { fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool> {
let view = self.view()?; let view = self.view()?;
Ok(view.find(path).is_some()) Ok(view.find(path).is_some())
} }
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> { fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>> {
let view = self.view()?; let view = self.view()?;
match view.find(path) { match view.find(path) {
Some((idx, node)) => match node.kind() { Some((idx, node)) => match node.kind() {
@ -66,7 +68,7 @@ impl Vfs for U8Fs {
} }
} }
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> { fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata> {
let metdata = self.file.metadata()?; let metdata = self.file.metadata()?;
let view = self.view()?; let view = self.view()?;
match view.find(path) { match view.find(path) {