Use typed-path in place of std Path/PathBuf
This allows handling path conversions in a more structured way, as well as avoiding needless UTF-8 checks. All argument inputs use `Utf8NativePathBuf`, while all config entries use `Utf8UnixPathBuf`, ensuring that we deserialize/serialize using forward slashes. We can omit `.display()` and lossy UTF-8 conversions since all paths are known valid UTF-8.
This commit is contained in:
parent
64d0491256
commit
2e524e6806
|
@ -381,7 +381,6 @@ dependencies = [
|
|||
"once_cell",
|
||||
"orthrus-ncompress",
|
||||
"owo-colors",
|
||||
"path-slash",
|
||||
"petgraph",
|
||||
"ppc750cl",
|
||||
"rayon",
|
||||
|
@ -399,6 +398,7 @@ dependencies = [
|
|||
"tracing",
|
||||
"tracing-attributes",
|
||||
"tracing-subscriber",
|
||||
"typed-path",
|
||||
"xxhash-rust",
|
||||
"zerocopy",
|
||||
]
|
||||
|
@ -1057,12 +1057,6 @@ dependencies = [
|
|||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "path-slash"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42"
|
||||
|
||||
[[package]]
|
||||
name = "pbjson-build"
|
||||
version = "0.7.0"
|
||||
|
@ -1764,6 +1758,12 @@ dependencies = [
|
|||
"syn 2.0.79",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typed-path"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "50c0c7479c430935701ff2532e3091e6680ec03f2f89ffcd9988b08e885b90a5"
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.17.0"
|
||||
|
|
|
@ -9,7 +9,7 @@ publish = false
|
|||
repository = "https://github.com/encounter/decomp-toolkit"
|
||||
readme = "README.md"
|
||||
categories = ["command-line-utilities"]
|
||||
rust-version = "1.80.0"
|
||||
rust-version = "1.81"
|
||||
|
||||
[[bin]]
|
||||
name = "dtk"
|
||||
|
@ -31,6 +31,7 @@ argp = "0.3"
|
|||
base16ct = "0.2"
|
||||
base64 = "0.22"
|
||||
byteorder = "1.5"
|
||||
typed-path = "0.9"
|
||||
crossterm = "0.28"
|
||||
cwdemangle = "1.0"
|
||||
cwextab = "1.0"
|
||||
|
@ -57,7 +58,6 @@ object = { version = "0.36", features = ["read_core", "std", "elf", "write_std"]
|
|||
once_cell = "1.20"
|
||||
orthrus-ncompress = "0.2"
|
||||
owo-colors = { version = "4.1", features = ["supports-colors"] }
|
||||
path-slash = "0.2"
|
||||
petgraph = { version = "0.6", default-features = false }
|
||||
ppc750cl = "0.3"
|
||||
rayon = "1.10"
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
use std::{
|
||||
io::{stdout, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
use std::io::{stdout, Write};
|
||||
|
||||
use anyhow::Result;
|
||||
use argp::FromArgs;
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
cmd,
|
||||
util::{
|
||||
alf::AlfFile,
|
||||
file::buf_writer,
|
||||
path::native_path,
|
||||
reader::{Endian, FromReader},
|
||||
},
|
||||
vfs::open_file,
|
||||
|
@ -35,21 +34,21 @@ enum SubCommand {
|
|||
/// Prints information about an alf file. (Same as `dol info`)
|
||||
#[argp(subcommand, name = "info")]
|
||||
pub struct InfoArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// alf file
|
||||
file: PathBuf,
|
||||
file: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Extracts symbol hashes from an alf file.
|
||||
#[argp(subcommand, name = "hashes")]
|
||||
pub struct HashesArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// alf file
|
||||
alf_file: PathBuf,
|
||||
#[argp(positional)]
|
||||
alf_file: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// output file
|
||||
output: Option<PathBuf>,
|
||||
output: Option<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
|
@ -64,7 +63,7 @@ fn hashes(args: HashesArgs) -> Result<()> {
|
|||
let mut file = open_file(&args.alf_file, true)?;
|
||||
AlfFile::from_reader(file.as_mut(), Endian::Little)?
|
||||
};
|
||||
let mut w: Box<dyn Write> = if let Some(output) = args.output {
|
||||
let mut w: Box<dyn Write> = if let Some(output) = &args.output {
|
||||
Box::new(buf_writer(output)?)
|
||||
} else {
|
||||
Box::new(stdout())
|
||||
|
|
|
@ -2,15 +2,18 @@ use std::{
|
|||
collections::{btree_map::Entry, BTreeMap},
|
||||
fs::File,
|
||||
io::Write,
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use argp::FromArgs;
|
||||
use object::{Object, ObjectSymbol, SymbolScope};
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
util::file::{buf_writer, process_rsp},
|
||||
util::{
|
||||
file::{buf_writer, process_rsp},
|
||||
path::native_path,
|
||||
},
|
||||
vfs::open_file,
|
||||
};
|
||||
|
||||
|
@ -33,24 +36,24 @@ enum SubCommand {
|
|||
/// Creates a static library.
|
||||
#[argp(subcommand, name = "create")]
|
||||
pub struct CreateArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// output file
|
||||
out: PathBuf,
|
||||
#[argp(positional)]
|
||||
out: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input files
|
||||
files: Vec<PathBuf>,
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Extracts a static library.
|
||||
#[argp(subcommand, name = "extract")]
|
||||
pub struct ExtractArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input files
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// output directory
|
||||
out: Option<PathBuf>,
|
||||
out: Option<Utf8NativePathBuf>,
|
||||
#[argp(switch, short = 'q')]
|
||||
/// quiet output
|
||||
quiet: bool,
|
||||
|
@ -74,14 +77,13 @@ fn create(args: CreateArgs) -> Result<()> {
|
|||
let mut identifiers = Vec::with_capacity(files.len());
|
||||
let mut symbol_table = BTreeMap::new();
|
||||
for path in &files {
|
||||
let path_str =
|
||||
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
|
||||
let identifier = path_str.as_bytes().to_vec();
|
||||
let unix_path = path.with_unix_encoding();
|
||||
let identifier = unix_path.as_str().as_bytes().to_vec();
|
||||
identifiers.push(identifier.clone());
|
||||
|
||||
let entries = match symbol_table.entry(identifier) {
|
||||
Entry::Vacant(e) => e.insert(Vec::new()),
|
||||
Entry::Occupied(_) => bail!("Duplicate file name '{path_str}'"),
|
||||
Entry::Occupied(_) => bail!("Duplicate file name '{unix_path}'"),
|
||||
};
|
||||
let mut file = open_file(path, false)?;
|
||||
let obj = object::File::parse(file.map()?)?;
|
||||
|
@ -102,10 +104,8 @@ fn create(args: CreateArgs) -> Result<()> {
|
|||
symbol_table,
|
||||
)?;
|
||||
for path in files {
|
||||
let path_str =
|
||||
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
|
||||
let mut file = File::open(&path)?;
|
||||
builder.append_file(path_str.as_bytes(), &mut file)?;
|
||||
builder.append_file(path.as_str().as_bytes(), &mut file)?;
|
||||
}
|
||||
builder.into_inner()?.flush()?;
|
||||
Ok(())
|
||||
|
@ -118,7 +118,8 @@ fn extract(args: ExtractArgs) -> Result<()> {
|
|||
// Extract files
|
||||
let mut num_files = 0;
|
||||
for path in &files {
|
||||
let mut out_dir = if let Some(out) = &args.out { out.clone() } else { PathBuf::new() };
|
||||
let mut out_dir =
|
||||
if let Some(out) = &args.out { out.clone() } else { Utf8NativePathBuf::new() };
|
||||
// If there are multiple files, extract to separate directories
|
||||
if files.len() > 1 {
|
||||
out_dir
|
||||
|
@ -126,14 +127,13 @@ fn extract(args: ExtractArgs) -> Result<()> {
|
|||
}
|
||||
std::fs::create_dir_all(&out_dir)?;
|
||||
if !args.quiet {
|
||||
println!("Extracting {} to {}", path.display(), out_dir.display());
|
||||
println!("Extracting {} to {}", path, out_dir);
|
||||
}
|
||||
|
||||
let mut file = open_file(path, false)?;
|
||||
let mut archive = ar::Archive::new(file.map()?);
|
||||
while let Some(entry) = archive.next_entry() {
|
||||
let mut entry =
|
||||
entry.with_context(|| format!("Processing entry in {}", path.display()))?;
|
||||
let mut entry = entry.with_context(|| format!("Processing entry in {}", path))?;
|
||||
let file_name = std::str::from_utf8(entry.header().identifier())?;
|
||||
if !args.quiet && args.verbose {
|
||||
println!("\t{}", file_name);
|
||||
|
@ -146,7 +146,7 @@ fn extract(args: ExtractArgs) -> Result<()> {
|
|||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let mut file = File::create(&file_path)
|
||||
.with_context(|| format!("Failed to create file {}", file_path.display()))?;
|
||||
.with_context(|| format!("Failed to create file {}", file_path))?;
|
||||
std::io::copy(&mut entry, &mut file)?;
|
||||
file.flush()?;
|
||||
|
||||
|
|
332
src/cmd/dol.rs
332
src/cmd/dol.rs
|
@ -1,13 +1,10 @@
|
|||
use std::{
|
||||
borrow::Cow,
|
||||
cmp::min,
|
||||
collections::{btree_map::Entry, hash_map, BTreeMap, HashMap},
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
fs::DirBuilder,
|
||||
io::{Cursor, Seek, Write},
|
||||
mem::take,
|
||||
path::{Path, PathBuf},
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
|
@ -18,6 +15,7 @@ use itertools::Itertools;
|
|||
use rayon::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, info, info_span};
|
||||
use typed_path::{Utf8NativePath, Utf8NativePathBuf, Utf8UnixPath, Utf8UnixPathBuf};
|
||||
use xxhash_rust::xxh3::xxh3_64;
|
||||
|
||||
use crate::{
|
||||
|
@ -51,6 +49,7 @@ use crate::{
|
|||
file::{buf_writer, touch, verify_hash, FileIterator, FileReadInfo},
|
||||
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
|
||||
map::apply_map_file,
|
||||
path::{check_path_buf, native_path},
|
||||
rel::{process_rel, process_rel_header, update_rel_section_alignment},
|
||||
rso::{process_rso, DOL_SECTION_ABS, DOL_SECTION_ETI, DOL_SECTION_NAMES},
|
||||
split::{is_linker_generated_object, split_obj, update_splits},
|
||||
|
@ -81,24 +80,24 @@ enum SubCommand {
|
|||
/// Views DOL file information.
|
||||
#[argp(subcommand, name = "info")]
|
||||
pub struct InfoArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// DOL file
|
||||
pub dol_file: PathBuf,
|
||||
#[argp(option, short = 's')]
|
||||
pub dol_file: Utf8NativePathBuf,
|
||||
#[argp(option, short = 's', from_str_fn(native_path))]
|
||||
/// optional path to selfile.sel
|
||||
pub selfile: Option<PathBuf>,
|
||||
pub selfile: Option<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Splits a DOL into relocatable objects.
|
||||
#[argp(subcommand, name = "split")]
|
||||
pub struct SplitArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input configuration file
|
||||
config: PathBuf,
|
||||
#[argp(positional)]
|
||||
config: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// output directory
|
||||
out_dir: PathBuf,
|
||||
out_dir: Utf8NativePathBuf,
|
||||
#[argp(switch)]
|
||||
/// skip updating splits & symbol files (for build systems)
|
||||
no_update: bool,
|
||||
|
@ -111,36 +110,36 @@ pub struct SplitArgs {
|
|||
/// Diffs symbols in a linked ELF.
|
||||
#[argp(subcommand, name = "diff")]
|
||||
pub struct DiffArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input configuration file
|
||||
config: PathBuf,
|
||||
#[argp(positional)]
|
||||
config: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// linked ELF
|
||||
elf_file: PathBuf,
|
||||
elf_file: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Applies updated symbols from a linked ELF to the project configuration.
|
||||
#[argp(subcommand, name = "apply")]
|
||||
pub struct ApplyArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input configuration file
|
||||
config: PathBuf,
|
||||
#[argp(positional)]
|
||||
config: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// linked ELF
|
||||
elf_file: PathBuf,
|
||||
elf_file: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Generates a project configuration file from a DOL (& RELs).
|
||||
#[argp(subcommand, name = "config")]
|
||||
pub struct ConfigArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// object files
|
||||
objects: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
objects: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// output config YAML file
|
||||
out_file: PathBuf,
|
||||
out_file: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -155,44 +154,37 @@ where T: Default + PartialEq {
|
|||
t == &T::default()
|
||||
}
|
||||
|
||||
mod path_slash_serde {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use path_slash::PathBufExt as _;
|
||||
mod unix_path_serde {
|
||||
use serde::{Deserialize, Deserializer, Serializer};
|
||||
use typed_path::Utf8UnixPathBuf;
|
||||
|
||||
pub fn serialize<S>(path: &PathBuf, s: S) -> Result<S::Ok, S::Error>
|
||||
pub fn serialize<S>(path: &Utf8UnixPathBuf, s: S) -> Result<S::Ok, S::Error>
|
||||
where S: Serializer {
|
||||
let path_str = path.to_slash().ok_or_else(|| serde::ser::Error::custom("Invalid path"))?;
|
||||
s.serialize_str(path_str.as_ref())
|
||||
s.serialize_str(path.as_str())
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<PathBuf, D::Error>
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<Utf8UnixPathBuf, D::Error>
|
||||
where D: Deserializer<'de> {
|
||||
String::deserialize(deserializer).map(PathBuf::from_slash)
|
||||
String::deserialize(deserializer).map(Utf8UnixPathBuf::from)
|
||||
}
|
||||
}
|
||||
|
||||
mod path_slash_serde_option {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use path_slash::PathBufExt as _;
|
||||
mod unix_path_serde_option {
|
||||
use serde::{Deserialize, Deserializer, Serializer};
|
||||
use typed_path::Utf8UnixPathBuf;
|
||||
|
||||
pub fn serialize<S>(path: &Option<PathBuf>, s: S) -> Result<S::Ok, S::Error>
|
||||
pub fn serialize<S>(path: &Option<Utf8UnixPathBuf>, s: S) -> Result<S::Ok, S::Error>
|
||||
where S: Serializer {
|
||||
if let Some(path) = path {
|
||||
let path_str =
|
||||
path.to_slash().ok_or_else(|| serde::ser::Error::custom("Invalid path"))?;
|
||||
s.serialize_str(path_str.as_ref())
|
||||
s.serialize_str(path.as_str())
|
||||
} else {
|
||||
s.serialize_none()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<PathBuf>, D::Error>
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Utf8UnixPathBuf>, D::Error>
|
||||
where D: Deserializer<'de> {
|
||||
Ok(Option::deserialize(deserializer)?.map(PathBuf::from_slash::<String>))
|
||||
Ok(Option::<String>::deserialize(deserializer)?.map(Utf8UnixPathBuf::from))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,8 +192,8 @@ mod path_slash_serde_option {
|
|||
pub struct ProjectConfig {
|
||||
#[serde(flatten)]
|
||||
pub base: ModuleConfig,
|
||||
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub selfile: Option<PathBuf>,
|
||||
#[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub selfile: Option<Utf8UnixPathBuf>,
|
||||
#[serde(skip_serializing_if = "is_default")]
|
||||
pub selfile_hash: Option<String>,
|
||||
/// Version of the MW `.comment` section format.
|
||||
|
@ -235,8 +227,8 @@ pub struct ProjectConfig {
|
|||
#[serde(default = "bool_true", skip_serializing_if = "is_true")]
|
||||
pub export_all: bool,
|
||||
/// Optional base path for all object files.
|
||||
#[serde(default, skip_serializing_if = "is_default")]
|
||||
pub object_base: Option<PathBuf>,
|
||||
#[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub object_base: Option<Utf8UnixPathBuf>,
|
||||
}
|
||||
|
||||
impl Default for ProjectConfig {
|
||||
|
@ -265,21 +257,21 @@ pub struct ModuleConfig {
|
|||
/// Object name. If not specified, the file name without extension will be used.
|
||||
#[serde(skip_serializing_if = "is_default")]
|
||||
pub name: Option<String>,
|
||||
#[serde(with = "path_slash_serde")]
|
||||
pub object: PathBuf,
|
||||
#[serde(with = "unix_path_serde")]
|
||||
pub object: Utf8UnixPathBuf,
|
||||
#[serde(skip_serializing_if = "is_default")]
|
||||
pub hash: Option<String>,
|
||||
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub splits: Option<PathBuf>,
|
||||
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub symbols: Option<PathBuf>,
|
||||
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub map: Option<PathBuf>,
|
||||
#[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub splits: Option<Utf8UnixPathBuf>,
|
||||
#[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub symbols: Option<Utf8UnixPathBuf>,
|
||||
#[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub map: Option<Utf8UnixPathBuf>,
|
||||
/// Forces the given symbols to be active (exported) in the linker script.
|
||||
#[serde(default, skip_serializing_if = "is_default")]
|
||||
pub force_active: Vec<String>,
|
||||
#[serde(skip_serializing_if = "is_default")]
|
||||
pub ldscript_template: Option<PathBuf>,
|
||||
#[serde(with = "unix_path_serde_option", default, skip_serializing_if = "is_default")]
|
||||
pub ldscript_template: Option<Utf8UnixPathBuf>,
|
||||
/// Overrides links to other modules.
|
||||
#[serde(skip_serializing_if = "is_default")]
|
||||
pub links: Option<Vec<String>>,
|
||||
|
@ -297,12 +289,12 @@ pub struct ExtractConfig {
|
|||
pub symbol: String,
|
||||
/// If specified, the symbol's data will be extracted to the given file.
|
||||
/// Path is relative to `out_dir/bin`.
|
||||
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "Option::is_none")]
|
||||
pub binary: Option<PathBuf>,
|
||||
#[serde(with = "unix_path_serde_option", default, skip_serializing_if = "Option::is_none")]
|
||||
pub binary: Option<Utf8UnixPathBuf>,
|
||||
/// If specified, the symbol's data will be extracted to the given file as a C array.
|
||||
/// Path is relative to `out_dir/include`.
|
||||
#[serde(with = "path_slash_serde_option", default, skip_serializing_if = "Option::is_none")]
|
||||
pub header: Option<PathBuf>,
|
||||
#[serde(with = "unix_path_serde_option", default, skip_serializing_if = "Option::is_none")]
|
||||
pub header: Option<Utf8UnixPathBuf>,
|
||||
}
|
||||
|
||||
/// A relocation that should be blocked.
|
||||
|
@ -336,30 +328,20 @@ pub struct AddRelocationConfig {
|
|||
}
|
||||
|
||||
impl ModuleConfig {
|
||||
pub fn file_name(&self) -> Cow<'_, str> {
|
||||
self.object.file_name().unwrap_or(self.object.as_os_str()).to_string_lossy()
|
||||
pub fn file_name(&self) -> &str { self.object.file_name().unwrap_or(self.object.as_str()) }
|
||||
|
||||
pub fn file_prefix(&self) -> &str {
|
||||
let file_name = self.file_name();
|
||||
file_name.split_once('.').map(|(prefix, _)| prefix).unwrap_or(file_name)
|
||||
}
|
||||
|
||||
pub fn file_prefix(&self) -> Cow<'_, str> {
|
||||
match self.file_name() {
|
||||
Cow::Borrowed(s) => {
|
||||
Cow::Borrowed(s.split_once('.').map(|(prefix, _)| prefix).unwrap_or(s))
|
||||
}
|
||||
Cow::Owned(s) => {
|
||||
Cow::Owned(s.split_once('.').map(|(prefix, _)| prefix).unwrap_or(&s).to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn name(&self) -> Cow<'_, str> {
|
||||
self.name.as_ref().map(|n| n.as_str().to_cow()).unwrap_or_else(|| self.file_prefix())
|
||||
}
|
||||
pub fn name(&self) -> &str { self.name.as_deref().unwrap_or_else(|| self.file_prefix()) }
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct OutputUnit {
|
||||
#[serde(with = "path_slash_serde")]
|
||||
pub object: PathBuf,
|
||||
#[serde(with = "unix_path_serde")]
|
||||
pub object: Utf8UnixPathBuf,
|
||||
pub name: String,
|
||||
pub autogenerated: bool,
|
||||
pub code_size: u32,
|
||||
|
@ -370,8 +352,8 @@ pub struct OutputUnit {
|
|||
pub struct OutputModule {
|
||||
pub name: String,
|
||||
pub module_id: u32,
|
||||
#[serde(with = "path_slash_serde")]
|
||||
pub ldscript: PathBuf,
|
||||
#[serde(with = "unix_path_serde")]
|
||||
pub ldscript: Utf8UnixPathBuf,
|
||||
pub entry: Option<String>,
|
||||
pub units: Vec<OutputUnit>,
|
||||
}
|
||||
|
@ -788,21 +770,21 @@ fn resolve_external_relocations(
|
|||
|
||||
struct AnalyzeResult {
|
||||
obj: ObjInfo,
|
||||
dep: Vec<PathBuf>,
|
||||
dep: Vec<Utf8NativePathBuf>,
|
||||
symbols_cache: Option<FileReadInfo>,
|
||||
splits_cache: Option<FileReadInfo>,
|
||||
}
|
||||
|
||||
fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<AnalyzeResult> {
|
||||
let object_path = object_base.join(&config.base.object);
|
||||
log::debug!("Loading {}", object_path.display());
|
||||
log::debug!("Loading {}", object_path);
|
||||
let mut obj = {
|
||||
let mut file = object_base.open(&config.base.object)?;
|
||||
let data = file.map()?;
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(data, hash_str)?;
|
||||
}
|
||||
process_dol(data, config.base.name().as_ref())?
|
||||
process_dol(data, config.base.name())?
|
||||
};
|
||||
let mut dep = vec![object_path];
|
||||
|
||||
|
@ -811,20 +793,25 @@ fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<
|
|||
}
|
||||
|
||||
if let Some(map_path) = &config.base.map {
|
||||
apply_map_file(map_path, &mut obj, config.common_start, config.mw_comment_version)?;
|
||||
dep.push(map_path.clone());
|
||||
let map_path = map_path.with_encoding();
|
||||
apply_map_file(&map_path, &mut obj, config.common_start, config.mw_comment_version)?;
|
||||
dep.push(map_path);
|
||||
}
|
||||
|
||||
let splits_cache = if let Some(splits_path) = &config.base.splits {
|
||||
dep.push(splits_path.clone());
|
||||
apply_splits_file(splits_path, &mut obj)?
|
||||
let splits_path = splits_path.with_encoding();
|
||||
let cache = apply_splits_file(&splits_path, &mut obj)?;
|
||||
dep.push(splits_path);
|
||||
cache
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let symbols_cache = if let Some(symbols_path) = &config.base.symbols {
|
||||
dep.push(symbols_path.clone());
|
||||
apply_symbols_file(symbols_path, &mut obj)?
|
||||
let symbols_path = symbols_path.with_encoding();
|
||||
let cache = apply_symbols_file(&symbols_path, &mut obj)?;
|
||||
dep.push(symbols_path);
|
||||
cache
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -850,8 +837,9 @@ fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<
|
|||
}
|
||||
|
||||
if let Some(selfile) = &config.selfile {
|
||||
log::info!("Loading {}", selfile.display());
|
||||
let mut file = open_file(selfile, true)?;
|
||||
let selfile: Utf8NativePathBuf = selfile.with_encoding();
|
||||
log::info!("Loading {}", selfile);
|
||||
let mut file = open_file(&selfile, true)?;
|
||||
let data = file.map()?;
|
||||
if let Some(hash) = &config.selfile_hash {
|
||||
verify_hash(data, hash)?;
|
||||
|
@ -872,8 +860,8 @@ fn load_analyze_dol(config: &ProjectConfig, object_base: &ObjectBase) -> Result<
|
|||
fn split_write_obj(
|
||||
module: &mut ModuleInfo,
|
||||
config: &ProjectConfig,
|
||||
base_dir: &Path,
|
||||
out_dir: &Path,
|
||||
base_dir: &Utf8NativePath,
|
||||
out_dir: &Utf8NativePath,
|
||||
no_update: bool,
|
||||
) -> Result<OutputModule> {
|
||||
debug!("Performing relocation analysis");
|
||||
|
@ -904,10 +892,15 @@ fn split_write_obj(
|
|||
if !no_update {
|
||||
debug!("Writing configuration");
|
||||
if let Some(symbols_path) = &module.config.symbols {
|
||||
write_symbols_file(symbols_path, &module.obj, module.symbols_cache)?;
|
||||
write_symbols_file(&symbols_path.with_encoding(), &module.obj, module.symbols_cache)?;
|
||||
}
|
||||
if let Some(splits_path) = &module.config.splits {
|
||||
write_splits_file(splits_path, &module.obj, false, module.splits_cache)?;
|
||||
write_splits_file(
|
||||
&splits_path.with_encoding(),
|
||||
&module.obj,
|
||||
false,
|
||||
module.splits_cache,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -919,7 +912,7 @@ fn split_write_obj(
|
|||
DirBuilder::new()
|
||||
.recursive(true)
|
||||
.create(out_dir)
|
||||
.with_context(|| format!("Failed to create out dir '{}'", out_dir.display()))?;
|
||||
.with_context(|| format!("Failed to create out dir '{}'", out_dir))?;
|
||||
let obj_dir = out_dir.join("obj");
|
||||
let entry = if module.obj.kind == ObjKind::Executable {
|
||||
module.obj.entry.and_then(|e| {
|
||||
|
@ -934,7 +927,7 @@ fn split_write_obj(
|
|||
let mut out_config = OutputModule {
|
||||
name: module_name,
|
||||
module_id,
|
||||
ldscript: out_dir.join("ldscript.lcf"),
|
||||
ldscript: out_dir.join("ldscript.lcf").with_unix_encoding(),
|
||||
units: Vec::with_capacity(split_objs.len()),
|
||||
entry,
|
||||
};
|
||||
|
@ -942,7 +935,7 @@ fn split_write_obj(
|
|||
let out_obj = write_elf(split_obj, config.export_all)?;
|
||||
let out_path = obj_dir.join(obj_path_for_unit(&unit.name));
|
||||
out_config.units.push(OutputUnit {
|
||||
object: out_path.clone(),
|
||||
object: out_path.with_unix_encoding(),
|
||||
name: unit.name.clone(),
|
||||
autogenerated: unit.autogenerated,
|
||||
code_size: split_obj.code_size(),
|
||||
|
@ -967,7 +960,7 @@ fn split_write_obj(
|
|||
let data = section.symbol_data(symbol)?;
|
||||
|
||||
if let Some(binary) = &extract.binary {
|
||||
let out_path = base_dir.join("bin").join(binary);
|
||||
let out_path = base_dir.join("bin").join(binary.with_encoding());
|
||||
if let Some(parent) = out_path.parent() {
|
||||
DirBuilder::new().recursive(true).create(parent)?;
|
||||
}
|
||||
|
@ -976,7 +969,7 @@ fn split_write_obj(
|
|||
|
||||
if let Some(header) = &extract.header {
|
||||
let header_string = bin2c(symbol, section, data);
|
||||
let out_path = base_dir.join("include").join(header);
|
||||
let out_path = base_dir.join("include").join(header.with_encoding());
|
||||
if let Some(parent) = out_path.parent() {
|
||||
DirBuilder::new().recursive(true).create(parent)?;
|
||||
}
|
||||
|
@ -985,16 +978,18 @@ fn split_write_obj(
|
|||
}
|
||||
|
||||
// Generate ldscript.lcf
|
||||
let ldscript_template = if let Some(template) = &module.config.ldscript_template {
|
||||
Some(fs::read_to_string(template).with_context(|| {
|
||||
format!("Failed to read linker script template '{}'", template.display())
|
||||
let ldscript_template = if let Some(template_path) = &module.config.ldscript_template {
|
||||
let template_path = template_path.with_encoding();
|
||||
Some(fs::read_to_string(&template_path).with_context(|| {
|
||||
format!("Failed to read linker script template '{}'", template_path)
|
||||
})?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let ldscript_string =
|
||||
generate_ldscript(&module.obj, ldscript_template.as_deref(), &module.config.force_active)?;
|
||||
write_if_changed(&out_config.ldscript, ldscript_string.as_bytes())?;
|
||||
let ldscript_path = out_config.ldscript.with_encoding();
|
||||
write_if_changed(&ldscript_path, ldscript_string.as_bytes())?;
|
||||
|
||||
if config.write_asm {
|
||||
debug!("Writing disassembly");
|
||||
|
@ -1004,15 +999,15 @@ fn split_write_obj(
|
|||
|
||||
let mut w = buf_writer(&out_path)?;
|
||||
write_asm(&mut w, split_obj)
|
||||
.with_context(|| format!("Failed to write {}", out_path.display()))?;
|
||||
.with_context(|| format!("Failed to write {}", out_path))?;
|
||||
w.flush()?;
|
||||
}
|
||||
}
|
||||
Ok(out_config)
|
||||
}
|
||||
|
||||
fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
|
||||
if path.is_file() {
|
||||
fn write_if_changed(path: &Utf8NativePath, contents: &[u8]) -> Result<()> {
|
||||
if fs::metadata(path).is_ok_and(|m| m.is_file()) {
|
||||
let mut old_file = open_file(path, true)?;
|
||||
let old_data = old_file.map()?;
|
||||
// If the file is the same size, check if the contents are the same
|
||||
|
@ -1021,8 +1016,7 @@ fn write_if_changed(path: &Path, contents: &[u8]) -> Result<()> {
|
|||
return Ok(());
|
||||
}
|
||||
}
|
||||
fs::write(path, contents)
|
||||
.with_context(|| format!("Failed to write file '{}'", path.display()))?;
|
||||
fs::write(path, contents).with_context(|| format!("Failed to write file '{}'", path))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1032,14 +1026,13 @@ fn load_analyze_rel(
|
|||
module_config: &ModuleConfig,
|
||||
) -> Result<AnalyzeResult> {
|
||||
let object_path = object_base.join(&module_config.object);
|
||||
debug!("Loading {}", object_path.display());
|
||||
debug!("Loading {}", object_path);
|
||||
let mut file = object_base.open(&module_config.object)?;
|
||||
let data = file.map()?;
|
||||
if let Some(hash_str) = &module_config.hash {
|
||||
verify_hash(data, hash_str)?;
|
||||
}
|
||||
let (header, mut module_obj) =
|
||||
process_rel(&mut Cursor::new(data), module_config.name().as_ref())?;
|
||||
let (header, mut module_obj) = process_rel(&mut Cursor::new(data), module_config.name())?;
|
||||
|
||||
if let Some(comment_version) = config.mw_comment_version {
|
||||
module_obj.mw_comment = Some(MWComment::new(comment_version)?);
|
||||
|
@ -1047,20 +1040,25 @@ fn load_analyze_rel(
|
|||
|
||||
let mut dep = vec![object_path];
|
||||
if let Some(map_path) = &module_config.map {
|
||||
apply_map_file(map_path, &mut module_obj, None, None)?;
|
||||
dep.push(map_path.clone());
|
||||
let map_path = map_path.with_encoding();
|
||||
apply_map_file(&map_path, &mut module_obj, None, None)?;
|
||||
dep.push(map_path);
|
||||
}
|
||||
|
||||
let splits_cache = if let Some(splits_path) = &module_config.splits {
|
||||
dep.push(splits_path.clone());
|
||||
apply_splits_file(splits_path, &mut module_obj)?
|
||||
let splits_path = splits_path.with_encoding();
|
||||
let cache = apply_splits_file(&splits_path, &mut module_obj)?;
|
||||
dep.push(splits_path);
|
||||
cache
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let symbols_cache = if let Some(symbols_path) = &module_config.symbols {
|
||||
dep.push(symbols_path.clone());
|
||||
apply_symbols_file(symbols_path, &mut module_obj)?
|
||||
let symbols_path = symbols_path.with_encoding();
|
||||
let cache = apply_symbols_file(&symbols_path, &mut module_obj)?;
|
||||
dep.push(symbols_path);
|
||||
cache
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -1100,7 +1098,7 @@ fn split(args: SplitArgs) -> Result<()> {
|
|||
}
|
||||
|
||||
let command_start = Instant::now();
|
||||
info!("Loading {}", args.config.display());
|
||||
info!("Loading {}", args.config);
|
||||
let mut config: ProjectConfig = {
|
||||
let mut config_file = open_file(&args.config, true)?;
|
||||
serde_yaml::from_reader(config_file.as_mut())?
|
||||
|
@ -1302,7 +1300,7 @@ fn split(args: SplitArgs) -> Result<()> {
|
|||
let _span =
|
||||
info_span!("module", name = %module.config.name(), id = module.obj.module_id)
|
||||
.entered();
|
||||
let out_dir = args.out_dir.join(module.config.name().as_ref());
|
||||
let out_dir = args.out_dir.join(module.config.name());
|
||||
split_write_obj(module, &config, &args.out_dir, &out_dir, args.no_update).with_context(
|
||||
|| {
|
||||
format!(
|
||||
|
@ -1363,7 +1361,7 @@ fn split(args: SplitArgs) -> Result<()> {
|
|||
// Write dep file
|
||||
{
|
||||
let dep_path = args.out_dir.join("dep");
|
||||
let mut dep_file = buf_writer(dep_path)?;
|
||||
let mut dep_file = buf_writer(&dep_path)?;
|
||||
dep.write(&mut dep_file)?;
|
||||
dep_file.flush()?;
|
||||
}
|
||||
|
@ -1379,8 +1377,7 @@ fn split(args: SplitArgs) -> Result<()> {
|
|||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn validate<P>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -> Result<()>
|
||||
where P: AsRef<Path> {
|
||||
fn validate(obj: &ObjInfo, elf_file: &Utf8NativePath, state: &AnalyzerState) -> Result<()> {
|
||||
let real_obj = process_elf(elf_file)?;
|
||||
for (section_index, real_section) in real_obj.sections.iter() {
|
||||
let obj_section = match obj.sections.get(section_index) {
|
||||
|
@ -1553,26 +1550,26 @@ fn symbol_name_fuzzy_eq(a: &ObjSymbol, b: &ObjSymbol) -> bool {
|
|||
}
|
||||
|
||||
fn diff(args: DiffArgs) -> Result<()> {
|
||||
log::info!("Loading {}", args.config.display());
|
||||
log::info!("Loading {}", args.config);
|
||||
let mut config_file = open_file(&args.config, true)?;
|
||||
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
|
||||
let object_base = find_object_base(&config)?;
|
||||
|
||||
log::info!("Loading {}", object_base.join(&config.base.object).display());
|
||||
log::info!("Loading {}", object_base.join(&config.base.object));
|
||||
let mut obj = {
|
||||
let mut file = object_base.open(&config.base.object)?;
|
||||
let data = file.map()?;
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(data, hash_str)?;
|
||||
}
|
||||
process_dol(data, config.base.name().as_ref())?
|
||||
process_dol(data, config.base.name())?
|
||||
};
|
||||
|
||||
if let Some(symbols_path) = &config.base.symbols {
|
||||
apply_symbols_file(symbols_path, &mut obj)?;
|
||||
apply_symbols_file(&symbols_path.with_encoding(), &mut obj)?;
|
||||
}
|
||||
|
||||
log::info!("Loading {}", args.elf_file.display());
|
||||
log::info!("Loading {}", args.elf_file);
|
||||
let linked_obj = process_elf(&args.elf_file)?;
|
||||
|
||||
let common_bss = obj.sections.common_bss_start();
|
||||
|
@ -1734,29 +1731,30 @@ fn diff(args: DiffArgs) -> Result<()> {
|
|||
}
|
||||
|
||||
fn apply(args: ApplyArgs) -> Result<()> {
|
||||
log::info!("Loading {}", args.config.display());
|
||||
log::info!("Loading {}", args.config);
|
||||
let mut config_file = open_file(&args.config, true)?;
|
||||
let config: ProjectConfig = serde_yaml::from_reader(config_file.as_mut())?;
|
||||
let object_base = find_object_base(&config)?;
|
||||
|
||||
log::info!("Loading {}", object_base.join(&config.base.object).display());
|
||||
log::info!("Loading {}", object_base.join(&config.base.object));
|
||||
let mut obj = {
|
||||
let mut file = object_base.open(&config.base.object)?;
|
||||
let data = file.map()?;
|
||||
if let Some(hash_str) = &config.base.hash {
|
||||
verify_hash(data, hash_str)?;
|
||||
}
|
||||
process_dol(data, config.base.name().as_ref())?
|
||||
process_dol(data, config.base.name())?
|
||||
};
|
||||
|
||||
let Some(symbols_path) = &config.base.symbols else {
|
||||
bail!("No symbols file specified in config");
|
||||
};
|
||||
let Some(symbols_cache) = apply_symbols_file(symbols_path, &mut obj)? else {
|
||||
bail!("Symbols file '{}' does not exist", symbols_path.display());
|
||||
let symbols_path = symbols_path.with_encoding();
|
||||
let Some(symbols_cache) = apply_symbols_file(&symbols_path, &mut obj)? else {
|
||||
bail!("Symbols file '{}' does not exist", symbols_path);
|
||||
};
|
||||
|
||||
log::info!("Loading {}", args.elf_file.display());
|
||||
log::info!("Loading {}", args.elf_file);
|
||||
let linked_obj = process_elf(&args.elf_file)?;
|
||||
|
||||
let mut replacements: Vec<(SymbolIndex, Option<ObjSymbol>)> = vec![];
|
||||
|
@ -1892,7 +1890,8 @@ fn apply(args: ApplyArgs) -> Result<()> {
|
|||
}
|
||||
}
|
||||
|
||||
write_symbols_file(config.base.symbols.as_ref().unwrap(), &obj, Some(symbols_cache))?;
|
||||
let symbols_path = config.base.symbols.as_ref().unwrap();
|
||||
write_symbols_file(&symbols_path.with_encoding(), &obj, Some(symbols_cache))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1902,39 +1901,41 @@ fn config(args: ConfigArgs) -> Result<()> {
|
|||
let mut modules = Vec::<(u32, ModuleConfig)>::new();
|
||||
for result in FileIterator::new(&args.objects)? {
|
||||
let (path, mut entry) = result?;
|
||||
log::info!("Loading {}", path.display());
|
||||
|
||||
match path.extension() {
|
||||
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("dol")) => {
|
||||
config.base.object = path;
|
||||
log::info!("Loading {}", path);
|
||||
let Some(ext) = path.extension() else {
|
||||
bail!("No file extension for {}", path);
|
||||
};
|
||||
match ext.to_ascii_lowercase().as_str() {
|
||||
"dol" => {
|
||||
config.base.object = path.with_unix_encoding();
|
||||
config.base.hash = Some(file_sha1_string(&mut entry)?);
|
||||
}
|
||||
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("rel")) => {
|
||||
"rel" => {
|
||||
let header = process_rel_header(&mut entry)?;
|
||||
entry.rewind()?;
|
||||
modules.push((header.module_id, ModuleConfig {
|
||||
object: path,
|
||||
object: path.with_unix_encoding(),
|
||||
hash: Some(file_sha1_string(&mut entry)?),
|
||||
..Default::default()
|
||||
}));
|
||||
}
|
||||
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("sel")) => {
|
||||
config.selfile = Some(path);
|
||||
"sel" => {
|
||||
config.selfile = Some(path.with_unix_encoding());
|
||||
config.selfile_hash = Some(file_sha1_string(&mut entry)?);
|
||||
}
|
||||
Some(ext) if ext.eq_ignore_ascii_case(OsStr::new("rso")) => {
|
||||
"rso" => {
|
||||
config.modules.push(ModuleConfig {
|
||||
object: path,
|
||||
object: path.with_unix_encoding(),
|
||||
hash: Some(file_sha1_string(&mut entry)?),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
_ => bail!("Unknown file extension: '{}'", path.display()),
|
||||
_ => bail!("Unknown file extension: '{}'", ext),
|
||||
}
|
||||
}
|
||||
modules.sort_by(|(a_id, a_config), (b_id, b_config)| {
|
||||
// Sort by module ID, then by name
|
||||
a_id.cmp(b_id).then(a_config.name().cmp(&b_config.name()))
|
||||
a_id.cmp(b_id).then(a_config.name().cmp(b_config.name()))
|
||||
});
|
||||
config.modules.extend(modules.into_iter().map(|(_, m)| m));
|
||||
|
||||
|
@ -1999,49 +2000,50 @@ fn apply_add_relocations(obj: &mut ObjInfo, relocations: &[AddRelocationConfig])
|
|||
|
||||
pub enum ObjectBase {
|
||||
None,
|
||||
Directory(PathBuf),
|
||||
Vfs(PathBuf, Box<dyn Vfs + Send + Sync>),
|
||||
Directory(Utf8NativePathBuf),
|
||||
Vfs(Utf8NativePathBuf, Box<dyn Vfs + Send + Sync>),
|
||||
}
|
||||
|
||||
impl ObjectBase {
|
||||
pub fn join(&self, path: &Path) -> PathBuf {
|
||||
pub fn join(&self, path: &Utf8UnixPath) -> Utf8NativePathBuf {
|
||||
match self {
|
||||
ObjectBase::None => path.to_path_buf(),
|
||||
ObjectBase::Directory(base) => base.join(path),
|
||||
ObjectBase::Vfs(base, _) => {
|
||||
PathBuf::from(format!("{}:{}", base.display(), path.display()))
|
||||
}
|
||||
ObjectBase::None => path.with_encoding(),
|
||||
ObjectBase::Directory(base) => base.join(path.with_encoding()),
|
||||
ObjectBase::Vfs(base, _) => Utf8NativePathBuf::from(format!("{}:{}", base, path)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open(&self, path: &Path) -> Result<Box<dyn VfsFile>> {
|
||||
pub fn open(&self, path: &Utf8UnixPath) -> Result<Box<dyn VfsFile>> {
|
||||
match self {
|
||||
ObjectBase::None => open_file(path, true),
|
||||
ObjectBase::Directory(base) => open_file(&base.join(path), true),
|
||||
ObjectBase::Vfs(vfs_path, vfs) => open_file_with_fs(vfs.clone(), path, true)
|
||||
.with_context(|| format!("Using disc image {}", vfs_path.display())),
|
||||
ObjectBase::None => open_file(&path.with_encoding(), true),
|
||||
ObjectBase::Directory(base) => open_file(&base.join(path.with_encoding()), true),
|
||||
ObjectBase::Vfs(vfs_path, vfs) => {
|
||||
open_file_with_fs(vfs.clone(), &path.with_encoding(), true)
|
||||
.with_context(|| format!("Using disc image {}", vfs_path))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_object_base(config: &ProjectConfig) -> Result<ObjectBase> {
|
||||
if let Some(base) = &config.object_base {
|
||||
let base = base.with_encoding();
|
||||
// Search for disc images in the object base directory
|
||||
for result in base.read_dir()? {
|
||||
for result in fs::read_dir(&base)? {
|
||||
let entry = result?;
|
||||
if entry.file_type()?.is_file() {
|
||||
let path = entry.path();
|
||||
let path = check_path_buf(entry.path())?;
|
||||
let mut file = open_file(&path, false)?;
|
||||
let format = nodtool::nod::Disc::detect(file.as_mut())?;
|
||||
if let Some(format) = format {
|
||||
file.rewind()?;
|
||||
log::info!("Using disc image {}", path.display());
|
||||
log::info!("Using disc image {}", path);
|
||||
let fs = open_fs(file, ArchiveKind::Disc(format))?;
|
||||
return Ok(ObjectBase::Vfs(path, fs));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Ok(ObjectBase::Directory(base.clone()));
|
||||
return Ok(ObjectBase::Directory(base));
|
||||
}
|
||||
Ok(ObjectBase::None)
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ use std::{
|
|||
collections::{btree_map, BTreeMap},
|
||||
io::{stdout, Cursor, Read, Write},
|
||||
ops::Bound::{Excluded, Unbounded},
|
||||
path::PathBuf,
|
||||
str::from_utf8,
|
||||
};
|
||||
|
||||
|
@ -15,6 +14,7 @@ use syntect::{
|
|||
highlighting::{Color, HighlightIterator, HighlightState, Highlighter, Theme, ThemeSet},
|
||||
parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet},
|
||||
};
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
util::{
|
||||
|
@ -23,6 +23,7 @@ use crate::{
|
|||
should_skip_tag, tag_type_string, AttributeKind, TagKind,
|
||||
},
|
||||
file::buf_writer,
|
||||
path::native_path,
|
||||
},
|
||||
vfs::open_file,
|
||||
};
|
||||
|
@ -45,12 +46,12 @@ enum SubCommand {
|
|||
/// Dumps DWARF 1.1 info from an object or archive.
|
||||
#[argp(subcommand, name = "dump")]
|
||||
pub struct DumpArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// Input object. (ELF or archive)
|
||||
in_file: PathBuf,
|
||||
#[argp(option, short = 'o')]
|
||||
in_file: Utf8NativePathBuf,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// Output file. (Or directory, for archive)
|
||||
out: Option<PathBuf>,
|
||||
out: Option<Utf8NativePathBuf>,
|
||||
#[argp(switch)]
|
||||
/// Disable color output.
|
||||
no_color: bool,
|
||||
|
@ -104,7 +105,7 @@ fn dump(args: DumpArgs) -> Result<()> {
|
|||
let name = name.trim_start_matches("D:").replace('\\', "/");
|
||||
let name = name.rsplit_once('/').map(|(_, b)| b).unwrap_or(&name);
|
||||
let file_path = out_path.join(format!("{}.txt", name));
|
||||
let mut file = buf_writer(file_path)?;
|
||||
let mut file = buf_writer(&file_path)?;
|
||||
dump_debug_section(&args, &mut file, &obj_file, debug_section)?;
|
||||
file.flush()?;
|
||||
} else if args.no_color {
|
||||
|
|
|
@ -3,7 +3,6 @@ use std::{
|
|||
fs,
|
||||
fs::DirBuilder,
|
||||
io::{Cursor, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Context, Result};
|
||||
|
@ -15,6 +14,7 @@ use object::{
|
|||
FileFlags, Object, ObjectSection, ObjectSymbol, RelocationTarget, SectionFlags, SectionIndex,
|
||||
SectionKind, SymbolFlags, SymbolIndex, SymbolKind, SymbolScope, SymbolSection,
|
||||
};
|
||||
use typed_path::{Utf8NativePath, Utf8NativePathBuf};
|
||||
|
||||
use crate::{
|
||||
obj::ObjKind,
|
||||
|
@ -24,6 +24,7 @@ use crate::{
|
|||
config::{write_splits_file, write_symbols_file},
|
||||
elf::{process_elf, write_elf},
|
||||
file::{buf_writer, process_rsp},
|
||||
path::native_path,
|
||||
reader::{Endian, FromReader},
|
||||
signatures::{compare_signature, generate_signature, FunctionSignature},
|
||||
split::split_obj,
|
||||
|
@ -54,72 +55,72 @@ enum SubCommand {
|
|||
/// Disassembles an ELF file.
|
||||
#[argp(subcommand, name = "disasm")]
|
||||
pub struct DisasmArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input file
|
||||
elf_file: PathBuf,
|
||||
#[argp(positional)]
|
||||
elf_file: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// output file (.o) or directory (.elf)
|
||||
out: PathBuf,
|
||||
out: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Fixes issues with GNU assembler built object files.
|
||||
#[argp(subcommand, name = "fixup")]
|
||||
pub struct FixupArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input file
|
||||
in_file: PathBuf,
|
||||
#[argp(positional)]
|
||||
in_file: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// output file
|
||||
out_file: PathBuf,
|
||||
out_file: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Splits an executable ELF into relocatable objects.
|
||||
#[argp(subcommand, name = "split")]
|
||||
pub struct SplitArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input file
|
||||
in_file: PathBuf,
|
||||
#[argp(positional)]
|
||||
in_file: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// output directory
|
||||
out_dir: PathBuf,
|
||||
out_dir: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Generates configuration files from an executable ELF.
|
||||
#[argp(subcommand, name = "config")]
|
||||
pub struct ConfigArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input file
|
||||
in_file: PathBuf,
|
||||
#[argp(positional)]
|
||||
in_file: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// output directory
|
||||
out_dir: PathBuf,
|
||||
out_dir: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Builds function signatures from an ELF file.
|
||||
#[argp(subcommand, name = "sigs")]
|
||||
pub struct SignaturesArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input file(s)
|
||||
files: Vec<PathBuf>,
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 's')]
|
||||
/// symbol name
|
||||
symbol: String,
|
||||
#[argp(option, short = 'o')]
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// output yml
|
||||
out_file: PathBuf,
|
||||
out_file: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Prints information about an ELF file.
|
||||
#[argp(subcommand, name = "info")]
|
||||
pub struct InfoArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input file
|
||||
input: PathBuf,
|
||||
input: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
|
@ -134,17 +135,17 @@ pub fn run(args: Args) -> Result<()> {
|
|||
}
|
||||
|
||||
fn config(args: ConfigArgs) -> Result<()> {
|
||||
log::info!("Loading {}", args.in_file.display());
|
||||
log::info!("Loading {}", args.in_file);
|
||||
let obj = process_elf(&args.in_file)?;
|
||||
|
||||
DirBuilder::new().recursive(true).create(&args.out_dir)?;
|
||||
write_symbols_file(args.out_dir.join("symbols.txt"), &obj, None)?;
|
||||
write_splits_file(args.out_dir.join("splits.txt"), &obj, false, None)?;
|
||||
write_symbols_file(&args.out_dir.join("symbols.txt"), &obj, None)?;
|
||||
write_splits_file(&args.out_dir.join("splits.txt"), &obj, false, None)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn disasm(args: DisasmArgs) -> Result<()> {
|
||||
log::info!("Loading {}", args.elf_file.display());
|
||||
log::info!("Loading {}", args.elf_file);
|
||||
let obj = process_elf(&args.elf_file)?;
|
||||
match obj.kind {
|
||||
ObjKind::Executable => {
|
||||
|
@ -156,12 +157,12 @@ fn disasm(args: DisasmArgs) -> Result<()> {
|
|||
DirBuilder::new().recursive(true).create(&include_dir)?;
|
||||
fs::write(include_dir.join("macros.inc"), include_bytes!("../../assets/macros.inc"))?;
|
||||
|
||||
let mut files_out = buf_writer(args.out.join("link_order.txt"))?;
|
||||
let mut files_out = buf_writer(&args.out.join("link_order.txt"))?;
|
||||
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
|
||||
let out_path = asm_dir.join(file_name_from_unit(&unit.name, ".s"));
|
||||
log::info!("Writing {}", out_path.display());
|
||||
log::info!("Writing {}", out_path);
|
||||
|
||||
let mut w = buf_writer(out_path)?;
|
||||
let mut w = buf_writer(&out_path)?;
|
||||
write_asm(&mut w, split_obj)?;
|
||||
w.flush()?;
|
||||
|
||||
|
@ -170,7 +171,7 @@ fn disasm(args: DisasmArgs) -> Result<()> {
|
|||
files_out.flush()?;
|
||||
}
|
||||
ObjKind::Relocatable => {
|
||||
let mut w = buf_writer(args.out)?;
|
||||
let mut w = buf_writer(&args.out)?;
|
||||
write_asm(&mut w, &obj)?;
|
||||
w.flush()?;
|
||||
}
|
||||
|
@ -193,18 +194,17 @@ fn split(args: SplitArgs) -> Result<()> {
|
|||
};
|
||||
}
|
||||
|
||||
let mut rsp_file = buf_writer("rsp")?;
|
||||
let mut rsp_file = buf_writer(Utf8NativePath::new("rsp"))?;
|
||||
for unit in &obj.link_order {
|
||||
let object = file_map
|
||||
.get(&unit.name)
|
||||
.ok_or_else(|| anyhow!("Failed to find object file for unit '{}'", unit.name))?;
|
||||
let out_path = args.out_dir.join(file_name_from_unit(&unit.name, ".o"));
|
||||
writeln!(rsp_file, "{}", out_path.display())?;
|
||||
writeln!(rsp_file, "{}", out_path)?;
|
||||
if let Some(parent) = out_path.parent() {
|
||||
DirBuilder::new().recursive(true).create(parent)?;
|
||||
}
|
||||
fs::write(&out_path, object)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||
fs::write(&out_path, object).with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
}
|
||||
rsp_file.flush()?;
|
||||
Ok(())
|
||||
|
@ -237,7 +237,7 @@ const ASM_SUFFIX: &str = " (asm)";
|
|||
|
||||
fn fixup(args: FixupArgs) -> Result<()> {
|
||||
let in_buf = fs::read(&args.in_file)
|
||||
.with_context(|| format!("Failed to open input file: '{}'", args.in_file.display()))?;
|
||||
.with_context(|| format!("Failed to open input file: '{}'", args.in_file))?;
|
||||
let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?;
|
||||
let mut out_file =
|
||||
object::write::Object::new(in_file.format(), in_file.architecture(), in_file.endianness());
|
||||
|
@ -262,10 +262,7 @@ fn fixup(args: FixupArgs) -> Result<()> {
|
|||
let file_name = args
|
||||
.in_file
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("'{}' is not a file path", args.in_file.display()))?;
|
||||
let file_name = file_name
|
||||
.to_str()
|
||||
.ok_or_else(|| anyhow!("'{}' is not valid UTF-8", file_name.to_string_lossy()))?;
|
||||
.ok_or_else(|| anyhow!("'{}' is not a file path", args.in_file))?;
|
||||
let mut name_bytes = file_name.as_bytes().to_vec();
|
||||
name_bytes.append(&mut ASM_SUFFIX.as_bytes().to_vec());
|
||||
out_file.add_symbol(object::write::Symbol {
|
||||
|
@ -445,7 +442,7 @@ fn signatures(args: SignaturesArgs) -> Result<()> {
|
|||
|
||||
let mut signatures: HashMap<String, FunctionSignature> = HashMap::new();
|
||||
for path in files {
|
||||
log::info!("Processing {}", path.display());
|
||||
log::info!("Processing {}", path);
|
||||
let signature = match generate_signature(&path, &args.symbol) {
|
||||
Ok(Some(signature)) => signature,
|
||||
Ok(None) => continue,
|
||||
|
@ -472,7 +469,7 @@ fn signatures(args: SignaturesArgs) -> Result<()> {
|
|||
|
||||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let in_buf = fs::read(&args.input)
|
||||
.with_context(|| format!("Failed to open input file: '{}'", args.input.display()))?;
|
||||
.with_context(|| format!("Failed to open input file: '{}'", args.input))?;
|
||||
let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?;
|
||||
|
||||
println!("ELF type: {:?}", in_file.kind());
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
use std::{
|
||||
io::{Seek, SeekFrom, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
use std::io::{Seek, SeekFrom, Write};
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Result};
|
||||
use argp::FromArgs;
|
||||
use object::{Architecture, Endianness, Object, ObjectKind, ObjectSection, SectionKind};
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{util::file::buf_writer, vfs::open_file};
|
||||
use crate::{
|
||||
util::{file::buf_writer, path::native_path},
|
||||
vfs::open_file,
|
||||
};
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Converts an ELF file to a DOL file.
|
||||
#[argp(subcommand, name = "elf2dol")]
|
||||
pub struct Args {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// path to input ELF
|
||||
elf_file: PathBuf,
|
||||
#[argp(positional)]
|
||||
elf_file: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// path to output DOL
|
||||
dol_file: PathBuf,
|
||||
dol_file: Utf8NativePathBuf,
|
||||
/// sections (by name) to ignore
|
||||
#[argp(option, long = "ignore")]
|
||||
deny_sections: Vec<String>,
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
use std::{fs::DirBuilder, path::PathBuf};
|
||||
use std::fs::DirBuilder;
|
||||
|
||||
use anyhow::{bail, ensure, Result};
|
||||
use argp::FromArgs;
|
||||
use cwdemangle::{demangle, DemangleOptions};
|
||||
use tracing::error;
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
util::{
|
||||
config::{write_splits_file, write_symbols_file},
|
||||
map::{create_obj, process_map, SymbolEntry, SymbolRef},
|
||||
path::native_path,
|
||||
split::update_splits,
|
||||
},
|
||||
vfs::open_file,
|
||||
|
@ -34,9 +36,9 @@ enum SubCommand {
|
|||
/// Displays all entries for a particular TU.
|
||||
#[argp(subcommand, name = "entries")]
|
||||
pub struct EntriesArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// path to input map
|
||||
map_file: PathBuf,
|
||||
map_file: Utf8NativePathBuf,
|
||||
#[argp(positional)]
|
||||
/// TU to display entries for
|
||||
unit: String,
|
||||
|
@ -46,9 +48,9 @@ pub struct EntriesArgs {
|
|||
/// Displays all references to a symbol.
|
||||
#[argp(subcommand, name = "symbol")]
|
||||
pub struct SymbolArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// path to input map
|
||||
map_file: PathBuf,
|
||||
map_file: Utf8NativePathBuf,
|
||||
#[argp(positional)]
|
||||
/// symbol to display references for
|
||||
symbol: String,
|
||||
|
@ -58,12 +60,12 @@ pub struct SymbolArgs {
|
|||
/// Generates project configuration files from a map. (symbols.txt, splits.txt)
|
||||
#[argp(subcommand, name = "config")]
|
||||
pub struct ConfigArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// path to input map
|
||||
map_file: PathBuf,
|
||||
#[argp(positional)]
|
||||
map_file: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// output directory for symbols.txt and splits.txt
|
||||
out_dir: PathBuf,
|
||||
out_dir: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
|
@ -189,8 +191,8 @@ fn config(args: ConfigArgs) -> Result<()> {
|
|||
error!("Failed to update splits: {}", e)
|
||||
}
|
||||
DirBuilder::new().recursive(true).create(&args.out_dir)?;
|
||||
write_symbols_file(args.out_dir.join("symbols.txt"), &obj, None)?;
|
||||
write_splits_file(args.out_dir.join("splits.txt"), &obj, false, None)?;
|
||||
write_symbols_file(&args.out_dir.join("symbols.txt"), &obj, None)?;
|
||||
write_splits_file(&args.out_dir.join("splits.txt"), &obj, false, None)?;
|
||||
log::info!("Done!");
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
use std::{fs, path::PathBuf};
|
||||
use std::fs;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use argp::FromArgs;
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
util::{file::process_rsp, nlzss, IntoCow, ToCow},
|
||||
util::{file::process_rsp, nlzss, path::native_path, IntoCow, ToCow},
|
||||
vfs::open_file,
|
||||
};
|
||||
|
||||
|
@ -26,13 +27,13 @@ enum SubCommand {
|
|||
/// Decompresses NLZSS-compressed files.
|
||||
#[argp(subcommand, name = "decompress")]
|
||||
pub struct DecompressArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// NLZSS-compressed file(s)
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// Output file (or directory, if multiple files are specified).
|
||||
/// If not specified, decompresses in-place.
|
||||
output: Option<PathBuf>,
|
||||
output: Option<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
|
@ -47,7 +48,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
|||
for path in files {
|
||||
let mut file = open_file(&path, false)?;
|
||||
let data = nlzss::decompress(file.as_mut())
|
||||
.map_err(|e| anyhow!("Failed to decompress '{}' with NLZSS: {}", path.display(), e))?;
|
||||
.map_err(|e| anyhow!("Failed to decompress '{}' with NLZSS: {}", path, e))?;
|
||||
let out_path = if let Some(output) = &args.output {
|
||||
if single_file {
|
||||
output.as_path().to_cow()
|
||||
|
@ -58,7 +59,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
|||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use argp::FromArgs;
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use super::vfs;
|
||||
use crate::util::path::native_path;
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Commands for processing RSO files.
|
||||
|
@ -24,9 +24,9 @@ enum SubCommand {
|
|||
/// Views RARC file information.
|
||||
#[argp(subcommand, name = "list")]
|
||||
pub struct ListArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// RARC file
|
||||
file: PathBuf,
|
||||
file: Utf8NativePathBuf,
|
||||
#[argp(switch, short = 's')]
|
||||
/// Only print filenames.
|
||||
short: bool,
|
||||
|
@ -36,12 +36,12 @@ pub struct ListArgs {
|
|||
/// Extracts RARC file contents.
|
||||
#[argp(subcommand, name = "extract")]
|
||||
pub struct ExtractArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// RARC file
|
||||
file: PathBuf,
|
||||
#[argp(option, short = 'o')]
|
||||
file: Utf8NativePathBuf,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// output directory
|
||||
output: Option<PathBuf>,
|
||||
output: Option<Utf8NativePathBuf>,
|
||||
#[argp(switch)]
|
||||
/// Do not decompress files when copying.
|
||||
no_decompress: bool,
|
||||
|
@ -58,13 +58,13 @@ pub fn run(args: Args) -> Result<()> {
|
|||
}
|
||||
|
||||
fn list(args: ListArgs) -> Result<()> {
|
||||
let path = PathBuf::from(format!("{}:", args.file.display()));
|
||||
let path = Utf8NativePathBuf::from(format!("{}:", args.file));
|
||||
vfs::ls(vfs::LsArgs { path, short: args.short, recursive: true })
|
||||
}
|
||||
|
||||
fn extract(args: ExtractArgs) -> Result<()> {
|
||||
let path = PathBuf::from(format!("{}:", args.file.display()));
|
||||
let output = args.output.unwrap_or_else(|| PathBuf::from("."));
|
||||
let path = Utf8NativePathBuf::from(format!("{}:", args.file));
|
||||
let output = args.output.unwrap_or_else(|| Utf8NativePathBuf::from("."));
|
||||
vfs::cp(vfs::CpArgs {
|
||||
paths: vec![path, output],
|
||||
no_decompress: args.no_decompress,
|
||||
|
|
|
@ -2,7 +2,6 @@ use std::{
|
|||
collections::{btree_map, BTreeMap},
|
||||
fs,
|
||||
io::{Cursor, Write},
|
||||
path::PathBuf,
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
|
@ -15,6 +14,7 @@ use object::{
|
|||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
use tracing::{info, info_span};
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
analysis::{
|
||||
|
@ -38,6 +38,7 @@ use crate::{
|
|||
elf::{to_obj_reloc_kind, write_elf},
|
||||
file::{buf_writer, process_rsp, verify_hash, FileIterator},
|
||||
nested::NestedMap,
|
||||
path::native_path,
|
||||
rel::{
|
||||
print_relocations, process_rel, process_rel_header, process_rel_sections, write_rel,
|
||||
RelHeader, RelReloc, RelSectionHeader, RelWriteInfo, PERMITTED_SECTIONS,
|
||||
|
@ -67,9 +68,9 @@ enum SubCommand {
|
|||
/// Views REL file information.
|
||||
#[argp(subcommand, name = "info")]
|
||||
pub struct InfoArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// REL file
|
||||
rel_file: PathBuf,
|
||||
rel_file: Utf8NativePathBuf,
|
||||
#[argp(switch, short = 'r')]
|
||||
/// print relocations
|
||||
relocations: bool,
|
||||
|
@ -79,27 +80,27 @@ pub struct InfoArgs {
|
|||
/// Merges a DOL + REL(s) into an ELF.
|
||||
#[argp(subcommand, name = "merge")]
|
||||
pub struct MergeArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// DOL file
|
||||
dol_file: PathBuf,
|
||||
#[argp(positional)]
|
||||
dol_file: Utf8NativePathBuf,
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// REL file(s)
|
||||
rel_files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
rel_files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// output ELF
|
||||
out_file: PathBuf,
|
||||
out_file: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Creates RELs from an ELF + PLF(s).
|
||||
#[argp(subcommand, name = "make")]
|
||||
pub struct MakeArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// input file(s)
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'c')]
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'c', from_str_fn(native_path))]
|
||||
/// (optional) project configuration file
|
||||
config: Option<PathBuf>,
|
||||
config: Option<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'n')]
|
||||
/// (optional) module names
|
||||
names: Vec<String>,
|
||||
|
@ -176,7 +177,7 @@ fn load_rel(module_config: &ModuleConfig, object_base: &ObjectBase) -> Result<Re
|
|||
let header = process_rel_header(&mut reader)?;
|
||||
let sections = process_rel_sections(&mut reader, &header)?;
|
||||
let section_defs = if let Some(splits_path) = &module_config.splits {
|
||||
read_splits_sections(splits_path)?
|
||||
read_splits_sections(&splits_path.with_encoding())?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -186,7 +187,7 @@ fn load_rel(module_config: &ModuleConfig, object_base: &ObjectBase) -> Result<Re
|
|||
struct LoadedModule<'a> {
|
||||
module_id: u32,
|
||||
file: File<'a>,
|
||||
path: PathBuf,
|
||||
path: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
fn resolve_relocations(
|
||||
|
@ -273,12 +274,12 @@ fn make(args: MakeArgs) -> Result<()> {
|
|||
let object_base = find_object_base(&config)?;
|
||||
for module_config in &config.modules {
|
||||
let module_name = module_config.name();
|
||||
if !args.names.is_empty() && !args.names.iter().any(|n| n == &module_name) {
|
||||
if !args.names.is_empty() && !args.names.iter().any(|n| n == module_name) {
|
||||
continue;
|
||||
}
|
||||
let _span = info_span!("module", name = %module_name).entered();
|
||||
let info = load_rel(module_config, &object_base).with_context(|| {
|
||||
format!("While loading REL '{}'", object_base.join(&module_config.object).display())
|
||||
format!("While loading REL '{}'", object_base.join(&module_config.object))
|
||||
})?;
|
||||
name_to_module_id.insert(module_name.to_string(), info.0.module_id);
|
||||
match existing_headers.entry(info.0.module_id) {
|
||||
|
@ -312,7 +313,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
|||
.unwrap_or(idx as u32);
|
||||
load_obj(file.map()?)
|
||||
.map(|o| LoadedModule { module_id, file: o, path: path.clone() })
|
||||
.with_context(|| format!("Failed to load '{}'", path.display()))
|
||||
.with_context(|| format!("Failed to load '{}'", path))
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
|
@ -320,7 +321,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
|||
let start = Instant::now();
|
||||
let mut symbol_map = FxHashMap::<&[u8], (u32, SymbolIndex)>::default();
|
||||
for module_info in modules.iter() {
|
||||
let _span = info_span!("file", path = %module_info.path.display()).entered();
|
||||
let _span = info_span!("file", path = %module_info.path).entered();
|
||||
for symbol in module_info.file.symbols() {
|
||||
if symbol.scope() == object::SymbolScope::Dynamic {
|
||||
symbol_map
|
||||
|
@ -335,7 +336,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
|||
let mut relocations = Vec::<Vec<RelReloc>>::with_capacity(modules.len() - 1);
|
||||
relocations.resize_with(modules.len() - 1, Vec::new);
|
||||
for (module_info, relocations) in modules.iter().skip(1).zip(&mut relocations) {
|
||||
let _span = info_span!("file", path = %module_info.path.display()).entered();
|
||||
let _span = info_span!("file", path = %module_info.path).entered();
|
||||
resolved += resolve_relocations(
|
||||
&module_info.file,
|
||||
&existing_headers,
|
||||
|
@ -344,9 +345,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
|||
&modules,
|
||||
relocations,
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("While resolving relocations in '{}'", module_info.path.display())
|
||||
})?;
|
||||
.with_context(|| format!("While resolving relocations in '{}'", module_info.path))?;
|
||||
}
|
||||
|
||||
if !args.quiet {
|
||||
|
@ -362,7 +361,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
|||
// Write RELs
|
||||
let start = Instant::now();
|
||||
for (module_info, relocations) in modules.iter().skip(1).zip(relocations) {
|
||||
let _span = info_span!("file", path = %module_info.path.display()).entered();
|
||||
let _span = info_span!("file", path = %module_info.path).entered();
|
||||
let mut info = RelWriteInfo {
|
||||
module_id: module_info.module_id,
|
||||
version: 3,
|
||||
|
@ -393,7 +392,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
|||
let rel_path = module_info.path.with_extension("rel");
|
||||
let mut w = buf_writer(&rel_path)?;
|
||||
write_rel(&mut w, &info, &module_info.file, relocations)
|
||||
.with_context(|| format!("Failed to write '{}'", rel_path.display()))?;
|
||||
.with_context(|| format!("Failed to write '{}'", rel_path))?;
|
||||
w.flush()?;
|
||||
}
|
||||
|
||||
|
@ -476,11 +475,11 @@ fn info(args: InfoArgs) -> Result<()> {
|
|||
const fn align32(x: u32) -> u32 { (x + 31) & !31 }
|
||||
|
||||
fn merge(args: MergeArgs) -> Result<()> {
|
||||
log::info!("Loading {}", args.dol_file.display());
|
||||
log::info!("Loading {}", args.dol_file);
|
||||
let mut obj = {
|
||||
let mut file = open_file(&args.dol_file, true)?;
|
||||
let name = args.dol_file.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default();
|
||||
process_dol(file.map()?, name.as_ref())?
|
||||
let name = args.dol_file.file_stem().unwrap_or_default();
|
||||
process_dol(file.map()?, name)?
|
||||
};
|
||||
|
||||
log::info!("Performing signature analysis");
|
||||
|
@ -491,9 +490,9 @@ fn merge(args: MergeArgs) -> Result<()> {
|
|||
let mut module_map = BTreeMap::<u32, ObjInfo>::new();
|
||||
for result in FileIterator::new(&args.rel_files)? {
|
||||
let (path, mut entry) = result?;
|
||||
log::info!("Loading {}", path.display());
|
||||
let name = path.file_stem().map(|s| s.to_string_lossy()).unwrap_or_default();
|
||||
let (_, obj) = process_rel(&mut entry, name.as_ref())?;
|
||||
log::info!("Loading {}", path);
|
||||
let name = path.file_stem().unwrap_or_default();
|
||||
let (_, obj) = process_rel(&mut entry, name)?;
|
||||
match module_map.entry(obj.module_id) {
|
||||
btree_map::Entry::Vacant(e) => e.insert(obj),
|
||||
btree_map::Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id),
|
||||
|
@ -610,7 +609,7 @@ fn merge(args: MergeArgs) -> Result<()> {
|
|||
tracker.apply(&mut obj, false)?;
|
||||
|
||||
// Write ELF
|
||||
log::info!("Writing {}", args.out_file.display());
|
||||
log::info!("Writing {}", args.out_file);
|
||||
fs::write(&args.out_file, write_elf(&obj, false)?)?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
use std::{
|
||||
io::{BufRead, Seek, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::io::{BufRead, Seek, Write};
|
||||
|
||||
use anyhow::{bail, ensure, Context, Result};
|
||||
use argp::FromArgs;
|
||||
|
@ -10,10 +7,12 @@ use object::{
|
|||
Architecture, Endianness, Object, ObjectKind, ObjectSection, ObjectSymbol, SectionKind,
|
||||
SymbolIndex, SymbolKind, SymbolSection,
|
||||
};
|
||||
use typed_path::{Utf8NativePath, Utf8NativePathBuf};
|
||||
|
||||
use crate::{
|
||||
util::{
|
||||
file::buf_writer,
|
||||
path::native_path,
|
||||
reader::{Endian, ToWriter},
|
||||
rso::{
|
||||
process_rso, symbol_hash, RsoHeader, RsoRelocation, RsoSectionHeader, RsoSymbol,
|
||||
|
@ -42,30 +41,30 @@ enum SubCommand {
|
|||
/// Views RSO file information.
|
||||
#[argp(subcommand, name = "info")]
|
||||
pub struct InfoArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// RSO file
|
||||
rso_file: PathBuf,
|
||||
rso_file: Utf8NativePathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Creates an RSO from an ELF.
|
||||
#[argp(subcommand, name = "make")]
|
||||
pub struct MakeArgs {
|
||||
#[argp(positional, arg_name = "ELF File")]
|
||||
#[argp(positional, arg_name = "ELF File", from_str_fn(native_path))]
|
||||
/// elf file
|
||||
input: PathBuf,
|
||||
input: Utf8NativePathBuf,
|
||||
|
||||
#[argp(option, short = 'o', arg_name = "File")]
|
||||
#[argp(option, short = 'o', arg_name = "File", from_str_fn(native_path))]
|
||||
/// output file path
|
||||
output: PathBuf,
|
||||
output: Utf8NativePathBuf,
|
||||
|
||||
#[argp(option, short = 'm', arg_name = "Name")]
|
||||
/// module name (or path). Default: input name
|
||||
module_name: Option<String>,
|
||||
|
||||
#[argp(option, short = 'e', arg_name = "File")]
|
||||
#[argp(option, short = 'e', arg_name = "File", from_str_fn(native_path))]
|
||||
/// file containing exported symbol names (newline separated)
|
||||
export: Option<PathBuf>,
|
||||
export: Option<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
|
@ -78,9 +77,7 @@ pub fn run(args: Args) -> Result<()> {
|
|||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let rso = {
|
||||
let mut file = open_file(&args.rso_file, true)?;
|
||||
let obj = process_rso(file.as_mut())?;
|
||||
#[allow(clippy::let_and_return)]
|
||||
obj
|
||||
process_rso(file.as_mut())?
|
||||
};
|
||||
println!("Read RSO module {}", rso.name);
|
||||
Ok(())
|
||||
|
@ -97,7 +94,7 @@ fn make(args: MakeArgs) -> Result<()> {
|
|||
|
||||
let module_name = match args.module_name {
|
||||
Some(n) => n,
|
||||
None => args.input.display().to_string(),
|
||||
None => args.input.to_string(),
|
||||
};
|
||||
|
||||
let symbols_to_export = match &args.export {
|
||||
|
@ -121,18 +118,18 @@ fn make(args: MakeArgs) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn make_sel<P: AsRef<Path>>(
|
||||
fn make_sel(
|
||||
_file: object::File,
|
||||
_output: P,
|
||||
_output: &Utf8NativePath,
|
||||
_module_name: &str,
|
||||
_symbols_to_export: Vec<String>,
|
||||
) -> Result<()> {
|
||||
bail!("Creating SEL files is not supported yet.");
|
||||
}
|
||||
|
||||
fn make_rso<P: AsRef<Path>>(
|
||||
fn make_rso(
|
||||
file: object::File,
|
||||
output: P,
|
||||
output: &Utf8NativePath,
|
||||
module_name: &str,
|
||||
symbols_to_export: Vec<String>,
|
||||
) -> Result<()> {
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
use std::{
|
||||
fs::File,
|
||||
io::{stdout, BufRead, Read, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use argp::FromArgs;
|
||||
use owo_colors::{OwoColorize, Stream};
|
||||
use path_slash::PathExt;
|
||||
use sha1::{Digest, Sha1};
|
||||
use typed_path::{Utf8NativePath, Utf8NativePathBuf};
|
||||
|
||||
use crate::{
|
||||
util::file::{buf_writer, process_rsp, touch},
|
||||
util::{
|
||||
file::{buf_writer, process_rsp, touch},
|
||||
path::native_path,
|
||||
},
|
||||
vfs::open_file,
|
||||
};
|
||||
|
||||
|
@ -22,13 +24,13 @@ pub struct Args {
|
|||
#[argp(switch, short = 'c')]
|
||||
/// check SHA sums against given list
|
||||
check: bool,
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// path to input file(s)
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// (check) touch output file on successful check
|
||||
/// (hash) write hash(es) to output file
|
||||
output: Option<PathBuf>,
|
||||
output: Option<Utf8NativePathBuf>,
|
||||
#[argp(switch, short = 'q')]
|
||||
/// only print failures and a summary
|
||||
quiet: bool,
|
||||
|
@ -44,17 +46,17 @@ pub fn run(args: Args) -> Result<()> {
|
|||
}
|
||||
if let Some(out_path) = &args.output {
|
||||
touch(out_path)
|
||||
.with_context(|| format!("Failed to touch output file '{}'", out_path.display()))?;
|
||||
.with_context(|| format!("Failed to touch output file '{}'", out_path))?;
|
||||
}
|
||||
} else {
|
||||
let mut w: Box<dyn Write> =
|
||||
if let Some(out_path) = &args.output {
|
||||
Box::new(buf_writer(out_path).with_context(|| {
|
||||
format!("Failed to open output file '{}'", out_path.display())
|
||||
})?)
|
||||
} else {
|
||||
Box::new(stdout())
|
||||
};
|
||||
let mut w: Box<dyn Write> = if let Some(out_path) = &args.output {
|
||||
Box::new(
|
||||
buf_writer(out_path)
|
||||
.with_context(|| format!("Failed to open output file '{}'", out_path))?,
|
||||
)
|
||||
} else {
|
||||
Box::new(stdout())
|
||||
};
|
||||
for path in process_rsp(&args.files)? {
|
||||
let mut file = open_file(&path, false)?;
|
||||
hash(w.as_mut(), file.as_mut(), &path)?
|
||||
|
@ -114,7 +116,7 @@ where R: BufRead + ?Sized {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn hash<R, W>(w: &mut W, reader: &mut R, path: &Path) -> Result<()>
|
||||
fn hash<R, W>(w: &mut W, reader: &mut R, path: &Utf8NativePath) -> Result<()>
|
||||
where
|
||||
R: Read + ?Sized,
|
||||
W: Write + ?Sized,
|
||||
|
@ -123,7 +125,7 @@ where
|
|||
let mut hash_buf = [0u8; 40];
|
||||
let hash_str = base16ct::lower::encode_str(&hash, &mut hash_buf)
|
||||
.map_err(|e| anyhow!("Failed to encode hash: {e}"))?;
|
||||
writeln!(w, "{} {}", hash_str, path.to_slash_lossy())?;
|
||||
writeln!(w, "{} {}", hash_str, path.with_unix_encoding())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use argp::FromArgs;
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use super::vfs;
|
||||
use crate::util::path::native_path;
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Commands for processing U8 (arc) files.
|
||||
|
@ -24,9 +24,9 @@ enum SubCommand {
|
|||
/// Views U8 (arc) file information.
|
||||
#[argp(subcommand, name = "list")]
|
||||
pub struct ListArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// U8 (arc) file
|
||||
file: PathBuf,
|
||||
file: Utf8NativePathBuf,
|
||||
#[argp(switch, short = 's')]
|
||||
/// Only print filenames.
|
||||
short: bool,
|
||||
|
@ -36,12 +36,12 @@ pub struct ListArgs {
|
|||
/// Extracts U8 (arc) file contents.
|
||||
#[argp(subcommand, name = "extract")]
|
||||
pub struct ExtractArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// U8 (arc) file
|
||||
file: PathBuf,
|
||||
#[argp(option, short = 'o')]
|
||||
file: Utf8NativePathBuf,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// output directory
|
||||
output: Option<PathBuf>,
|
||||
output: Option<Utf8NativePathBuf>,
|
||||
#[argp(switch)]
|
||||
/// Do not decompress files when copying.
|
||||
no_decompress: bool,
|
||||
|
@ -58,13 +58,13 @@ pub fn run(args: Args) -> Result<()> {
|
|||
}
|
||||
|
||||
fn list(args: ListArgs) -> Result<()> {
|
||||
let path = PathBuf::from(format!("{}:", args.file.display()));
|
||||
let path = Utf8NativePathBuf::from(format!("{}:", args.file));
|
||||
vfs::ls(vfs::LsArgs { path, short: args.short, recursive: true })
|
||||
}
|
||||
|
||||
fn extract(args: ExtractArgs) -> Result<()> {
|
||||
let path = PathBuf::from(format!("{}:", args.file.display()));
|
||||
let output = args.output.unwrap_or_else(|| PathBuf::from("."));
|
||||
let path = Utf8NativePathBuf::from(format!("{}:", args.file));
|
||||
let output = args.output.unwrap_or_else(|| Utf8NativePathBuf::from("."));
|
||||
vfs::cp(vfs::CpArgs {
|
||||
paths: vec![path, output],
|
||||
no_decompress: args.no_decompress,
|
||||
|
|
|
@ -3,16 +3,19 @@ use std::{
|
|||
fs::File,
|
||||
io,
|
||||
io::{BufRead, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, Context};
|
||||
use argp::FromArgs;
|
||||
use size::Size;
|
||||
use typed_path::{Utf8NativePath, Utf8NativePathBuf, Utf8UnixPath};
|
||||
|
||||
use crate::vfs::{
|
||||
decompress_file, detect, open_path, FileFormat, OpenResult, Vfs, VfsFile, VfsFileType,
|
||||
VfsMetadata,
|
||||
use crate::{
|
||||
util::path::native_path,
|
||||
vfs::{
|
||||
decompress_file, detect, open_path, FileFormat, OpenResult, Vfs, VfsFile, VfsFileType,
|
||||
VfsMetadata,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
|
@ -34,9 +37,9 @@ enum SubCommand {
|
|||
/// List files in a directory or container.
|
||||
#[argp(subcommand, name = "ls")]
|
||||
pub struct LsArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// Directory or container path.
|
||||
pub path: PathBuf,
|
||||
pub path: Utf8NativePathBuf,
|
||||
#[argp(switch, short = 's')]
|
||||
/// Only print filenames.
|
||||
pub short: bool,
|
||||
|
@ -49,9 +52,9 @@ pub struct LsArgs {
|
|||
/// Copy files from a container.
|
||||
#[argp(subcommand, name = "cp")]
|
||||
pub struct CpArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// Source path(s) and destination path.
|
||||
pub paths: Vec<PathBuf>,
|
||||
pub paths: Vec<Utf8NativePathBuf>,
|
||||
#[argp(switch)]
|
||||
/// Do not decompress files when copying.
|
||||
pub no_decompress: bool,
|
||||
|
@ -111,21 +114,18 @@ pub fn ls(args: LsArgs) -> anyhow::Result<()> {
|
|||
let mut files = Vec::new();
|
||||
match open_path(&args.path, false)? {
|
||||
OpenResult::File(mut file, path) => {
|
||||
let filename = Path::new(path)
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("Path has no filename"))?
|
||||
.to_string_lossy();
|
||||
let filename = path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?;
|
||||
if args.short {
|
||||
println!("{}", filename);
|
||||
} else {
|
||||
let metadata = file
|
||||
.metadata()
|
||||
.with_context(|| format!("Failed to fetch metadata for {}", path))?;
|
||||
files.push(file_info(&filename, file.as_mut(), &metadata)?);
|
||||
files.push(file_info(filename, file.as_mut(), &metadata)?);
|
||||
}
|
||||
}
|
||||
OpenResult::Directory(mut fs, path) => {
|
||||
ls_directory(fs.as_mut(), path, "", &args, &mut files)?;
|
||||
ls_directory(fs.as_mut(), &path, Utf8UnixPath::new(""), &args, &mut files)?;
|
||||
}
|
||||
}
|
||||
if !args.short {
|
||||
|
@ -149,16 +149,16 @@ pub fn ls(args: LsArgs) -> anyhow::Result<()> {
|
|||
|
||||
fn ls_directory(
|
||||
fs: &mut dyn Vfs,
|
||||
path: &str,
|
||||
base_filename: &str,
|
||||
path: &Utf8UnixPath,
|
||||
base_filename: &Utf8UnixPath,
|
||||
args: &LsArgs,
|
||||
files: &mut Vec<Columns<5>>,
|
||||
) -> anyhow::Result<()> {
|
||||
let entries = fs.read_dir(path)?;
|
||||
files.reserve(entries.len());
|
||||
for filename in entries {
|
||||
let entry_path = format!("{}/{}", path, filename);
|
||||
let display_filename = format!("{}{}", base_filename, filename);
|
||||
let entry_path = path.join(&filename);
|
||||
let display_path = base_filename.join(&filename);
|
||||
let metadata = fs
|
||||
.metadata(&entry_path)
|
||||
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?;
|
||||
|
@ -168,26 +168,25 @@ fn ls_directory(
|
|||
.open(&entry_path)
|
||||
.with_context(|| format!("Failed to open file {}", entry_path))?;
|
||||
if args.short {
|
||||
println!("{}", display_filename);
|
||||
println!("{}", display_path);
|
||||
} else {
|
||||
files.push(file_info(&display_filename, file.as_mut(), &metadata)?);
|
||||
files.push(file_info(display_path.as_str(), file.as_mut(), &metadata)?);
|
||||
}
|
||||
}
|
||||
VfsFileType::Directory => {
|
||||
if args.short {
|
||||
println!("{}/", display_filename);
|
||||
println!("{}/", display_path);
|
||||
} else {
|
||||
files.push([
|
||||
" ".to_string(),
|
||||
format!("{}/", display_filename),
|
||||
format!("{}/", display_path),
|
||||
"Directory".to_string(),
|
||||
String::new(),
|
||||
String::new(),
|
||||
]);
|
||||
}
|
||||
if args.recursive {
|
||||
let base_filename = format!("{}/", display_filename);
|
||||
ls_directory(fs, &entry_path, &base_filename, args, files)?;
|
||||
ls_directory(fs, &entry_path, &display_path, args, files)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -200,26 +199,24 @@ pub fn cp(mut args: CpArgs) -> anyhow::Result<()> {
|
|||
bail!("Both source and destination paths must be provided");
|
||||
}
|
||||
let dest = args.paths.pop().unwrap();
|
||||
let dest_is_dir = args.paths.len() > 1 || dest.metadata().ok().is_some_and(|m| m.is_dir());
|
||||
let dest_is_dir = args.paths.len() > 1 || fs::metadata(&dest).ok().is_some_and(|m| m.is_dir());
|
||||
let auto_decompress = !args.no_decompress;
|
||||
for path in args.paths {
|
||||
match open_path(&path, auto_decompress)? {
|
||||
OpenResult::File(file, path) => {
|
||||
let dest = if dest_is_dir {
|
||||
fs::create_dir_all(&dest).with_context(|| {
|
||||
format!("Failed to create directory {}", dest.display())
|
||||
})?;
|
||||
let filename = Path::new(path)
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("Path has no filename"))?;
|
||||
fs::create_dir_all(&dest)
|
||||
.with_context(|| format!("Failed to create directory {}", dest))?;
|
||||
let filename =
|
||||
path.file_name().ok_or_else(|| anyhow!("Path has no filename"))?;
|
||||
dest.join(filename)
|
||||
} else {
|
||||
dest.clone()
|
||||
};
|
||||
cp_file(file, path, &dest, auto_decompress, args.quiet)?;
|
||||
cp_file(file, &path, &dest, auto_decompress, args.quiet)?;
|
||||
}
|
||||
OpenResult::Directory(mut fs, path) => {
|
||||
cp_recursive(fs.as_mut(), path, &dest, auto_decompress, args.quiet)?;
|
||||
cp_recursive(fs.as_mut(), &path, &dest, auto_decompress, args.quiet)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -228,8 +225,8 @@ pub fn cp(mut args: CpArgs) -> anyhow::Result<()> {
|
|||
|
||||
fn cp_file(
|
||||
mut file: Box<dyn VfsFile>,
|
||||
path: &str,
|
||||
dest: &Path,
|
||||
path: &Utf8UnixPath,
|
||||
dest: &Utf8NativePath,
|
||||
auto_decompress: bool,
|
||||
quiet: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
|
@ -237,31 +234,30 @@ fn cp_file(
|
|||
if let FileFormat::Compressed(kind) = detect(file.as_mut())? {
|
||||
if auto_decompress {
|
||||
file = decompress_file(file.as_mut(), kind)
|
||||
.with_context(|| format!("Failed to decompress file {}", dest.display()))?;
|
||||
.with_context(|| format!("Failed to decompress file {}", dest))?;
|
||||
compression = Some(kind);
|
||||
}
|
||||
}
|
||||
let metadata = file
|
||||
.metadata()
|
||||
.with_context(|| format!("Failed to fetch metadata for {}", dest.display()))?;
|
||||
let metadata =
|
||||
file.metadata().with_context(|| format!("Failed to fetch metadata for {}", dest))?;
|
||||
if !quiet {
|
||||
if let Some(kind) = compression {
|
||||
println!(
|
||||
"{} -> {} ({}) [Decompressed {}]",
|
||||
path,
|
||||
dest.display(),
|
||||
dest,
|
||||
Size::from_bytes(metadata.len),
|
||||
kind
|
||||
);
|
||||
} else {
|
||||
println!("{} -> {} ({})", path, dest.display(), Size::from_bytes(metadata.len));
|
||||
println!("{} -> {} ({})", path, dest, Size::from_bytes(metadata.len));
|
||||
}
|
||||
}
|
||||
let mut dest_file =
|
||||
File::create(dest).with_context(|| format!("Failed to create file {}", dest.display()))?;
|
||||
File::create(dest).with_context(|| format!("Failed to create file {}", dest))?;
|
||||
buf_copy(file.as_mut(), &mut dest_file)
|
||||
.with_context(|| format!("Failed to copy file {}", dest.display()))?;
|
||||
dest_file.flush().with_context(|| format!("Failed to flush file {}", dest.display()))?;
|
||||
.with_context(|| format!("Failed to copy file {}", dest))?;
|
||||
dest_file.flush().with_context(|| format!("Failed to flush file {}", dest))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -286,16 +282,15 @@ where
|
|||
|
||||
fn cp_recursive(
|
||||
fs: &mut dyn Vfs,
|
||||
path: &str,
|
||||
dest: &Path,
|
||||
path: &Utf8UnixPath,
|
||||
dest: &Utf8NativePath,
|
||||
auto_decompress: bool,
|
||||
quiet: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
fs::create_dir_all(dest)
|
||||
.with_context(|| format!("Failed to create directory {}", dest.display()))?;
|
||||
fs::create_dir_all(dest).with_context(|| format!("Failed to create directory {}", dest))?;
|
||||
let entries = fs.read_dir(path)?;
|
||||
for filename in entries {
|
||||
let entry_path = format!("{}/{}", path, filename);
|
||||
let entry_path = path.join(&filename);
|
||||
let metadata = fs
|
||||
.metadata(&entry_path)
|
||||
.with_context(|| format!("Failed to fetch metadata for {}", entry_path))?;
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
use std::{fs, path::PathBuf};
|
||||
use std::fs;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use argp::FromArgs;
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
util::{
|
||||
file::process_rsp,
|
||||
ncompress::{compress_yay0, decompress_yay0},
|
||||
path::native_path,
|
||||
IntoCow, ToCow,
|
||||
},
|
||||
vfs::open_file,
|
||||
|
@ -31,26 +33,26 @@ enum SubCommand {
|
|||
/// Compresses files using YAY0.
|
||||
#[argp(subcommand, name = "compress")]
|
||||
pub struct CompressArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// Files to compress
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// Output file (or directory, if multiple files are specified).
|
||||
/// If not specified, compresses in-place.
|
||||
output: Option<PathBuf>,
|
||||
output: Option<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Decompresses YAY0-compressed files.
|
||||
#[argp(subcommand, name = "decompress")]
|
||||
pub struct DecompressArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// YAY0-compressed files
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// Output file (or directory, if multiple files are specified).
|
||||
/// If not specified, decompresses in-place.
|
||||
output: Option<PathBuf>,
|
||||
output: Option<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
|
@ -78,7 +80,7 @@ fn compress(args: CompressArgs) -> Result<()> {
|
|||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -90,7 +92,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
|||
let data = {
|
||||
let mut file = open_file(&path, true)?;
|
||||
decompress_yay0(file.map()?)
|
||||
.with_context(|| format!("Failed to decompress '{}' using Yay0", path.display()))?
|
||||
.with_context(|| format!("Failed to decompress '{}' using Yay0", path))?
|
||||
};
|
||||
let out_path = if let Some(output) = &args.output {
|
||||
if single_file {
|
||||
|
@ -102,7 +104,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
|||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
use std::{fs, path::PathBuf};
|
||||
use std::fs;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use argp::FromArgs;
|
||||
use typed_path::Utf8NativePathBuf;
|
||||
|
||||
use crate::{
|
||||
util::{
|
||||
file::process_rsp,
|
||||
ncompress::{compress_yaz0, decompress_yaz0},
|
||||
path::native_path,
|
||||
IntoCow, ToCow,
|
||||
},
|
||||
vfs::open_file,
|
||||
|
@ -31,26 +33,26 @@ enum SubCommand {
|
|||
/// Compresses files using YAZ0.
|
||||
#[argp(subcommand, name = "compress")]
|
||||
pub struct CompressArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// Files to compress
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// Output file (or directory, if multiple files are specified).
|
||||
/// If not specified, compresses in-place.
|
||||
output: Option<PathBuf>,
|
||||
output: Option<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Decompresses YAZ0-compressed files.
|
||||
#[argp(subcommand, name = "decompress")]
|
||||
pub struct DecompressArgs {
|
||||
#[argp(positional)]
|
||||
#[argp(positional, from_str_fn(native_path))]
|
||||
/// YAZ0-compressed files
|
||||
files: Vec<PathBuf>,
|
||||
#[argp(option, short = 'o')]
|
||||
files: Vec<Utf8NativePathBuf>,
|
||||
#[argp(option, short = 'o', from_str_fn(native_path))]
|
||||
/// Output file (or directory, if multiple files are specified).
|
||||
/// If not specified, decompresses in-place.
|
||||
output: Option<PathBuf>,
|
||||
output: Option<Utf8NativePathBuf>,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
|
@ -78,7 +80,7 @@ fn compress(args: CompressArgs) -> Result<()> {
|
|||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -90,7 +92,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
|||
let data = {
|
||||
let mut file = open_file(&path, false)?;
|
||||
decompress_yaz0(file.map()?)
|
||||
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path.display()))?
|
||||
.with_context(|| format!("Failed to decompress '{}' using Yaz0", path))?
|
||||
};
|
||||
let out_path = if let Some(output) = &args.output {
|
||||
if single_file {
|
||||
|
@ -102,7 +104,7 @@ fn decompress(args: DecompressArgs) -> Result<()> {
|
|||
path.as_path().to_cow()
|
||||
};
|
||||
fs::write(out_path.as_ref(), data)
|
||||
.with_context(|| format!("Failed to write '{}'", out_path.display()))?;
|
||||
.with_context(|| format!("Failed to write '{}'", out_path))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![deny(unused_crate_dependencies)]
|
||||
use std::{env, ffi::OsStr, fmt::Display, path::PathBuf, process::exit, str::FromStr};
|
||||
|
||||
use anyhow::Error;
|
||||
|
|
|
@ -2,7 +2,6 @@ use std::{
|
|||
fs,
|
||||
io::{BufRead, Write},
|
||||
num::ParseIntError,
|
||||
path::Path,
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
|
@ -12,6 +11,7 @@ use filetime::FileTime;
|
|||
use once_cell::sync::Lazy;
|
||||
use regex::{Captures, Regex};
|
||||
use tracing::{debug, info, warn};
|
||||
use typed_path::Utf8NativePath;
|
||||
use xxhash_rust::xxh3::xxh3_64;
|
||||
|
||||
use crate::{
|
||||
|
@ -45,10 +45,11 @@ pub fn parse_i32(s: &str) -> Result<i32, ParseIntError> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn apply_symbols_file<P>(path: P, obj: &mut ObjInfo) -> Result<Option<FileReadInfo>>
|
||||
where P: AsRef<Path> {
|
||||
let path = path.as_ref();
|
||||
Ok(if path.is_file() {
|
||||
pub fn apply_symbols_file(
|
||||
path: &Utf8NativePath,
|
||||
obj: &mut ObjInfo,
|
||||
) -> Result<Option<FileReadInfo>> {
|
||||
Ok(if fs::metadata(path).is_ok_and(|m| m.is_file()) {
|
||||
let mut file = open_file(path, true)?;
|
||||
let cached = FileReadInfo::new(file.as_mut())?;
|
||||
for result in file.lines() {
|
||||
|
@ -199,19 +200,21 @@ pub fn is_auto_label(symbol: &ObjSymbol) -> bool { symbol.name.starts_with("lbl_
|
|||
|
||||
pub fn is_auto_jump_table(symbol: &ObjSymbol) -> bool { symbol.name.starts_with("jumptable_") }
|
||||
|
||||
fn write_if_unchanged<P, Cb>(path: P, cb: Cb, cached_file: Option<FileReadInfo>) -> Result<()>
|
||||
fn write_if_unchanged<Cb>(
|
||||
path: &Utf8NativePath,
|
||||
cb: Cb,
|
||||
cached_file: Option<FileReadInfo>,
|
||||
) -> Result<()>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
Cb: FnOnce(&mut dyn Write) -> Result<()>,
|
||||
{
|
||||
if let Some(cached_file) = cached_file {
|
||||
// Check file mtime
|
||||
let path = path.as_ref();
|
||||
let new_mtime = fs::metadata(path).ok().map(|m| FileTime::from_last_modification_time(&m));
|
||||
if let (Some(new_mtime), Some(old_mtime)) = (new_mtime, cached_file.mtime) {
|
||||
if new_mtime != old_mtime {
|
||||
// File changed, don't write
|
||||
warn!(path = %path.display(), "File changed since read, not updating");
|
||||
warn!(path = %path, "File changed since read, not updating");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
@ -221,12 +224,12 @@ where
|
|||
cb(&mut buf)?;
|
||||
if xxh3_64(&buf) == cached_file.hash {
|
||||
// No changes
|
||||
debug!(path = %path.display(), "File unchanged");
|
||||
debug!(path = %path, "File unchanged");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Write to file
|
||||
info!("Writing updated {}", path.display());
|
||||
info!("Writing updated {}", path);
|
||||
fs::write(path, &buf)?;
|
||||
} else {
|
||||
// Write directly
|
||||
|
@ -238,14 +241,11 @@ where
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn write_symbols_file<P>(
|
||||
path: P,
|
||||
pub fn write_symbols_file(
|
||||
path: &Utf8NativePath,
|
||||
obj: &ObjInfo,
|
||||
cached_file: Option<FileReadInfo>,
|
||||
) -> Result<()>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
) -> Result<()> {
|
||||
write_if_unchanged(path, |w| write_symbols(w, obj), cached_file)
|
||||
}
|
||||
|
||||
|
@ -413,15 +413,12 @@ fn section_kind_to_str(kind: ObjSectionKind) -> &'static str {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn write_splits_file<P>(
|
||||
path: P,
|
||||
pub fn write_splits_file(
|
||||
path: &Utf8NativePath,
|
||||
obj: &ObjInfo,
|
||||
all: bool,
|
||||
cached_file: Option<FileReadInfo>,
|
||||
) -> Result<()>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
) -> Result<()> {
|
||||
write_if_unchanged(path, |w| write_splits(w, obj, all), cached_file)
|
||||
}
|
||||
|
||||
|
@ -625,10 +622,8 @@ enum SplitState {
|
|||
Unit(String),
|
||||
}
|
||||
|
||||
pub fn apply_splits_file<P>(path: P, obj: &mut ObjInfo) -> Result<Option<FileReadInfo>>
|
||||
where P: AsRef<Path> {
|
||||
let path = path.as_ref();
|
||||
Ok(if path.is_file() {
|
||||
pub fn apply_splits_file(path: &Utf8NativePath, obj: &mut ObjInfo) -> Result<Option<FileReadInfo>> {
|
||||
Ok(if fs::metadata(path).is_ok_and(|m| m.is_file()) {
|
||||
let mut file = open_file(path, true)?;
|
||||
let cached = FileReadInfo::new(file.as_mut())?;
|
||||
apply_splits(file.as_mut(), obj)?;
|
||||
|
@ -738,10 +733,8 @@ where R: BufRead + ?Sized {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read_splits_sections<P>(path: P) -> Result<Option<Vec<SectionDef>>>
|
||||
where P: AsRef<Path> {
|
||||
let path = path.as_ref();
|
||||
if !path.is_file() {
|
||||
pub fn read_splits_sections(path: &Utf8NativePath) -> Result<Option<Vec<SectionDef>>> {
|
||||
if !fs::metadata(path).is_ok_and(|m| m.is_file()) {
|
||||
return Ok(None);
|
||||
}
|
||||
let file = open_file(path, true)?;
|
||||
|
|
|
@ -1,41 +1,39 @@
|
|||
use std::{
|
||||
io::Write,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::io::Write;
|
||||
|
||||
use itertools::Itertools;
|
||||
use typed_path::{Utf8NativePath, Utf8NativePathBuf, Utf8UnixPathBuf};
|
||||
|
||||
pub struct DepFile {
|
||||
pub name: String,
|
||||
pub dependencies: Vec<String>,
|
||||
pub name: Utf8UnixPathBuf,
|
||||
pub dependencies: Vec<Utf8UnixPathBuf>,
|
||||
}
|
||||
|
||||
fn normalize_path(path: &Path) -> String {
|
||||
let path = path.to_string_lossy().replace('\\', "/");
|
||||
path.split_once(':').map(|(p, _)| p.to_string()).unwrap_or(path)
|
||||
fn normalize_path(path: Utf8NativePathBuf) -> Utf8UnixPathBuf {
|
||||
if let Some((a, _)) = path.as_str().split_once(':') {
|
||||
Utf8NativePath::new(a).with_unix_encoding()
|
||||
} else {
|
||||
path.with_unix_encoding()
|
||||
}
|
||||
}
|
||||
|
||||
impl DepFile {
|
||||
pub fn new(name: PathBuf) -> Self {
|
||||
Self { name: name.to_string_lossy().into_owned(), dependencies: vec![] }
|
||||
pub fn new(name: Utf8NativePathBuf) -> Self {
|
||||
Self { name: name.with_unix_encoding(), dependencies: vec![] }
|
||||
}
|
||||
|
||||
pub fn push<P>(&mut self, dependency: P)
|
||||
where P: AsRef<Path> {
|
||||
let path = dependency.as_ref().to_string_lossy().replace('\\', "/");
|
||||
let path = path.split_once(':').map(|(p, _)| p.to_string()).unwrap_or(path);
|
||||
self.dependencies.push(path);
|
||||
pub fn push(&mut self, dependency: Utf8NativePathBuf) {
|
||||
self.dependencies.push(normalize_path(dependency));
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, dependencies: Vec<PathBuf>) {
|
||||
self.dependencies.extend(dependencies.iter().map(|dependency| normalize_path(dependency)));
|
||||
pub fn extend(&mut self, dependencies: Vec<Utf8NativePathBuf>) {
|
||||
self.dependencies.extend(dependencies.into_iter().map(normalize_path));
|
||||
}
|
||||
|
||||
pub fn write<W>(&self, w: &mut W) -> std::io::Result<()>
|
||||
where W: Write + ?Sized {
|
||||
write!(w, "{}:", self.name)?;
|
||||
for dep in self.dependencies.iter().unique() {
|
||||
write!(w, " \\\n {}", dep.replace(' ', "\\ "))?;
|
||||
write!(w, " \\\n {}", dep.as_str().replace(' ', "\\ "))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ use object::{
|
|||
Architecture, Endianness, Object, ObjectKind, ObjectSection, ObjectSymbol, Relocation,
|
||||
RelocationFlags, RelocationTarget, SectionKind, Symbol, SymbolKind, SymbolScope, SymbolSection,
|
||||
};
|
||||
use typed_path::Utf8NativePath;
|
||||
|
||||
use crate::{
|
||||
array_ref,
|
||||
|
@ -46,9 +47,7 @@ enum BoundaryState {
|
|||
FilesEnded,
|
||||
}
|
||||
|
||||
pub fn process_elf<P>(path: P) -> Result<ObjInfo>
|
||||
where P: AsRef<Path> {
|
||||
let path = path.as_ref();
|
||||
pub fn process_elf(path: &Utf8NativePath) -> Result<ObjInfo> {
|
||||
let mut file = open_file(path, true)?;
|
||||
let obj_file = object::read::File::parse(file.map()?)?;
|
||||
let architecture = match obj_file.architecture() {
|
||||
|
|
|
@ -1,33 +1,32 @@
|
|||
use std::{
|
||||
fs,
|
||||
fs::{DirBuilder, File, OpenOptions},
|
||||
io,
|
||||
io::{BufRead, BufWriter, Read, Seek, SeekFrom},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use filetime::{set_file_mtime, FileTime};
|
||||
use path_slash::PathBufExt;
|
||||
use sha1::{Digest, Sha1};
|
||||
use typed_path::{Utf8NativePath, Utf8NativePathBuf, Utf8UnixPathBuf};
|
||||
use xxhash_rust::xxh3::xxh3_64;
|
||||
|
||||
use crate::{
|
||||
array_ref,
|
||||
util::{
|
||||
ncompress::{decompress_yay0, decompress_yaz0, YAY0_MAGIC, YAZ0_MAGIC},
|
||||
path::check_path_buf,
|
||||
Bytes,
|
||||
},
|
||||
vfs::{open_file, VfsFile},
|
||||
};
|
||||
|
||||
/// Creates a buffered writer around a file (not memory mapped).
|
||||
pub fn buf_writer<P>(path: P) -> Result<BufWriter<File>>
|
||||
where P: AsRef<Path> {
|
||||
if let Some(parent) = path.as_ref().parent() {
|
||||
pub fn buf_writer(path: &Utf8NativePath) -> Result<BufWriter<File>> {
|
||||
if let Some(parent) = path.parent() {
|
||||
DirBuilder::new().recursive(true).create(parent)?;
|
||||
}
|
||||
let file = File::create(&path)
|
||||
.with_context(|| format!("Failed to create file '{}'", path.as_ref().display()))?;
|
||||
let file = File::create(path).with_context(|| format!("Failed to create file '{}'", path))?;
|
||||
Ok(BufWriter::new(file))
|
||||
}
|
||||
|
||||
|
@ -61,22 +60,21 @@ where R: Read + Seek + ?Sized {
|
|||
}
|
||||
|
||||
/// Process response files (starting with '@') and glob patterns (*).
|
||||
pub fn process_rsp(files: &[PathBuf]) -> Result<Vec<PathBuf>> {
|
||||
let mut out = Vec::with_capacity(files.len());
|
||||
pub fn process_rsp(files: &[Utf8NativePathBuf]) -> Result<Vec<Utf8NativePathBuf>> {
|
||||
let mut out = Vec::<Utf8NativePathBuf>::with_capacity(files.len());
|
||||
for path in files {
|
||||
let path_str =
|
||||
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
|
||||
if let Some(rsp_file) = path_str.strip_prefix('@') {
|
||||
let file = open_file(Path::new(rsp_file), true)?;
|
||||
if let Some(rsp_file) = path.as_str().strip_prefix('@') {
|
||||
let file = open_file(Utf8NativePath::new(rsp_file), true)?;
|
||||
for result in file.lines() {
|
||||
let line = result?;
|
||||
if !line.is_empty() {
|
||||
out.push(PathBuf::from_slash(line));
|
||||
out.push(Utf8UnixPathBuf::from(line).with_encoding());
|
||||
}
|
||||
}
|
||||
} else if path_str.contains('*') {
|
||||
for entry in glob::glob(path_str)? {
|
||||
out.push(entry?);
|
||||
} else if path.as_str().contains('*') {
|
||||
for entry in glob::glob(path.as_str())? {
|
||||
let path = check_path_buf(entry?)?;
|
||||
out.push(path.with_encoding());
|
||||
}
|
||||
} else {
|
||||
out.push(path.clone());
|
||||
|
@ -106,16 +104,16 @@ impl FileReadInfo {
|
|||
/// If a file is a RARC archive, iterate over its contents.
|
||||
/// If a file is a Yaz0 compressed file, decompress it.
|
||||
pub struct FileIterator {
|
||||
paths: Vec<PathBuf>,
|
||||
paths: Vec<Utf8NativePathBuf>,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl FileIterator {
|
||||
pub fn new(paths: &[PathBuf]) -> Result<Self> {
|
||||
pub fn new(paths: &[Utf8NativePathBuf]) -> Result<Self> {
|
||||
Ok(Self { paths: process_rsp(paths)?, index: 0 })
|
||||
}
|
||||
|
||||
fn next_path(&mut self) -> Option<Result<(PathBuf, Box<dyn VfsFile>)>> {
|
||||
fn next_path(&mut self) -> Option<Result<(Utf8NativePathBuf, Box<dyn VfsFile>)>> {
|
||||
if self.index >= self.paths.len() {
|
||||
return None;
|
||||
}
|
||||
|
@ -130,14 +128,13 @@ impl FileIterator {
|
|||
}
|
||||
|
||||
impl Iterator for FileIterator {
|
||||
type Item = Result<(PathBuf, Box<dyn VfsFile>)>;
|
||||
type Item = Result<(Utf8NativePathBuf, Box<dyn VfsFile>)>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> { self.next_path() }
|
||||
}
|
||||
|
||||
pub fn touch<P>(path: P) -> io::Result<()>
|
||||
where P: AsRef<Path> {
|
||||
if path.as_ref().exists() {
|
||||
pub fn touch(path: &Utf8NativePath) -> io::Result<()> {
|
||||
if fs::exists(path)? {
|
||||
set_file_mtime(path, FileTime::now())
|
||||
} else {
|
||||
match OpenOptions::new().create(true).truncate(true).write(true).open(path) {
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use path_slash::PathBufExt;
|
||||
use typed_path::{Utf8NativePathBuf, Utf8UnixPath};
|
||||
|
||||
use crate::obj::{ObjInfo, ObjKind};
|
||||
|
||||
|
@ -33,7 +31,7 @@ pub fn generate_ldscript(
|
|||
let mut force_files = Vec::with_capacity(obj.link_order.len());
|
||||
for unit in &obj.link_order {
|
||||
let obj_path = obj_path_for_unit(&unit.name);
|
||||
force_files.push(obj_path.file_name().unwrap().to_str().unwrap().to_string());
|
||||
force_files.push(obj_path.file_name().unwrap().to_string());
|
||||
}
|
||||
|
||||
let mut force_active = force_active.to_vec();
|
||||
|
@ -82,7 +80,7 @@ pub fn generate_ldscript_partial(
|
|||
let mut force_files = Vec::with_capacity(obj.link_order.len());
|
||||
for unit in &obj.link_order {
|
||||
let obj_path = obj_path_for_unit(&unit.name);
|
||||
force_files.push(obj_path.file_name().unwrap().to_str().unwrap().to_string());
|
||||
force_files.push(obj_path.file_name().unwrap().to_string());
|
||||
}
|
||||
|
||||
let mut force_active = force_active.to_vec();
|
||||
|
@ -99,6 +97,10 @@ pub fn generate_ldscript_partial(
|
|||
Ok(out)
|
||||
}
|
||||
|
||||
pub fn obj_path_for_unit(unit: &str) -> PathBuf { PathBuf::from_slash(unit).with_extension("o") }
|
||||
pub fn obj_path_for_unit(unit: &str) -> Utf8NativePathBuf {
|
||||
Utf8UnixPath::new(unit).with_encoding().with_extension("o")
|
||||
}
|
||||
|
||||
pub fn asm_path_for_unit(unit: &str) -> PathBuf { PathBuf::from_slash(unit).with_extension("s") }
|
||||
pub fn asm_path_for_unit(unit: &str) -> Utf8NativePathBuf {
|
||||
Utf8UnixPath::new(unit).with_encoding().with_extension("s")
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@ use std::{
|
|||
hash::Hash,
|
||||
io::BufRead,
|
||||
mem::{replace, take},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, Error, Result};
|
||||
|
@ -16,6 +15,7 @@ use itertools::Itertools;
|
|||
use multimap::MultiMap;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::{Captures, Regex};
|
||||
use typed_path::Utf8NativePath;
|
||||
|
||||
use crate::{
|
||||
obj::{
|
||||
|
@ -26,6 +26,7 @@ use crate::{
|
|||
util::nested::NestedVec,
|
||||
vfs::open_file,
|
||||
};
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum SymbolKind {
|
||||
Function,
|
||||
|
@ -713,16 +714,13 @@ where
|
|||
Ok(sm.result)
|
||||
}
|
||||
|
||||
pub fn apply_map_file<P>(
|
||||
path: P,
|
||||
pub fn apply_map_file(
|
||||
path: &Utf8NativePath,
|
||||
obj: &mut ObjInfo,
|
||||
common_bss_start: Option<u32>,
|
||||
mw_comment_version: Option<u8>,
|
||||
) -> Result<()>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let mut file = open_file(path.as_ref(), true)?;
|
||||
) -> Result<()> {
|
||||
let mut file = open_file(path, true)?;
|
||||
let info = process_map(file.as_mut(), common_bss_start, mw_comment_version)?;
|
||||
apply_map(info, obj)
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ pub mod map;
|
|||
pub mod ncompress;
|
||||
pub mod nested;
|
||||
pub mod nlzss;
|
||||
pub mod path;
|
||||
pub mod rarc;
|
||||
pub mod reader;
|
||||
pub mod rel;
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
str::Utf8Error,
|
||||
string::FromUtf8Error,
|
||||
};
|
||||
|
||||
use typed_path::{NativePath, NativePathBuf, Utf8NativePath, Utf8NativePathBuf};
|
||||
|
||||
// For argp::FromArgs
|
||||
pub fn native_path(value: &str) -> Result<Utf8NativePathBuf, String> {
|
||||
Ok(Utf8NativePathBuf::from(value))
|
||||
}
|
||||
|
||||
/// Checks if the path is valid UTF-8 and returns it as a [`Utf8NativePath`].
|
||||
#[inline]
|
||||
pub fn check_path(path: &Path) -> Result<&Utf8NativePath, Utf8Error> {
|
||||
Utf8NativePath::from_bytes_path(NativePath::new(path.as_os_str().as_encoded_bytes()))
|
||||
}
|
||||
|
||||
/// Checks if the path is valid UTF-8 and returns it as a [`Utf8NativePathBuf`].
|
||||
#[inline]
|
||||
pub fn check_path_buf(path: PathBuf) -> Result<Utf8NativePathBuf, FromUtf8Error> {
|
||||
Utf8NativePathBuf::from_bytes_path_buf(NativePathBuf::from(
|
||||
path.into_os_string().into_encoded_bytes(),
|
||||
))
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
use std::{borrow::Cow, ffi::CStr};
|
||||
|
||||
use typed_path::Utf8UnixPath;
|
||||
use zerocopy::{big_endian::*, FromBytes, Immutable, IntoBytes, KnownLayout};
|
||||
|
||||
use crate::{static_assert, vfs::next_non_empty};
|
||||
|
@ -223,8 +224,8 @@ impl<'a> RarcView<'a> {
|
|||
}
|
||||
|
||||
/// Finds a particular file or directory by path.
|
||||
pub fn find(&self, path: &str) -> Option<RarcNodeKind> {
|
||||
let mut split = path.split('/');
|
||||
pub fn find(&self, path: &Utf8UnixPath) -> Option<RarcNodeKind> {
|
||||
let mut split = path.as_str().split('/');
|
||||
let mut current = next_non_empty(&mut split);
|
||||
|
||||
let mut dir_idx = 0;
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
use std::{
|
||||
collections::{btree_map, BTreeMap},
|
||||
path::Path,
|
||||
};
|
||||
use std::collections::{btree_map, BTreeMap};
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Result};
|
||||
use base64::{engine::general_purpose::STANDARD, Engine};
|
||||
use cwdemangle::{demangle, DemangleOptions};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha1::{Digest, Sha1};
|
||||
use typed_path::Utf8NativePath;
|
||||
|
||||
use crate::{
|
||||
analysis::{
|
||||
|
@ -246,8 +244,10 @@ pub fn compare_signature(existing: &mut FunctionSignature, new: &FunctionSignatu
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_signature<P>(path: P, symbol_name: &str) -> Result<Option<FunctionSignature>>
|
||||
where P: AsRef<Path> {
|
||||
pub fn generate_signature(
|
||||
path: &Utf8NativePath,
|
||||
symbol_name: &str,
|
||||
) -> Result<Option<FunctionSignature>> {
|
||||
let mut out_symbols: Vec<OutSymbol> = Vec::new();
|
||||
let mut out_relocs: Vec<OutReloc> = Vec::new();
|
||||
let mut symbol_map: BTreeMap<SymbolIndex, u32> = BTreeMap::new();
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use std::{borrow::Cow, ffi::CStr, mem::size_of};
|
||||
|
||||
use anyhow::Result;
|
||||
use typed_path::Utf8UnixPath;
|
||||
use zerocopy::{big_endian::U32, FromBytes, Immutable, IntoBytes, KnownLayout};
|
||||
|
||||
use crate::{static_assert, vfs::next_non_empty};
|
||||
|
@ -138,8 +139,8 @@ impl<'a> U8View<'a> {
|
|||
}
|
||||
|
||||
/// Finds a particular file or directory by path.
|
||||
pub fn find(&self, path: &str) -> Option<(usize, U8Node)> {
|
||||
let mut split = path.split('/');
|
||||
pub fn find(&self, path: &Utf8UnixPath) -> Option<(usize, U8Node)> {
|
||||
let mut split = path.as_str().split('/');
|
||||
let mut current = next_non_empty(&mut split);
|
||||
if current.is_empty() {
|
||||
return Some((0, self.nodes[0]));
|
||||
|
|
|
@ -9,6 +9,7 @@ use nodtool::{
|
|||
nod,
|
||||
nod::{Disc, DiscStream, Fst, NodeKind, OwnedFileStream, PartitionBase, PartitionMeta},
|
||||
};
|
||||
use typed_path::Utf8UnixPath;
|
||||
use zerocopy::IntoBytes;
|
||||
|
||||
use super::{
|
||||
|
@ -46,8 +47,8 @@ impl DiscFs {
|
|||
Ok(Self { disc, base, meta, mtime })
|
||||
}
|
||||
|
||||
fn find(&self, path: &str) -> VfsResult<DiscNode> {
|
||||
let path = path.trim_matches('/');
|
||||
fn find(&self, path: &Utf8UnixPath) -> VfsResult<DiscNode> {
|
||||
let path = path.as_str().trim_matches('/');
|
||||
let mut split = path.split('/');
|
||||
let mut segment = next_non_empty(&mut split);
|
||||
match segment.to_ascii_lowercase().as_str() {
|
||||
|
@ -116,7 +117,7 @@ impl DiscFs {
|
|||
}
|
||||
|
||||
impl Vfs for DiscFs {
|
||||
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
|
||||
fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>> {
|
||||
match self.find(path)? {
|
||||
DiscNode::None => Err(VfsError::NotFound),
|
||||
DiscNode::Special(_) => Err(VfsError::IsADirectory),
|
||||
|
@ -140,11 +141,11 @@ impl Vfs for DiscFs {
|
|||
}
|
||||
}
|
||||
|
||||
fn exists(&mut self, path: &str) -> VfsResult<bool> {
|
||||
fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool> {
|
||||
Ok(!matches!(self.find(path)?, DiscNode::None))
|
||||
}
|
||||
|
||||
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> {
|
||||
fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>> {
|
||||
match self.find(path)? {
|
||||
DiscNode::None => Err(VfsError::NotFound),
|
||||
DiscNode::Special(SpecialDir::Root) => {
|
||||
|
@ -211,7 +212,7 @@ impl Vfs for DiscFs {
|
|||
}
|
||||
}
|
||||
|
||||
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> {
|
||||
fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata> {
|
||||
match self.find(path)? {
|
||||
DiscNode::None => Err(VfsError::NotFound),
|
||||
DiscNode::Special(_) => {
|
||||
|
|
|
@ -9,7 +9,6 @@ use std::{
|
|||
fmt::{Debug, Display, Formatter},
|
||||
io,
|
||||
io::{BufRead, Read, Seek, SeekFrom},
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
|
@ -21,6 +20,7 @@ use filetime::FileTime;
|
|||
use nodtool::{nod, nod::DiscStream};
|
||||
use rarc::RarcFs;
|
||||
pub use std_fs::StdFs;
|
||||
use typed_path::{Utf8NativePath, Utf8UnixPath, Utf8UnixPathBuf};
|
||||
use u8_arc::U8Fs;
|
||||
|
||||
use crate::util::{
|
||||
|
@ -31,13 +31,13 @@ use crate::util::{
|
|||
};
|
||||
|
||||
pub trait Vfs: DynClone + Send + Sync {
|
||||
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>>;
|
||||
fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>>;
|
||||
|
||||
fn exists(&mut self, path: &str) -> VfsResult<bool>;
|
||||
fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool>;
|
||||
|
||||
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>>;
|
||||
fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>>;
|
||||
|
||||
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata>;
|
||||
fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata>;
|
||||
}
|
||||
|
||||
dyn_clone::clone_trait_object!(Vfs);
|
||||
|
@ -192,33 +192,33 @@ where R: Read + Seek + ?Sized {
|
|||
}
|
||||
}
|
||||
|
||||
pub enum OpenResult<'a> {
|
||||
File(Box<dyn VfsFile>, &'a str),
|
||||
Directory(Box<dyn Vfs>, &'a str),
|
||||
pub enum OpenResult {
|
||||
File(Box<dyn VfsFile>, Utf8UnixPathBuf),
|
||||
Directory(Box<dyn Vfs>, Utf8UnixPathBuf),
|
||||
}
|
||||
|
||||
pub fn open_path(path: &Path, auto_decompress: bool) -> anyhow::Result<OpenResult> {
|
||||
pub fn open_path(path: &Utf8NativePath, auto_decompress: bool) -> anyhow::Result<OpenResult> {
|
||||
open_path_with_fs(Box::new(StdFs), path, auto_decompress)
|
||||
}
|
||||
|
||||
pub fn open_path_with_fs(
|
||||
mut fs: Box<dyn Vfs>,
|
||||
path: &Path,
|
||||
path: &Utf8NativePath,
|
||||
auto_decompress: bool,
|
||||
) -> anyhow::Result<OpenResult> {
|
||||
let str = path.to_str().ok_or_else(|| anyhow!("Path is not valid UTF-8"))?;
|
||||
let mut split = str.split(':').peekable();
|
||||
let path = path.with_unix_encoding();
|
||||
let mut split = path.as_str().split(':').peekable();
|
||||
let mut current_path = String::new();
|
||||
let mut file: Option<Box<dyn VfsFile>> = None;
|
||||
let mut segment = "";
|
||||
let mut segment = Utf8UnixPath::new("");
|
||||
loop {
|
||||
// Open the next segment if necessary
|
||||
if file.is_none() {
|
||||
segment = split.next().unwrap();
|
||||
segment = Utf8UnixPath::new(split.next().unwrap());
|
||||
if !current_path.is_empty() {
|
||||
current_path.push(':');
|
||||
}
|
||||
current_path.push_str(segment);
|
||||
current_path.push_str(segment.as_str());
|
||||
let file_type = match fs.metadata(segment) {
|
||||
Ok(metadata) => metadata.file_type,
|
||||
Err(VfsError::NotFound) => return Err(anyhow!("{} not found", current_path)),
|
||||
|
@ -235,7 +235,7 @@ pub fn open_path_with_fs(
|
|||
return if split.peek().is_some() {
|
||||
Err(anyhow!("{} is not a file", current_path))
|
||||
} else {
|
||||
Ok(OpenResult::Directory(fs, segment))
|
||||
Ok(OpenResult::Directory(fs, segment.to_path_buf()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -297,21 +297,21 @@ pub fn open_path_with_fs(
|
|||
FileFormat::Compressed(kind) if auto_decompress => Ok(OpenResult::File(
|
||||
decompress_file(current_file.as_mut(), kind)
|
||||
.with_context(|| format!("Failed to decompress {}", current_path))?,
|
||||
segment,
|
||||
segment.to_path_buf(),
|
||||
)),
|
||||
_ => Ok(OpenResult::File(current_file, segment)),
|
||||
_ => Ok(OpenResult::File(current_file, segment.to_path_buf())),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open_file(path: &Path, auto_decompress: bool) -> anyhow::Result<Box<dyn VfsFile>> {
|
||||
pub fn open_file(path: &Utf8NativePath, auto_decompress: bool) -> anyhow::Result<Box<dyn VfsFile>> {
|
||||
open_file_with_fs(Box::new(StdFs), path, auto_decompress)
|
||||
}
|
||||
|
||||
pub fn open_file_with_fs(
|
||||
fs: Box<dyn Vfs>,
|
||||
path: &Path,
|
||||
path: &Utf8NativePath,
|
||||
auto_decompress: bool,
|
||||
) -> anyhow::Result<Box<dyn VfsFile>> {
|
||||
match open_path_with_fs(fs, path, auto_decompress)? {
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use std::io;
|
||||
|
||||
use typed_path::Utf8UnixPath;
|
||||
|
||||
use super::{Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult, WindowedFile};
|
||||
use crate::util::rarc::{RarcNodeKind, RarcView};
|
||||
|
||||
|
@ -18,7 +20,7 @@ impl RarcFs {
|
|||
}
|
||||
|
||||
impl Vfs for RarcFs {
|
||||
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
|
||||
fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>> {
|
||||
let view = self.view()?;
|
||||
match view.find(path) {
|
||||
Some(RarcNodeKind::File(_, node)) => {
|
||||
|
@ -34,12 +36,12 @@ impl Vfs for RarcFs {
|
|||
}
|
||||
}
|
||||
|
||||
fn exists(&mut self, path: &str) -> VfsResult<bool> {
|
||||
fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool> {
|
||||
let view = self.view()?;
|
||||
Ok(view.find(path).is_some())
|
||||
}
|
||||
|
||||
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> {
|
||||
fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>> {
|
||||
let view = self.view()?;
|
||||
match view.find(path) {
|
||||
Some(RarcNodeKind::Directory(_, dir)) => {
|
||||
|
@ -58,7 +60,7 @@ impl Vfs for RarcFs {
|
|||
}
|
||||
}
|
||||
|
||||
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> {
|
||||
fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata> {
|
||||
let metadata = self.file.metadata()?;
|
||||
let view = self.view()?;
|
||||
match view.find(path) {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use std::{
|
||||
io,
|
||||
fs, io,
|
||||
io::{BufRead, BufReader, Read, Seek, SeekFrom},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use filetime::FileTime;
|
||||
use typed_path::{Utf8NativePathBuf, Utf8UnixPath};
|
||||
|
||||
use super::{DiscStream, Vfs, VfsFile, VfsFileType, VfsMetadata, VfsResult};
|
||||
|
||||
|
@ -12,23 +12,25 @@ use super::{DiscStream, Vfs, VfsFile, VfsFileType, VfsMetadata, VfsResult};
|
|||
pub struct StdFs;
|
||||
|
||||
impl Vfs for StdFs {
|
||||
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
|
||||
let mut file = StdFile::new(PathBuf::from(path));
|
||||
fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>> {
|
||||
let mut file = StdFile::new(path.with_encoding());
|
||||
file.file()?; // Open the file now to check for errors
|
||||
Ok(Box::new(file))
|
||||
}
|
||||
|
||||
fn exists(&mut self, path: &str) -> VfsResult<bool> { Ok(Path::new(path).exists()) }
|
||||
fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool> {
|
||||
Ok(fs::exists(path.with_encoding())?)
|
||||
}
|
||||
|
||||
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> {
|
||||
let entries = std::fs::read_dir(path)?
|
||||
fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>> {
|
||||
let entries = fs::read_dir(path.with_encoding())?
|
||||
.map(|entry| entry.map(|e| e.file_name().to_string_lossy().into_owned()))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> {
|
||||
let metadata = std::fs::metadata(path)?;
|
||||
fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata> {
|
||||
let metadata = fs::metadata(path.with_encoding())?;
|
||||
Ok(VfsMetadata {
|
||||
file_type: if metadata.is_dir() { VfsFileType::Directory } else { VfsFileType::File },
|
||||
len: metadata.len(),
|
||||
|
@ -38,8 +40,8 @@ impl Vfs for StdFs {
|
|||
}
|
||||
|
||||
pub struct StdFile {
|
||||
path: PathBuf,
|
||||
file: Option<BufReader<std::fs::File>>,
|
||||
path: Utf8NativePathBuf,
|
||||
file: Option<BufReader<fs::File>>,
|
||||
mmap: Option<memmap2::Mmap>,
|
||||
}
|
||||
|
||||
|
@ -50,11 +52,11 @@ impl Clone for StdFile {
|
|||
|
||||
impl StdFile {
|
||||
#[inline]
|
||||
pub fn new(path: PathBuf) -> Self { StdFile { path, file: None, mmap: None } }
|
||||
pub fn new(path: Utf8NativePathBuf) -> Self { StdFile { path, file: None, mmap: None } }
|
||||
|
||||
pub fn file(&mut self) -> io::Result<&mut BufReader<std::fs::File>> {
|
||||
pub fn file(&mut self) -> io::Result<&mut BufReader<fs::File>> {
|
||||
if self.file.is_none() {
|
||||
self.file = Some(BufReader::new(std::fs::File::open(&self.path)?));
|
||||
self.file = Some(BufReader::new(fs::File::open(&self.path)?));
|
||||
}
|
||||
Ok(self.file.as_mut().unwrap())
|
||||
}
|
||||
|
@ -85,7 +87,7 @@ impl Seek for StdFile {
|
|||
impl VfsFile for StdFile {
|
||||
fn map(&mut self) -> io::Result<&[u8]> {
|
||||
if self.file.is_none() {
|
||||
self.file = Some(BufReader::new(std::fs::File::open(&self.path)?));
|
||||
self.file = Some(BufReader::new(fs::File::open(&self.path)?));
|
||||
}
|
||||
if self.mmap.is_none() {
|
||||
self.mmap = Some(unsafe { memmap2::Mmap::map(self.file.as_ref().unwrap().get_ref())? });
|
||||
|
@ -94,7 +96,7 @@ impl VfsFile for StdFile {
|
|||
}
|
||||
|
||||
fn metadata(&mut self) -> io::Result<VfsMetadata> {
|
||||
let metadata = std::fs::metadata(&self.path)?;
|
||||
let metadata = fs::metadata(&self.path)?;
|
||||
Ok(VfsMetadata {
|
||||
file_type: if metadata.is_dir() { VfsFileType::Directory } else { VfsFileType::File },
|
||||
len: metadata.len(),
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use std::io;
|
||||
|
||||
use typed_path::Utf8UnixPath;
|
||||
|
||||
use super::{Vfs, VfsError, VfsFile, VfsFileType, VfsMetadata, VfsResult, WindowedFile};
|
||||
use crate::util::u8_arc::{U8NodeKind, U8View};
|
||||
|
||||
|
@ -18,7 +20,7 @@ impl U8Fs {
|
|||
}
|
||||
|
||||
impl Vfs for U8Fs {
|
||||
fn open(&mut self, path: &str) -> VfsResult<Box<dyn VfsFile>> {
|
||||
fn open(&mut self, path: &Utf8UnixPath) -> VfsResult<Box<dyn VfsFile>> {
|
||||
let view = self.view()?;
|
||||
match view.find(path) {
|
||||
Some((_, node)) => match node.kind() {
|
||||
|
@ -35,12 +37,12 @@ impl Vfs for U8Fs {
|
|||
}
|
||||
}
|
||||
|
||||
fn exists(&mut self, path: &str) -> VfsResult<bool> {
|
||||
fn exists(&mut self, path: &Utf8UnixPath) -> VfsResult<bool> {
|
||||
let view = self.view()?;
|
||||
Ok(view.find(path).is_some())
|
||||
}
|
||||
|
||||
fn read_dir(&mut self, path: &str) -> VfsResult<Vec<String>> {
|
||||
fn read_dir(&mut self, path: &Utf8UnixPath) -> VfsResult<Vec<String>> {
|
||||
let view = self.view()?;
|
||||
match view.find(path) {
|
||||
Some((idx, node)) => match node.kind() {
|
||||
|
@ -66,7 +68,7 @@ impl Vfs for U8Fs {
|
|||
}
|
||||
}
|
||||
|
||||
fn metadata(&mut self, path: &str) -> VfsResult<VfsMetadata> {
|
||||
fn metadata(&mut self, path: &Utf8UnixPath) -> VfsResult<VfsMetadata> {
|
||||
let metdata = self.file.metadata()?;
|
||||
let view = self.view()?;
|
||||
match view.find(path) {
|
||||
|
|
Loading…
Reference in New Issue