6 months of occasional work I guess

This commit is contained in:
Luke Street 2023-07-21 17:59:07 -04:00
parent f1b4afa885
commit 0fa0aafaea
44 changed files with 4688 additions and 1817 deletions

324
Cargo.lock generated
View File

@ -30,18 +30,18 @@ dependencies = [
[[package]] [[package]]
name = "aho-corasick" name = "aho-corasick"
version = "0.7.20" version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.66" version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6" checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
dependencies = [ dependencies = [
"backtrace", "backtrace",
] ]
@ -53,9 +53,9 @@ source = "git+https://github.com/bjorn3/rust-ar.git?branch=write_symbol_table#a6
[[package]] [[package]]
name = "argh" name = "argh"
version = "0.1.9" version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c375edecfd2074d5edcc31396860b6e54b6f928714d0e097b983053fac0cabe3" checksum = "ab257697eb9496bf75526f0217b5ed64636a9cfafa78b8365c71bd283fcef93e"
dependencies = [ dependencies = [
"argh_derive", "argh_derive",
"argh_shared", "argh_shared",
@ -63,22 +63,21 @@ dependencies = [
[[package]] [[package]]
name = "argh_derive" name = "argh_derive"
version = "0.1.9" version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa013479b80109a1bf01a039412b0f0013d716f36921226d86c6709032fb7a03" checksum = "b382dbd3288e053331f03399e1db106c9fb0d8562ad62cb04859ae926f324fa6"
dependencies = [ dependencies = [
"argh_shared", "argh_shared",
"heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn 1.0.107",
] ]
[[package]] [[package]]
name = "argh_shared" name = "argh_shared"
version = "0.1.9" version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "149f75bbec1827618262e0855a68f0f9a7f2edc13faebf33c4f16d6725edb6a9" checksum = "64cb94155d965e3d37ffbbe7cc5b82c3dd79dd33bd48e536f73d2cfb8d85506f"
[[package]] [[package]]
name = "autocfg" name = "autocfg"
@ -96,22 +95,22 @@ dependencies = [
"cc", "cc",
"cfg-if", "cfg-if",
"libc", "libc",
"miniz_oxide", "miniz_oxide 0.6.2",
"object", "object 0.30.0",
"rustc-demangle", "rustc-demangle",
] ]
[[package]] [[package]]
name = "base16ct" name = "base16ct"
version = "0.1.1" version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.21.0" version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
[[package]] [[package]]
name = "bincode" name = "bincode"
@ -145,9 +144,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.78" version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
@ -185,9 +184,9 @@ dependencies = [
[[package]] [[package]]
name = "cwdemangle" name = "cwdemangle"
version = "0.1.3" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e411efa4ed072fa5bdb637c945ea7f618ebd416748cecc255b00968c1db81e68" checksum = "b58d34a3a03cfe0a4ebfd03aeda6ee8a0f2e99bd3308476a8a89815add3ec373"
dependencies = [ dependencies = [
"argh", "argh",
] ]
@ -209,14 +208,16 @@ dependencies = [
"fixedbitset", "fixedbitset",
"flagset", "flagset",
"flate2", "flate2",
"glob",
"hex", "hex",
"indexmap", "indexmap 2.0.0",
"itertools",
"log", "log",
"memchr", "memchr",
"memmap2", "memmap2",
"multimap", "multimap",
"num_enum", "num_enum",
"object", "object 0.31.1",
"once_cell", "once_cell",
"ppc750cl", "ppc750cl",
"regex", "regex",
@ -249,6 +250,12 @@ dependencies = [
"thiserror", "thiserror",
] ]
[[package]]
name = "either"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
[[package]] [[package]]
name = "env_logger" name = "env_logger"
version = "0.10.0" version = "0.10.0"
@ -262,6 +269,12 @@ dependencies = [
"termcolor", "termcolor",
] ]
[[package]]
name = "equivalent"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1"
[[package]] [[package]]
name = "errno" name = "errno"
version = "0.2.8" version = "0.2.8"
@ -285,14 +298,14 @@ dependencies = [
[[package]] [[package]]
name = "filetime" name = "filetime"
version = "0.2.18" version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b9663d381d07ae25dc88dbdf27df458faa83a9b25336bcac83d5e452b5fc9d3" checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"redox_syscall", "redox_syscall",
"windows-sys", "windows-sys 0.48.0",
] ]
[[package]] [[package]]
@ -312,12 +325,12 @@ dependencies = [
[[package]] [[package]]
name = "flate2" name = "flate2"
version = "1.0.25" version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743"
dependencies = [ dependencies = [
"crc32fast", "crc32fast",
"miniz_oxide", "miniz_oxide 0.7.1",
] ]
[[package]] [[package]]
@ -336,6 +349,12 @@ version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793" checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793"
[[package]]
name = "glob"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.12.3" version = "0.12.3"
@ -352,10 +371,10 @@ dependencies = [
] ]
[[package]] [[package]]
name = "heck" name = "hashbrown"
version = "0.4.0" version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
[[package]] [[package]]
name = "hermit-abi" name = "hermit-abi"
@ -388,6 +407,16 @@ dependencies = [
"hashbrown 0.12.3", "hashbrown 0.12.3",
] ]
[[package]]
name = "indexmap"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
dependencies = [
"equivalent",
"hashbrown 0.14.0",
]
[[package]] [[package]]
name = "io-lifetimes" name = "io-lifetimes"
version = "1.0.3" version = "1.0.3"
@ -395,7 +424,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46112a93252b123d31a119a8d1a1ac19deac4fac6e0e8b0df58f0d4e5870e63c" checksum = "46112a93252b123d31a119a8d1a1ac19deac4fac6e0e8b0df58f0d4e5870e63c"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys", "windows-sys 0.42.0",
] ]
[[package]] [[package]]
@ -407,7 +436,16 @@ dependencies = [
"hermit-abi", "hermit-abi",
"io-lifetimes", "io-lifetimes",
"rustix", "rustix",
"windows-sys", "windows-sys 0.42.0",
]
[[package]]
name = "itertools"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
dependencies = [
"either",
] ]
[[package]] [[package]]
@ -418,9 +456,9 @@ checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.137" version = "0.2.147"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89" checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
[[package]] [[package]]
name = "linux-raw-sys" name = "linux-raw-sys"
@ -430,12 +468,9 @@ checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4"
[[package]] [[package]]
name = "log" name = "log"
version = "0.4.17" version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "memchr" name = "memchr"
@ -445,9 +480,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]] [[package]]
name = "memmap2" name = "memmap2"
version = "0.5.8" version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc" checksum = "f49388d20533534cd19360ad3d6a7dadc885944aa802ba3995040c5ec11288c6"
dependencies = [ dependencies = [
"libc", "libc",
] ]
@ -462,10 +497,19 @@ dependencies = [
] ]
[[package]] [[package]]
name = "multimap" name = "miniz_oxide"
version = "0.8.3" version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
dependencies = [
"adler",
]
[[package]]
name = "multimap"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70db9248a93dc36a36d9a47898caa007a32755c7ad140ec64eeeb50d5a730631"
dependencies = [ dependencies = [
"serde", "serde",
] ]
@ -481,23 +525,23 @@ dependencies = [
[[package]] [[package]]
name = "num_enum" name = "num_enum"
version = "0.5.7" version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf5395665662ef45796a4ff5486c5d41d29e0c09640af4c5f17fd94ee2c119c9" checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1"
dependencies = [ dependencies = [
"num_enum_derive", "num_enum_derive",
] ]
[[package]] [[package]]
name = "num_enum_derive" name = "num_enum_derive"
version = "0.5.7" version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b0498641e53dd6ac1a4f22547548caa6864cc4933784319cd1775271c5a46ce" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6"
dependencies = [ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn 2.0.23",
] ]
[[package]] [[package]]
@ -505,18 +549,27 @@ name = "object"
version = "0.30.0" version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "239da7f290cfa979f43f85a8efeee9a8a76d0827c356d37f9d3d7254d6b537fb" checksum = "239da7f290cfa979f43f85a8efeee9a8a76d0827c356d37f9d3d7254d6b537fb"
dependencies = [
"memchr",
]
[[package]]
name = "object"
version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
dependencies = [ dependencies = [
"crc32fast", "crc32fast",
"hashbrown 0.13.1", "hashbrown 0.13.1",
"indexmap", "indexmap 1.9.2",
"memchr", "memchr",
] ]
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.17.0" version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]] [[package]]
name = "paste" name = "paste"
@ -546,18 +599,18 @@ dependencies = [
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.47" version = "1.0.63"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.21" version = "1.0.29"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
@ -573,9 +626,21 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.7.0" version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a" checksum = "89089e897c013b3deb627116ae56a6955a72b8bed395c9526af31c9fe528b484"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa250384981ea14565685dea16a9ccc4d1c541a13f82b9c168572264d1df8c56"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -584,9 +649,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.6.28" version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846"
[[package]] [[package]]
name = "rmp" name = "rmp"
@ -612,9 +677,9 @@ dependencies = [
[[package]] [[package]]
name = "rustc-demangle" name = "rustc-demangle"
version = "0.1.21" version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]] [[package]]
name = "rustix" name = "rustix"
@ -627,7 +692,7 @@ dependencies = [
"io-lifetimes", "io-lifetimes",
"libc", "libc",
"linux-raw-sys", "linux-raw-sys",
"windows-sys", "windows-sys 0.42.0",
] ]
[[package]] [[package]]
@ -638,42 +703,42 @@ checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.152" version = "1.0.166"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" checksum = "d01b7404f9d441d3ad40e6a636a7782c377d2abdbe4fa2440e2edcc2f4f10db8"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.152" version = "1.0.166"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" checksum = "5dd83d6dde2b6b2d466e14d9d1acce8816dedee94f735eac6395808b3483c6d6"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn 2.0.23",
] ]
[[package]] [[package]]
name = "serde_repr" name = "serde_repr"
version = "0.1.10" version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a5ec9fa74a20ebbe5d9ac23dac1fc96ba0ecfe9f50f2843b52e537b10fbcb4e" checksum = "1d89a8107374290037607734c0b73a85db7ed80cae314b3c5791f192a496e731"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn 2.0.23",
] ]
[[package]] [[package]]
name = "serde_yaml" name = "serde_yaml"
version = "0.9.16" version = "0.9.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92b5b431e8907b50339b51223b97d102db8d987ced36f6e4d03621db9316c834" checksum = "452e67b9c20c37fa79df53201dc03839651086ed9bbe92b3ca585ca9fdaa7d85"
dependencies = [ dependencies = [
"indexmap", "indexmap 2.0.0",
"itoa", "itoa",
"ryu", "ryu",
"serde", "serde",
@ -682,9 +747,9 @@ dependencies = [
[[package]] [[package]]
name = "sha-1" name = "sha-1"
version = "0.10.0" version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"cpufeatures", "cpufeatures",
@ -693,9 +758,9 @@ dependencies = [
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "1.10.0" version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
[[package]] [[package]]
name = "syn" name = "syn"
@ -708,6 +773,17 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "syn"
version = "2.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59fb7d6d8281a51045d62b8eb3a7d1ce347b76f312af50cd3dc0af39c87c1737"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]] [[package]]
name = "termcolor" name = "termcolor"
version = "1.1.3" version = "1.1.3"
@ -734,7 +810,7 @@ checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn 1.0.107",
] ]
[[package]] [[package]]
@ -766,9 +842,9 @@ checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
[[package]] [[package]]
name = "unsafe-libyaml" name = "unsafe-libyaml"
version = "0.2.5" version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc7ed8ba44ca06be78ea1ad2c3682a43349126c8818054231ee6f4748012aed2" checksum = "1865806a559042e51ab5414598446a5871b561d21b6764f2eabb0dd481d880a6"
[[package]] [[package]]
name = "version_check" name = "version_check"
@ -813,13 +889,37 @@ version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
dependencies = [ dependencies = [
"windows_aarch64_gnullvm", "windows_aarch64_gnullvm 0.42.0",
"windows_aarch64_msvc", "windows_aarch64_msvc 0.42.0",
"windows_i686_gnu", "windows_i686_gnu 0.42.0",
"windows_i686_msvc", "windows_i686_msvc 0.42.0",
"windows_x86_64_gnu", "windows_x86_64_gnu 0.42.0",
"windows_x86_64_gnullvm", "windows_x86_64_gnullvm 0.42.0",
"windows_x86_64_msvc", "windows_x86_64_msvc 0.42.0",
]
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.48.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
dependencies = [
"windows_aarch64_gnullvm 0.48.0",
"windows_aarch64_msvc 0.48.0",
"windows_i686_gnu 0.48.0",
"windows_i686_msvc 0.48.0",
"windows_x86_64_gnu 0.48.0",
"windows_x86_64_gnullvm 0.48.0",
"windows_x86_64_msvc 0.48.0",
] ]
[[package]] [[package]]
@ -828,38 +928,80 @@ version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
[[package]] [[package]]
name = "windows_aarch64_msvc" name = "windows_aarch64_msvc"
version = "0.42.0" version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
[[package]] [[package]]
name = "windows_i686_gnu" name = "windows_i686_gnu"
version = "0.42.0" version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7"
[[package]]
name = "windows_i686_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
[[package]] [[package]]
name = "windows_i686_msvc" name = "windows_i686_msvc"
version = "0.42.0" version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246"
[[package]]
name = "windows_i686_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
[[package]] [[package]]
name = "windows_x86_64_gnu" name = "windows_x86_64_gnu"
version = "0.42.0" version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
[[package]] [[package]]
name = "windows_x86_64_gnullvm" name = "windows_x86_64_gnullvm"
version = "0.42.0" version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
[[package]] [[package]]
name = "windows_x86_64_msvc" name = "windows_x86_64_msvc"
version = "0.42.0" version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"

View File

@ -21,43 +21,46 @@ panic = "abort"
strip = "debuginfo" strip = "debuginfo"
[dependencies] [dependencies]
anyhow = { version = "1.0.64", features = ["backtrace"] } anyhow = { version = "1.0.71", features = ["backtrace"] }
ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "write_symbol_table" } ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "write_symbol_table" }
argh = "0.1.8" argh = "0.1.10"
base16ct = "0.1.1" base16ct = "0.2.0"
base64 = "0.21.0" base64 = "0.21.2"
byteorder = "1.4.3" byteorder = "1.4.3"
cwdemangle = "0.1.3" cwdemangle = "0.1.5"
dol = { git = "https://github.com/encounter/ppc750cl", rev = "5f6e991bf495388c4104f188d2e90c79da9f78de" } dol = { git = "https://github.com/encounter/ppc750cl", rev = "5f6e991bf495388c4104f188d2e90c79da9f78de" }
env_logger = "0.10.0" env_logger = "0.10.0"
filetime = "0.2.18" filetime = "0.2.21"
fixedbitset = "0.4.2" fixedbitset = "0.4.2"
flagset = { version = "0.4.3", features = ["serde"] } flagset = { version = "0.4.3", features = ["serde"] }
flate2 = "1.0.26"
glob = "0.3.1"
hex = "0.4.3" hex = "0.4.3"
indexmap = "1.9.2" indexmap = "2.0.0"
log = "0.4.17" itertools = "0.11.0"
log = "0.4.19"
memchr = "2.5.0" memchr = "2.5.0"
memmap2 = "0.5.7" memmap2 = "0.7.1"
multimap = "0.8.3" multimap = "0.9.0"
num_enum = "0.5.7" num_enum = "0.6.1"
object = { version = "0.30.0", features = ["read_core", "std", "elf", "write_std"], default-features = false } object = { version = "0.31.1", features = ["read_core", "std", "elf", "write_std"], default-features = false }
once_cell = "1.17.0" once_cell = "1.18.0"
ppc750cl = { git = "https://github.com/encounter/ppc750cl", rev = "5f6e991bf495388c4104f188d2e90c79da9f78de" } ppc750cl = { git = "https://github.com/encounter/ppc750cl", rev = "5f6e991bf495388c4104f188d2e90c79da9f78de" }
regex = "1.6.0" regex = "1.9.0"
serde = "1.0.152" serde = "1.0.166"
serde_repr = "0.1.10" serde_repr = "0.1.14"
serde_yaml = "0.9.16" serde_yaml = "0.9.22"
sha-1 = "0.10.0" sha-1 = "0.10.1"
smallvec = "1.10.0" smallvec = "1.11.0"
topological-sort = "0.2.2" topological-sort = "0.2.2"
flate2 = "1.0.25"
[build-dependencies] [build-dependencies]
anyhow = { version = "1.0.64", features = ["backtrace"] } anyhow = { version = "1.0.71", features = ["backtrace"] }
base64 = "0.21.0" base64 = "0.21.2"
flagset = { version = "0.4.3", features = ["serde"] } flagset = { version = "0.4.3", features = ["serde"] }
serde = "1.0.152" serde = "1.0.166"
serde_repr = "0.1.10" serde_repr = "0.1.14"
serde_yaml = "0.9.16" serde_yaml = "0.9.22"
rmp-serde = "1.1.1" rmp-serde = "1.1.1"
flate2 = "1.0.25" flate2 = "1.0.26"

294
README.md
View File

@ -3,11 +3,230 @@
[Build Status]: https://github.com/encounter/decomp-toolkit/actions/workflows/build.yml/badge.svg [Build Status]: https://github.com/encounter/decomp-toolkit/actions/workflows/build.yml/badge.svg
[actions]: https://github.com/encounter/decomp-toolkit/actions [actions]: https://github.com/encounter/decomp-toolkit/actions
GameCube/Wii decompilation project tools. Yet another GameCube/Wii decompilation toolkit.
This provides various commands that assist with creating a build system that works decomp-toolkit functions both as a command-line tool for developers, and as a replacement for various parts of a
across all major platforms without dealing with platform-specific C compilers, decompilation project's build system.
UNIX compatibility layers like msys2, or other idiosyncrasies.
## Goals
- Automate as much as possible, allowing developers to focus on matching code rather than months-long tedious setup.
- Provide highly **accurate** and performant analysis and tooling.
- Provide everything in a single portable binary. This simplifies project setup: a script can simply fetch the
binary from GitHub.
- Replace common usages of msys2 and GNU assembler, eliminating the need to depend on devkitPro.
- Integrate well with other decompilation tooling like [objdiff](https://github.com/encounter/objdiff) and
[decomp.me](https://decomp.me).
## Background
The goal of a matching decompilation project is to write C/C++ code that compiles back to the _exact_ same binary as
the original game. This often requires using the same compiler as the original game. (For GameCube and Wii,
[Metrowerks CodeWarrior](https://en.wikipedia.org/wiki/CodeWarrior))
When compiling C/C++ code, the compiler (in our case, `mwcceppc`) generates an object file (`.o`) for every source file.
This object file contains the compiled machine code, as well as information that the linker (`mwldeppc`) uses to
generate the final executable.
One way to verify that our code is a match is by taking any code that has been decompiled, and
linking it alongside portions of the original binary that have not been decompiled yet. First, we create relocatable
objects from the original binary:
<picture>
<source media="(prefers-color-scheme: dark)" srcset="assets/diagram_dark.svg">
<source media="(prefers-color-scheme: light)" srcset="assets/diagram_light.svg">
<img alt="Binary split diagram" src="assets/diagram.svg">
</picture>
(Heavily simplified)
Then, each object can be replaced by a decompiled version as matching code is written. If the linker still generates a
binary that is byte-for-byte identical to the original, then we know that the decompiled code is a match.
decomp-toolkit provides tooling for analyzing and splitting the original binary into relocatable objects, as well
as generating the linker script and other files needed to link the decompiled code.
## Other approaches
### Manual assembly
With existing GameCube/Wii decompilation tooling, the setup process is very tedious and error-prone.
The general process is:
- Begin by disassembling the original binary with a tool like
[doldisasm.py](https://gist.github.com/camthesaxman/a36f610dbf4cc53a874322ef146c4123). This produces one giant
assembly file per section.
- Manually comb through the assembly files and fix many issues, like incorrect or missing relocations, incorrect or
missing symbols, and more.
- Manually find-and-replace the auto-generated symbol names based on other sources, like other decompilation projects
or a map file. (If you're lucky enough to have one)
- Manually determine data types and sizes, and convert them accordingly. (For example, `.4byte` -> `.float`, strings,
etc)
- Manually split the assembly files into individual objects. This is a very tedious process, as it requires identifying
the boundaries of each function, determining whether adjacent functions are related, finding associated
data from each data section, and cut-and-pasting all of this into a new file.
Other downsides of this approach:
- Manually editing the assembly means that the result is not reproducible. You can't run the script again to
make any updates, because your changes will be overwritten. This also means that the assembly files must be
stored in version control, which is not ideal.
- Incorrectly splitting objects is very easy to do, and can be difficult to detect. For example, a `.ctors` entry _must_
be located in the same object as the function it references, otherwise the linker will not generate the correct
`.ctors` entry. `extab` and `extabindex` entries _must also_ be located in the same object as the function they
reference, have a label and have the correct size, and have a direct relocation rather than a section-relative
relocation. Otherwise, the linker will crash with a cryptic error message.
- Relying on assembly means that you need an assembler. For GameCube/Wii, this means devkitPro, which is a
large dependency and an obstacle for new contributors. The assembler also has some quirks that don't interact well
with `mwldeppc`, which means that the object files must be manually post-processed to fix these issues. (See the
[elf fixup](#elf-fixup) command)
With decomp-toolkit:
- Many analysis steps are automated and highly accurate. Many DOL files can be analyzed and split into re-linkable
objects with no configuration.
- Signature analysis automatically labels common functions and objects, and allows for more accurate relocation
rebuilding.
- Any manual adjustments are stored in configuration files, which are stored in version control.
- Splitting is simplified by updating a configuration file. The analyzer will check for common issues, like
incorrectly split `.ctors`/`.dtors`/`extab`/`extabindex` entries. If the user hasn't configured a split for these,
the analyzer will automatically split them along with their associated functions to ensure that the linker will
generate everything correctly. This means that matching code can be written without worrying about splitting all
sections up front.
- The splitter generates object files directly, with no assembler required. This means that we can avoid the devkitPro
requirement. (Although we can still generate assembly files for viewing, editing, and compatibility with other tools)
### dadosod
[dadosod](https://github.com/InusualZ/dadosod) is a newer replacement for `doldisasm.py`. It has more accurate function
and relocation analysis than `doldisasm.py`, as well as support for renaming symbols based on a map file. However, since
it operates as a one-shot assembly generator, it still suffers from many of the same issues described above.
### ppcdis
[ppcdis](https://github.com/SeekyCt/ppcdis) is one of the tools that inspired decomp-toolkit. It has more accurate
analysis than doldisasm.py, and has similar goals to decomp-toolkit. It also has some features that decomp-toolkit does
not yet, like support for REL files.
However, decomp-toolkit has a few advantages:
- Faster and more accurate analysis. (See [Analyzer features](#analyzer-features))
- Emits object files directly, with no assembler required.
- More robust handling of features like common BSS, `.ctors`/`.dtors`/`extab`/`extabindex`, and more.
- Requires very little configuration to start.
- Automatically labels common functions and objects with signature analysis.
### Honorable mentions
[splat](https://github.com/ethteck/splat) is a binary splitting tool for N64 and PSX. Some ideas from splat inspired
decomp-toolkit, like the symbol configuration format.
## Terminology
### DOL
A [DOL file](https://wiki.tockdom.com/wiki/DOL_(File_Format)) is the executable format used by GameCube and Wii games.
It's essentially a raw binary with a header that contains information about the code and data sections, as well as the
entry point.
### ELF
An [ELF file](https://en.wikipedia.org/wiki/Executable_and_Linkable_Format) is the executable format used by most
Unix-like operating systems. There are two common types of ELF files: **relocatable** and **executable**.
A relocatable ELF (`.o`, also called "object file") contains machine code and relocation information, and is used as
input to the linker. Each object file is compiled from a single source file (`.c`, `.cpp`).
An executable ELF (`.elf`) contains the final machine code that can be loaded and executed. It *can* include
information about symbols, debug information (DWARF), and sometimes information about the original relocations, but it
is often missing some or all of these (referred to as "stripped").
### Symbol
A symbol is a name that is assigned to a memory address. Symbols can be functions, variables, or other data.
**Local** symbols are only visible within the object file they are defined in.
These are usually defined as `static` in C/C++ or are compiler-generated.
**Global** symbols are visible to all object files, and their names must be unique.
**Weak** symbols are similar to global symbols, but can be replaced by a global symbol with the same name.
For example: the SDK defines a weak `OSReport` function, which can be replaced by a game-specific implementation.
Weak symbols are also used for functions generated by the compiler or as a result of C++ features, since they can exist
in multiple object files. The linker will deduplicate these functions, keeping only the first copy.
### Relocation
A relocation is essentially a pointer to a symbol. At compile time, the final address of a symbol is
not known yet, therefore a relocation is needed.
At link time, each symbol is assigned a final address, and the linker will use the relocations to update the machine
code with the final addresses of the symbol.
Before:
```asm
# Unrelocated, instructions point to address 0 (unknown)
lis r3, 0
ori r3, r3, 0
```
After:
```asm
# Relocated, instructions point to 0x80001234
lis r3, 0x8000
ori r3, r3, 0x1234
```
Once the linker performs the relocation with the final address, the relocation is no longer needed. Still, sometimes the
final ELF will still contain the relocation information, but the conversion to DOL will **always** remove it.
When we analyze a file, we attempt to rebuild the relocations. This is useful for several reasons:
- It allows us to split the file into relocatable objects. Each object can then be replaced with a decompiled version,
as matching code is written.
- It allows us to modify or add code and data to the game and have all machine code still to point to the correct
symbols, which may now be in a different location.
- It allows us to view the machine code in a disassembler and show symbol names instead of raw addresses.
## Analyzer features
**Function boundary analysis**
Discovers function boundaries with high accuracy. Uses various heuristics to disambiguate tail calls from
inner-function control flow.
**Signature analysis**
Utilizes a built-in signature database to identify common Metrowerks and SDK functions and objects.
This also helps decomp-toolkit automatically generate required splits, like `__init_cpp_exceptions`.
**Relocation analysis**
Performs control-flow analysis and rebuilds relocations with high accuracy.
With some manual tweaking (mainly in data), this should generate fully-shiftable objects.
**Section analysis**
Automatically identifies DOL and REL sections based on information from signature and relocation analysis.
**Object analysis**
Attempts to identify the type and size of data objects by analyzing usage.
Also attempts to identify string literals, wide string literals, and string tables.
**Splitting**
Generates split object files in memory based on user configuration.
In order to support relinking with `mwldeppc.exe`, any **unsplit** `.ctors`, `.dtors`, `extab` and `extabindex` entries
are analyzed and automatically split along with their associated functions. This ensures that the linker will properly
generate these sections without any additional configuration.
A topological sort is performed to determine the final link order of the split objects.
**Object file writing**
Writes object files directly, with no assembler required. (Bye devkitPPC!)
If desired, optionally writes GNU assembler-compatible files alongside the object files.
**Linker script generation**
Generates `ldscript.lcf` for `mwldeppc.exe`.
**Future work**
- Support REL and RSO files
- Add more signatures
- Rework CodeWarrior map parsing
## Commands ## Commands
@ -32,17 +251,45 @@ $ dtk demangle 'BuildLight__9CGuiLightCFv'
CGuiLight::BuildLight() const CGuiLight::BuildLight() const
``` ```
### elf disasm ### dol info
Disassemble an unstripped CodeWarrior ELF file into fully-split & fully-shiftable assembly files. Analyzes a DOL file and outputs information section and symbol information.
```shell ```shell
$ dtk disasm input.elf out $ dtk dol info input.dol
```
### dol split
> [!NOTE]
> This command is a work-in-progress.
Analyzes and splits a DOL file into relocatable objects based on user configuration.
```shell
$ dtk dol split input.dol target -s config/symbols.txt -p config/splits.txt
```
### dwarf dump
Dumps DWARF 1.1 information from an ELF file. (Does **not** support DWARF 2+)
```shell
$ dtk dwarf dump input.elf
```
### elf disasm
Disassemble an unstripped CodeWarrior ELF file. Attempts to automatically split objects and rebuild relocations
when possible.
```shell
$ dtk elf disasm input.elf out
``` ```
### elf fixup ### elf fixup
Fixes issues with GNU assembler-built objects to ensure compatibility with `mwldeppc`. Fixes issues with GNU assembler-built objects to ensure compatibility with `mwldeppc.exe`.
- Strips empty sections - Strips empty sections
- Generates section symbols for all allocatable sections - Generates section symbols for all allocatable sections
@ -64,6 +311,9 @@ $ dtk elf2dol input.elf output.dol
### map ### map
> [!WARNING]
> This command is currently broken.
Processes CodeWarrior map files and provides information about symbols and TUs. Processes CodeWarrior map files and provides information about symbols and TUs.
```shell ```shell
@ -79,6 +329,34 @@ $ dtk map symbol Game.MAP 'Function__5ClassFv'
# in a readable format. # in a readable format.
``` ```
### rel info
Prints basic information about a REL file.
```shell
$ dtk rel info input.rel
```
### rel merge
Merges a DOL file and associated RELs into a single ELF file, suitable for analysis in your favorite
reverse engineering software.
```shell
$ dtk rel info main.dol rels/*.rel -o merged.elf
```
### rso info
> [!WARNING]
> This command is not yet functional.
Prints basic information about an RSO file.
```shell
$ dtk rso info input.rso
```
### shasum ### shasum
Calculate and verify SHA-1 hashes. Calculate and verify SHA-1 hashes.

17
assets/diagram.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 40 KiB

17
assets/diagram_dark.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 34 KiB

17
assets/diagram_light.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 34 KiB

29
assets/ldscript.lcf Normal file
View File

@ -0,0 +1,29 @@
MEMORY
{
text : origin = 0x80003100
}
SECTIONS
{
GROUP:
{
$SECTIONS
.stack ALIGN(0x100):{}
} > text
_stack_addr = (_f_sbss2 + SIZEOF(.sbss2) + $STACKSIZE + 0x7) & ~0x7;
_stack_end = _f_sbss2 + SIZEOF(.sbss2);
_db_stack_addr = (_stack_addr + 0x2000);
_db_stack_end = _stack_addr;
__ArenaLo = (_db_stack_addr + 0x1f) & ~0x1f;
__ArenaHi = 0x81700000;
}
FORCEFILES
{
$FORCEFILES
}
FORCEACTIVE
{
}

View File

@ -311,7 +311,7 @@ fn main() -> Result<()> {
rmp_serde::encode::write(&mut encoder, &Output { symbols, signatures: out })?; rmp_serde::encode::write(&mut encoder, &Output { symbols, signatures: out })?;
let compressed = encoder.finish()?; let compressed = encoder.finish()?;
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
fs::write(out_dir.join("signatures.bin"), &compressed)?; fs::write(out_dir.join("signatures.bin"), compressed)?;
Ok(()) Ok(())
} }

View File

@ -1,6 +1,6 @@
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use anyhow::{anyhow, bail, Result}; use anyhow::{bail, Context, Result};
use crate::{ use crate::{
analysis::{ analysis::{
@ -9,7 +9,7 @@ use crate::{
slices::{FunctionSlices, TailCallResult}, slices::{FunctionSlices, TailCallResult},
vm::{BranchTarget, GprValue, StepResult, VM}, vm::{BranchTarget, GprValue, StepResult, VM},
}, },
obj::{ObjInfo, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind}, obj::{ObjInfo, ObjSectionKind, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind},
}; };
#[derive(Debug, Default)] #[derive(Debug, Default)]
@ -29,102 +29,66 @@ impl AnalyzerState {
if end == 0 { if end == 0 {
continue; continue;
} }
if let Some(existing_symbol) = obj let section_index =
.symbols obj.section_for(start..end).context("Failed to locate section for function")?.index;
.iter_mut() obj.add_symbol(
.find(|sym| sym.address == start as u64 && sym.kind == ObjSymbolKind::Function) ObjSymbol {
{ name: format!("fn_{:08X}", start),
let new_size = (end - start) as u64; demangled_name: None,
if !existing_symbol.size_known || existing_symbol.size == 0 { address: start as u64,
existing_symbol.size = new_size; section: Some(section_index),
existing_symbol.size_known = true; size: (end - start) as u64,
} else if existing_symbol.size != new_size { size_known: true,
log::warn!( flags: Default::default(),
"Conflicting size for {}: was {:#X}, now {:#X}", kind: ObjSymbolKind::Function,
existing_symbol.name, align: None,
existing_symbol.size, data_kind: Default::default(),
new_size },
); false,
} )?;
continue;
}
let section = obj
.sections
.iter()
.find(|section| {
(start as u64) >= section.address
&& (end as u64) <= section.address + section.size
})
.ok_or_else(|| {
anyhow!("Failed to locate section for function {:#010X}-{:#010X}", start, end)
})?;
obj.symbols.push(ObjSymbol {
name: format!("fn_{:08X}", start),
demangled_name: None,
address: start as u64,
section: Some(section.index),
size: (end - start) as u64,
size_known: true,
flags: Default::default(),
kind: ObjSymbolKind::Function,
});
} }
for (&addr, &size) in &self.jump_tables { for (&addr, &size) in &self.jump_tables {
let section = obj let section_index = obj
.sections .section_for(addr..addr + size)
.iter() .context("Failed to locate section for jump table")?
.find(|section| { .index;
(addr as u64) >= section.address obj.add_symbol(
&& ((addr + size) as u64) <= section.address + section.size ObjSymbol {
}) name: format!("jumptable_{:08X}", addr),
.ok_or_else(|| anyhow!("Failed to locate section for jump table"))?; demangled_name: None,
if let Some(existing_symbol) = obj address: addr as u64,
.symbols section: Some(section_index),
.iter_mut() size: size as u64,
.find(|sym| sym.address == addr as u64 && sym.kind == ObjSymbolKind::Object) size_known: true,
{ flags: ObjSymbolFlagSet(ObjSymbolFlags::Local.into()),
let new_size = size as u64; kind: ObjSymbolKind::Object,
if !existing_symbol.size_known || existing_symbol.size == 0 { align: None,
existing_symbol.size = new_size; data_kind: Default::default(),
existing_symbol.size_known = true; },
// existing_symbol.flags.0 &= ObjSymbolFlags::Global; false,
// existing_symbol.flags.0 |= ObjSymbolFlags::Local; )?;
} else if existing_symbol.size != new_size {
log::warn!(
"Conflicting size for {}: was {:#X}, now {:#X}",
existing_symbol.name,
existing_symbol.size,
new_size
);
}
continue;
}
obj.symbols.push(ObjSymbol {
name: format!("jumptable_{:08X}", addr),
demangled_name: None,
address: addr as u64,
section: Some(section.index),
size: size as u64,
size_known: true,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local.into()),
kind: ObjSymbolKind::Object,
});
} }
for (&_addr, symbol) in &self.known_symbols { for (&_addr, symbol) in &self.known_symbols {
if let Some(existing_symbol) = obj obj.add_symbol(symbol.clone(), true)?;
.symbols
.iter_mut()
.find(|e| symbol.address == e.address && symbol.kind == e.kind)
{
*existing_symbol = symbol.clone();
continue;
}
obj.symbols.push(symbol.clone());
} }
Ok(()) Ok(())
} }
pub fn detect_functions(&mut self, obj: &ObjInfo) -> Result<()> { pub fn detect_functions(&mut self, obj: &ObjInfo) -> Result<()> {
// Apply known functions from extab
for (&addr, &size) in &obj.known_functions {
self.function_entries.insert(addr);
self.function_bounds.insert(addr, addr + size);
}
// Apply known functions from symbols
for (_, symbol) in obj.symbols.by_kind(ObjSymbolKind::Function) {
self.function_entries.insert(symbol.address as u32);
if symbol.size_known {
self.function_bounds
.insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
}
}
// Process known functions first // Process known functions first
let known_functions = self.function_entries.clone(); let known_functions = self.function_entries.clone();
for addr in known_functions { for addr in known_functions {
@ -189,6 +153,7 @@ impl AnalyzerState {
)?; )?;
} }
} }
TailCallResult::Error(e) => return Err(e),
} }
} }
if slices.can_finalize() { if slices.can_finalize() {
@ -249,13 +214,11 @@ impl AnalyzerState {
match self.first_unbounded_function() { match self.first_unbounded_function() {
Some(addr) => { Some(addr) => {
log::trace!("Processing {:#010X}", addr); log::trace!("Processing {:#010X}", addr);
self.process_function_at(&obj, addr)?; self.process_function_at(obj, addr)?;
} }
None => { None => {
if !self.finalize_functions(obj, false)? { if !self.finalize_functions(obj, false)? && !self.detect_new_functions(obj)? {
if !self.detect_new_functions(obj)? { break;
break;
}
} }
} }
} }
@ -291,9 +254,6 @@ impl AnalyzerState {
fn process_function(&mut self, obj: &ObjInfo, start: u32) -> Result<Option<FunctionSlices>> { fn process_function(&mut self, obj: &ObjInfo, start: u32) -> Result<Option<FunctionSlices>> {
let mut slices = FunctionSlices::default(); let mut slices = FunctionSlices::default();
let function_end = self.function_bounds.get(&start).cloned(); let function_end = self.function_bounds.get(&start).cloned();
if start == 0x801FC300 {
log::info!("Processing TRKExceptionHandler");
}
Ok(match slices.analyze(obj, start, start, function_end, &self.function_entries)? { Ok(match slices.analyze(obj, start, start, function_end, &self.function_entries)? {
true => Some(slices), true => Some(slices),
false => None, false => None,
@ -302,27 +262,56 @@ impl AnalyzerState {
fn detect_new_functions(&mut self, obj: &ObjInfo) -> Result<bool> { fn detect_new_functions(&mut self, obj: &ObjInfo) -> Result<bool> {
let mut found_new = false; let mut found_new = false;
let mut iter = self.function_bounds.iter().peekable(); for section in &obj.sections {
while let (Some((&first_begin, &first_end)), Some(&(&second_begin, &second_end))) = if section.kind != ObjSectionKind::Code {
(iter.next(), iter.peek())
{
if first_end == 0 || first_end > second_begin {
continue; continue;
} }
let addr = match skip_alignment(obj, first_end, second_begin) {
Some(addr) => addr, let section_start = section.address as u32;
None => continue, let section_end = (section.address + section.size) as u32;
}; let mut iter = self.function_bounds.range(section_start..section_end).peekable();
if second_begin > addr && self.function_entries.insert(addr) { loop {
log::trace!( match (iter.next(), iter.peek()) {
"Trying function @ {:#010X} (from {:#010X}-{:#010X} <-> {:#010X}-{:#010X})", (Some((&first_begin, &first_end)), Some(&(&second_begin, &second_end))) => {
addr, if first_end == 0 || first_end > second_begin {
first_begin, continue;
first_end, }
second_begin, let addr = match skip_alignment(obj, first_end, second_begin) {
second_end, Some(addr) => addr,
); None => continue,
found_new = true; };
if second_begin > addr && self.function_entries.insert(addr) {
log::trace!(
"Trying function @ {:#010X} (from {:#010X}-{:#010X} <-> {:#010X}-{:#010X})",
addr,
first_begin,
first_end,
second_begin,
second_end,
);
found_new = true;
}
}
(Some((&last_begin, &last_end)), None) => {
if last_end > 0 && last_end < section_end {
let addr = match skip_alignment(obj, last_end, section_end) {
Some(addr) => addr,
None => continue,
};
if addr < section_end && self.function_entries.insert(addr) {
log::debug!(
"Trying function @ {:#010X} (from {:#010X}-{:#010X} <-> {:#010X})",
addr,
last_begin,
last_end,
section_end,
);
found_new = true;
}
}
}
_ => break,
}
} }
} }
Ok(found_new) Ok(found_new)
@ -342,19 +331,15 @@ pub fn locate_sda_bases(obj: &mut ObjInfo) -> Result<bool> {
return Ok(ExecCbResult::Continue); return Ok(ExecCbResult::Continue);
} }
StepResult::Illegal => bail!("Illegal instruction @ {:#010X}", ins.addr), StepResult::Illegal => bail!("Illegal instruction @ {:#010X}", ins.addr),
StepResult::Jump(target) => match target { StepResult::Jump(target) => {
BranchTarget::Address(addr) => { if let BranchTarget::Address(addr) = target {
return Ok(ExecCbResult::Jump(addr)); return Ok(ExecCbResult::Jump(addr));
} }
_ => {} }
},
StepResult::Branch(branches) => { StepResult::Branch(branches) => {
for branch in branches { for branch in branches {
match branch.target { if let BranchTarget::Address(addr) = branch.target {
BranchTarget::Address(addr) => { executor.push(addr, branch.vm, false);
executor.push(addr, branch.vm, false);
}
_ => {}
} }
} }
} }

View File

@ -7,7 +7,9 @@ use crate::obj::{ObjInfo, ObjSection, ObjSectionKind};
pub mod cfa; pub mod cfa;
pub mod executor; pub mod executor;
pub mod objects;
pub mod pass; pub mod pass;
pub mod signatures;
pub mod slices; pub mod slices;
pub mod tracker; pub mod tracker;
pub mod vm; pub mod vm;

155
src/analysis/objects.rs Normal file
View File

@ -0,0 +1,155 @@
use anyhow::Result;
use crate::obj::{
split::is_linker_generated_label, ObjDataKind, ObjInfo, ObjSectionKind, ObjSymbolKind,
};
pub fn detect_object_boundaries(obj: &mut ObjInfo) -> Result<()> {
for section in obj.sections.iter().filter(|s| s.kind != ObjSectionKind::Code) {
let section_start = section.address as u32;
let section_end = (section.address + section.size) as u32;
let mut replace_symbols = vec![];
for (idx, symbol) in obj.symbols.for_range(section_start..section_end) {
let mut symbol = symbol.clone();
if is_linker_generated_label(&symbol.name) {
continue;
}
let expected_size = match symbol.data_kind {
ObjDataKind::Byte => 1,
ObjDataKind::Byte2 => 2,
ObjDataKind::Byte4 | ObjDataKind::Float => 4,
ObjDataKind::Byte8 | ObjDataKind::Double => 8,
_ => 0,
};
if !symbol.size_known {
let next_addr = obj
.symbols
.for_range(symbol.address as u32 + 1..section_end)
.next()
.map_or(section_end, |(_, symbol)| symbol.address as u32);
let new_size = next_addr - symbol.address as u32;
log::debug!("Guessed {} size {:#X}", symbol.name, new_size);
symbol.size = match (new_size, expected_size) {
(..=4, 1) => expected_size,
(2 | 4, 2) => expected_size,
(..=8, 1 | 2 | 4) => {
// alignment to double
if obj.symbols.at_address(next_addr).any(|(_, sym)| sym.data_kind == ObjDataKind::Double)
// If we're at a TU boundary, we can assume it's just padding
|| obj.splits.contains_key(&(symbol.address as u32 + new_size))
{
expected_size
} else {
new_size
}
}
_ => new_size,
} as u64;
symbol.size_known = true;
}
symbol.kind = ObjSymbolKind::Object;
if expected_size > 1 && symbol.size as u32 % expected_size != 0 {
symbol.data_kind = ObjDataKind::Unknown;
}
replace_symbols.push((idx, symbol));
}
for (idx, symbol) in replace_symbols {
obj.symbols.replace(idx, symbol)?;
}
}
Ok(())
}
pub fn detect_strings(obj: &mut ObjInfo) -> Result<()> {
let mut symbols_set = Vec::<(usize, ObjDataKind, usize)>::new();
for section in obj
.sections
.iter()
.filter(|s| matches!(s.kind, ObjSectionKind::Data | ObjSectionKind::ReadOnlyData))
{
enum StringResult {
None,
String { length: usize, terminated: bool },
WString { length: usize, str: String },
}
pub const fn trim_zeroes_end(mut bytes: &[u8]) -> &[u8] {
while let [rest @ .., last] = bytes {
if *last == 0 {
bytes = rest;
} else {
break;
}
}
bytes
}
fn is_string(data: &[u8]) -> StringResult {
let bytes = trim_zeroes_end(data);
if bytes.iter().all(|&c| c.is_ascii_graphic() || c.is_ascii_whitespace()) {
return StringResult::String {
length: bytes.len(),
terminated: data.len() > bytes.len(),
};
}
if bytes.len() % 2 == 0 && data.len() >= bytes.len() + 2 {
// Found at least 2 bytes of trailing 0s, check UTF-16
let mut ok = true;
let mut str = String::new();
for n in std::char::decode_utf16(
bytes.chunks_exact(2).map(|c| u16::from_be_bytes(c.try_into().unwrap())),
) {
match n {
Ok(c) if c.is_ascii_graphic() || c.is_ascii_whitespace() => {
str.push(c);
}
_ => {
ok = false;
break;
}
}
}
if ok {
return StringResult::WString { length: bytes.len(), str };
}
}
StringResult::None
}
for (symbol_idx, symbol) in obj
.symbols
.for_section(section)
.filter(|(_, sym)| sym.data_kind == ObjDataKind::Unknown)
{
let (_section, data) =
obj.section_data(symbol.address as u32, (symbol.address + symbol.size) as u32)?;
match is_string(data) {
StringResult::None => {}
StringResult::String { length, terminated } => {
if length > 0 {
let str = String::from_utf8_lossy(&data[..length]);
log::debug!("Found string '{}' @ {}", str, symbol.name);
symbols_set.push((
symbol_idx,
ObjDataKind::String,
if terminated { length + 1 } else { length },
));
}
}
StringResult::WString { length, str } => {
if length > 0 {
log::debug!("Found wide string '{}' @ {}", str, symbol.name);
symbols_set.push((symbol_idx, ObjDataKind::String16, length + 2));
}
}
}
}
}
for (symbol_idx, data_kind, size) in symbols_set {
let mut symbol = obj.symbols.at(symbol_idx).clone();
log::debug!("Setting {} ({:#010X}) to size {:#X}", symbol.name, symbol.address, size);
symbol.data_kind = data_kind;
symbol.size = size as u64;
symbol.size_known = true;
obj.symbols.replace(symbol_idx, symbol)?;
}
Ok(())
}

View File

@ -38,6 +38,8 @@ impl AnalysisPass for FindTRKInterruptVectorTable {
size_known: true, size_known: true,
flags: ObjSymbolFlagSet(FlagSet::from(ObjSymbolFlags::Global)), flags: ObjSymbolFlagSet(FlagSet::from(ObjSymbolFlags::Global)),
kind: ObjSymbolKind::Unknown, kind: ObjSymbolKind::Unknown,
align: None,
data_kind: Default::default(),
}); });
let end = start + TRK_TABLE_SIZE; let end = start + TRK_TABLE_SIZE;
state.known_symbols.insert(end, ObjSymbol { state.known_symbols.insert(end, ObjSymbol {
@ -49,19 +51,21 @@ impl AnalysisPass for FindTRKInterruptVectorTable {
size_known: true, size_known: true,
flags: ObjSymbolFlagSet(FlagSet::from(ObjSymbolFlags::Global)), flags: ObjSymbolFlagSet(FlagSet::from(ObjSymbolFlags::Global)),
kind: ObjSymbolKind::Unknown, kind: ObjSymbolKind::Unknown,
align: None,
data_kind: Default::default(),
}); });
return Ok(()); return Ok(());
} }
} }
log::info!("gTRKInterruptVectorTable not found"); log::debug!("gTRKInterruptVectorTable not found");
Ok(()) Ok(())
} }
} }
pub struct FindSaveRestSleds {} pub struct FindSaveRestSleds {}
const SLEDS: [([u8; 4], &'static str, &'static str); 4] = [ const SLEDS: [([u8; 4], &str, &str); 4] = [
([0xd9, 0xcb, 0xff, 0x70], "__save_fpr", "_savefpr_"), ([0xd9, 0xcb, 0xff, 0x70], "__save_fpr", "_savefpr_"),
([0xc9, 0xcb, 0xff, 0x70], "__restore_fpr", "_restfpr_"), ([0xc9, 0xcb, 0xff, 0x70], "__restore_fpr", "_restfpr_"),
([0x91, 0xcb, 0xff, 0xb8], "__save_gpr", "_savegpr_"), ([0x91, 0xcb, 0xff, 0xb8], "__save_gpr", "_savegpr_"),
@ -77,7 +81,7 @@ impl AnalysisPass for FindSaveRestSleds {
let (section, data) = obj.section_data(start, 0)?; let (section, data) = obj.section_data(start, 0)?;
for (needle, func, label) in &SLEDS { for (needle, func, label) in &SLEDS {
if data.starts_with(needle) { if data.starts_with(needle) {
log::info!("Found {} @ {:#010X}", func, start); log::debug!("Found {} @ {:#010X}", func, start);
clear_ranges.push(start + 4..start + SLED_SIZE as u32); clear_ranges.push(start + 4..start + SLED_SIZE as u32);
state.known_symbols.insert(start, ObjSymbol { state.known_symbols.insert(start, ObjSymbol {
name: func.to_string(), name: func.to_string(),
@ -88,6 +92,8 @@ impl AnalysisPass for FindSaveRestSleds {
size_known: true, size_known: true,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()), flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
kind: ObjSymbolKind::Function, kind: ObjSymbolKind::Function,
align: None,
data_kind: Default::default(),
}); });
for i in 14..=31 { for i in 14..=31 {
let addr = start + (i - 14) * 4; let addr = start + (i - 14) * 4;
@ -100,6 +106,8 @@ impl AnalysisPass for FindSaveRestSleds {
size_known: true, size_known: true,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()), flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
kind: ObjSymbolKind::Unknown, kind: ObjSymbolKind::Unknown,
align: None,
data_kind: Default::default(),
}); });
} }
} }

365
src/analysis/signatures.rs Normal file
View File

@ -0,0 +1,365 @@
use anyhow::{anyhow, Result};
use crate::{
analysis::{cfa::AnalyzerState, read_u32},
obj::{
signatures::{
apply_signature, check_signatures, check_signatures_str, parse_signatures,
FunctionSignature,
},
ObjInfo, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
},
};
const SIGNATURES: &[(&str, &str)] = &[
("__init_registers", include_str!("../../assets/signatures/__init_registers.yml")),
("__init_hardware", include_str!("../../assets/signatures/__init_hardware.yml")),
("__init_data", include_str!("../../assets/signatures/__init_data.yml")),
("__set_debug_bba", include_str!("../../assets/signatures/__set_debug_bba.yml")),
("__OSPSInit", include_str!("../../assets/signatures/__OSPSInit.yml")),
("__OSFPRInit", include_str!("../../assets/signatures/__OSFPRInit.yml")),
("__OSCacheInit", include_str!("../../assets/signatures/__OSCacheInit.yml")),
("DMAErrorHandler", include_str!("../../assets/signatures/DMAErrorHandler.yml")),
("DBInit", include_str!("../../assets/signatures/DBInit.yml")),
("OSInit", include_str!("../../assets/signatures/OSInit.yml")),
("__OSThreadInit", include_str!("../../assets/signatures/__OSThreadInit.yml")),
("__OSInitIPCBuffer", include_str!("../../assets/signatures/__OSInitIPCBuffer.yml")),
("EXIInit", include_str!("../../assets/signatures/EXIInit.yml")),
("EXIGetID", include_str!("../../assets/signatures/EXIGetID.yml")),
("exit", include_str!("../../assets/signatures/exit.yml")),
("_ExitProcess", include_str!("../../assets/signatures/_ExitProcess.yml")),
("__fini_cpp", include_str!("../../assets/signatures/__fini_cpp.yml")),
// ("__destroy_global_chain", include_str!("../../assets/signatures/__destroy_global_chain.yml")),
("InitMetroTRK", include_str!("../../assets/signatures/InitMetroTRK.yml")),
("InitMetroTRKCommTable", include_str!("../../assets/signatures/InitMetroTRKCommTable.yml")),
("OSExceptionInit", include_str!("../../assets/signatures/OSExceptionInit.yml")),
(
"OSDefaultExceptionHandler",
include_str!("../../assets/signatures/OSDefaultExceptionHandler.yml"),
),
("__OSUnhandledException", include_str!("../../assets/signatures/__OSUnhandledException.yml")),
("OSDisableScheduler", include_str!("../../assets/signatures/OSDisableScheduler.yml")),
("__OSReschedule", include_str!("../../assets/signatures/__OSReschedule.yml")),
("__OSInitSystemCall", include_str!("../../assets/signatures/__OSInitSystemCall.yml")),
("OSInitAlarm", include_str!("../../assets/signatures/OSInitAlarm.yml")),
("__OSInitAlarm", include_str!("../../assets/signatures/__OSInitAlarm.yml")),
("__OSEVStart", include_str!("../../assets/signatures/OSExceptionVector.yml")),
("__OSDBINTSTART", include_str!("../../assets/signatures/__OSDBIntegrator.yml")),
("__OSDBJUMPSTART", include_str!("../../assets/signatures/__OSDBJump.yml")),
("SIInit", include_str!("../../assets/signatures/SIInit.yml")),
("SIGetType", include_str!("../../assets/signatures/SIGetType.yml")),
("SISetSamplingRate", include_str!("../../assets/signatures/SISetSamplingRate.yml")),
("SISetXY", include_str!("../../assets/signatures/SISetXY.yml")),
("VIGetTvFormat", include_str!("../../assets/signatures/VIGetTvFormat.yml")),
("DVDInit", include_str!("../../assets/signatures/DVDInit.yml")),
(
"DVDSetAutoFatalMessaging",
include_str!("../../assets/signatures/DVDSetAutoFatalMessaging.yml"),
),
("OSSetArenaLo", include_str!("../../assets/signatures/OSSetArenaLo.yml")),
("OSSetArenaHi", include_str!("../../assets/signatures/OSSetArenaHi.yml")),
("OSSetMEM1ArenaLo", include_str!("../../assets/signatures/OSSetMEM1ArenaLo.yml")),
("OSSetMEM1ArenaHi", include_str!("../../assets/signatures/OSSetMEM1ArenaHi.yml")),
("OSSetMEM2ArenaLo", include_str!("../../assets/signatures/OSSetMEM2ArenaLo.yml")),
("OSSetMEM2ArenaHi", include_str!("../../assets/signatures/OSSetMEM2ArenaHi.yml")),
("__OSInitAudioSystem", include_str!("../../assets/signatures/__OSInitAudioSystem.yml")),
(
"__OSInitMemoryProtection",
include_str!("../../assets/signatures/__OSInitMemoryProtection.yml"),
),
// ("BATConfig", include_str!("../../assets/signatures/BATConfig.yml")), TODO
("ReportOSInfo", include_str!("../../assets/signatures/ReportOSInfo.yml")),
("__check_pad3", include_str!("../../assets/signatures/__check_pad3.yml")),
("OSResetSystem", include_str!("../../assets/signatures/OSResetSystem.yml")),
("OSReturnToMenu", include_str!("../../assets/signatures/OSReturnToMenu.yml")),
("__OSReturnToMenu", include_str!("../../assets/signatures/__OSReturnToMenu.yml")),
("__OSShutdownDevices", include_str!("../../assets/signatures/__OSShutdownDevices.yml")),
("__OSInitSram", include_str!("../../assets/signatures/__OSInitSram.yml")),
("__OSSyncSram", include_str!("../../assets/signatures/__OSSyncSram.yml")),
(
"__OSGetExceptionHandler",
include_str!("../../assets/signatures/__OSGetExceptionHandler.yml"),
),
(
"OSRegisterResetFunction",
include_str!("../../assets/signatures/OSRegisterResetFunction.yml"),
),
(
"OSRegisterShutdownFunction",
include_str!("../../assets/signatures/OSRegisterShutdownFunction.yml"),
),
(
"DecrementerExceptionHandler",
include_str!("../../assets/signatures/DecrementerExceptionHandler.yml"),
),
(
"DecrementerExceptionCallback",
include_str!("../../assets/signatures/DecrementerExceptionCallback.yml"),
),
("__OSInterruptInit", include_str!("../../assets/signatures/__OSInterruptInit.yml")),
("__OSContextInit", include_str!("../../assets/signatures/__OSContextInit.yml")),
("OSSwitchFPUContext", include_str!("../../assets/signatures/OSSwitchFPUContext.yml")),
("OSReport", include_str!("../../assets/signatures/OSReport.yml")),
("TRK_main", include_str!("../../assets/signatures/TRK_main.yml")),
("TRKNubWelcome", include_str!("../../assets/signatures/TRKNubWelcome.yml")),
("TRKInitializeNub", include_str!("../../assets/signatures/TRKInitializeNub.yml")),
(
"TRKInitializeIntDrivenUART",
include_str!("../../assets/signatures/TRKInitializeIntDrivenUART.yml"),
),
("TRKEXICallBack", include_str!("../../assets/signatures/TRKEXICallBack.yml")),
("TRKLoadContext", include_str!("../../assets/signatures/TRKLoadContext.yml")),
("TRKInterruptHandler", include_str!("../../assets/signatures/TRKInterruptHandler.yml")),
("TRKExceptionHandler", include_str!("../../assets/signatures/TRKExceptionHandler.yml")),
("TRKSaveExtended1Block", include_str!("../../assets/signatures/TRKSaveExtended1Block.yml")),
("TRKNubMainLoop", include_str!("../../assets/signatures/TRKNubMainLoop.yml")),
("TRKTargetContinue", include_str!("../../assets/signatures/TRKTargetContinue.yml")),
("TRKSwapAndGo", include_str!("../../assets/signatures/TRKSwapAndGo.yml")),
(
"TRKRestoreExtended1Block",
include_str!("../../assets/signatures/TRKRestoreExtended1Block.yml"),
),
(
"TRKInterruptHandlerEnableInterrupts",
include_str!("../../assets/signatures/TRKInterruptHandlerEnableInterrupts.yml"),
),
("memset", include_str!("../../assets/signatures/memset.yml")),
(
"__msl_runtime_constraint_violation_s",
include_str!("../../assets/signatures/__msl_runtime_constraint_violation_s.yml"),
),
("ClearArena", include_str!("../../assets/signatures/ClearArena.yml")),
("IPCCltInit", include_str!("../../assets/signatures/IPCCltInit.yml")),
("__OSInitSTM", include_str!("../../assets/signatures/__OSInitSTM.yml")),
("IOS_Open", include_str!("../../assets/signatures/IOS_Open.yml")),
("__ios_Ipc2", include_str!("../../assets/signatures/__ios_Ipc2.yml")),
("IPCiProfQueueReq", include_str!("../../assets/signatures/IPCiProfQueueReq.yml")),
("SCInit", include_str!("../../assets/signatures/SCInit.yml")),
("SCReloadConfFileAsync", include_str!("../../assets/signatures/SCReloadConfFileAsync.yml")),
("NANDPrivateOpenAsync", include_str!("../../assets/signatures/NANDPrivateOpenAsync.yml")),
("nandIsInitialized", include_str!("../../assets/signatures/nandIsInitialized.yml")),
("nandOpen", include_str!("../../assets/signatures/nandOpen.yml")),
("nandGenerateAbsPath", include_str!("../../assets/signatures/nandGenerateAbsPath.yml")),
("nandGetHeadToken", include_str!("../../assets/signatures/nandGetHeadToken.yml")),
("ISFS_OpenAsync", include_str!("../../assets/signatures/ISFS_OpenAsync.yml")),
("nandConvertErrorCode", include_str!("../../assets/signatures/nandConvertErrorCode.yml")),
(
"NANDLoggingAddMessageAsync",
include_str!("../../assets/signatures/NANDLoggingAddMessageAsync.yml"),
),
(
"__NANDPrintErrorMessage",
include_str!("../../assets/signatures/__NANDPrintErrorMessage.yml"),
),
("__OSInitNet", include_str!("../../assets/signatures/__OSInitNet.yml")),
("__DVDCheckDevice", include_str!("../../assets/signatures/__DVDCheckDevice.yml")),
("__OSInitPlayTime", include_str!("../../assets/signatures/__OSInitPlayTime.yml")),
("__OSStartPlayRecord", include_str!("../../assets/signatures/__OSStartPlayRecord.yml")),
("NANDInit", include_str!("../../assets/signatures/NANDInit.yml")),
("ISFS_OpenLib", include_str!("../../assets/signatures/ISFS_OpenLib.yml")),
("ESP_GetTitleId", include_str!("../../assets/signatures/ESP_GetTitleId.yml")),
(
"NANDSetAutoErrorMessaging",
include_str!("../../assets/signatures/NANDSetAutoErrorMessaging.yml"),
),
("__DVDFSInit", include_str!("../../assets/signatures/__DVDFSInit.yml")),
("__DVDClearWaitingQueue", include_str!("../../assets/signatures/__DVDClearWaitingQueue.yml")),
("__DVDInitWA", include_str!("../../assets/signatures/__DVDInitWA.yml")),
("__DVDLowSetWAType", include_str!("../../assets/signatures/__DVDLowSetWAType.yml")),
("__fstLoad", include_str!("../../assets/signatures/__fstLoad.yml")),
("DVDReset", include_str!("../../assets/signatures/DVDReset.yml")),
("DVDLowReset", include_str!("../../assets/signatures/DVDLowReset.yml")),
("DVDReadDiskID", include_str!("../../assets/signatures/DVDReadDiskID.yml")),
("stateReady", include_str!("../../assets/signatures/stateReady.yml")),
("DVDLowWaitCoverClose", include_str!("../../assets/signatures/DVDLowWaitCoverClose.yml")),
("__DVDStoreErrorCode", include_str!("../../assets/signatures/__DVDStoreErrorCode.yml")),
("DVDLowStopMotor", include_str!("../../assets/signatures/DVDLowStopMotor.yml")),
("DVDGetDriveStatus", include_str!("../../assets/signatures/DVDGetDriveStatus.yml")),
("printf", include_str!("../../assets/signatures/printf.yml")),
("sprintf", include_str!("../../assets/signatures/sprintf.yml")),
("vprintf", include_str!("../../assets/signatures/vprintf.yml")),
("vsprintf", include_str!("../../assets/signatures/vsprintf.yml")),
("vsnprintf", include_str!("../../assets/signatures/vsnprintf.yml")),
("__pformatter", include_str!("../../assets/signatures/__pformatter.yml")),
("longlong2str", include_str!("../../assets/signatures/longlong2str.yml")),
("__mod2u", include_str!("../../assets/signatures/__mod2u.yml")),
("__FileWrite", include_str!("../../assets/signatures/__FileWrite.yml")),
("fwrite", include_str!("../../assets/signatures/fwrite.yml")),
("__fwrite", include_str!("../../assets/signatures/__fwrite.yml")),
("__stdio_atexit", include_str!("../../assets/signatures/__stdio_atexit.yml")),
("__StringWrite", include_str!("../../assets/signatures/__StringWrite.yml")),
];
const POST_SIGNATURES: &[(&str, &str)] = &[
("RSOStaticLocateObject", include_str!("../../assets/signatures/RSOStaticLocateObject.yml")),
// ("GXInit", include_str!("../../assets/signatures/GXInit.yml")),
("__register_fragment", include_str!("../../assets/signatures/__register_fragment.yml")),
];
pub fn apply_signatures(obj: &mut ObjInfo) -> Result<()> {
let entry = obj.entry as u32;
if let Some(signature) =
check_signatures_str(obj, entry, include_str!("../../assets/signatures/__start.yml"))?
{
apply_signature(obj, entry, &signature)?;
}
for &(name, sig_str) in SIGNATURES {
if let Some((_, symbol)) = obj.symbols.by_name(name)? {
let addr = symbol.address as u32;
if let Some(signature) = check_signatures_str(obj, addr, sig_str)? {
apply_signature(obj, addr, &signature)?;
}
}
}
if let Some((_, symbol)) = obj.symbols.by_name("__init_user")? {
// __init_user can be overridden, but we can still look for __init_cpp from it
let mut analyzer = AnalyzerState::default();
analyzer.process_function_at(obj, symbol.address as u32)?;
for addr in analyzer.function_entries {
if let Some(signature) = check_signatures_str(
obj,
addr,
include_str!("../../assets/signatures/__init_cpp.yml"),
)? {
apply_signature(obj, addr, &signature)?;
break;
}
}
}
if let Some((_, symbol)) = obj.symbols.by_name("_ctors")? {
// First entry of ctors is __init_cpp_exceptions
let section = obj.section_at(symbol.address as u32)?;
let target = read_u32(&section.data, symbol.address as u32, section.address as u32)
.ok_or_else(|| anyhow!("Failed to read _ctors data"))?;
if target != 0 {
if let Some(signature) = check_signatures_str(
obj,
target,
include_str!("../../assets/signatures/__init_cpp_exceptions.yml"),
)? {
let address = symbol.address;
let section_index = section.index;
apply_signature(obj, target, &signature)?;
obj.add_symbol(
ObjSymbol {
name: "__init_cpp_exceptions_reference".to_string(),
demangled_name: None,
address,
section: Some(section_index),
size: 4,
size_known: true,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local.into()),
kind: ObjSymbolKind::Object,
align: None,
data_kind: Default::default(),
},
true,
)?;
if obj.split_for(address as u32).is_none() {
obj.add_split(address as u32, ObjSplit {
unit: "__init_cpp_exceptions.cpp".to_string(),
end: address as u32 + 4,
align: None,
common: false,
});
}
}
}
}
if let Some((_, symbol)) = obj.symbols.by_name("_dtors")? {
let section = obj.section_at(symbol.address as u32)?;
let address = symbol.address;
let section_address = section.address;
let section_index = section.index;
// First entry of dtors is __destroy_global_chain
let target = read_u32(&section.data, address as u32, section_address as u32)
.ok_or_else(|| anyhow!("Failed to read _dtors data"))?;
let target2 = read_u32(&section.data, address as u32 + 4, section_address as u32)
.ok_or_else(|| anyhow!("Failed to read _dtors data"))?;
let mut target_ok = false;
let mut target2_ok = false;
if target != 0 {
if let Some(signature) = check_signatures_str(
obj,
target,
include_str!("../../assets/signatures/__destroy_global_chain.yml"),
)? {
apply_signature(obj, target, &signature)?;
obj.add_symbol(
ObjSymbol {
name: "__destroy_global_chain_reference".to_string(),
demangled_name: None,
address,
section: Some(section_index),
size: 4,
size_known: true,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local.into()),
kind: ObjSymbolKind::Object,
align: None,
data_kind: Default::default(),
},
true,
)?;
target_ok = true;
}
}
// Second entry of dtors is __fini_cpp_exceptions
if target2 != 0 {
if let Some(signature) = check_signatures_str(
obj,
target2,
include_str!("../../assets/signatures/__fini_cpp_exceptions.yml"),
)? {
apply_signature(obj, target2, &signature)?;
obj.add_symbol(
ObjSymbol {
name: "__fini_cpp_exceptions_reference".to_string(),
demangled_name: None,
address: address + 4,
section: Some(section_index),
size: 4,
size_known: true,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local.into()),
kind: ObjSymbolKind::Object,
align: None,
data_kind: Default::default(),
},
true,
)?;
target2_ok = true;
}
}
if target_ok && target2_ok && obj.split_for(address as u32).is_none() {
obj.add_split(address as u32, ObjSplit {
unit: "__init_cpp_exceptions.cpp".to_string(),
end: address as u32 + 8,
align: None,
common: false,
});
}
}
Ok(())
}
pub fn apply_signatures_post(obj: &mut ObjInfo) -> Result<()> {
log::info!("Checking post CFA signatures...");
for &(_name, sig_str) in POST_SIGNATURES {
let signatures = parse_signatures(sig_str)?;
let mut iter = obj.symbols.by_kind(ObjSymbolKind::Function);
let opt = loop {
let Some((_, symbol)) = iter.next() else {
break Option::<(u32, FunctionSignature)>::None;
};
if let Some(signature) = check_signatures(obj, symbol.address as u32, &signatures)? {
break Some((symbol.address as u32, signature));
}
};
if let Some((addr, signature)) = opt {
drop(iter);
apply_signature(obj, addr, &signature)?;
break;
}
}
log::info!("Done!");
Ok(())
}

View File

@ -35,6 +35,7 @@ pub enum TailCallResult {
Not, Not,
Is, Is,
Possible, Possible,
Error(anyhow::Error),
} }
type BlockRange = Range<u32>; type BlockRange = Range<u32>;
@ -137,7 +138,7 @@ impl FunctionSlices {
.with_context(|| format!("While processing {:#010X}", function_start))?; .with_context(|| format!("While processing {:#010X}", function_start))?;
self.check_epilogue(section, ins) self.check_epilogue(section, ins)
.with_context(|| format!("While processing {:#010X}", function_start))?; .with_context(|| format!("While processing {:#010X}", function_start))?;
if !self.has_conditional_blr && is_conditional_blr(&ins) { if !self.has_conditional_blr && is_conditional_blr(ins) {
self.has_conditional_blr = true; self.has_conditional_blr = true;
} }
if !self.has_rfi && ins.op == Opcode::Rfi { if !self.has_rfi && ins.op == Opcode::Rfi {
@ -351,13 +352,15 @@ impl FunctionSlices {
} }
let end = self.end(); let end = self.end();
if let Ok(section) = obj.section_at(end) { match (obj.section_at(end), obj.section_at(end - 4)) {
// FIXME this is real bad (Ok(section), Ok(other_section)) if section.index == other_section.index => {
if !self.has_conditional_blr { // FIXME this is real bad
if let Some(ins) = disassemble(&section, end - 4) { if !self.has_conditional_blr {
if ins.op == Opcode::B { if let Some(ins) = disassemble(section, end - 4) {
if self.function_references.contains(&ins.branch_dest().unwrap()) { if ins.op == Opcode::B
for (_, branches) in &self.branches { && self.function_references.contains(&ins.branch_dest().unwrap())
{
for branches in self.branches.values() {
if branches.len() > 1 if branches.len() > 1
&& branches.contains(self.blocks.last_key_value().unwrap().0) && branches.contains(self.blocks.last_key_value().unwrap().0)
{ {
@ -367,29 +370,28 @@ impl FunctionSlices {
} }
} }
} }
}
// MWCC optimization sometimes leaves an unreachable blr // MWCC optimization sometimes leaves an unreachable blr
// after generating a conditional blr in the function. // after generating a conditional blr in the function.
if self.has_conditional_blr { if self.has_conditional_blr
if matches!(disassemble(&section, end - 4), Some(ins) if !ins.is_blr()) && matches!(disassemble(section, end - 4), Some(ins) if !ins.is_blr())
&& matches!(disassemble(&section, end), Some(ins) if ins.is_blr()) && matches!(disassemble(section, end), Some(ins) if ins.is_blr())
&& !known_functions.contains(&end) && !known_functions.contains(&end)
{ {
log::trace!("Found trailing blr @ {:#010X}, merging with function", end); log::trace!("Found trailing blr @ {:#010X}, merging with function", end);
self.blocks.insert(end, end + 4); self.blocks.insert(end, end + 4);
} }
}
// Some functions with rfi also include a trailing nop // Some functions with rfi also include a trailing nop
if self.has_rfi { if self.has_rfi
if matches!(disassemble(&section, end), Some(ins) if is_nop(&ins)) && matches!(disassemble(section, end), Some(ins) if is_nop(&ins))
&& !known_functions.contains(&end) && !known_functions.contains(&end)
{ {
log::trace!("Found trailing nop @ {:#010X}, merging with function", end); log::trace!("Found trailing nop @ {:#010X}, merging with function", end);
self.blocks.insert(end, end + 4); self.blocks.insert(end, end + 4);
} }
} }
_ => {}
} }
self.finalized = true; self.finalized = true;
@ -417,6 +419,14 @@ impl FunctionSlices {
if addr < function_start { if addr < function_start {
return TailCallResult::Is; return TailCallResult::Is;
} }
// If the jump target is in a different section, known tail call.
let section = match obj.section_at(function_start) {
Ok(section) => section,
Err(e) => return TailCallResult::Error(e),
};
if !section.contains(addr) {
return TailCallResult::Is;
}
// If the jump target has 0'd padding before it, known tail call. // If the jump target has 0'd padding before it, known tail call.
if matches!(obj.section_data(addr - 4, addr), Ok((_, data)) if data == [0u8; 4]) { if matches!(obj.section_data(addr - 4, addr), Ok((_, data)) if data == [0u8; 4]) {
return TailCallResult::Is; return TailCallResult::Is;
@ -428,15 +438,16 @@ impl FunctionSlices {
} }
// If jump target is known to be a function, or there's a function in between // If jump target is known to be a function, or there's a function in between
// this and the jump target, known tail call. // this and the jump target, known tail call.
log::trace!("Checking {:#010X}..={:#010X}", function_start + 4, addr);
if self.function_references.range(function_start + 4..=addr).next().is_some() if self.function_references.range(function_start + 4..=addr).next().is_some()
|| known_functions.range(function_start + 4..=addr).next().is_some() || known_functions.range(function_start + 4..=addr).next().is_some()
{ {
return TailCallResult::Is; return TailCallResult::Is;
} }
// Perform CFA on jump target to determine more // Perform CFA on jump target to determine more
let mut slices = FunctionSlices::default(); let mut slices = FunctionSlices {
slices.function_references = self.function_references.clone(); function_references: self.function_references.clone(),
..Default::default()
};
if let Ok(result) = if let Ok(result) =
slices.analyze(obj, addr, function_start, Some(function_end), known_functions) slices.analyze(obj, addr, function_start, Some(function_end), known_functions)
{ {

View File

@ -1,5 +1,5 @@
use std::{ use std::{
collections::{BTreeMap, BTreeSet}, collections::{btree_map::Entry, BTreeMap, BTreeSet},
mem::take, mem::take,
}; };
@ -12,8 +12,10 @@ use crate::{
uniq_jump_table_entries, uniq_jump_table_entries,
vm::{is_store_op, BranchTarget, GprValue, StepResult, VM}, vm::{is_store_op, BranchTarget, GprValue, StepResult, VM},
}, },
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol, ObjSymbolKind}, obj::{
util::nested::NestedVec, ObjDataKind, ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol,
ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
},
}; };
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
@ -99,80 +101,38 @@ impl Tracker {
Ok(()) Ok(())
} }
// fn update_stack_address(&mut self, addr: u32) {
// if let Some(db_stack_addr) = self.db_stack_addr {
// if db_stack_addr == addr {
// return;
// }
// }
// if let Some(stack_addr) = self.stack_address {
// if stack_addr != addr {
// log::error!("Stack address overridden from {:#010X} to {:#010X}", stack_addr, addr);
// return;
// }
// }
// log::debug!("Located stack address: {:08X}", addr);
// self.stack_address = Some(addr);
// let db_stack_addr = addr + 0x2000;
// self.db_stack_addr = Some(db_stack_addr);
// self.arena_lo = Some((db_stack_addr + 0x1F) & !0x1F);
// // __ArenaHi is fixed (until it isn't?)
// self.arena_hi = Some(0x81700000);
// log::debug!("_stack_addr: {:#010X}", addr);
// log::debug!("_stack_end: {:#010X}", self.stack_end.unwrap());
// log::debug!("_db_stack_addr: {:#010X}", db_stack_addr);
// log::debug!("__ArenaLo: {:#010X}", self.arena_lo.unwrap());
// log::debug!("__ArenaHi: {:#010X}", self.arena_hi.unwrap());
// }
fn process_code(&mut self, obj: &ObjInfo) -> Result<()> { fn process_code(&mut self, obj: &ObjInfo) -> Result<()> {
let mut symbol_map = BTreeMap::new(); self.process_function_by_address(obj, obj.entry as u32)?;
for section in obj.sections.iter().filter(|s| s.kind == ObjSectionKind::Code) { for section in obj.sections.iter().filter(|s| s.kind == ObjSectionKind::Code) {
symbol_map.append(&mut obj.build_symbol_map(section.index)?); for (_, symbol) in obj
} .symbols
self.process_function_by_address(obj, &symbol_map, obj.entry as u32)?; .for_range(section.address as u32..(section.address + section.size) as u32)
'outer: for (&addr, symbols) in &symbol_map { .filter(|(_, symbol)| symbol.kind == ObjSymbolKind::Function && symbol.size_known)
if self.processed_functions.contains(&addr) { {
continue; let addr = symbol.address as u32;
} if !self.processed_functions.insert(addr) {
self.processed_functions.insert(addr); continue;
for &symbol_idx in symbols {
let symbol = &obj.symbols[symbol_idx];
if symbol.kind == ObjSymbolKind::Function && symbol.size_known {
self.process_function(obj, symbol)?;
continue 'outer;
} }
self.process_function(obj, symbol)?;
} }
} }
// Special handling for gTRKInterruptVectorTable
// TODO
// if let (Some(trk_interrupt_table), Some(trk_interrupt_vector_table_end)) = (
// obj.symbols.iter().find(|sym| sym.name == "gTRKInterruptVectorTable"),
// obj.symbols.iter().find(|sym| sym.name == "gTRKInterruptVectorTableEnd"),
// ) {}
Ok(()) Ok(())
} }
fn process_function_by_address( fn process_function_by_address(&mut self, obj: &ObjInfo, addr: u32) -> Result<()> {
&mut self,
obj: &ObjInfo,
symbol_map: &BTreeMap<u32, Vec<usize>>,
addr: u32,
) -> Result<()> {
if self.processed_functions.contains(&addr) { if self.processed_functions.contains(&addr) {
return Ok(()); return Ok(());
} }
self.processed_functions.insert(addr); self.processed_functions.insert(addr);
if let Some(symbols) = symbol_map.get(&addr) { if let Some((_, symbol)) = obj
for &symbol_idx in symbols { .symbols
let symbol = &obj.symbols[symbol_idx]; .at_address(addr)
if symbol.kind == ObjSymbolKind::Function && symbol.size_known { .find(|(_, symbol)| symbol.kind == ObjSymbolKind::Function && symbol.size_known)
self.process_function(obj, symbol)?; {
return Ok(()); self.process_function(obj, symbol)?;
} } else {
} log::warn!("Failed to locate function symbol @ {:#010X}", addr);
} }
log::warn!("Failed to locate function symbol @ {:#010X}", addr);
Ok(()) Ok(())
} }
@ -189,12 +149,9 @@ impl Tracker {
match result { match result {
StepResult::Continue => { StepResult::Continue => {
// if ins.addr == 0x8000ed0c || ins.addr == 0x8000ed08 || ins.addr == 0x8000ca50 {
// println!("ok");
// }
match ins.op { match ins.op {
// addi rD, rA, SIMM
Opcode::Addi | Opcode::Addic | Opcode::Addic_ => { Opcode::Addi | Opcode::Addic | Opcode::Addic_ => {
// addi rD, rA, SIMM
let source = ins.field_rA(); let source = ins.field_rA();
let target = ins.field_rD(); let target = ins.field_rD();
if let GprValue::Constant(value) = vm.gpr[target].value { if let GprValue::Constant(value) = vm.gpr[target].value {
@ -224,8 +181,8 @@ impl Tracker {
} }
} }
} }
// ori rA, rS, UIMM
Opcode::Ori => { Opcode::Ori => {
// ori rA, rS, UIMM
let target = ins.field_rA(); let target = ins.field_rA();
if let GprValue::Constant(value) = vm.gpr[target].value { if let GprValue::Constant(value) = vm.gpr[target].value {
if self.is_valid_address(obj, ins.addr, value) { if self.is_valid_address(obj, ins.addr, value) {
@ -416,6 +373,11 @@ impl Tracker {
if self.ignore_addresses.contains(&addr) { if self.ignore_addresses.contains(&addr) {
return false; return false;
} }
if let Some((&start, &end)) = obj.blocked_ranges.range(..=from).last() {
if from >= start && from < end {
return false;
}
}
if self.known_relocations.contains(&from) { if self.known_relocations.contains(&from) {
return true; return true;
} }
@ -432,11 +394,9 @@ impl Tracker {
// if addr > 0x80000000 && addr < 0x80003100 { // if addr > 0x80000000 && addr < 0x80003100 {
// return true; // return true;
// } // }
for section in &obj.sections { if let Ok(section) = obj.section_at(addr) {
if addr >= section.address as u32 && addr <= (section.address + section.size) as u32 { // References to code sections will never be unaligned
// References to code sections will never be unaligned return section.kind != ObjSectionKind::Code || addr & 3 == 0;
return section.kind != ObjSectionKind::Code || addr & 3 == 0;
}
} }
false false
} }
@ -451,16 +411,16 @@ impl Tracker {
return None; return None;
} }
// HACK for RSOStaticLocateObject // HACK for RSOStaticLocateObject
for section in &obj.sections { // for section in &obj.sections {
if addr == section.address as u32 { // if addr == section.address as u32 {
let name = format!("_f_{}", section.name.trim_start_matches('.')); // let name = format!("_f_{}", section.name.trim_start_matches('.'));
return Some(generate_special_symbol(obj, addr, &name)); // return generate_special_symbol(obj, addr, &name).ok();
} // }
} // }
let mut check_symbol = |opt: Option<u32>, name: &str| -> Option<usize> { let mut check_symbol = |opt: Option<u32>, name: &str| -> Option<usize> {
if let Some(value) = opt { if let Some(value) = opt {
if addr == value { if addr == value {
return Some(generate_special_symbol(obj, value, name)); return generate_special_symbol(obj, value, name).ok();
} }
} }
None None
@ -475,11 +435,22 @@ impl Tracker {
} }
pub fn apply(&self, obj: &mut ObjInfo, replace: bool) -> Result<()> { pub fn apply(&self, obj: &mut ObjInfo, replace: bool) -> Result<()> {
fn apply_section_name(section: &mut ObjSection, name: &str) {
let module_id = if let Some((_, b)) = section.name.split_once(':') {
b.parse::<u32>().unwrap_or(0)
} else {
0
};
let new_name =
if module_id == 0 { name.to_string() } else { format!("{}:{}", name, module_id) };
log::debug!("Renaming {} to {}", section.name, new_name);
section.name = new_name;
}
for section in &mut obj.sections { for section in &mut obj.sections {
if !section.section_known { if !section.section_known {
if section.kind == ObjSectionKind::Code { if section.kind == ObjSectionKind::Code {
log::info!("Renaming {} to .text", section.name); apply_section_name(section, ".text");
section.name = ".text".to_string();
continue; continue;
} }
let start = section.address as u32; let start = section.address as u32;
@ -487,39 +458,32 @@ impl Tracker {
if self.sda_to.range(start..end).next().is_some() { if self.sda_to.range(start..end).next().is_some() {
if self.stores_to.range(start..end).next().is_some() { if self.stores_to.range(start..end).next().is_some() {
if section.kind == ObjSectionKind::Bss { if section.kind == ObjSectionKind::Bss {
log::info!("Renaming {} to .sbss", section.name); apply_section_name(section, ".sbss");
section.name = ".sbss".to_string();
} else { } else {
log::info!("Renaming {} to .sdata", section.name); apply_section_name(section, ".sdata");
section.name = ".sdata".to_string();
} }
} else if section.kind == ObjSectionKind::Bss { } else if section.kind == ObjSectionKind::Bss {
log::info!("Renaming {} to .sbss2", section.name); apply_section_name(section, ".sbss2");
section.name = ".sbss2".to_string();
} else { } else {
log::info!("Renaming {} to .sdata2", section.name); apply_section_name(section, ".sdata2");
section.name = ".sdata2".to_string();
section.kind = ObjSectionKind::ReadOnlyData; section.kind = ObjSectionKind::ReadOnlyData;
} }
} else if self.hal_to.range(start..end).next().is_some() { } else if self.hal_to.range(start..end).next().is_some() {
if section.kind == ObjSectionKind::Bss { if section.kind == ObjSectionKind::Bss {
log::info!("Renaming {} to .bss", section.name); apply_section_name(section, ".bss");
section.name = ".bss".to_string();
} else if self.stores_to.range(start..end).next().is_some() { } else if self.stores_to.range(start..end).next().is_some() {
log::info!("Renaming {} to .data", section.name); apply_section_name(section, ".data");
section.name = ".data".to_string();
} else { } else {
log::info!("Renaming {} to .rodata", section.name); apply_section_name(section, ".rodata");
section.name = ".rodata".to_string();
section.kind = ObjSectionKind::ReadOnlyData; section.kind = ObjSectionKind::ReadOnlyData;
} }
} }
} }
} }
let mut symbol_maps = Vec::new(); let mut relocation_maps = Vec::new();
for section in &obj.sections { for section in &obj.sections {
symbol_maps.push(obj.build_symbol_map(section.index)?); relocation_maps.push(section.build_relocation_map()?);
} }
for (addr, reloc) in &self.relocations { for (addr, reloc) in &self.relocations {
@ -533,6 +497,18 @@ impl Tracker {
Relocation::Rel24(v) => (ObjRelocKind::PpcRel24, v), Relocation::Rel24(v) => (ObjRelocKind::PpcRel24, v),
Relocation::Absolute(v) => (ObjRelocKind::Absolute, v), Relocation::Absolute(v) => (ObjRelocKind::Absolute, v),
}; };
let data_kind = self
.data_types
.get(&target)
.map(|dt| match dt {
DataKind::Unknown => ObjDataKind::Unknown,
DataKind::Word => ObjDataKind::Byte4,
DataKind::Half => ObjDataKind::Byte2,
DataKind::Byte => ObjDataKind::Byte,
DataKind::Float => ObjDataKind::Float,
DataKind::Double => ObjDataKind::Double,
})
.unwrap_or_default();
let (target_symbol, addend) = let (target_symbol, addend) =
if let Some(symbol) = self.special_symbol(obj, target, reloc_kind) { if let Some(symbol) = self.special_symbol(obj, target, reloc_kind) {
(symbol, 0) (symbol, 0)
@ -544,16 +520,15 @@ impl Tracker {
None => continue, None => continue,
}; };
// Try to find a previous sized symbol that encompasses the target // Try to find a previous sized symbol that encompasses the target
let sym_map = &mut symbol_maps[target_section.index];
let target_symbol = { let target_symbol = {
let mut result = None; let mut result = None;
for (_addr, symbol_idxs) in sym_map.range(..=target).rev() { for (_addr, symbol_idxs) in obj.symbols.indexes_for_range(..=target).rev() {
let symbol_idx = if symbol_idxs.len() == 1 { let symbol_idx = if symbol_idxs.len() == 1 {
symbol_idxs.first().cloned().unwrap() symbol_idxs.first().cloned().unwrap()
} else { } else {
let mut symbol_idxs = symbol_idxs.clone(); let mut symbol_idxs = symbol_idxs.to_vec();
symbol_idxs.sort_by_key(|&symbol_idx| { symbol_idxs.sort_by_key(|&symbol_idx| {
let symbol = &obj.symbols[symbol_idx]; let symbol = obj.symbols.at(symbol_idx);
let mut rank = match symbol.kind { let mut rank = match symbol.kind {
ObjSymbolKind::Function | ObjSymbolKind::Object => { ObjSymbolKind::Function | ObjSymbolKind::Object => {
match reloc_kind { match reloc_kind {
@ -589,7 +564,7 @@ impl Tracker {
None => continue, None => continue,
} }
}; };
let symbol = &obj.symbols[symbol_idx]; let symbol = obj.symbols.at(symbol_idx);
if symbol.address == target as u64 { if symbol.address == target as u64 {
result = Some(symbol_idx); result = Some(symbol_idx);
break; break;
@ -604,12 +579,20 @@ impl Tracker {
result result
}; };
if let Some(symbol_idx) = target_symbol { if let Some(symbol_idx) = target_symbol {
let symbol = &obj.symbols[symbol_idx]; let symbol = obj.symbols.at(symbol_idx);
(symbol_idx, target as i64 - symbol.address as i64) let symbol_address = symbol.address;
// TODO meh
if data_kind != ObjDataKind::Unknown
&& symbol.data_kind == ObjDataKind::Unknown
&& symbol_address as u32 == target
{
obj.symbols
.replace(symbol_idx, ObjSymbol { data_kind, ..symbol.clone() })?;
}
(symbol_idx, target as i64 - symbol_address as i64)
} else { } else {
// Create a new label // Create a new label
let symbol_idx = obj.symbols.len(); let symbol_idx = obj.symbols.add_direct(ObjSymbol {
obj.symbols.push(ObjSymbol {
name: format!("lbl_{:08X}", target), name: format!("lbl_{:08X}", target),
demangled_name: None, demangled_name: None,
address: target as u64, address: target as u64,
@ -618,8 +601,9 @@ impl Tracker {
size_known: false, size_known: false,
flags: Default::default(), flags: Default::default(),
kind: Default::default(), kind: Default::default(),
}); align: None,
sym_map.nested_push(target, symbol_idx); data_kind,
})?;
(symbol_idx, 0) (symbol_idx, 0)
} }
}; };
@ -636,25 +620,35 @@ impl Tracker {
reloc reloc
), ),
}; };
match section.relocations.iter_mut().find(|r| r.address as u32 == addr) {
Some(v) => { let reloc_map = &mut relocation_maps[section.index];
let iter_symbol = &obj.symbols[v.target_symbol]; match reloc_map.entry(addr) {
let reloc_symbol = &obj.symbols[reloc.target_symbol]; Entry::Vacant(e) => {
if iter_symbol.address as i64 + v.addend e.insert(section.relocations.len());
!= reloc_symbol.address as i64 + reloc.addend section.relocations.push(reloc);
{ }
bail!( Entry::Occupied(e) => {
"Conflicting relocations (target {:#010X}): {:#010X?} != {:#010X?}", let reloc_symbol = obj.symbols.at(reloc.target_symbol);
target, if reloc_symbol.name != "_unresolved" {
v, let v = &mut section.relocations[*e.get()];
reloc let iter_symbol = obj.symbols.at(v.target_symbol);
); if iter_symbol.address as i64 + v.addend
} != reloc_symbol.address as i64 + reloc.addend
if replace { {
*v = reloc; bail!(
"Conflicting relocations (target {:#010X}): {:#010X?} ({}) != {:#010X?} ({})",
target,
v,
iter_symbol.name,
reloc,
reloc_symbol.name
);
}
if replace {
*v = reloc;
}
} }
} }
None => section.relocations.push(reloc),
} }
} }
Ok(()) Ok(())
@ -716,17 +710,16 @@ fn data_kind_from_op(op: Opcode) -> DataKind {
} }
} }
fn generate_special_symbol(obj: &mut ObjInfo, addr: u32, name: &str) -> usize { fn generate_special_symbol(obj: &mut ObjInfo, addr: u32, name: &str) -> Result<usize> {
if let Some((symbol_idx, _)) = obj.add_symbol(
obj.symbols.iter().enumerate().find(|&(_, symbol)| symbol.name == name) ObjSymbol {
{ name: name.to_string(),
return symbol_idx; address: addr as u64,
} size: 0,
let symbol_idx = obj.symbols.len(); size_known: true,
obj.symbols.push(ObjSymbol { flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
name: name.to_string(), ..Default::default()
address: addr as u64, },
..Default::default() true,
}); )
symbol_idx
} }

View File

@ -162,8 +162,8 @@ impl VM {
Opcode::Illegal => { Opcode::Illegal => {
return StepResult::Illegal; return StepResult::Illegal;
} }
// add rD, rA, rB
Opcode::Add => { Opcode::Add => {
// add rD, rA, rB
let left = self.gpr[ins.field_rA()].value; let left = self.gpr[ins.field_rA()].value;
let right = self.gpr[ins.field_rB()].value; let right = self.gpr[ins.field_rB()].value;
let value = match (left, right) { let value = match (left, right) {
@ -174,8 +174,8 @@ impl VM {
}; };
self.gpr[ins.field_rD()].set_direct(value); self.gpr[ins.field_rD()].set_direct(value);
} }
// addis rD, rA, SIMM
Opcode::Addis => { Opcode::Addis => {
// addis rD, rA, SIMM
let left = if ins.field_rA() == 0 { let left = if ins.field_rA() == 0 {
GprValue::Constant(0) GprValue::Constant(0)
} else { } else {
@ -194,10 +194,10 @@ impl VM {
self.gpr[ins.field_rD()].set_direct(value); self.gpr[ins.field_rD()].set_direct(value);
} }
} }
// addi rD, rA, SIMM
// addic rD, rA, SIMM
// addic. rD, rA, SIMM
Opcode::Addi | Opcode::Addic | Opcode::Addic_ => { Opcode::Addi | Opcode::Addic | Opcode::Addic_ => {
// addi rD, rA, SIMM
// addic rD, rA, SIMM
// addic. rD, rA, SIMM
let left = if ins.field_rA() == 0 && ins.op == Opcode::Addi { let left = if ins.field_rA() == 0 && ins.op == Opcode::Addi {
GprValue::Constant(0) GprValue::Constant(0)
} else { } else {
@ -216,8 +216,8 @@ impl VM {
self.gpr[ins.field_rD()].set_lo(value, ins.addr, self.gpr[ins.field_rA()]); self.gpr[ins.field_rD()].set_lo(value, ins.addr, self.gpr[ins.field_rA()]);
} }
} }
// ori rA, rS, UIMM
Opcode::Ori => { Opcode::Ori => {
// ori rA, rS, UIMM
let value = match self.gpr[ins.field_rS()].value { let value = match self.gpr[ins.field_rS()].value {
GprValue::Constant(value) => { GprValue::Constant(value) => {
GprValue::Constant(value | ins.field_uimm() as u32) GprValue::Constant(value | ins.field_uimm() as u32)
@ -226,8 +226,8 @@ impl VM {
}; };
self.gpr[ins.field_rA()].set_lo(value, ins.addr, self.gpr[ins.field_rS()]); self.gpr[ins.field_rA()].set_lo(value, ins.addr, self.gpr[ins.field_rS()]);
} }
// or rA, rS, rB
Opcode::Or => { Opcode::Or => {
// or rA, rS, rB
if ins.field_rS() == ins.field_rB() { if ins.field_rS() == ins.field_rB() {
// Register copy // Register copy
self.gpr[ins.field_rA()] = self.gpr[ins.field_rS()]; self.gpr[ins.field_rA()] = self.gpr[ins.field_rS()];
@ -428,11 +428,8 @@ impl VM {
} }
_ => { _ => {
for field in ins.defs() { for field in ins.defs() {
match field.argument() { if let Some(Argument::GPR(GPR(reg))) = field.argument() {
Some(Argument::GPR(GPR(reg))) => { self.gpr[reg as usize].set_direct(GprValue::Unknown);
self.gpr[reg as usize].set_direct(GprValue::Unknown);
}
_ => {}
} }
} }
} }

View File

@ -1,7 +1,7 @@
use std::{ use std::{
collections::{btree_map::Entry, BTreeMap}, collections::{btree_map::Entry, BTreeMap},
fs::File, fs::File,
io::{BufRead, BufWriter, Write}, io::{BufWriter, Write},
path::PathBuf, path::PathBuf,
}; };
@ -9,7 +9,7 @@ use anyhow::{anyhow, bail, Result};
use argh::FromArgs; use argh::FromArgs;
use object::{Object, ObjectSymbol, SymbolScope}; use object::{Object, ObjectSymbol, SymbolScope};
use crate::util::file::{buf_reader, map_file}; use crate::util::file::{map_file, process_rsp};
#[derive(FromArgs, PartialEq, Debug)] #[derive(FromArgs, PartialEq, Debug)]
/// Commands for processing static libraries. /// Commands for processing static libraries.
@ -45,25 +45,7 @@ pub fn run(args: Args) -> Result<()> {
fn create(args: CreateArgs) -> Result<()> { fn create(args: CreateArgs) -> Result<()> {
// Process response files (starting with '@') // Process response files (starting with '@')
let mut files = Vec::with_capacity(args.files.len()); let files = process_rsp(&args.files)?;
for path in args.files {
let path_str =
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
match path_str.strip_prefix('@') {
Some(rsp_file) => {
let reader = buf_reader(rsp_file)?;
for result in reader.lines() {
let line = result?;
if !line.is_empty() {
files.push(PathBuf::from(line));
}
}
}
None => {
files.push(path);
}
}
}
// Build identifiers & symbol table // Build identifiers & symbol table
let mut identifiers = Vec::with_capacity(files.len()); let mut identifiers = Vec::with_capacity(files.len());

View File

@ -1,11 +1,10 @@
use std::{ use std::{
collections::BTreeMap, collections::{hash_map, BTreeMap, HashMap},
fs, fs,
fs::{DirBuilder, File}, fs::{DirBuilder, File},
io::{BufRead, BufWriter, Write}, io::{BufRead, BufWriter, Write},
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use std::collections::{hash_map, HashMap};
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use argh::FromArgs; use argh::FromArgs;
@ -13,25 +12,24 @@ use argh::FromArgs;
use crate::{ use crate::{
analysis::{ analysis::{
cfa::AnalyzerState, cfa::AnalyzerState,
objects::{detect_object_boundaries, detect_strings},
pass::{AnalysisPass, FindSaveRestSleds, FindTRKInterruptVectorTable}, pass::{AnalysisPass, FindSaveRestSleds, FindTRKInterruptVectorTable},
read_u32, signatures::{apply_signatures, apply_signatures_post},
tracker::Tracker, tracker::Tracker,
}, },
obj::{ obj::{
signatures::{apply_signature, check_signatures, check_signatures_str, parse_signatures}, split::{split_obj, update_splits},
split::split_obj,
ObjInfo, ObjRelocKind, ObjSectionKind, ObjSymbolKind, ObjInfo, ObjRelocKind, ObjSectionKind, ObjSymbolKind,
}, },
util::{ util::{
asm::write_asm, asm::write_asm,
config::{apply_splits, parse_symbol_line, write_symbols}, config::{apply_splits, parse_symbol_line, write_splits, write_symbols},
dol::process_dol, dol::process_dol,
elf::process_elf, elf::{process_elf, write_elf},
file::{map_file, map_reader}, file::{map_file, map_reader},
map::process_map, lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
}, },
}; };
use crate::util::elf::write_elf;
#[derive(FromArgs, PartialEq, Debug)] #[derive(FromArgs, PartialEq, Debug)]
/// Commands for processing DOL files. /// Commands for processing DOL files.
@ -44,32 +42,8 @@ pub struct Args {
#[derive(FromArgs, PartialEq, Debug)] #[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand)] #[argh(subcommand)]
enum SubCommand { enum SubCommand {
Disasm(DisasmArgs),
Info(InfoArgs), Info(InfoArgs),
} Split(SplitArgs),
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// disassembles a DOL file
#[argh(subcommand, name = "disasm")]
pub struct DisasmArgs {
#[argh(option, short = 'm')]
/// path to input map
map_file: Option<PathBuf>,
#[argh(option, short = 's')]
/// path to symbols file
symbols_file: Option<PathBuf>,
#[argh(option, short = 'p')]
/// path to splits file
splits_file: Option<PathBuf>,
#[argh(option, short = 'e')]
/// ELF file to validate against (debugging only)
elf_file: Option<PathBuf>,
#[argh(positional)]
/// DOL file
dol_file: PathBuf,
#[argh(option, short = 'o')]
/// output file (or directory, if splitting)
out: PathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
@ -81,301 +55,39 @@ pub struct InfoArgs {
dol_file: PathBuf, dol_file: PathBuf,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)]
/// Splits a DOL into relocatable objects.
#[argh(subcommand, name = "split")]
pub struct SplitArgs {
#[argh(positional)]
/// input file
in_file: PathBuf,
#[argh(positional)]
/// output directory
out_dir: PathBuf,
#[argh(option, short = 's')]
/// path to symbols file
symbols_file: Option<PathBuf>,
#[argh(option, short = 'p')]
/// path to splits file
splits_file: Option<PathBuf>,
#[argh(option, short = 'e')]
/// ELF file to validate against (debugging only)
elf_file: Option<PathBuf>,
}
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
match args.command { match args.command {
SubCommand::Disasm(c_args) => disasm(c_args),
SubCommand::Info(c_args) => info(c_args), SubCommand::Info(c_args) => info(c_args),
SubCommand::Split(c_args) => split(c_args),
} }
} }
const SIGNATURES: &[(&str, &str)] = &[
("__init_registers", include_str!("../../assets/signatures/__init_registers.yml")),
("__init_hardware", include_str!("../../assets/signatures/__init_hardware.yml")),
("__init_data", include_str!("../../assets/signatures/__init_data.yml")),
("__set_debug_bba", include_str!("../../assets/signatures/__set_debug_bba.yml")),
("__OSPSInit", include_str!("../../assets/signatures/__OSPSInit.yml")),
("__OSFPRInit", include_str!("../../assets/signatures/__OSFPRInit.yml")),
("__OSCacheInit", include_str!("../../assets/signatures/__OSCacheInit.yml")),
("DMAErrorHandler", include_str!("../../assets/signatures/DMAErrorHandler.yml")),
("DBInit", include_str!("../../assets/signatures/DBInit.yml")),
("OSInit", include_str!("../../assets/signatures/OSInit.yml")),
("__OSThreadInit", include_str!("../../assets/signatures/__OSThreadInit.yml")),
("__OSInitIPCBuffer", include_str!("../../assets/signatures/__OSInitIPCBuffer.yml")),
("EXIInit", include_str!("../../assets/signatures/EXIInit.yml")),
("EXIGetID", include_str!("../../assets/signatures/EXIGetID.yml")),
("exit", include_str!("../../assets/signatures/exit.yml")),
("_ExitProcess", include_str!("../../assets/signatures/_ExitProcess.yml")),
("__fini_cpp", include_str!("../../assets/signatures/__fini_cpp.yml")),
("__destroy_global_chain", include_str!("../../assets/signatures/__destroy_global_chain.yml")),
("InitMetroTRK", include_str!("../../assets/signatures/InitMetroTRK.yml")),
("InitMetroTRKCommTable", include_str!("../../assets/signatures/InitMetroTRKCommTable.yml")),
("OSExceptionInit", include_str!("../../assets/signatures/OSExceptionInit.yml")),
(
"OSDefaultExceptionHandler",
include_str!("../../assets/signatures/OSDefaultExceptionHandler.yml"),
),
("__OSUnhandledException", include_str!("../../assets/signatures/__OSUnhandledException.yml")),
("OSDisableScheduler", include_str!("../../assets/signatures/OSDisableScheduler.yml")),
("__OSReschedule", include_str!("../../assets/signatures/__OSReschedule.yml")),
("__OSInitSystemCall", include_str!("../../assets/signatures/__OSInitSystemCall.yml")),
("OSInitAlarm", include_str!("../../assets/signatures/OSInitAlarm.yml")),
("__OSInitAlarm", include_str!("../../assets/signatures/__OSInitAlarm.yml")),
("__OSEVStart", include_str!("../../assets/signatures/OSExceptionVector.yml")),
("__OSDBINTSTART", include_str!("../../assets/signatures/__OSDBIntegrator.yml")),
("__OSDBJUMPSTART", include_str!("../../assets/signatures/__OSDBJump.yml")),
("SIInit", include_str!("../../assets/signatures/SIInit.yml")),
("SIGetType", include_str!("../../assets/signatures/SIGetType.yml")),
("SISetSamplingRate", include_str!("../../assets/signatures/SISetSamplingRate.yml")),
("SISetXY", include_str!("../../assets/signatures/SISetXY.yml")),
("VIGetTvFormat", include_str!("../../assets/signatures/VIGetTvFormat.yml")),
("DVDInit", include_str!("../../assets/signatures/DVDInit.yml")),
(
"DVDSetAutoFatalMessaging",
include_str!("../../assets/signatures/DVDSetAutoFatalMessaging.yml"),
),
("OSSetArenaLo", include_str!("../../assets/signatures/OSSetArenaLo.yml")),
("OSSetArenaHi", include_str!("../../assets/signatures/OSSetArenaHi.yml")),
("OSSetMEM1ArenaLo", include_str!("../../assets/signatures/OSSetMEM1ArenaLo.yml")),
("OSSetMEM1ArenaHi", include_str!("../../assets/signatures/OSSetMEM1ArenaHi.yml")),
("OSSetMEM2ArenaLo", include_str!("../../assets/signatures/OSSetMEM2ArenaLo.yml")),
("OSSetMEM2ArenaHi", include_str!("../../assets/signatures/OSSetMEM2ArenaHi.yml")),
("__OSInitAudioSystem", include_str!("../../assets/signatures/__OSInitAudioSystem.yml")),
(
"__OSInitMemoryProtection",
include_str!("../../assets/signatures/__OSInitMemoryProtection.yml"),
),
// ("BATConfig", include_str!("../../assets/signatures/BATConfig.yml")), TODO
("ReportOSInfo", include_str!("../../assets/signatures/ReportOSInfo.yml")),
("__check_pad3", include_str!("../../assets/signatures/__check_pad3.yml")),
("OSResetSystem", include_str!("../../assets/signatures/OSResetSystem.yml")),
("OSReturnToMenu", include_str!("../../assets/signatures/OSReturnToMenu.yml")),
("__OSReturnToMenu", include_str!("../../assets/signatures/__OSReturnToMenu.yml")),
("__OSShutdownDevices", include_str!("../../assets/signatures/__OSShutdownDevices.yml")),
("__OSInitSram", include_str!("../../assets/signatures/__OSInitSram.yml")),
("__OSSyncSram", include_str!("../../assets/signatures/__OSSyncSram.yml")),
(
"__OSGetExceptionHandler",
include_str!("../../assets/signatures/__OSGetExceptionHandler.yml"),
),
(
"OSRegisterResetFunction",
include_str!("../../assets/signatures/OSRegisterResetFunction.yml"),
),
(
"OSRegisterShutdownFunction",
include_str!("../../assets/signatures/OSRegisterShutdownFunction.yml"),
),
(
"DecrementerExceptionHandler",
include_str!("../../assets/signatures/DecrementerExceptionHandler.yml"),
),
(
"DecrementerExceptionCallback",
include_str!("../../assets/signatures/DecrementerExceptionCallback.yml"),
),
("__OSInterruptInit", include_str!("../../assets/signatures/__OSInterruptInit.yml")),
("__OSContextInit", include_str!("../../assets/signatures/__OSContextInit.yml")),
("OSSwitchFPUContext", include_str!("../../assets/signatures/OSSwitchFPUContext.yml")),
("OSReport", include_str!("../../assets/signatures/OSReport.yml")),
("TRK_main", include_str!("../../assets/signatures/TRK_main.yml")),
("TRKNubWelcome", include_str!("../../assets/signatures/TRKNubWelcome.yml")),
("TRKInitializeNub", include_str!("../../assets/signatures/TRKInitializeNub.yml")),
(
"TRKInitializeIntDrivenUART",
include_str!("../../assets/signatures/TRKInitializeIntDrivenUART.yml"),
),
("TRKEXICallBack", include_str!("../../assets/signatures/TRKEXICallBack.yml")),
("TRKLoadContext", include_str!("../../assets/signatures/TRKLoadContext.yml")),
("TRKInterruptHandler", include_str!("../../assets/signatures/TRKInterruptHandler.yml")),
("TRKExceptionHandler", include_str!("../../assets/signatures/TRKExceptionHandler.yml")),
("TRKSaveExtended1Block", include_str!("../../assets/signatures/TRKSaveExtended1Block.yml")),
("TRKNubMainLoop", include_str!("../../assets/signatures/TRKNubMainLoop.yml")),
("TRKTargetContinue", include_str!("../../assets/signatures/TRKTargetContinue.yml")),
("TRKSwapAndGo", include_str!("../../assets/signatures/TRKSwapAndGo.yml")),
(
"TRKRestoreExtended1Block",
include_str!("../../assets/signatures/TRKRestoreExtended1Block.yml"),
),
(
"TRKInterruptHandlerEnableInterrupts",
include_str!("../../assets/signatures/TRKInterruptHandlerEnableInterrupts.yml"),
),
("memset", include_str!("../../assets/signatures/memset.yml")),
(
"__msl_runtime_constraint_violation_s",
include_str!("../../assets/signatures/__msl_runtime_constraint_violation_s.yml"),
),
("ClearArena", include_str!("../../assets/signatures/ClearArena.yml")),
("IPCCltInit", include_str!("../../assets/signatures/IPCCltInit.yml")),
("__OSInitSTM", include_str!("../../assets/signatures/__OSInitSTM.yml")),
("IOS_Open", include_str!("../../assets/signatures/IOS_Open.yml")),
("__ios_Ipc2", include_str!("../../assets/signatures/__ios_Ipc2.yml")),
("IPCiProfQueueReq", include_str!("../../assets/signatures/IPCiProfQueueReq.yml")),
("SCInit", include_str!("../../assets/signatures/SCInit.yml")),
("SCReloadConfFileAsync", include_str!("../../assets/signatures/SCReloadConfFileAsync.yml")),
("NANDPrivateOpenAsync", include_str!("../../assets/signatures/NANDPrivateOpenAsync.yml")),
("nandIsInitialized", include_str!("../../assets/signatures/nandIsInitialized.yml")),
("nandOpen", include_str!("../../assets/signatures/nandOpen.yml")),
("nandGenerateAbsPath", include_str!("../../assets/signatures/nandGenerateAbsPath.yml")),
("nandGetHeadToken", include_str!("../../assets/signatures/nandGetHeadToken.yml")),
("ISFS_OpenAsync", include_str!("../../assets/signatures/ISFS_OpenAsync.yml")),
("nandConvertErrorCode", include_str!("../../assets/signatures/nandConvertErrorCode.yml")),
(
"NANDLoggingAddMessageAsync",
include_str!("../../assets/signatures/NANDLoggingAddMessageAsync.yml"),
),
(
"__NANDPrintErrorMessage",
include_str!("../../assets/signatures/__NANDPrintErrorMessage.yml"),
),
("__OSInitNet", include_str!("../../assets/signatures/__OSInitNet.yml")),
("__DVDCheckDevice", include_str!("../../assets/signatures/__DVDCheckDevice.yml")),
("__OSInitPlayTime", include_str!("../../assets/signatures/__OSInitPlayTime.yml")),
("__OSStartPlayRecord", include_str!("../../assets/signatures/__OSStartPlayRecord.yml")),
("NANDInit", include_str!("../../assets/signatures/NANDInit.yml")),
("ISFS_OpenLib", include_str!("../../assets/signatures/ISFS_OpenLib.yml")),
("ESP_GetTitleId", include_str!("../../assets/signatures/ESP_GetTitleId.yml")),
(
"NANDSetAutoErrorMessaging",
include_str!("../../assets/signatures/NANDSetAutoErrorMessaging.yml"),
),
("__DVDFSInit", include_str!("../../assets/signatures/__DVDFSInit.yml")),
("__DVDClearWaitingQueue", include_str!("../../assets/signatures/__DVDClearWaitingQueue.yml")),
("__DVDInitWA", include_str!("../../assets/signatures/__DVDInitWA.yml")),
("__DVDLowSetWAType", include_str!("../../assets/signatures/__DVDLowSetWAType.yml")),
("__fstLoad", include_str!("../../assets/signatures/__fstLoad.yml")),
("DVDReset", include_str!("../../assets/signatures/DVDReset.yml")),
("DVDLowReset", include_str!("../../assets/signatures/DVDLowReset.yml")),
("DVDReadDiskID", include_str!("../../assets/signatures/DVDReadDiskID.yml")),
("stateReady", include_str!("../../assets/signatures/stateReady.yml")),
("DVDLowWaitCoverClose", include_str!("../../assets/signatures/DVDLowWaitCoverClose.yml")),
("__DVDStoreErrorCode", include_str!("../../assets/signatures/__DVDStoreErrorCode.yml")),
("DVDLowStopMotor", include_str!("../../assets/signatures/DVDLowStopMotor.yml")),
("DVDGetDriveStatus", include_str!("../../assets/signatures/DVDGetDriveStatus.yml")),
("printf", include_str!("../../assets/signatures/printf.yml")),
("sprintf", include_str!("../../assets/signatures/sprintf.yml")),
("vprintf", include_str!("../../assets/signatures/vprintf.yml")),
("vsprintf", include_str!("../../assets/signatures/vsprintf.yml")),
("vsnprintf", include_str!("../../assets/signatures/vsnprintf.yml")),
("__pformatter", include_str!("../../assets/signatures/__pformatter.yml")),
("longlong2str", include_str!("../../assets/signatures/longlong2str.yml")),
("__mod2u", include_str!("../../assets/signatures/__mod2u.yml")),
("__FileWrite", include_str!("../../assets/signatures/__FileWrite.yml")),
("fwrite", include_str!("../../assets/signatures/fwrite.yml")),
("__fwrite", include_str!("../../assets/signatures/__fwrite.yml")),
("__stdio_atexit", include_str!("../../assets/signatures/__stdio_atexit.yml")),
("__StringWrite", include_str!("../../assets/signatures/__StringWrite.yml")),
];
const POST_SIGNATURES: &[(&str, &str)] = &[
("RSOStaticLocateObject", include_str!("../../assets/signatures/RSOStaticLocateObject.yml")),
// ("GXInit", include_str!("../../assets/signatures/GXInit.yml")),
];
pub fn apply_signatures(obj: &mut ObjInfo) -> Result<()> {
let entry = obj.entry as u32;
if let Some(signature) =
check_signatures_str(obj, entry, include_str!("../../assets/signatures/__start.yml"))?
{
apply_signature(obj, entry, &signature)?;
}
for &(name, sig_str) in SIGNATURES {
if let Some(symbol) = obj.symbols.iter().find(|symbol| symbol.name == name) {
let addr = symbol.address as u32;
if let Some(signature) = check_signatures_str(obj, addr, sig_str)? {
apply_signature(obj, addr, &signature)?;
}
}
}
if let Some(symbol) = obj.symbols.iter().find(|symbol| symbol.name == "__init_user") {
// __init_user can be overridden, but we can still look for __init_cpp from it
let mut analyzer = AnalyzerState::default();
analyzer.process_function_at(&obj, symbol.address as u32)?;
for addr in analyzer.function_entries {
if let Some(signature) = check_signatures_str(
obj,
addr,
include_str!("../../assets/signatures/__init_cpp.yml"),
)? {
apply_signature(obj, addr, &signature)?;
break;
}
}
}
if let Some(symbol) = obj.symbols.iter().find(|symbol| symbol.name == "_ctors") {
// First entry of ctors is __init_cpp_exceptions
let section = obj.section_at(symbol.address as u32)?;
let target = read_u32(&section.data, symbol.address as u32, section.address as u32)
.ok_or_else(|| anyhow!("Failed to read _ctors data"))?;
if target != 0 {
if let Some(signature) = check_signatures_str(
obj,
target,
include_str!("../../assets/signatures/__init_cpp_exceptions.yml"),
)? {
apply_signature(obj, target, &signature)?;
}
}
}
if let Some(symbol) = obj.symbols.iter().find(|symbol| symbol.name == "_dtors") {
// Second entry of dtors is __fini_cpp_exceptions
let section = obj.section_at(symbol.address as u32)?;
let target = read_u32(&section.data, symbol.address as u32 + 4, section.address as u32)
.ok_or_else(|| anyhow!("Failed to read _dtors data"))?;
if target != 0 {
if let Some(signature) = check_signatures_str(
obj,
target,
include_str!("../../assets/signatures/__fini_cpp_exceptions.yml"),
)? {
apply_signature(obj, target, &signature)?;
}
}
}
Ok(())
}
pub fn apply_signatures_post(obj: &mut ObjInfo) -> Result<()> {
log::info!("Checking post CFA signatures...");
for &(_name, sig_str) in POST_SIGNATURES {
let signatures = parse_signatures(sig_str)?;
for symbol in obj.symbols.iter().filter(|symbol| symbol.kind == ObjSymbolKind::Function) {
let addr = symbol.address as u32;
if let Some(signature) = check_signatures(obj, addr, &signatures)? {
apply_signature(obj, addr, &signature)?;
break;
}
}
}
log::info!("Done!");
Ok(())
}
fn info(args: InfoArgs) -> Result<()> { fn info(args: InfoArgs) -> Result<()> {
let mut obj = process_dol(&args.dol_file)?; let mut obj = process_dol(&args.dol_file)?;
apply_signatures(&mut obj)?; apply_signatures(&mut obj)?;
// Apply known functions from extab
let mut state = AnalyzerState::default();
for (&addr, &size) in &obj.known_functions {
state.function_entries.insert(addr);
state.function_bounds.insert(addr, addr + size);
}
for symbol in &obj.symbols {
if symbol.kind != ObjSymbolKind::Function {
continue;
}
state.function_entries.insert(symbol.address as u32);
if !symbol.size_known {
continue;
}
state.function_bounds.insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
}
// Also check the start of each code section
for section in &obj.sections {
if section.kind == ObjSectionKind::Code {
state.function_entries.insert(section.address as u32);
}
}
let mut state = AnalyzerState::default();
state.detect_functions(&obj)?; state.detect_functions(&obj)?;
log::info!("Discovered {} functions", state.function_slices.len()); log::info!("Discovered {} functions", state.function_slices.len());
@ -397,9 +109,7 @@ fn info(args: InfoArgs) -> Result<()> {
} }
println!("\nDiscovered symbols:"); println!("\nDiscovered symbols:");
println!("\t{: >23} | {: <10} | {: <10}", "Name", "Address", "Size"); println!("\t{: >23} | {: <10} | {: <10}", "Name", "Address", "Size");
let mut symbols = obj.symbols.clone(); for (_, symbol) in obj.symbols.for_range(..) {
symbols.sort_by_key(|sym| sym.address);
for symbol in symbols {
if symbol.name.starts_with('@') || symbol.name.starts_with("fn_") { if symbol.name.starts_with('@') || symbol.name.starts_with("fn_") {
continue; continue;
} }
@ -414,83 +124,39 @@ fn info(args: InfoArgs) -> Result<()> {
Ok(()) Ok(())
} }
fn disasm(args: DisasmArgs) -> Result<()> { fn split(args: SplitArgs) -> Result<()> {
let mut obj = process_dol(&args.dol_file)?; log::info!("Loading {}", args.in_file.display());
log::info!("Performing initial control flow analysis"); let mut obj = process_dol(&args.in_file)?;
// if detect_sda_bases(&mut obj).context("Failed to locate SDA bases")? { if let Some(splits_path) = &args.splits_file {
// let (sda2_base, sda_base) = obj.sda_bases.unwrap(); if splits_path.is_file() {
// log::info!("Found _SDA2_BASE_ @ {:#010X}, _SDA_BASE_ @ {:#010X}", sda2_base, sda_base); let map = map_file(splits_path)?;
// } else { apply_splits(map_reader(&map), &mut obj)?;
// bail!("Unable to locate SDA bases"); }
// }
if let Some(map) = &args.map_file {
let mmap = map_file(map)?;
let _entries = process_map(map_reader(&mmap))?;
}
if let Some(splits_file) = &args.splits_file {
let map = map_file(splits_file)?;
apply_splits(map_reader(&map), &mut obj)?;
} }
let mut state = AnalyzerState::default(); let mut state = AnalyzerState::default();
if let Some(symbols_path) = &args.symbols_file { if let Some(symbols_path) = &args.symbols_file {
let map = map_file(symbols_path)?; if symbols_path.is_file() {
for result in map_reader(&map).lines() { let map = map_file(symbols_path)?;
let line = match result { for result in map_reader(&map).lines() {
Ok(line) => line, let line = match result {
Err(e) => bail!("Failed to process symbols file: {e:?}"), Ok(line) => line,
}; Err(e) => bail!("Failed to process symbols file: {e:?}"),
if let Some(symbol) = parse_symbol_line(&line, &obj)? { };
// if symbol.kind == ObjSymbolKind::Function { if let Some(symbol) = parse_symbol_line(&line, &mut obj)? {
// state.function_entries.insert(symbol.address as u32); obj.add_symbol(symbol, true)?;
// if symbol.size_known {
// state
// .function_bounds
// .insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
// }
// }
if let Some(existing_symbol) = obj
.symbols
.iter_mut()
.find(|e| e.address == symbol.address && e.kind == symbol.kind)
{
*existing_symbol = symbol;
} else {
obj.symbols.push(symbol);
} }
} }
} }
} }
// TODO move before symbols? // TODO move before symbols?
log::info!("Performing signature analysis");
apply_signatures(&mut obj)?; apply_signatures(&mut obj)?;
// Apply known functions from extab log::info!("Detecting function boundaries");
for (&addr, &size) in &obj.known_functions {
state.function_entries.insert(addr);
state.function_bounds.insert(addr, addr + size);
}
for symbol in &obj.symbols {
if symbol.kind != ObjSymbolKind::Function {
continue;
}
state.function_entries.insert(symbol.address as u32);
if !symbol.size_known {
continue;
}
state.function_bounds.insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
}
// Also check the start of each code section
for section in &obj.sections {
if section.kind == ObjSectionKind::Code {
state.function_entries.insert(section.address as u32);
}
}
state.detect_functions(&obj)?; state.detect_functions(&obj)?;
log::info!("Discovered {} functions", state.function_slices.len()); log::info!("Discovered {} functions", state.function_slices.len());
@ -505,78 +171,112 @@ fn disasm(args: DisasmArgs) -> Result<()> {
log::info!("Applying relocations"); log::info!("Applying relocations");
tracker.apply(&mut obj, false)?; tracker.apply(&mut obj, false)?;
if args.splits_file.is_some() { log::info!("Detecting object boundaries");
detect_object_boundaries(&mut obj)?;
log::info!("Splitting {} objects", obj.link_order.len()); log::info!("Detecting strings");
let split_objs = split_obj(&obj)?; detect_strings(&mut obj)?;
// Create out dirs
let asm_dir = args.out.join("asm");
let include_dir = args.out.join("include");
let obj_dir = args.out.join("expected");
DirBuilder::new().recursive(true).create(&include_dir)?;
fs::write(include_dir.join("macros.inc"), include_bytes!("../../assets/macros.inc"))?;
log::info!("Writing object files");
let mut file_map = HashMap::<String, Vec<u8>>::new();
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
let out_obj = write_elf(split_obj)?;
match file_map.entry(unit.clone()) {
hash_map::Entry::Vacant(e) => e.insert(out_obj),
hash_map::Entry::Occupied(_) => bail!("Duplicate file {unit}"),
};
}
let mut rsp_file = BufWriter::new(File::create("rsp")?);
for unit in &obj.link_order {
let object = file_map
.get(unit)
.ok_or_else(|| anyhow!("Failed to find object file for unit '{unit}'"))?;
let out_path = obj_dir.join(unit);
writeln!(rsp_file, "{}", out_path.display())?;
if let Some(parent) = out_path.parent() {
DirBuilder::new().recursive(true).create(parent)?;
}
let mut file = File::create(&out_path)
.with_context(|| format!("Failed to create '{}'", out_path.display()))?;
file.write_all(object)?;
file.flush()?;
}
rsp_file.flush()?;
log::info!("Writing disassembly");
let mut files_out = File::create(args.out.join("link_order.txt"))?;
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
let out_path = asm_dir.join(format!("{}.s", unit.trim_end_matches(".o")));
if let Some(parent) = out_path.parent() {
DirBuilder::new().recursive(true).create(parent)?;
}
let mut w = BufWriter::new(File::create(out_path)?);
write_asm(&mut w, split_obj)?;
w.flush()?;
writeln!(files_out, "{}", unit)?;
}
files_out.flush()?;
} else {
log::info!("Writing disassembly");
let mut w = BufWriter::new(File::create("out.s")?);
write_asm(&mut w, &obj)?;
}
if let Some(symbols_path) = &args.symbols_file { if let Some(symbols_path) = &args.symbols_file {
let mut symbols_writer = BufWriter::new( let mut symbols_writer = BufWriter::new(
File::create(&symbols_path) File::create(symbols_path)
.with_context(|| format!("Failed to create '{}'", symbols_path.display()))?, .with_context(|| format!("Failed to create '{}'", symbols_path.display()))?,
); );
write_symbols(&mut symbols_writer, &obj)?; write_symbols(&mut symbols_writer, &obj)?;
} }
// (debugging) validate against ELF if let Some(splits_path) = &args.splits_file {
if let Some(file) = args.elf_file { let mut splits_writer = BufWriter::new(
validate(&obj, &file, &state)?; File::create(splits_path)
.with_context(|| format!("Failed to create '{}'", splits_path.display()))?,
);
write_splits(&mut splits_writer, &obj)?;
} }
log::info!("Adjusting splits");
update_splits(&mut obj)?;
log::info!("Splitting {} objects", obj.link_order.len());
let split_objs = split_obj(&obj)?;
// Create out dirs
let asm_dir = args.out_dir.join("asm");
let include_dir = args.out_dir.join("include");
let obj_dir = args.out_dir.clone();
DirBuilder::new().recursive(true).create(&include_dir)?;
fs::write(include_dir.join("macros.inc"), include_str!("../../assets/macros.inc"))?;
log::info!("Writing object files");
let mut file_map = HashMap::<String, Vec<u8>>::new();
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
let out_obj = write_elf(split_obj)?;
match file_map.entry(unit.clone()) {
hash_map::Entry::Vacant(e) => e.insert(out_obj),
hash_map::Entry::Occupied(_) => bail!("Duplicate file {unit}"),
};
}
let mut rsp_file = BufWriter::new(File::create(args.out_dir.join("rsp"))?);
for unit in &obj.link_order {
let object = file_map
.get(unit)
.ok_or_else(|| anyhow!("Failed to find object file for unit '{unit}'"))?;
let out_path = obj_dir.join(obj_path_for_unit(unit));
writeln!(rsp_file, "{}", out_path.display())?;
if let Some(parent) = out_path.parent() {
DirBuilder::new().recursive(true).create(parent)?;
}
let mut file = File::create(&out_path)
.with_context(|| format!("Failed to create '{}'", out_path.display()))?;
file.write_all(object)?;
file.flush()?;
}
rsp_file.flush()?;
// Generate ldscript.lcf
fs::write(args.out_dir.join("ldscript.lcf"), generate_ldscript(&obj)?)?;
log::info!("Writing disassembly");
// let mut files_out = File::create(args.out_dir.join("build.ps1"))?;
// writeln!(files_out, "$ErrorActionPreference = 'Stop'")?;
// writeln!(
// files_out,
// "$asflags = '-mgekko', '-I', '{}', '--defsym', 'version=0', '-W', '--strip-local-absolute', '-gdwarf-2'",
// include_dir.display()
// )?;
// writeln!(files_out, "$env:PATH = \"$env:PATH;C:\\devkitPro\\devkitPPC\\bin\"")?;
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
let out_path = asm_dir.join(asm_path_for_unit(unit));
if let Some(parent) = out_path.parent() {
DirBuilder::new().recursive(true).create(parent)?;
}
let mut w = BufWriter::new(File::create(&out_path)?);
write_asm(&mut w, split_obj)?;
w.flush()?;
// let obj_path = obj_dir.join(obj_path_for_unit(unit));
// writeln!(files_out, "Write-Host 'Compiling {}'", obj_path.display())?;
// writeln!(
// files_out,
// "powerpc-eabi-as @asflags -o '{}' '{}'",
// obj_path.display(),
// out_path.display()
// )?;
// writeln!(
// files_out,
// "dtk elf fixup '{}' '{}'",
// obj_path.display(),
// obj_path.display()
// )?;
}
// files_out.flush()?;
// (debugging) validate against ELF
if let Some(file) = &args.elf_file {
validate(&obj, file, &state)?;
}
Ok(()) Ok(())
} }
@ -610,11 +310,7 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
if section.kind != ObjSectionKind::Code { if section.kind != ObjSectionKind::Code {
continue; continue;
} }
for (_symbol_idx, symbol) in real_obj.symbols_for_section(section.index) { for (_symbol_idx, symbol) in real_obj.symbols.for_section(section) {
// if symbol.name.starts_with("switch_") {
// continue;
// }
// if symbol.kind == ObjSymbolKind::Function {
real_functions.insert(symbol.address as u32, symbol.name.clone()); real_functions.insert(symbol.address as u32, symbol.name.clone());
match state.function_bounds.get(&(symbol.address as u32)) { match state.function_bounds.get(&(symbol.address as u32)) {
Some(&end) => { Some(&end) => {
@ -636,7 +332,6 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
); );
} }
} }
// }
} }
} }
for (&start, &end) in &state.function_bounds { for (&start, &end) in &state.function_bounds {
@ -653,7 +348,8 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
); );
} }
} }
return Ok(()); // TODO // return Ok(()); // TODO
for real_section in &real_obj.sections { for real_section in &real_obj.sections {
let obj_section = match obj.sections.get(real_section.index) { let obj_section = match obj.sections.get(real_section.index) {
Some(v) => v, Some(v) => v,
@ -661,10 +357,11 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
}; };
let real_map = real_section.build_relocation_map()?; let real_map = real_section.build_relocation_map()?;
let obj_map = obj_section.build_relocation_map()?; let obj_map = obj_section.build_relocation_map()?;
for (&real_addr, real_reloc) in &real_map { for (&real_addr, &real_reloc_idx) in &real_map {
let real_symbol = &real_obj.symbols[real_reloc.target_symbol]; let real_reloc = &real_section.relocations[real_reloc_idx];
let real_symbol = real_obj.symbols.at(real_reloc.target_symbol);
let obj_reloc = match obj_map.get(&real_addr) { let obj_reloc = match obj_map.get(&real_addr) {
Some(v) => v, Some(v) => &obj_section.relocations[*v],
None => { None => {
// Ignore GCC local jump branches // Ignore GCC local jump branches
if real_symbol.kind == ObjSymbolKind::Section if real_symbol.kind == ObjSymbolKind::Section
@ -688,7 +385,7 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
continue; continue;
} }
}; };
let obj_symbol = &obj.symbols[obj_reloc.target_symbol]; let obj_symbol = obj.symbols.at(obj_reloc.target_symbol);
if real_reloc.kind != obj_reloc.kind { if real_reloc.kind != obj_reloc.kind {
log::warn!( log::warn!(
"Relocation type mismatch @ {:#010X}: {:?} != {:?}", "Relocation type mismatch @ {:#010X}: {:?} != {:?}",
@ -714,8 +411,9 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
continue; continue;
} }
} }
for (&obj_addr, obj_reloc) in &obj_map { for (&obj_addr, &obj_reloc_idx) in &obj_map {
let obj_symbol = &obj.symbols[obj_reloc.target_symbol]; let obj_reloc = &obj_section.relocations[obj_reloc_idx];
let obj_symbol = obj.symbols.at(obj_reloc.target_symbol);
if !real_map.contains_key(&obj_addr) { if !real_map.contains_key(&obj_addr) {
log::warn!( log::warn!(
"Relocation not real @ {:#010X} {:?} to {:#010X}+{:X} ({})", "Relocation not real @ {:#010X} {:?} to {:#010X}+{:X} ({})",

View File

@ -11,9 +11,8 @@ use object::{elf, Object, ObjectSection, ObjectSymbol, RelocationKind, Relocatio
use crate::util::{ use crate::util::{
dwarf::{ dwarf::{
process_address, process_offset, process_type, process_variable_location, process_address, process_type, process_variable_location, read_debug_section, type_string,
read_debug_section, type_string, ud_type, ud_type_def, ud_type_string, AttributeKind, ud_type, ud_type_def, ud_type_string, AttributeKind, TagKind,
TagKind, TypeKind,
}, },
file::map_file, file::map_file,
}; };
@ -61,13 +60,13 @@ fn dump(args: DumpArgs) -> Result<()> {
}; };
let name = String::from_utf8_lossy(e.header().identifier()).to_string(); let name = String::from_utf8_lossy(e.header().identifier()).to_string();
let mut data = vec![0u8; e.header().size() as usize]; let mut data = vec![0u8; e.header().size() as usize];
e.read(&mut data)?; e.read_exact(&mut data)?;
let obj_file = object::read::File::parse(&*data)?; let obj_file = object::read::File::parse(&*data)?;
let debug_section = match obj_file.section_by_name(".debug") { let debug_section = match obj_file.section_by_name(".debug") {
Some(section) => { Some(section) => {
log::info!("Processing '{}'", name); log::info!("Processing '{}'", name);
section section
}, }
None => { None => {
log::warn!("Object '{}' missing .debug section", name); log::warn!("Object '{}' missing .debug section", name);
continue; continue;
@ -76,7 +75,7 @@ fn dump(args: DumpArgs) -> Result<()> {
if let Some(out_path) = &args.out { if let Some(out_path) = &args.out {
// TODO make a basename method // TODO make a basename method
let name = name.trim_start_matches("D:").replace('\\', "/"); let name = name.trim_start_matches("D:").replace('\\', "/");
let name = name.rsplit_once('/').map(|(a, b)| b).unwrap_or(&name); let name = name.rsplit_once('/').map(|(_, b)| b).unwrap_or(&name);
let file_path = out_path.join(format!("{}.txt", name)); let file_path = out_path.join(format!("{}.txt", name));
let mut file = BufWriter::new(File::create(file_path)?); let mut file = BufWriter::new(File::create(file_path)?);
dump_debug_section(&mut file, &obj_file, debug_section)?; dump_debug_section(&mut file, &obj_file, debug_section)?;

View File

@ -2,16 +2,17 @@ use std::{
collections::{btree_map, hash_map, BTreeMap, HashMap}, collections::{btree_map, hash_map, BTreeMap, HashMap},
fs, fs,
fs::{DirBuilder, File}, fs::{DirBuilder, File},
io::{BufRead, BufReader, BufWriter, Write}, io::{BufWriter, Write},
path::PathBuf, path::PathBuf,
}; };
use anyhow::{anyhow, bail, ensure, Context, Result}; use anyhow::{anyhow, bail, ensure, Context, Result};
use argh::FromArgs; use argh::FromArgs;
use object::{ use object::{
elf,
write::{Mangling, SectionId, SymbolId}, write::{Mangling, SectionId, SymbolId},
Object, ObjectSection, ObjectSymbol, RelocationKind, RelocationTarget, SectionFlags, FileFlags, Object, ObjectSection, ObjectSymbol, RelocationKind, RelocationTarget, SectionFlags,
SectionIndex, SectionKind, SymbolFlags, SymbolKind, SymbolScope, SymbolSection, SectionIndex, SectionKind, SymbolFlags, SymbolIndex, SymbolKind, SymbolScope, SymbolSection,
}; };
use crate::{ use crate::{
@ -24,7 +25,7 @@ use crate::{
asm::write_asm, asm::write_asm,
config::{write_splits, write_symbols}, config::{write_splits, write_symbols},
elf::{process_elf, write_elf}, elf::{process_elf, write_elf},
file::buf_reader, file::process_rsp,
}, },
}; };
@ -92,9 +93,6 @@ pub struct ConfigArgs {
#[argh(positional)] #[argh(positional)]
/// output directory /// output directory
out_dir: PathBuf, out_dir: PathBuf,
#[argh(option, short = 'm')]
/// path to obj_files.mk
obj_files: Option<PathBuf>,
} }
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
@ -137,29 +135,12 @@ fn config(args: ConfigArgs) -> Result<()> {
} }
{ {
let obj_files = if let Some(path) = &args.obj_files {
Some(
BufReader::new(
File::open(path)
.with_context(|| format!("Failed to open '{}'", path.display()))?,
)
.lines()
.filter(|line| match line {
Ok(line) => line.contains(".o"),
Err(_) => false,
})
.map(|result| result.unwrap())
.collect::<Vec<String>>(),
)
} else {
None
};
let splits_path = args.out_dir.join("splits.txt"); let splits_path = args.out_dir.join("splits.txt");
let mut splits_writer = BufWriter::new( let mut splits_writer = BufWriter::new(
File::create(&splits_path) File::create(&splits_path)
.with_context(|| format!("Failed to create '{}'", splits_path.display()))?, .with_context(|| format!("Failed to create '{}'", splits_path.display()))?,
); );
write_splits(&mut splits_writer, &obj, obj_files)?; write_splits(&mut splits_writer, &obj)?;
} }
Ok(()) Ok(())
@ -257,13 +238,22 @@ fn file_name_from_unit(str: &str, suffix: &str) -> String {
const ASM_SUFFIX: &str = " (asm)"; const ASM_SUFFIX: &str = " (asm)";
// fn fixup(args: FixupArgs) -> Result<()> {
// let obj = process_elf(&args.in_file)?;
// let out = write_elf(&obj)?;
// fs::write(&args.out_file, &out).context("Failed to create output file")?;
// Ok(())
// }
fn fixup(args: FixupArgs) -> Result<()> { fn fixup(args: FixupArgs) -> Result<()> {
let in_buf = fs::read(&args.in_file) let in_buf = fs::read(&args.in_file)
.with_context(|| format!("Failed to open input file: '{}'", args.in_file.display()))?; .with_context(|| format!("Failed to open input file: '{}'", args.in_file.display()))?;
let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?; let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?;
let mut out_file = let mut out_file =
object::write::Object::new(in_file.format(), in_file.architecture(), in_file.endianness()); object::write::Object::new(in_file.format(), in_file.architecture(), in_file.endianness());
out_file.set_mangling(Mangling::None); out_file.flags =
FileFlags::Elf { os_abi: elf::ELFOSABI_SYSV, abi_version: 0, e_flags: elf::EF_PPC_EMB };
out_file.mangling = Mangling::None;
// Write file symbol first // Write file symbol first
let mut file_symbol_found = false; let mut file_symbol_found = false;
@ -317,7 +307,7 @@ fn fixup(args: FixupArgs) -> Result<()> {
} else { } else {
out_section.set_data(section.uncompressed_data()?.into_owned(), section.align()); out_section.set_data(section.uncompressed_data()?.into_owned(), section.align());
} }
if has_section_flags(section.flags(), object::elf::SHF_ALLOC)? { if has_section_flags(section.flags(), elf::SHF_ALLOC)? {
// Generate section symbol // Generate section symbol
out_file.section_symbol(section_id); out_file.section_symbol(section_id);
} }
@ -398,9 +388,9 @@ fn fixup(args: FixupArgs) -> Result<()> {
// This is a hack to avoid replacement with a section symbol // This is a hack to avoid replacement with a section symbol
// See [`object::write::elf::object::elf_fixup_relocation`] // See [`object::write::elf::object::elf_fixup_relocation`]
RelocationKind::Absolute => RelocationKind::Elf(if addr & 3 == 0 { RelocationKind::Absolute => RelocationKind::Elf(if addr & 3 == 0 {
object::elf::R_PPC_ADDR32 elf::R_PPC_ADDR32
} else { } else {
object::elf::R_PPC_UADDR32 elf::R_PPC_UADDR32
}), }),
other => other, other => other,
}; };
@ -442,7 +432,9 @@ fn to_write_symbol_section(
} }
} }
fn to_write_symbol_flags(flags: SymbolFlags<SectionIndex>) -> Result<SymbolFlags<SectionId>> { fn to_write_symbol_flags(
flags: SymbolFlags<SectionIndex, SymbolIndex>,
) -> Result<SymbolFlags<SectionId, SymbolId>> {
match flags { match flags {
SymbolFlags::Elf { st_info, st_other } => Ok(SymbolFlags::Elf { st_info, st_other }), SymbolFlags::Elf { st_info, st_other } => Ok(SymbolFlags::Elf { st_info, st_other }),
SymbolFlags::None => Ok(SymbolFlags::None), SymbolFlags::None => Ok(SymbolFlags::None),
@ -475,25 +467,7 @@ fn has_section_flags(flags: SectionFlags, flag: u32) -> Result<bool> {
fn signatures(args: SignaturesArgs) -> Result<()> { fn signatures(args: SignaturesArgs) -> Result<()> {
// Process response files (starting with '@') // Process response files (starting with '@')
let mut files = Vec::with_capacity(args.files.len()); let files = process_rsp(&args.files)?;
for path in args.files {
let path_str =
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
match path_str.strip_prefix('@') {
Some(rsp_file) => {
let reader = buf_reader(rsp_file)?;
for result in reader.lines() {
let line = result?;
if !line.is_empty() {
files.push(PathBuf::from(line));
}
}
}
None => {
files.push(path);
}
}
}
let mut signatures: HashMap<String, FunctionSignature> = HashMap::new(); let mut signatures: HashMap<String, FunctionSignature> = HashMap::new();
for path in files { for path in files {

View File

@ -65,10 +65,10 @@ pub fn run(args: Args) -> Result<()> {
out.seek(SeekFrom::Start(offset as u64))?; out.seek(SeekFrom::Start(offset as u64))?;
// Text sections // Text sections
for section in obj_file.sections() { for section in
if section.kind() != SectionKind::Text { obj_file.sections().filter(|s| section_kind(s) == SectionKind::Text && is_alloc(s.flags()))
continue; {
} log::debug!("Processing text section '{}'", section.name().unwrap_or("[error]"));
let address = section.address() as u32; let address = section.address() as u32;
let size = align32(section.size() as u32); let size = align32(section.size() as u32);
*header.text_sections.get_mut(header.text_section_count).ok_or_else(|| { *header.text_sections.get_mut(header.text_section_count).ok_or_else(|| {
@ -83,10 +83,10 @@ pub fn run(args: Args) -> Result<()> {
} }
// Data sections // Data sections
for section in obj_file.sections() { for section in
if section.kind() != SectionKind::Data && section.kind() != SectionKind::ReadOnlyData { obj_file.sections().filter(|s| section_kind(s) == SectionKind::Data && is_alloc(s.flags()))
continue; {
} log::debug!("Processing data section '{}'", section.name().unwrap_or("[error]"));
let address = section.address() as u32; let address = section.address() as u32;
let size = align32(section.size() as u32); let size = align32(section.size() as u32);
*header.data_sections.get_mut(header.data_section_count).ok_or_else(|| { *header.data_sections.get_mut(header.data_section_count).ok_or_else(|| {
@ -101,10 +101,10 @@ pub fn run(args: Args) -> Result<()> {
} }
// BSS sections // BSS sections
for section in obj_file.sections() { for section in obj_file
if section.kind() != SectionKind::UninitializedData { .sections()
continue; .filter(|s| section_kind(s) == SectionKind::UninitializedData && is_alloc(s.flags()))
} {
let address = section.address() as u32; let address = section.address() as u32;
let size = section.size() as u32; let size = section.size() as u32;
if header.bss_address == 0 { if header.bss_address == 0 {
@ -162,3 +162,28 @@ fn write_aligned<T: Write>(out: &mut T, bytes: &[u8], aligned_size: u32) -> std:
} }
Ok(()) Ok(())
} }
// Some ELF files don't have the proper section kind set (for small data sections in particular)
// so we map the section name to the expected section kind when possible.
#[inline]
fn section_kind(section: &object::Section) -> SectionKind {
section
.name()
.ok()
.and_then(|name| match name {
".init" | ".text" | ".vmtext" | ".dbgtext" => Some(SectionKind::Text),
".ctors" | ".dtors" | ".data" | ".rodata" | ".sdata" | ".sdata2" | "extab"
| "extabindex" => Some(SectionKind::Data),
".bss" | ".sbss" | ".sbss2" => Some(SectionKind::UninitializedData),
_ => None,
})
.unwrap_or_else(|| match section.kind() {
SectionKind::ReadOnlyData => SectionKind::Data,
kind => kind,
})
}
#[inline]
fn is_alloc(flags: object::SectionFlags) -> bool {
matches!(flags, object::SectionFlags::Elf { sh_flags } if sh_flags & object::elf::SHF_ALLOC as u64 != 0)
}

View File

@ -1,11 +1,13 @@
#![allow(clippy::needless_borrow)]
use std::path::PathBuf; use std::path::PathBuf;
use anyhow::{bail, ensure, Result}; use anyhow::{bail, Result};
use argh::FromArgs; use argh::FromArgs;
use cwdemangle::{demangle, DemangleOptions};
use crate::util::{ use crate::util::{
file::{map_file, map_reader}, file::{map_file, map_reader},
map::{process_map, resolve_link_order, SymbolEntry, SymbolRef}, map::{process_map, SymbolEntry, SymbolRef},
}; };
#[derive(FromArgs, PartialEq, Debug)] #[derive(FromArgs, PartialEq, Debug)]
@ -96,11 +98,8 @@ fn entries(args: EntriesArgs) -> Result<()> {
if symbol_ref.name.starts_with('@') { if symbol_ref.name.starts_with('@') {
continue; continue;
} }
if let Some(symbol) = entries.symbols.get(symbol_ref) { let demangled = demangle(&symbol_ref.name, &DemangleOptions::default());
println!("{}", symbol.demangled.as_ref().unwrap_or(&symbol.name)); println!("{}", demangled.as_deref().unwrap_or(&symbol_ref.name));
} else {
println!("Symbol not found: {}", symbol_ref.name);
}
} }
} }
None => bail!("Failed to find entries for TU '{}' in map", args.unit), None => bail!("Failed to find entries for TU '{}' in map", args.unit),
@ -111,98 +110,115 @@ fn entries(args: EntriesArgs) -> Result<()> {
fn symbol(args: SymbolArgs) -> Result<()> { fn symbol(args: SymbolArgs) -> Result<()> {
let map = map_file(&args.map_file)?; let map = map_file(&args.map_file)?;
let entries = process_map(map_reader(&map))?; let entries = process_map(map_reader(&map))?;
let mut opt_ref: Option<(SymbolRef, SymbolEntry)> = None; let opt_ref: Option<(SymbolRef, SymbolEntry)> = None;
for (symbol_ref, entry) in &entries.symbols {
if symbol_ref.name == args.symbol { _ = entries;
ensure!(opt_ref.is_none(), "Symbol '{}' found in multiple TUs", args.symbol); _ = opt_ref;
opt_ref = Some((symbol_ref.clone(), entry.clone())); // TODO
}
} // for (symbol_ref, entry) in &entries.symbols {
match opt_ref { // if symbol_ref.name == args.symbol {
Some((symbol_ref, symbol)) => { // ensure!(opt_ref.is_none(), "Symbol '{}' found in multiple TUs", args.symbol);
println!("Located symbol {}", symbol.demangled.as_ref().unwrap_or(&symbol.name)); // opt_ref = Some((symbol_ref.clone(), entry.clone()));
if let Some(vec) = entries.entry_references.get_vec(&symbol_ref) { // }
println!("\nReferences:"); // }
for x in vec { // match opt_ref {
if let Some(reference) = entries.symbols.get(x) { // Some((symbol_ref, symbol)) => {
println!( // println!("Located symbol {}", symbol.demangled.as_ref().unwrap_or(&symbol.name));
">>> {} ({:?},{:?}) [{}]", // if let Some(vec) = entries.entry_references.get_vec(&symbol_ref) {
reference.demangled.as_ref().unwrap_or(&reference.name), // println!("\nReferences:");
reference.kind, // for x in vec {
reference.visibility, // if let Some(reference) = entries.symbols.get(x) {
reference.unit.as_deref().unwrap_or("[generated]") // println!(
); // ">>> {} ({:?},{:?}) [{}]",
} else { // reference.demangled.as_ref().unwrap_or(&reference.name),
println!(">>> {} (NOT FOUND)", x.name); // reference.kind,
} // reference.visibility,
} // reference.unit.as_deref().unwrap_or("[generated]")
} // );
if let Some(vec) = entries.entry_referenced_from.get_vec(&symbol_ref) { // } else {
println!("\nReferenced from:"); // println!(">>> {} (NOT FOUND)", x.name);
for x in vec { // }
if let Some(reference) = entries.symbols.get(x) { // }
println!( // }
">>> {} ({:?}, {:?}) [{}]", // if let Some(vec) = entries.entry_referenced_from.get_vec(&symbol_ref) {
reference.demangled.as_ref().unwrap_or(&reference.name), // println!("\nReferenced from:");
reference.kind, // for x in vec {
reference.visibility, // if let Some(reference) = entries.symbols.get(x) {
reference.unit.as_deref().unwrap_or("[generated]") // println!(
); // ">>> {} ({:?}, {:?}) [{}]",
} else { // reference.demangled.as_ref().unwrap_or(&reference.name),
println!(">>> {} (NOT FOUND)", x.name); // reference.kind,
} // reference.visibility,
} // reference.unit.as_deref().unwrap_or("[generated]")
} // );
println!("\n"); // } else {
} // println!(">>> {} (NOT FOUND)", x.name);
None => bail!("Failed to find symbol '{}' in map", args.symbol), // }
} // }
// }
// println!("\n");
// }
// None => bail!("Failed to find symbol '{}' in map", args.symbol),
// }
Ok(()) Ok(())
} }
fn order(args: OrderArgs) -> Result<()> { fn order(args: OrderArgs) -> Result<()> {
let map = map_file(&args.map_file)?; let map = map_file(&args.map_file)?;
let entries = process_map(map_reader(&map))?; let entries = process_map(map_reader(&map))?;
let order = resolve_link_order(&entries.unit_order)?;
for unit in order { _ = entries;
println!("{unit}"); // TODO
}
// let order = resolve_link_order(&entries.unit_order)?;
// for unit in order {
// println!("{unit}");
// }
Ok(()) Ok(())
} }
fn slices(args: SlicesArgs) -> Result<()> { fn slices(args: SlicesArgs) -> Result<()> {
let map = map_file(&args.map_file)?; let map = map_file(&args.map_file)?;
let entries = process_map(map_reader(&map))?; let entries = process_map(map_reader(&map))?;
let order = resolve_link_order(&entries.unit_order)?;
for unit in order { _ = entries;
let unit_path = if let Some((lib, name)) = unit.split_once(' ') { // TODO
format!("{}/{}", lib.strip_suffix(".a").unwrap_or(lib), name)
} else if let Some(strip) = unit.strip_suffix(".o") { // let order = resolve_link_order(&entries.unit_order)?;
format!("{strip}.c") // for unit in order {
} else { // let unit_path = if let Some((lib, name)) = unit.split_once(' ') {
unit.clone() // format!("{}/{}", lib.strip_suffix(".a").unwrap_or(lib), name)
}; // } else if let Some(strip) = unit.strip_suffix(".o") {
println!("{unit_path}:"); // format!("{strip}.c")
// let mut ranges = Vec::<(String, Range<u32>)>::new(); // } else {
// match entries.unit_section_ranges.get(&unit) { // unit.clone()
// Some(sections) => { // };
// for (name, range) in sections { // println!("{unit_path}:");
// ranges.push((name.clone(), range.clone())); // let mut ranges = Vec::<(String, Range<u32>)>::new();
// } // match entries.unit_section_ranges.get(&unit) {
// } // Some(sections) => {
// None => bail!("Failed to locate sections for unit '{unit}'"), // for (name, range) in sections {
// } // ranges.push((name.clone(), range.clone()));
// ranges.sort_by(|(_, a), (_, b)| a.start.cmp(&b.start)); // }
// for (name, range) in ranges { // }
// println!("\t{}: [{:#010x}, {:#010x}]", name, range.start, range.end); // None => bail!("Failed to locate sections for unit '{unit}'"),
// } // }
} // ranges.sort_by(|(_, a), (_, b)| a.start.cmp(&b.start));
// for (name, range) in ranges {
// println!("\t{}: [{:#010x}, {:#010x}]", name, range.start, range.end);
// }
// }
Ok(()) Ok(())
} }
fn symbols(args: SymbolsArgs) -> Result<()> { fn symbols(args: SymbolsArgs) -> Result<()> {
let map = map_file(&args.map_file)?; let map = map_file(&args.map_file)?;
let _entries = process_map(map_reader(&map))?; let entries = process_map(map_reader(&map))?;
_ = entries;
// TODO
// for (address, symbol) in entries.address_to_symbol { // for (address, symbol) in entries.address_to_symbol {
// if symbol.name.starts_with('@') { // if symbol.name.starts_with('@') {
// continue; // continue;

View File

@ -12,14 +12,16 @@ use crate::{
analysis::{ analysis::{
cfa::AnalyzerState, cfa::AnalyzerState,
pass::{AnalysisPass, FindSaveRestSleds, FindTRKInterruptVectorTable}, pass::{AnalysisPass, FindSaveRestSleds, FindTRKInterruptVectorTable},
signatures::apply_signatures,
tracker::Tracker, tracker::Tracker,
}, },
cmd::dol::apply_signatures, array_ref_mut,
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol, ObjSymbolKind}, obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSymbol, ObjSymbolKind},
util::{ util::{
dol::process_dol, dol::process_dol,
elf::write_elf, elf::write_elf,
nested::{NestedMap, NestedVec}, file::{map_file, map_reader, FileIterator},
nested::NestedMap,
rel::process_rel, rel::process_rel,
}, },
}; };
@ -71,7 +73,8 @@ pub fn run(args: Args) -> Result<()> {
} }
fn info(args: InfoArgs) -> Result<()> { fn info(args: InfoArgs) -> Result<()> {
let rel = process_rel(&args.rel_file)?; let map = map_file(args.rel_file)?;
let rel = process_rel(map_reader(&map))?;
println!("Read REL module ID {}", rel.module_id); println!("Read REL module ID {}", rel.module_id);
// println!("REL: {:#?}", rel); // println!("REL: {:#?}", rel);
Ok(()) Ok(())
@ -81,22 +84,30 @@ fn info(args: InfoArgs) -> Result<()> {
const fn align32(x: u32) -> u32 { (x + 31) & !31 } const fn align32(x: u32) -> u32 { (x + 31) & !31 }
fn merge(args: MergeArgs) -> Result<()> { fn merge(args: MergeArgs) -> Result<()> {
let mut module_map = BTreeMap::<u32, ObjInfo>::new();
log::info!("Loading {}", args.dol_file.display()); log::info!("Loading {}", args.dol_file.display());
let mut obj = process_dol(&args.dol_file)?; let mut obj = process_dol(&args.dol_file)?;
apply_signatures(&mut obj)?;
for path in &args.rel_files { log::info!("Performing signature analysis");
apply_signatures(&mut obj)?;
let Some(arena_lo) = obj.arena_lo else { bail!("Failed to locate __ArenaLo in DOL") };
let mut processed = 0;
let mut module_map = BTreeMap::<u32, ObjInfo>::new();
for result in FileIterator::new(&args.rel_files)? {
let (path, entry) = result?;
log::info!("Loading {}", path.display()); log::info!("Loading {}", path.display());
let obj = process_rel(path)?; let obj = process_rel(entry.as_reader())?;
match module_map.entry(obj.module_id) { match module_map.entry(obj.module_id) {
btree_map::Entry::Vacant(e) => e.insert(obj), btree_map::Entry::Vacant(e) => e.insert(obj),
btree_map::Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id), btree_map::Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id),
}; };
processed += 1;
} }
log::info!("Merging {} REL(s)", processed);
let mut section_map: BTreeMap<u32, BTreeMap<u32, u32>> = BTreeMap::new(); let mut section_map: BTreeMap<u32, BTreeMap<u32, u32>> = BTreeMap::new();
let mut offset = align32(obj.arena_lo.unwrap() + 0x2000); let mut offset = align32(arena_lo + 0x2000);
for (_, module) in &module_map { for module in module_map.values() {
for mod_section in &module.sections { for mod_section in &module.sections {
let section_idx = obj.sections.len(); let section_idx = obj.sections.len();
ensure!(mod_section.relocations.is_empty(), "Unsupported relocations during merge"); ensure!(mod_section.relocations.is_empty(), "Unsupported relocations during merge");
@ -115,9 +126,8 @@ fn merge(args: MergeArgs) -> Result<()> {
section_known: mod_section.section_known, section_known: mod_section.section_known,
}); });
section_map.nested_insert(module.module_id, mod_section.elf_index as u32, offset)?; section_map.nested_insert(module.module_id, mod_section.elf_index as u32, offset)?;
let symbols = module.symbols_for_section(mod_section.index); for (_, mod_symbol) in module.symbols.for_section(mod_section) {
for (_, mod_symbol) in symbols { obj.symbols.add_direct(ObjSymbol {
obj.symbols.push(ObjSymbol {
name: mod_symbol.name.clone(), name: mod_symbol.name.clone(),
demangled_name: mod_symbol.demangled_name.clone(), demangled_name: mod_symbol.demangled_name.clone(),
address: mod_symbol.address + offset as u64, address: mod_symbol.address + offset as u64,
@ -126,44 +136,41 @@ fn merge(args: MergeArgs) -> Result<()> {
size_known: mod_symbol.size_known, size_known: mod_symbol.size_known,
flags: mod_symbol.flags, flags: mod_symbol.flags,
kind: mod_symbol.kind, kind: mod_symbol.kind,
}); align: None,
data_kind: Default::default(),
})?;
} }
offset += align32(mod_section.size as u32); offset += align32(mod_section.size as u32);
} }
} }
let mut symbol_maps = Vec::new(); log::info!("Applying REL relocations");
for section in &obj.sections { for module in module_map.values() {
symbol_maps.push(obj.build_symbol_map(section.index)?);
}
// Apply relocations
for (_, module) in &module_map {
for rel_reloc in &module.unresolved_relocations { for rel_reloc in &module.unresolved_relocations {
let source_addr = let source_addr = (section_map[&module.module_id][&(rel_reloc.section as u32)]
section_map[&module.module_id][&(rel_reloc.section as u32)] + rel_reloc.address; + rel_reloc.address)
& !3;
let target_addr = if rel_reloc.module_id == 0 { let target_addr = if rel_reloc.module_id == 0 {
rel_reloc.addend rel_reloc.addend
} else { } else {
let base = section_map[&rel_reloc.module_id][&(rel_reloc.target_section as u32)]; let section_map = &section_map.get(&rel_reloc.module_id).with_context(|| {
let addend = rel_reloc.addend; format!("Relocation against unknown module ID {}", rel_reloc.module_id)
base + addend })?;
section_map[&(rel_reloc.target_section as u32)] + rel_reloc.addend
}; };
let source_section = obj.section_at(source_addr)?; let source_section_index = obj.section_at(source_addr)?.index;
let target_section = obj.section_at(target_addr)?; let target_section_index = obj.section_at(target_addr)?.index;
let target_section_index = target_section.index;
// Try to find a previous sized symbol that encompasses the target // Try to find a previous sized symbol that encompasses the target
let sym_map = &mut symbol_maps[target_section_index];
let target_symbol = { let target_symbol = {
let mut result = None; let mut result = None;
for (_addr, symbol_idxs) in sym_map.range(..=target_addr).rev() { for (_addr, symbol_idxs) in obj.symbols.indexes_for_range(..=target_addr).rev() {
let symbol_idx = if symbol_idxs.len() == 1 { let symbol_idx = if symbol_idxs.len() == 1 {
symbol_idxs.first().cloned().unwrap() symbol_idxs.first().cloned().unwrap()
} else { } else {
let mut symbol_idxs = symbol_idxs.clone(); let mut symbol_idxs = symbol_idxs.to_vec();
symbol_idxs.sort_by_key(|&symbol_idx| { symbol_idxs.sort_by_key(|&symbol_idx| {
let symbol = &obj.symbols[symbol_idx]; let symbol = obj.symbols.at(symbol_idx);
let mut rank = match symbol.kind { let mut rank = match symbol.kind {
ObjSymbolKind::Function | ObjSymbolKind::Object => { ObjSymbolKind::Function | ObjSymbolKind::Object => {
match rel_reloc.kind { match rel_reloc.kind {
@ -199,7 +206,7 @@ fn merge(args: MergeArgs) -> Result<()> {
None => continue, None => continue,
} }
}; };
let symbol = &obj.symbols[symbol_idx]; let symbol = obj.symbols.at(symbol_idx);
if symbol.address == target_addr as u64 { if symbol.address == target_addr as u64 {
result = Some(symbol_idx); result = Some(symbol_idx);
break; break;
@ -214,12 +221,11 @@ fn merge(args: MergeArgs) -> Result<()> {
result result
}; };
let (symbol_idx, addend) = if let Some(symbol_idx) = target_symbol { let (symbol_idx, addend) = if let Some(symbol_idx) = target_symbol {
let symbol = &obj.symbols[symbol_idx]; let symbol = obj.symbols.at(symbol_idx);
(symbol_idx, target_addr as i64 - symbol.address as i64) (symbol_idx, target_addr as i64 - symbol.address as i64)
} else { } else {
// Create a new label // Create a new label
let symbol_idx = obj.symbols.len(); let symbol_idx = obj.symbols.add_direct(ObjSymbol {
obj.symbols.push(ObjSymbol {
name: String::new(), name: String::new(),
demangled_name: None, demangled_name: None,
address: target_addr as u64, address: target_addr as u64,
@ -228,11 +234,12 @@ fn merge(args: MergeArgs) -> Result<()> {
size_known: false, size_known: false,
flags: Default::default(), flags: Default::default(),
kind: Default::default(), kind: Default::default(),
}); align: None,
sym_map.nested_push(target_addr, symbol_idx); data_kind: Default::default(),
})?;
(symbol_idx, 0) (symbol_idx, 0)
}; };
obj.sections[target_section_index].relocations.push(ObjReloc { obj.sections[source_section_index].relocations.push(ObjReloc {
kind: rel_reloc.kind, kind: rel_reloc.kind,
address: source_addr as u64, address: source_addr as u64,
target_symbol: symbol_idx, target_symbol: symbol_idx,
@ -241,29 +248,11 @@ fn merge(args: MergeArgs) -> Result<()> {
} }
} }
// Apply known functions from extab // Apply relocations to code/data for analyzer
let mut state = AnalyzerState::default(); link_relocations(&mut obj)?;
for (&addr, &size) in &obj.known_functions {
state.function_entries.insert(addr);
state.function_bounds.insert(addr, addr + size);
}
for symbol in &obj.symbols {
if symbol.kind != ObjSymbolKind::Function {
continue;
}
state.function_entries.insert(symbol.address as u32);
if !symbol.size_known {
continue;
}
state.function_bounds.insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
}
// Also check the start of each code section
for section in &obj.sections {
if section.kind == ObjSectionKind::Code {
state.function_entries.insert(section.address as u32);
}
}
log::info!("Detecting function boundaries");
let mut state = AnalyzerState::default();
state.detect_functions(&obj)?; state.detect_functions(&obj)?;
log::info!("Discovered {} functions", state.function_slices.len()); log::info!("Discovered {} functions", state.function_slices.len());
@ -281,8 +270,57 @@ fn merge(args: MergeArgs) -> Result<()> {
// Write ELF // Write ELF
let mut file = File::create(&args.out_file) let mut file = File::create(&args.out_file)
.with_context(|| format!("Failed to create '{}'", args.out_file.display()))?; .with_context(|| format!("Failed to create '{}'", args.out_file.display()))?;
log::info!("Writing {}", args.out_file.display());
let out_object = write_elf(&obj)?; let out_object = write_elf(&obj)?;
file.write_all(&out_object)?; file.write_all(&out_object)?;
file.flush()?; file.flush()?;
Ok(()) Ok(())
} }
fn link_relocations(obj: &mut ObjInfo) -> Result<()> {
for section in &mut obj.sections {
for reloc in &section.relocations {
let source_address = reloc.address /*& !3*/;
let target_address =
(obj.symbols.address_of(reloc.target_symbol) as i64 + reloc.addend) as u32;
let ins_ref =
array_ref_mut!(section.data, (source_address - section.address) as usize, 4);
let mut ins = u32::from_be_bytes(*ins_ref);
match reloc.kind {
ObjRelocKind::Absolute => {
ins = target_address;
}
ObjRelocKind::PpcAddr16Hi => {
ins = (ins & 0xffff0000) | ((target_address >> 16) & 0xffff);
}
ObjRelocKind::PpcAddr16Ha => {
ins = (ins & 0xffff0000) | (((target_address + 0x8000) >> 16) & 0xffff);
}
ObjRelocKind::PpcAddr16Lo => {
ins = (ins & 0xffff0000) | (target_address & 0xffff);
}
ObjRelocKind::PpcRel24 => {
let diff = target_address as i32 - source_address as i32;
ensure!(
(-0x2000000..0x2000000).contains(&diff),
"R_PPC_REL24 relocation out of range"
);
ins = (ins & !0x3fffffc) | (diff as u32 & 0x3fffffc);
}
ObjRelocKind::PpcRel14 => {
let diff = target_address as i32 - source_address as i32;
ensure!(
(-0x2000..0x2000).contains(&diff),
"R_PPC_REL14 relocation out of range"
);
ins = (ins & !0xfffc) | (diff as u32 & 0xfffc);
}
ObjRelocKind::PpcEmbSda21 => {
// Unused in RELs
}
};
*ins_ref = ins.to_be_bytes();
}
}
Ok(())
}

View File

@ -35,7 +35,7 @@ pub fn run(args: Args) -> Result<()> {
} }
fn info(args: InfoArgs) -> Result<()> { fn info(args: InfoArgs) -> Result<()> {
let rso = process_rso(&args.rso_file)?; let rso = process_rso(args.rso_file)?;
println!("Read RSO module {}", rso.name); println!("Read RSO module {}", rso.name);
Ok(()) Ok(())
} }

View File

@ -9,6 +9,8 @@ use argh::FromArgs;
use filetime::{set_file_mtime, FileTime}; use filetime::{set_file_mtime, FileTime};
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use crate::util::file::process_rsp;
#[derive(FromArgs, PartialEq, Eq, Debug)] #[derive(FromArgs, PartialEq, Eq, Debug)]
/// Print or check SHA1 (160-bit) checksums. /// Print or check SHA1 (160-bit) checksums.
#[argh(subcommand, name = "shasum")] #[argh(subcommand, name = "shasum")]
@ -17,8 +19,8 @@ pub struct Args {
/// check SHA sums against given list /// check SHA sums against given list
check: bool, check: bool,
#[argh(positional)] #[argh(positional)]
/// path to file /// path to input file(s)
file: PathBuf, files: Vec<PathBuf>,
#[argh(option, short = 'o')] #[argh(option, short = 'o')]
/// touch output file on successful check /// touch output file on successful check
output: Option<PathBuf>, output: Option<PathBuf>,
@ -27,16 +29,23 @@ pub struct Args {
const DEFAULT_BUF_SIZE: usize = 8192; const DEFAULT_BUF_SIZE: usize = 8192;
pub fn run(args: Args) -> Result<()> { pub fn run(args: Args) -> Result<()> {
let file = File::open(&args.file) for path in process_rsp(&args.files)? {
.with_context(|| format!("Failed to open file '{}'", args.file.display()))?; let file = File::open(&path)
if args.check { .with_context(|| format!("Failed to open file '{}'", path.display()))?;
check(args, file) if args.check {
} else { check(file)?
hash(args, file) } else {
hash(file, &path)?
}
} }
if let Some(out_path) = args.output {
touch(&out_path)
.with_context(|| format!("Failed to touch output file '{}'", out_path.display()))?;
}
Ok(())
} }
fn check(args: Args, file: File) -> Result<()> { fn check(file: File) -> Result<()> {
let reader = BufReader::new(file); let reader = BufReader::new(file);
let mut mismatches = 0usize; let mut mismatches = 0usize;
for line in reader.lines() { for line in reader.lines() {
@ -68,19 +77,15 @@ fn check(args: Args, file: File) -> Result<()> {
eprintln!("WARNING: {mismatches} computed checksum did NOT match"); eprintln!("WARNING: {mismatches} computed checksum did NOT match");
std::process::exit(1); std::process::exit(1);
} }
if let Some(out_path) = args.output {
touch(&out_path)
.with_context(|| format!("Failed to touch output file '{}'", out_path.display()))?;
}
Ok(()) Ok(())
} }
fn hash(args: Args, file: File) -> Result<()> { fn hash(file: File, path: &Path) -> Result<()> {
let hash = file_sha1(file)?; let hash = file_sha1(file)?;
let mut hash_buf = [0u8; 40]; let mut hash_buf = [0u8; 40];
let hash_str = base16ct::lower::encode_str(&hash, &mut hash_buf) let hash_str = base16ct::lower::encode_str(&hash, &mut hash_buf)
.map_err(|e| anyhow!("Failed to encode hash: {e}"))?; .map_err(|e| anyhow!("Failed to encode hash: {e}"))?;
println!("{} {}", hash_str, args.file.display()); println!("{} {}", hash_str, path.display());
Ok(()) Ok(())
} }

View File

@ -1,3 +1,6 @@
#![feature(seek_stream_len)]
use std::io::Write;
use argh::FromArgs; use argh::FromArgs;
pub mod analysis; pub mod analysis;
@ -22,7 +25,7 @@ enum SubCommand {
Dwarf(cmd::dwarf::Args), Dwarf(cmd::dwarf::Args),
Elf(cmd::elf::Args), Elf(cmd::elf::Args),
Elf2Dol(cmd::elf2dol::Args), Elf2Dol(cmd::elf2dol::Args),
Map(cmd::map::Args), // Map(cmd::map::Args),
MetroidBuildInfo(cmd::metroidbuildinfo::Args), MetroidBuildInfo(cmd::metroidbuildinfo::Args),
Rel(cmd::rel::Args), Rel(cmd::rel::Args),
Rso(cmd::rso::Args), Rso(cmd::rso::Args),
@ -30,7 +33,9 @@ enum SubCommand {
} }
fn main() { fn main() {
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init(); env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
.format(|f, r| writeln!(f, "[{}] {}", r.level(), r.args()))
.init();
let args: TopLevel = argh_version::from_env(); let args: TopLevel = argh_version::from_env();
let result = match args.command { let result = match args.command {
@ -40,7 +45,7 @@ fn main() {
SubCommand::Dwarf(c_args) => cmd::dwarf::run(c_args), SubCommand::Dwarf(c_args) => cmd::dwarf::run(c_args),
SubCommand::Elf(c_args) => cmd::elf::run(c_args), SubCommand::Elf(c_args) => cmd::elf::run(c_args),
SubCommand::Elf2Dol(c_args) => cmd::elf2dol::run(c_args), SubCommand::Elf2Dol(c_args) => cmd::elf2dol::run(c_args),
SubCommand::Map(c_args) => cmd::map::run(c_args), // SubCommand::Map(c_args) => cmd::map::run(c_args),
SubCommand::MetroidBuildInfo(c_args) => cmd::metroidbuildinfo::run(c_args), SubCommand::MetroidBuildInfo(c_args) => cmd::metroidbuildinfo::run(c_args),
SubCommand::Rel(c_args) => cmd::rel::run(c_args), SubCommand::Rel(c_args) => cmd::rel::run(c_args),
SubCommand::Rso(c_args) => cmd::rso::run(c_args), SubCommand::Rso(c_args) => cmd::rso::run(c_args),

View File

@ -3,16 +3,17 @@ pub mod split;
use std::{ use std::{
cmp::min, cmp::min,
collections::{btree_map, BTreeMap}, collections::{btree_map, BTreeMap, HashMap},
hash::{Hash, Hasher}, hash::{Hash, Hasher},
ops::{Range, RangeBounds},
}; };
use anyhow::{anyhow, bail, Result}; use anyhow::{anyhow, bail, ensure, Result};
use flagset::{flags, FlagSet}; use flagset::{flags, FlagSet};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr}; use serde_repr::{Deserialize_repr, Serialize_repr};
use crate::util::{nested::NestedVec, rel::RelReloc}; use crate::util::{comment::MWComment, nested::NestedVec, rel::RelReloc};
flags! { flags! {
#[repr(u8)] #[repr(u8)]
@ -23,14 +24,18 @@ flags! {
Weak, Weak,
Common, Common,
Hidden, Hidden,
ForceActive,
} }
} }
#[derive(Debug, Copy, Clone, Default, Eq, PartialEq, Serialize, Deserialize)] #[derive(Debug, Copy, Clone, Default, Eq, PartialEq, Serialize, Deserialize)]
pub struct ObjSymbolFlagSet(pub FlagSet<ObjSymbolFlags>); pub struct ObjSymbolFlagSet(pub FlagSet<ObjSymbolFlags>);
#[allow(clippy::derive_hash_xor_eq)]
#[allow(clippy::derived_hash_with_manual_eq)]
impl Hash for ObjSymbolFlagSet { impl Hash for ObjSymbolFlagSet {
fn hash<H: Hasher>(&self, state: &mut H) { self.0.bits().hash(state) } fn hash<H: Hasher>(&self, state: &mut H) { self.0.bits().hash(state) }
} }
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum ObjSectionKind { pub enum ObjSectionKind {
Code, Code,
@ -38,6 +43,7 @@ pub enum ObjSectionKind {
ReadOnlyData, ReadOnlyData,
Bss, Bss,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ObjSection { pub struct ObjSection {
pub name: String, pub name: String,
@ -54,6 +60,7 @@ pub struct ObjSection {
pub file_offset: u64, pub file_offset: u64,
pub section_known: bool, pub section_known: bool,
} }
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Default, Serialize, Deserialize)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Default, Serialize, Deserialize)]
pub enum ObjSymbolKind { pub enum ObjSymbolKind {
#[default] #[default]
@ -62,7 +69,24 @@ pub enum ObjSymbolKind {
Object, Object,
Section, Section,
} }
#[derive(Debug, Clone, Default)]
#[derive(Debug, Copy, Clone, Default, PartialEq, Eq)]
pub enum ObjDataKind {
#[default]
Unknown,
Byte,
Byte2,
Byte4,
Byte8,
Float,
Double,
String,
String16,
StringTable,
String16Table,
}
#[derive(Debug, Clone, Default, Eq, PartialEq)]
pub struct ObjSymbol { pub struct ObjSymbol {
pub name: String, pub name: String,
pub demangled_name: Option<String>, pub demangled_name: Option<String>,
@ -72,7 +96,10 @@ pub struct ObjSymbol {
pub size_known: bool, pub size_known: bool,
pub flags: ObjSymbolFlagSet, pub flags: ObjSymbolFlagSet,
pub kind: ObjSymbolKind, pub kind: ObjSymbolKind,
pub align: Option<u32>,
pub data_kind: ObjDataKind,
} }
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum ObjKind { pub enum ObjKind {
/// Fully linked object /// Fully linked object
@ -80,18 +107,38 @@ pub enum ObjKind {
/// Relocatable object /// Relocatable object
Relocatable, Relocatable,
} }
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum ObjArchitecture { pub enum ObjArchitecture {
PowerPc, PowerPc,
} }
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct ObjSplit {
pub unit: String,
pub end: u32,
pub align: Option<u32>,
pub common: bool,
}
type SymbolIndex = usize;
#[derive(Debug, Clone)]
pub struct ObjSymbols {
symbols: Vec<ObjSymbol>,
symbols_by_address: BTreeMap<u32, Vec<SymbolIndex>>,
symbols_by_name: HashMap<String, Vec<SymbolIndex>>,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ObjInfo { pub struct ObjInfo {
pub kind: ObjKind, pub kind: ObjKind,
pub architecture: ObjArchitecture, pub architecture: ObjArchitecture,
pub name: String, pub name: String,
pub symbols: Vec<ObjSymbol>, pub symbols: ObjSymbols,
pub sections: Vec<ObjSection>, pub sections: Vec<ObjSection>,
pub entry: u64, pub entry: u64,
pub mw_comment: MWComment,
// Linker generated // Linker generated
pub sda2_base: Option<u32>, pub sda2_base: Option<u32>,
@ -103,9 +150,10 @@ pub struct ObjInfo {
pub arena_hi: Option<u32>, pub arena_hi: Option<u32>,
// Extracted // Extracted
pub splits: BTreeMap<u32, Vec<String>>, pub splits: BTreeMap<u32, Vec<ObjSplit>>,
pub named_sections: BTreeMap<u32, String>, pub named_sections: BTreeMap<u32, String>,
pub link_order: Vec<String>, pub link_order: Vec<String>,
pub blocked_ranges: BTreeMap<u32, u32>, // start -> end
// From extab // From extab
pub known_functions: BTreeMap<u32, u32>, pub known_functions: BTreeMap<u32, u32>,
@ -115,6 +163,7 @@ pub struct ObjInfo {
pub module_id: u32, pub module_id: u32,
pub unresolved_relocations: Vec<RelReloc>, pub unresolved_relocations: Vec<RelReloc>,
} }
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub enum ObjRelocKind { pub enum ObjRelocKind {
Absolute, Absolute,
@ -125,42 +174,289 @@ pub enum ObjRelocKind {
PpcRel14, PpcRel14,
PpcEmbSda21, PpcEmbSda21,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ObjReloc { pub struct ObjReloc {
pub kind: ObjRelocKind, pub kind: ObjRelocKind,
pub address: u64, pub address: u64,
pub target_symbol: usize, pub target_symbol: SymbolIndex,
pub addend: i64, pub addend: i64,
} }
impl ObjInfo { impl ObjSymbols {
pub fn symbols_for_section( pub fn new(symbols: Vec<ObjSymbol>) -> Self {
&self, let mut symbols_by_address = BTreeMap::<u32, Vec<SymbolIndex>>::new();
section_idx: usize, let mut symbols_by_name = HashMap::<String, Vec<SymbolIndex>>::new();
) -> impl Iterator<Item = (usize, &ObjSymbol)> { for (idx, symbol) in symbols.iter().enumerate() {
self.symbols symbols_by_address.nested_push(symbol.address as u32, idx);
.iter() if !symbol.name.is_empty() {
.enumerate() symbols_by_name.nested_push(symbol.name.clone(), idx);
.filter(move |&(_, symbol)| symbol.section == Some(section_idx)) }
}
Self { symbols, symbols_by_address, symbols_by_name }
} }
pub fn build_symbol_map(&self, section_idx: usize) -> Result<BTreeMap<u32, Vec<usize>>> { pub fn add(&mut self, in_symbol: ObjSymbol, replace: bool) -> Result<SymbolIndex> {
let mut symbols = BTreeMap::<u32, Vec<usize>>::new(); let opt = self.at_address(in_symbol.address as u32).find(|(_, symbol)| {
for (symbol_idx, symbol) in self.symbols_for_section(section_idx) { (symbol.kind == in_symbol.kind ||
symbols.nested_push(symbol.address as u32, symbol_idx); // Replace lbl_* with real symbols
(symbol.kind == ObjSymbolKind::Unknown && symbol.name.starts_with("lbl_")))
// Hack to avoid replacing different ABS symbols
&& (symbol.section.is_some() || symbol.name == in_symbol.name)
});
let target_symbol_idx = if let Some((symbol_idx, existing)) = opt {
let size =
if existing.size_known && in_symbol.size_known && existing.size != in_symbol.size {
log::warn!(
"Conflicting size for {}: was {:#X}, now {:#X}",
existing.name,
existing.size,
in_symbol.size
);
if replace {
in_symbol.size
} else {
existing.size
}
} else if in_symbol.size_known {
in_symbol.size
} else {
existing.size
};
if !replace {
// Not replacing existing symbol, but update size
if in_symbol.size_known && !existing.size_known {
self.replace(symbol_idx, ObjSymbol {
size: in_symbol.size,
size_known: true,
..existing.clone()
})?;
}
return Ok(symbol_idx);
}
let new_symbol = ObjSymbol {
name: in_symbol.name,
demangled_name: in_symbol.demangled_name,
address: in_symbol.address,
section: in_symbol.section,
size,
size_known: existing.size_known || in_symbol.size != 0,
flags: in_symbol.flags,
kind: in_symbol.kind,
align: in_symbol.align.or(existing.align),
data_kind: match in_symbol.data_kind {
ObjDataKind::Unknown => existing.data_kind,
kind => kind,
},
};
if existing != &new_symbol {
log::debug!("Replacing {:?} with {:?}", existing, new_symbol);
self.replace(symbol_idx, new_symbol)?;
}
symbol_idx
} else {
let target_symbol_idx = self.symbols.len();
self.add_direct(ObjSymbol {
name: in_symbol.name,
demangled_name: in_symbol.demangled_name,
address: in_symbol.address,
section: in_symbol.section,
size: in_symbol.size,
size_known: in_symbol.size != 0,
flags: in_symbol.flags,
kind: in_symbol.kind,
align: in_symbol.align,
data_kind: in_symbol.data_kind,
})?;
target_symbol_idx
};
Ok(target_symbol_idx)
}
pub fn add_direct(&mut self, in_symbol: ObjSymbol) -> Result<SymbolIndex> {
let symbol_idx = self.symbols.len();
self.symbols_by_address.nested_push(in_symbol.address as u32, symbol_idx);
if !in_symbol.name.is_empty() {
self.symbols_by_name.nested_push(in_symbol.name.clone(), symbol_idx);
} }
Ok(symbols) self.symbols.push(in_symbol);
Ok(symbol_idx)
}
pub fn at(&self, symbol_idx: SymbolIndex) -> &ObjSymbol { &self.symbols[symbol_idx] }
pub fn address_of(&self, symbol_idx: SymbolIndex) -> u64 { self.symbols[symbol_idx].address }
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &ObjSymbol> { self.symbols.iter() }
pub fn count(&self) -> usize { self.symbols.len() }
pub fn at_address(
&self,
addr: u32,
) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)> {
self.symbols_by_address
.get(&addr)
.into_iter()
.flatten()
.map(move |&idx| (idx, &self.symbols[idx]))
}
pub fn kind_at_address(
&self,
addr: u32,
kind: ObjSymbolKind,
) -> Result<Option<(SymbolIndex, &ObjSymbol)>> {
let (count, result) = self
.at_address(addr)
.filter(|(_, sym)| sym.kind == kind)
.fold((0, None), |(i, _), v| (i + 1, Some(v)));
ensure!(count <= 1, "Multiple symbols of kind {:?} at address {:#010X}", kind, addr);
Ok(result)
}
pub fn for_range<R>(
&self,
range: R,
) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)>
where
R: RangeBounds<u32>,
{
self.symbols_by_address
.range(range)
.flat_map(move |(_, v)| v.iter().map(move |u| (*u, &self.symbols[*u])))
}
pub fn indexes_for_range<R>(
&self,
range: R,
) -> impl DoubleEndedIterator<Item = (u32, &[SymbolIndex])>
where
R: RangeBounds<u32>,
{
self.symbols_by_address.range(range).map(|(k, v)| (*k, v.as_ref()))
}
pub fn for_section(
&self,
section: &ObjSection,
) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)> {
let section_index = section.index;
self.for_range(section.address as u32..(section.address + section.size) as u32)
// TODO required?
.filter(move |(_, symbol)| symbol.section == Some(section_index))
}
pub fn for_name(
&self,
name: &str,
) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)> {
self.symbols_by_name
.get(name)
.into_iter()
.flat_map(move |v| v.iter().map(move |u| (*u, &self.symbols[*u])))
}
pub fn by_name(&self, name: &str) -> Result<Option<(SymbolIndex, &ObjSymbol)>> {
let mut iter = self.for_name(name);
let result = iter.next();
if let Some((index, symbol)) = result {
if let Some((other_index, other_symbol)) = iter.next() {
bail!(
"Multiple symbols with name {}: {} {:?} {:#010X} and {} {:?} {:#010X}",
name,
index,
symbol.kind,
symbol.address,
other_index,
other_symbol.kind,
other_symbol.address
);
}
}
Ok(result)
}
pub fn by_kind(&self, kind: ObjSymbolKind) -> impl Iterator<Item = (SymbolIndex, &ObjSymbol)> {
self.symbols.iter().enumerate().filter(move |(_, sym)| sym.kind == kind)
}
pub fn replace(&mut self, index: SymbolIndex, symbol: ObjSymbol) -> Result<()> {
let symbol_ref = &mut self.symbols[index];
ensure!(symbol_ref.address == symbol.address, "Can't modify address with replace_symbol");
if symbol_ref.name != symbol.name {
if !symbol_ref.name.is_empty() {
self.symbols_by_name.nested_remove(&symbol_ref.name, &index);
}
if !symbol.name.is_empty() {
self.symbols_by_name.nested_push(symbol.name.clone(), index);
}
}
*symbol_ref = symbol;
Ok(())
}
}
impl ObjInfo {
pub fn new(
kind: ObjKind,
architecture: ObjArchitecture,
name: String,
symbols: Vec<ObjSymbol>,
sections: Vec<ObjSection>,
) -> Self {
Self {
kind,
architecture,
name,
symbols: ObjSymbols::new(symbols),
sections,
entry: 0,
mw_comment: Default::default(),
sda2_base: None,
sda_base: None,
stack_address: None,
stack_end: None,
db_stack_addr: None,
arena_lo: None,
arena_hi: None,
splits: Default::default(),
named_sections: Default::default(),
link_order: vec![],
blocked_ranges: Default::default(),
known_functions: Default::default(),
module_id: 0,
unresolved_relocations: vec![],
}
}
pub fn add_symbol(&mut self, in_symbol: ObjSymbol, replace: bool) -> Result<SymbolIndex> {
match in_symbol.name.as_str() {
"_SDA_BASE_" => self.sda_base = Some(in_symbol.address as u32),
"_SDA2_BASE_" => self.sda2_base = Some(in_symbol.address as u32),
"_stack_addr" => self.stack_address = Some(in_symbol.address as u32),
"_stack_end" => self.stack_end = Some(in_symbol.address as u32),
"_db_stack_addr" => self.db_stack_addr = Some(in_symbol.address as u32),
"__ArenaLo" => self.arena_lo = Some(in_symbol.address as u32),
"__ArenaHi" => self.arena_hi = Some(in_symbol.address as u32),
_ => {}
}
self.symbols.add(in_symbol, replace)
} }
pub fn section_at(&self, addr: u32) -> Result<&ObjSection> { pub fn section_at(&self, addr: u32) -> Result<&ObjSection> {
self.sections self.sections
.iter() .iter()
.find(|&section| { .find(|s| s.contains(addr))
(addr as u64) >= section.address && (addr as u64) < section.address + section.size
})
.ok_or_else(|| anyhow!("Failed to locate section @ {:#010X}", addr)) .ok_or_else(|| anyhow!("Failed to locate section @ {:#010X}", addr))
} }
pub fn section_for(&self, range: Range<u32>) -> Result<&ObjSection> {
self.sections.iter().find(|s| s.contains_range(range.clone())).ok_or_else(|| {
anyhow!("Failed to locate section @ {:#010X}-{:#010X}", range.start, range.end)
})
}
pub fn section_data(&self, start: u32, end: u32) -> Result<(&ObjSection, &[u8])> { pub fn section_data(&self, start: u32, end: u32) -> Result<(&ObjSection, &[u8])> {
let section = self.section_at(start)?; let section = self.section_at(start)?;
let data = if end == 0 { let data = if end == 0 {
@ -171,20 +467,76 @@ impl ObjInfo {
}; };
Ok((section, data)) Ok((section, data))
} }
/// Locate an existing split for the given address.
pub fn split_for(&self, address: u32) -> Option<(u32, &ObjSplit)> {
match self.splits_for_range(..=address).last() {
Some((addr, split)) if split.end == 0 || split.end > address => Some((addr, split)),
_ => None,
}
}
/// Locate existing splits within the given address range.
pub fn splits_for_range<R>(&self, range: R) -> impl Iterator<Item = (u32, &ObjSplit)>
where R: RangeBounds<u32> {
self.splits.range(range).flat_map(|(addr, v)| v.iter().map(move |u| (*addr, u)))
}
pub fn add_split(&mut self, address: u32, split: ObjSplit) {
log::debug!("Adding split @ {:#010X}: {:?}", address, split);
// TODO merge with preceding split if possible
self.splits.entry(address).or_default().push(split);
}
} }
impl ObjSection { impl ObjSection {
pub fn build_relocation_map(&self) -> Result<BTreeMap<u32, ObjReloc>> { pub fn build_relocation_map(&self) -> Result<BTreeMap<u32, usize>> {
let mut relocations = BTreeMap::<u32, ObjReloc>::new(); let mut relocations = BTreeMap::new();
for reloc in &self.relocations { for (idx, reloc) in self.relocations.iter().enumerate() {
let address = reloc.address as u32; let address = reloc.address as u32;
match relocations.entry(address) { match relocations.entry(address) {
btree_map::Entry::Vacant(e) => { btree_map::Entry::Vacant(e) => {
e.insert(reloc.clone()); e.insert(idx);
} }
btree_map::Entry::Occupied(_) => bail!("Duplicate relocation @ {address:#010X}"), btree_map::Entry::Occupied(_) => bail!("Duplicate relocation @ {address:#010X}"),
} }
} }
Ok(relocations) Ok(relocations)
} }
pub fn build_relocation_map_cloned(&self) -> Result<BTreeMap<u32, ObjReloc>> {
let mut relocations = BTreeMap::new();
for reloc in self.relocations.iter().cloned() {
let address = reloc.address as u32;
match relocations.entry(address) {
btree_map::Entry::Vacant(e) => {
e.insert(reloc);
}
btree_map::Entry::Occupied(_) => bail!("Duplicate relocation @ {address:#010X}"),
}
}
Ok(relocations)
}
#[inline]
pub fn contains(&self, addr: u32) -> bool {
(self.address..self.address + self.size).contains(&(addr as u64))
}
#[inline]
pub fn contains_range(&self, range: Range<u32>) -> bool {
(range.start as u64) >= self.address && (range.end as u64) <= self.address + self.size
}
}
pub fn section_kind_for_section(section_name: &str) -> Result<ObjSectionKind> {
Ok(match section_name {
".init" | ".text" | ".dbgtext" | ".vmtext" => ObjSectionKind::Code,
".ctors" | ".dtors" | ".rodata" | ".sdata2" | "extab" | "extabindex" => {
ObjSectionKind::ReadOnlyData
}
".bss" | ".sbss" | ".sbss2" => ObjSectionKind::Bss,
".data" | ".sdata" => ObjSectionKind::Data,
name => bail!("Unknown section {name}"),
})
} }

View File

@ -13,7 +13,8 @@ use crate::{
analysis::tracker::{Relocation, Tracker}, analysis::tracker::{Relocation, Tracker},
array_ref, array_ref,
obj::{ obj::{
ObjInfo, ObjReloc, ObjRelocKind, ObjSectionKind, ObjSymbol, ObjSymbolFlagSet, ObjSymbolKind, section_kind_for_section, ObjInfo, ObjReloc, ObjRelocKind, ObjSymbol, ObjSymbolFlagSet,
ObjSymbolKind,
}, },
util::elf::process_elf, util::elf::process_elf,
}; };
@ -112,15 +113,7 @@ pub fn apply_symbol(obj: &mut ObjInfo, target: u32, sig_symbol: &OutSymbol) -> R
if !target_section.section_known { if !target_section.section_known {
if let Some(section_name) = &sig_symbol.section { if let Some(section_name) = &sig_symbol.section {
target_section.name = section_name.clone(); target_section.name = section_name.clone();
target_section.kind = match section_name.as_str() { target_section.kind = section_kind_for_section(section_name)?;
".init" | ".text" | ".dbgtext" => ObjSectionKind::Code,
".ctors" | ".dtors" | ".rodata" | ".sdata2" | "extab" | "extabindex" => {
ObjSectionKind::ReadOnlyData
}
".bss" | ".sbss" | ".sbss2" => ObjSectionKind::Bss,
".data" | ".sdata" => ObjSectionKind::Data,
name => bail!("Unknown section {name}"),
};
target_section.section_known = true; target_section.section_known = true;
} }
} }
@ -131,49 +124,22 @@ pub fn apply_symbol(obj: &mut ObjInfo, target: u32, sig_symbol: &OutSymbol) -> R
// Hack to mark linker generated symbols as ABS // Hack to mark linker generated symbols as ABS
target_section_index = None; target_section_index = None;
} }
let target_symbol_idx = if let Some((symbol_idx, existing)) = let demangled_name = demangle(&sig_symbol.name, &DemangleOptions::default());
obj.symbols.iter_mut().enumerate().find(|(_, symbol)| { let target_symbol_idx = obj.add_symbol(
symbol.address == target as u64 ObjSymbol {
&& symbol.kind == sig_symbol.kind
// Hack to avoid replacing different ABS symbols
&& (symbol.section.is_some() || symbol.name == sig_symbol.name)
}) {
log::debug!("Replacing {:?} with {:?}", existing, sig_symbol);
*existing = ObjSymbol {
name: sig_symbol.name.clone(), name: sig_symbol.name.clone(),
demangled_name: demangle(&sig_symbol.name, &DemangleOptions::default()), demangled_name,
address: target as u64,
section: target_section_index,
size: if sig_symbol.size == 0 { existing.size } else { sig_symbol.size as u64 },
size_known: existing.size_known || sig_symbol.size != 0,
flags: sig_symbol.flags,
kind: sig_symbol.kind,
};
symbol_idx
} else {
let target_symbol_idx = obj.symbols.len();
obj.symbols.push(ObjSymbol {
name: sig_symbol.name.clone(),
demangled_name: demangle(&sig_symbol.name, &DemangleOptions::default()),
address: target as u64, address: target as u64,
section: target_section_index, section: target_section_index,
size: sig_symbol.size as u64, size: sig_symbol.size as u64,
size_known: sig_symbol.size != 0, size_known: sig_symbol.size > 0 || sig_symbol.kind == ObjSymbolKind::Unknown,
flags: sig_symbol.flags, flags: sig_symbol.flags,
kind: sig_symbol.kind, kind: sig_symbol.kind,
}); align: None,
target_symbol_idx data_kind: Default::default(),
}; },
match sig_symbol.name.as_str() { true,
"_SDA_BASE_" => obj.sda_base = Some(target), )?;
"_SDA2_BASE_" => obj.sda2_base = Some(target),
"_stack_addr" => obj.stack_address = Some(target),
"_stack_end" => obj.stack_end = Some(target),
"_db_stack_addr" => obj.db_stack_addr = Some(target),
"__ArenaLo" => obj.arena_lo = Some(target),
"__ArenaHi" => obj.arena_hi = Some(target),
_ => {}
}
Ok(target_symbol_idx) Ok(target_symbol_idx)
} }
@ -185,7 +151,7 @@ pub fn apply_signature(obj: &mut ObjInfo, addr: u32, signature: &FunctionSignatu
for reloc in &signature.relocations { for reloc in &signature.relocations {
tracker.known_relocations.insert(addr + reloc.offset); tracker.known_relocations.insert(addr + reloc.offset);
} }
tracker.process_function(obj, &obj.symbols[symbol_idx])?; tracker.process_function(obj, obj.symbols.at(symbol_idx))?;
for (&reloc_addr, reloc) in &tracker.relocations { for (&reloc_addr, reloc) in &tracker.relocations {
if reloc_addr < addr || reloc_addr >= addr + in_symbol.size { if reloc_addr < addr || reloc_addr >= addr + in_symbol.size {
continue; continue;
@ -293,26 +259,20 @@ pub fn generate_signature(path: &Path, symbol_name: &str) -> Result<Option<Funct
} }
let mut tracker = Tracker::new(&obj); let mut tracker = Tracker::new(&obj);
// tracker.ignore_addresses.insert(0x80004000); // tracker.ignore_addresses.insert(0x80004000);
for symbol in &obj.symbols { for (_, symbol) in obj.symbols.by_kind(ObjSymbolKind::Function) {
if symbol.kind != ObjSymbolKind::Function {
continue;
}
if symbol.name != symbol_name && symbol.name != symbol_name.replace("TRK", "TRK_") { if symbol.name != symbol_name && symbol.name != symbol_name.replace("TRK", "TRK_") {
continue; continue;
} }
tracker.process_function(&obj, symbol)?; tracker.process_function(&obj, symbol)?;
} }
tracker.apply(&mut obj, true)?; // true tracker.apply(&mut obj, true)?; // true
for symbol in &obj.symbols { for (_, symbol) in obj.symbols.by_kind(ObjSymbolKind::Function) {
if symbol.kind != ObjSymbolKind::Function {
continue;
}
if symbol.name != symbol_name && symbol.name != symbol_name.replace("TRK", "TRK_") { if symbol.name != symbol_name && symbol.name != symbol_name.replace("TRK", "TRK_") {
continue; continue;
} }
let section_idx = symbol.section.unwrap(); let section_idx = symbol.section.unwrap();
let section = &obj.sections[section_idx]; let section = &obj.sections[section_idx];
let out_symbol_idx = out_symbols.len(); // let out_symbol_idx = out_symbols.len();
out_symbols.push(OutSymbol { out_symbols.push(OutSymbol {
kind: symbol.kind, kind: symbol.kind,
name: symbol.name.clone(), name: symbol.name.clone(),
@ -334,10 +294,11 @@ pub fn generate_signature(path: &Path, symbol_name: &str) -> Result<Option<Funct
.collect::<Vec<(u32, u32)>>(); .collect::<Vec<(u32, u32)>>();
for (idx, (ins, pat)) in instructions.iter_mut().enumerate() { for (idx, (ins, pat)) in instructions.iter_mut().enumerate() {
let addr = (symbol.address as usize + idx * 4) as u32; let addr = (symbol.address as usize + idx * 4) as u32;
if let Some(reloc) = relocations.get(&addr) { if let Some(&reloc_idx) = relocations.get(&addr) {
let reloc = &section.relocations[reloc_idx];
let symbol_idx = match symbol_map.entry(reloc.target_symbol) { let symbol_idx = match symbol_map.entry(reloc.target_symbol) {
btree_map::Entry::Vacant(e) => { btree_map::Entry::Vacant(e) => {
let target = &obj.symbols[reloc.target_symbol]; let target = obj.symbols.at(reloc.target_symbol);
let symbol_idx = out_symbols.len(); let symbol_idx = out_symbols.len();
e.insert(symbol_idx); e.insert(symbol_idx);
out_symbols.push(OutSymbol { out_symbols.push(OutSymbol {
@ -363,19 +324,19 @@ pub fn generate_signature(path: &Path, symbol_name: &str) -> Result<Option<Funct
ObjRelocKind::PpcAddr16Hi ObjRelocKind::PpcAddr16Hi
| ObjRelocKind::PpcAddr16Ha | ObjRelocKind::PpcAddr16Ha
| ObjRelocKind::PpcAddr16Lo => { | ObjRelocKind::PpcAddr16Lo => {
*ins = *ins & !0xFFFF; *ins &= !0xFFFF;
*pat = !0xFFFF; *pat = !0xFFFF;
} }
ObjRelocKind::PpcRel24 => { ObjRelocKind::PpcRel24 => {
*ins = *ins & !0x3FFFFFC; *ins &= !0x3FFFFFC;
*pat = !0x3FFFFFC; *pat = !0x3FFFFFC;
} }
ObjRelocKind::PpcRel14 => { ObjRelocKind::PpcRel14 => {
*ins = *ins & !0xFFFC; *ins &= !0xFFFC;
*pat = !0xFFFC; *pat = !0xFFFC;
} }
ObjRelocKind::PpcEmbSda21 => { ObjRelocKind::PpcEmbSda21 => {
*ins = *ins & !0x1FFFFF; *ins &= !0x1FFFFF;
*pat = !0x1FFFFF; *pat = !0x1FFFFF;
} }
} }

View File

@ -1,11 +1,426 @@
use std::{cmp::min, collections::HashMap}; use std::{
cmp::min,
collections::{BTreeMap, HashMap, HashSet},
};
use anyhow::{anyhow, bail, ensure, Result}; use anyhow::{anyhow, bail, ensure, Result};
use itertools::Itertools;
use topological_sort::TopologicalSort;
use crate::obj::{ use crate::obj::{
ObjArchitecture, ObjInfo, ObjKind, ObjReloc, ObjSection, ObjSectionKind, ObjSymbol, ObjArchitecture, ObjInfo, ObjKind, ObjReloc, ObjSection, ObjSectionKind, ObjSplit, ObjSymbol,
ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
}; };
/// Create splits for function pointers in the given section.
fn split_ctors_dtors(obj: &mut ObjInfo, section_start: u32, section_end: u32) -> Result<()> {
let mut new_splits = BTreeMap::new();
let mut current_address = section_start;
while current_address < section_end {
let (section, chunk) = obj.section_data(current_address, current_address + 4)?;
let function_addr = u32::from_be_bytes(chunk[0..4].try_into().unwrap());
log::debug!("Found {} entry: {:#010X}", section.name, function_addr);
let Some((_, function_symbol)) =
obj.symbols.kind_at_address(function_addr, ObjSymbolKind::Function)?
else {
bail!("Failed to find function symbol @ {:#010X}", function_addr);
};
let ctors_split = obj.split_for(current_address);
let function_split = obj.split_for(function_addr);
let mut expected_unit = None;
if let Some((_, ctors_split)) = ctors_split {
expected_unit = Some(ctors_split.unit.clone());
}
if let Some((_, function_split)) = function_split {
if let Some(unit) = &expected_unit {
ensure!(
unit == &function_split.unit,
"Mismatched splits for {} {:#010X} ({}) and function {:#010X} ({})",
section.name,
current_address,
unit,
function_addr,
function_split.unit
);
} else {
expected_unit = Some(function_split.unit.clone());
}
}
if ctors_split.is_none() || function_split.is_none() {
let unit = expected_unit.unwrap_or_else(|| {
let section_name = function_symbol
.section
.and_then(|idx| obj.sections.get(idx).map(|s| s.name.clone()))
.unwrap_or_else(|| "unknown".to_string());
format!("{}_{}", function_symbol.name, section_name)
});
log::debug!("Adding splits to unit {}", unit);
if ctors_split.is_none() {
log::debug!("Adding split for {} entry @ {:#010X}", section.name, current_address);
new_splits.insert(current_address, ObjSplit {
unit: unit.clone(),
end: current_address + 4,
align: None,
common: false,
});
}
if function_split.is_none() {
log::debug!("Adding split for function @ {:#010X}", function_addr);
new_splits.insert(function_addr, ObjSplit {
unit,
end: function_addr + function_symbol.size as u32,
align: None,
common: false,
});
}
}
current_address += 4;
}
for (addr, split) in new_splits {
obj.add_split(addr, split);
}
Ok(())
}
/// Create splits for extabindex + extab entries.
fn split_extabindex(obj: &mut ObjInfo, section_index: usize, section_start: u32) -> Result<()> {
let mut new_splits = BTreeMap::new();
let (_, eti_init_info) = obj
.symbols
.by_name("_eti_init_info")?
.ok_or_else(|| anyhow!("Failed to find _eti_init_info symbol"))?;
ensure!(
eti_init_info.section == Some(section_index),
"_eti_init_info symbol in the wrong section: {:?} != {}",
eti_init_info.section,
section_index
);
let mut current_address = section_start;
let section_end = eti_init_info.address as u32;
while current_address < section_end {
let (_eti_section, chunk) = obj.section_data(current_address, current_address + 12)?;
let function_addr = u32::from_be_bytes(chunk[0..4].try_into().unwrap());
let function_size = u32::from_be_bytes(chunk[4..8].try_into().unwrap());
let extab_addr = u32::from_be_bytes(chunk[8..12].try_into().unwrap());
log::debug!(
"Found extabindex entry: {:#010X} size {:#010X} extab {:#010X}",
function_addr,
function_size,
extab_addr
);
let Some((_, eti_symbol)) =
obj.symbols.kind_at_address(current_address, ObjSymbolKind::Object)?
else {
bail!("Failed to find extabindex symbol @ {:#010X}", current_address);
};
ensure!(
eti_symbol.size_known && eti_symbol.size == 12,
"extabindex symbol {} has mismatched size ({:#X}, expected {:#X})",
eti_symbol.name,
eti_symbol.size,
12
);
let Some((_, function_symbol)) =
obj.symbols.kind_at_address(function_addr, ObjSymbolKind::Function)?
else {
bail!("Failed to find function symbol @ {:#010X}", function_addr);
};
ensure!(
function_symbol.size_known && function_symbol.size == function_size as u64,
"Function symbol {} has mismatched size ({:#X}, expected {:#X})",
function_symbol.name,
function_symbol.size,
function_size
);
let Some((_, extab_symbol)) =
obj.symbols.kind_at_address(extab_addr, ObjSymbolKind::Object)?
else {
bail!("Failed to find extab symbol @ {:#010X}", extab_addr);
};
ensure!(
extab_symbol.size_known && extab_symbol.size > 0,
"extab symbol {} has unknown size",
extab_symbol.name
);
let extabindex_split = obj.split_for(current_address);
let extab_split = obj.split_for(extab_addr);
let function_split = obj.split_for(function_addr);
let mut expected_unit = None;
if let Some((_, extabindex_split)) = extabindex_split {
expected_unit = Some(extabindex_split.unit.clone());
}
if let Some((_, extab_split)) = extab_split {
if let Some(unit) = &expected_unit {
ensure!(
unit == &extab_split.unit,
"Mismatched splits for extabindex {:#010X} ({}) and extab {:#010X} ({})",
current_address,
unit,
extab_addr,
extab_split.unit
);
} else {
expected_unit = Some(extab_split.unit.clone());
}
}
if let Some((_, function_split)) = function_split {
if let Some(unit) = &expected_unit {
ensure!(
unit == &function_split.unit,
"Mismatched splits for extabindex {:#010X} ({}) and function {:#010X} ({})",
current_address,
unit,
function_addr,
function_split.unit
);
} else {
expected_unit = Some(function_split.unit.clone());
}
}
if extabindex_split.is_none() || extab_split.is_none() || function_split.is_none() {
let unit = expected_unit.unwrap_or_else(|| {
let section_name = function_symbol
.section
.and_then(|idx| obj.sections.get(idx).map(|s| s.name.clone()))
.unwrap_or_else(|| "unknown".to_string());
format!("{}_{}", function_symbol.name, section_name)
});
log::debug!("Adding splits to unit {}", unit);
if extabindex_split.is_none() {
log::debug!("Adding split for extabindex entry @ {:#010X}", current_address);
new_splits.insert(current_address, ObjSplit {
unit: unit.clone(),
end: current_address + 12,
align: None,
common: false,
});
}
if extab_split.is_none() {
log::debug!("Adding split for extab @ {:#010X}", extab_addr);
new_splits.insert(extab_addr, ObjSplit {
unit: unit.clone(),
end: extab_addr + extab_symbol.size as u32,
align: None,
common: false,
});
}
if function_split.is_none() {
log::debug!("Adding split for function @ {:#010X}", function_addr);
new_splits.insert(function_addr, ObjSplit {
unit,
end: function_addr + function_symbol.size as u32,
align: None,
common: false,
});
}
}
current_address += 12;
}
for (addr, split) in new_splits {
obj.add_split(addr, split);
}
Ok(())
}
/// Create splits for gaps between existing splits.
fn create_gap_splits(obj: &mut ObjInfo) -> Result<()> {
let mut new_splits = BTreeMap::new();
for (section_idx, section) in obj.sections.iter().enumerate() {
let mut current_address = section.address as u32;
let section_end = end_for_section(obj, section_idx)?;
let mut file_iter = obj.splits_for_range(current_address..section_end).peekable();
log::debug!(
"Checking splits for section {} ({:#010X}..{:#010X})",
section.name,
current_address,
section_end
);
loop {
if current_address >= section_end {
break;
}
let (split_start, split_end) = match file_iter.peek() {
Some(&(addr, split)) => {
log::debug!("Found split {} ({:#010X}..{:#010X})", split.unit, addr, split.end);
(addr, split.end)
}
None => (section_end, 0),
};
ensure!(
split_start >= current_address,
"Split {:#010X}..{:#010X} overlaps with previous split",
split_start,
split_end
);
if split_start > current_address {
// Find any duplicate symbols in this range
let mut new_split_end = split_start;
let symbols = obj.symbols.for_range(current_address..split_start).collect_vec();
let mut existing_symbols = HashSet::new();
for (_, symbol) in symbols {
// Sanity check? Maybe not required?
ensure!(
symbol.section == Some(section_idx),
"Expected symbol {} to be in section {}",
symbol.name,
section_idx
);
if !existing_symbols.insert(symbol.name.clone()) {
log::debug!(
"Found duplicate symbol {} at {:#010X}",
symbol.name,
symbol.address
);
new_split_end = symbol.address as u32;
break;
}
}
log::debug!(
"Creating split from {:#010X}..{:#010X}",
current_address,
new_split_end
);
let unit = format!("{:08X}_{}", current_address, section.name);
new_splits.insert(current_address, ObjSplit {
unit: unit.clone(),
end: new_split_end,
align: None,
common: false,
});
current_address = new_split_end;
continue;
}
file_iter.next();
if split_end > 0 {
current_address = split_end;
} else {
let mut file_end = section_end;
if let Some(&(next_addr, _next_split)) = file_iter.peek() {
file_end = min(next_addr, section_end);
}
current_address = file_end;
}
}
}
// Add new splits
for (addr, split) in new_splits {
obj.add_split(addr, split);
}
Ok(())
}
/// Perform any necessary adjustments to allow relinking.
/// This includes:
/// - Ensuring .ctors & .dtors entries are split with their associated function
/// - Ensuring extab & extabindex entries are split with their associated function
/// - Creating splits for gaps between existing splits
/// - Resolving a new object link order
pub fn update_splits(obj: &mut ObjInfo) -> Result<()> {
// Create splits for extab and extabindex entries
if let Some(section) = obj.sections.iter().find(|s| s.name == "extabindex") {
split_extabindex(obj, section.index, section.address as u32)?;
}
// Create splits for .ctors entries
if let Some(section) = obj.sections.iter().find(|s| s.name == ".ctors") {
let section_start = section.address as u32;
let section_end = section.address as u32 + section.size as u32 - 4;
split_ctors_dtors(obj, section_start, section_end)?;
}
// Create splits for .dtors entries
if let Some(section) = obj.sections.iter().find(|s| s.name == ".dtors") {
let section_start = section.address as u32 + 4; // skip __destroy_global_chain_reference
let section_end = section.address as u32 + section.size as u32 - 4;
split_ctors_dtors(obj, section_start, section_end)?;
}
// Create gap splits
create_gap_splits(obj)?;
// Resolve link order
obj.link_order = resolve_link_order(obj)?;
Ok(())
}
/// The ordering of TUs inside of each section represents a directed edge in a DAG.
/// We can use a topological sort to determine a valid global TU order.
/// There can be ambiguities, but any solution that satisfies the link order
/// constraints is considered valid.
fn resolve_link_order(obj: &ObjInfo) -> Result<Vec<String>> {
let mut global_unit_order = Vec::<String>::new();
let mut t_sort = TopologicalSort::<String>::new();
for section in &obj.sections {
let mut iter = obj
.splits_for_range(section.address as u32..(section.address + section.size) as u32)
.peekable();
if section.name == ".ctors" || section.name == ".dtors" {
// Skip __init_cpp_exceptions.o
let skipped = iter.next();
log::debug!("Skipping split {:?} (next: {:?})", skipped, iter.peek());
}
loop {
match (iter.next(), iter.peek()) {
(Some((a_addr, a)), Some((b_addr, b))) => {
if a.unit != b.unit {
log::debug!(
"Adding dependency {} ({:#010X}) -> {} ({:#010X})",
a.unit,
a_addr,
b.unit,
b_addr
);
t_sort.add_dependency(a.unit.clone(), b.unit.clone());
}
}
(Some((_, a)), None) => {
t_sort.insert(a.unit.clone());
break;
}
_ => break,
}
}
}
for unit in &mut t_sort {
global_unit_order.push(unit);
}
// An incomplete topological sort indicates that a cyclic dependency was encountered.
ensure!(t_sort.is_empty(), "Cyclic dependency encountered while resolving link order");
// Sanity check, did we get all TUs in the final order?
for unit in obj.splits.values().flatten().map(|s| &s.unit) {
ensure!(global_unit_order.contains(unit), "Failed to find an order for {unit}");
}
Ok(global_unit_order)
}
/// Split an executable object into relocatable objects. /// Split an executable object into relocatable objects.
pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> { pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
ensure!(obj.kind == ObjKind::Executable, "Expected executable object"); ensure!(obj.kind == ObjKind::Executable, "Expected executable object");
@ -15,60 +430,33 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
let mut name_to_obj: HashMap<String, usize> = HashMap::new(); let mut name_to_obj: HashMap<String, usize> = HashMap::new();
for unit in &obj.link_order { for unit in &obj.link_order {
name_to_obj.insert(unit.clone(), objects.len()); name_to_obj.insert(unit.clone(), objects.len());
object_symbols.push(vec![None; obj.symbols.len()]); object_symbols.push(vec![None; obj.symbols.count()]);
objects.push(ObjInfo { let mut obj = ObjInfo::new(
module_id: 0, ObjKind::Relocatable,
kind: ObjKind::Relocatable, ObjArchitecture::PowerPc,
architecture: ObjArchitecture::PowerPc, unit.clone(),
name: unit.clone(), vec![],
symbols: vec![], vec![],
sections: vec![], );
entry: 0, obj.mw_comment = obj.mw_comment.clone();
sda2_base: None, objects.push(obj);
sda_base: None,
stack_address: None,
stack_end: None,
db_stack_addr: None,
arena_lo: None,
arena_hi: None,
splits: Default::default(),
named_sections: Default::default(),
link_order: vec![],
known_functions: Default::default(),
unresolved_relocations: vec![],
});
} }
for (section_idx, section) in obj.sections.iter().enumerate() { for (section_idx, section) in obj.sections.iter().enumerate() {
let mut current_address = section.address as u32; let mut current_address = section.address as u32;
let mut section_end = (section.address + section.size) as u32; let section_end = end_for_section(obj, section_idx)?;
// if matches!(section.name.as_str(), "extab" | "extabindex") { let mut file_iter = obj.splits_for_range(current_address..section_end).peekable();
// continue;
// }
// .ctors and .dtors end with a linker-generated null pointer,
// adjust section size appropriately
if matches!(section.name.as_str(), ".ctors" | ".dtors")
&& section.data[section.data.len() - 4..] == [0u8; 4]
{
section_end -= 4;
}
let mut file_iter = obj
.splits
.range(current_address..)
.flat_map(|(addr, v)| v.iter().map(move |u| (addr, u)))
.peekable();
// Build address to relocation / address to symbol maps // Build address to relocation / address to symbol maps
let relocations = section.build_relocation_map()?; let relocations = section.build_relocation_map()?;
let symbols = obj.build_symbol_map(section_idx)?;
loop { loop {
if current_address >= section_end { if current_address >= section_end {
break; break;
} }
let (file_addr, unit) = match file_iter.next() { let (file_addr, split) = match file_iter.next() {
Some((&addr, unit)) => (addr, unit), Some((addr, split)) => (addr, split),
None => bail!("No file found"), None => bail!("No file found"),
}; };
ensure!( ensure!(
@ -76,41 +464,30 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
"Gap in files: {} @ {:#010X}, {} @ {:#010X}", "Gap in files: {} @ {:#010X}, {} @ {:#010X}",
section.name, section.name,
section.address, section.address,
unit, split.unit,
file_addr file_addr
); );
let mut file_end = section_end; let mut file_end = section_end;
let mut dont_go_forward = false; if let Some(&(next_addr, _next_split)) = file_iter.peek() {
if let Some(&(&next_addr, next_unit)) = file_iter.peek() { file_end = min(next_addr, section_end);
if file_addr == next_addr {
log::warn!("Duplicating {} in {unit} and {next_unit}", section.name);
dont_go_forward = true;
file_end = obj
.splits
.range(current_address + 1..)
.next()
.map(|(&addr, _)| addr)
.unwrap_or(section_end);
} else {
file_end = min(next_addr, section_end);
}
} }
let file = name_to_obj let file = name_to_obj
.get(unit) .get(&split.unit)
.and_then(|&idx| objects.get_mut(idx)) .and_then(|&idx| objects.get_mut(idx))
.ok_or_else(|| anyhow!("Unit '{unit}' not in link order"))?; .ok_or_else(|| anyhow!("Unit '{}' not in link order", split.unit))?;
let symbol_idxs = name_to_obj let symbol_idxs = name_to_obj
.get(unit) .get(&split.unit)
.and_then(|&idx| object_symbols.get_mut(idx)) .and_then(|&idx| object_symbols.get_mut(idx))
.ok_or_else(|| anyhow!("Unit '{unit}' not in link order"))?; .ok_or_else(|| anyhow!("Unit '{}' not in link order", split.unit))?;
// Calculate & verify section alignment // Calculate & verify section alignment
let mut align = default_section_align(section); let mut align =
split.align.map(u64::from).unwrap_or_else(|| default_section_align(section));
if current_address & (align as u32 - 1) != 0 { if current_address & (align as u32 - 1) != 0 {
log::warn!( log::warn!(
"Alignment for {} {} expected {}, but starts at {:#010X}", "Alignment for {} {} expected {}, but starts at {:#010X}",
unit, split.unit,
section.name, section.name,
align, align,
current_address current_address
@ -125,7 +502,7 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
ensure!( ensure!(
current_address & (align as u32 - 1) == 0, current_address & (align as u32 - 1) == 0,
"Invalid alignment for split: {} {} {:#010X}", "Invalid alignment for split: {} {} {:#010X}",
unit, split.unit,
section.name, section.name,
current_address current_address
); );
@ -133,67 +510,95 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
// Collect relocations; target_symbol will be updated later // Collect relocations; target_symbol will be updated later
let out_relocations = relocations let out_relocations = relocations
.range(current_address..file_end) .range(current_address..file_end)
.map(|(_, o)| ObjReloc { .map(|(_, &idx)| {
kind: o.kind, let o = &section.relocations[idx];
address: o.address - current_address as u64, ObjReloc {
target_symbol: o.target_symbol, kind: o.kind,
addend: o.addend, address: o.address - current_address as u64,
target_symbol: o.target_symbol,
addend: o.addend,
}
}) })
.collect(); .collect();
// Add section symbols // Add section symbols
let out_section_idx = file.sections.len(); let out_section_idx = file.sections.len();
for &symbol_idx in symbols.range(current_address..file_end).flat_map(|(_, vec)| vec) { let mut comm_addr = current_address;
for (symbol_idx, symbol) in obj.symbols.for_range(current_address..file_end) {
if symbol_idxs[symbol_idx].is_some() { if symbol_idxs[symbol_idx].is_some() {
continue; // should never happen? continue; // should never happen?
} }
let symbol = &obj.symbols[symbol_idx];
symbol_idxs[symbol_idx] = Some(file.symbols.len()); if split.common && symbol.address as u32 > comm_addr {
file.symbols.push(ObjSymbol { // HACK: Add padding for common bug
file.symbols.add_direct(ObjSymbol {
name: format!("pad_{:010X}", comm_addr),
demangled_name: None,
address: 0,
section: None,
size: symbol.address - comm_addr as u64,
size_known: true,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Common.into()),
kind: ObjSymbolKind::Object,
align: Some(4),
data_kind: Default::default(),
})?;
}
comm_addr = (symbol.address + symbol.size) as u32;
symbol_idxs[symbol_idx] = Some(file.symbols.count());
file.symbols.add_direct(ObjSymbol {
name: symbol.name.clone(), name: symbol.name.clone(),
demangled_name: symbol.demangled_name.clone(), demangled_name: symbol.demangled_name.clone(),
address: symbol.address - current_address as u64, address: if split.common { 4 } else { symbol.address - current_address as u64 },
section: Some(out_section_idx), section: if split.common { None } else { Some(out_section_idx) },
size: symbol.size, size: symbol.size,
size_known: symbol.size_known, size_known: symbol.size_known,
flags: symbol.flags, flags: if split.common {
ObjSymbolFlagSet(ObjSymbolFlags::Common.into())
} else {
symbol.flags
},
kind: symbol.kind, kind: symbol.kind,
align: if split.common { Some(4) } else { symbol.align },
data_kind: symbol.data_kind,
})?;
}
if !split.common {
let data = match section.kind {
ObjSectionKind::Bss => vec![],
_ => section.data[(current_address as u64 - section.address) as usize
..(file_end as u64 - section.address) as usize]
.to_vec(),
};
let name = if let Some(name) = obj.named_sections.get(&current_address) {
name.clone()
} else {
section.name.clone()
};
file.sections.push(ObjSection {
name,
kind: section.kind,
address: 0,
size: file_end as u64 - current_address as u64,
data,
align,
index: out_section_idx,
elf_index: out_section_idx + 1,
relocations: out_relocations,
original_address: current_address as u64,
file_offset: section.file_offset + (current_address as u64 - section.address),
section_known: true,
}); });
} }
let data = match section.kind { current_address = file_end;
ObjSectionKind::Bss => vec![],
_ => section.data[(current_address as u64 - section.address) as usize
..(file_end as u64 - section.address) as usize]
.to_vec(),
};
let name = if let Some(name) = obj.named_sections.get(&current_address) {
name.clone()
} else {
section.name.clone()
};
file.sections.push(ObjSection {
name,
kind: section.kind,
address: 0,
size: file_end as u64 - current_address as u64,
data,
align,
index: out_section_idx,
elf_index: out_section_idx + 1,
relocations: out_relocations,
original_address: current_address as u64,
file_offset: section.file_offset + (current_address as u64 - section.address),
section_known: true,
});
if !dont_go_forward {
current_address = file_end;
}
} }
} }
// Update relocations // Update relocations
let mut globalize_symbols = vec![];
for (obj_idx, out_obj) in objects.iter_mut().enumerate() { for (obj_idx, out_obj) in objects.iter_mut().enumerate() {
let symbol_idxs = &mut object_symbols[obj_idx]; let symbol_idxs = &mut object_symbols[obj_idx];
for section in &mut out_obj.sections { for section in &mut out_obj.sections {
@ -204,32 +609,52 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
} }
None => { None => {
// Extern // Extern
let out_sym_idx = out_obj.symbols.len(); let out_sym_idx = out_obj.symbols.count();
let target_sym = &obj.symbols[reloc.target_symbol]; let target_sym = obj.symbols.at(reloc.target_symbol);
// If the symbol is local, we'll upgrade the scope to global
// and rename it to avoid conflicts
if target_sym.flags.0.contains(ObjSymbolFlags::Local) {
let address_str = format!("{:08X}", target_sym.address);
let new_name = if target_sym.name.ends_with(&address_str) {
target_sym.name.clone()
} else {
format!("{}_{}", target_sym.name, address_str)
};
globalize_symbols.push((reloc.target_symbol, new_name));
}
symbol_idxs[reloc.target_symbol] = Some(out_sym_idx); symbol_idxs[reloc.target_symbol] = Some(out_sym_idx);
out_obj.symbols.push(ObjSymbol { out_obj.symbols.add_direct(ObjSymbol {
name: target_sym.name.clone(), name: target_sym.name.clone(),
demangled_name: target_sym.demangled_name.clone(), demangled_name: target_sym.demangled_name.clone(),
..Default::default() ..Default::default()
}); })?;
reloc.target_symbol = out_sym_idx; reloc.target_symbol = out_sym_idx;
if section.name.as_str() == "extabindex" { if section.name.as_str() == "extabindex" {
let (target_addr, target_unit) = obj let Some((target_addr, target_split)) =
.splits obj.split_for(target_sym.address as u32)
.range(..=target_sym.address as u32) else {
.map(|(addr, v)| (*addr, v.last().unwrap())) bail!(
.last() "Bad extabindex relocation @ {:#010X}",
.unwrap(); reloc.address + section.original_address
);
};
let target_section = &obj.section_at(target_addr)?.name; let target_section = &obj.section_at(target_addr)?.name;
log::warn!( log::error!(
"Extern relocation @ {:#010X}\n\tSource object: {}:{:#010X} {}\n\tTarget object: {}:{:#010X} {}\n\tTarget symbol: {:#010X} {}\n", "Bad extabindex relocation @ {:#010X}\n\
\tSource object: {}:{:#010X} ({})\n\
\tTarget object: {}:{:#010X} ({})\n\
\tTarget symbol: {:#010X} ({})\n\
This will cause the linker to crash.\n",
reloc.address + section.original_address, reloc.address + section.original_address,
section.name, section.name,
section.original_address, section.original_address,
out_obj.name, out_obj.name,
target_section, target_section,
target_addr, target_addr,
target_unit, target_split.unit,
target_sym.address, target_sym.address,
target_sym.demangled_name.as_deref().unwrap_or(&target_sym.name), target_sym.demangled_name.as_deref().unwrap_or(&target_sym.name),
); );
@ -240,44 +665,50 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
} }
} }
// Strip linker generated symbols // Upgrade local symbols to global if necessary
for obj in &mut objects { for (obj, symbol_map) in objects.iter_mut().zip(&object_symbols) {
for symbol in &mut obj.symbols { for (globalize_idx, new_name) in &globalize_symbols {
if is_skip_symbol(&symbol.name) { if let Some(symbol_idx) = symbol_map[*globalize_idx] {
if symbol.section.is_some() { let mut symbol = obj.symbols.at(symbol_idx).clone();
log::debug!("Externing {:?} in {}", symbol, obj.name); symbol.name = new_name.clone();
*symbol = ObjSymbol { if symbol.flags.0.contains(ObjSymbolFlags::Local) {
name: symbol.name.clone(), log::debug!("Globalizing {} in {}", symbol.name, obj.name);
demangled_name: symbol.demangled_name.clone(), symbol.flags.0 &= !ObjSymbolFlags::Local;
..Default::default() symbol.flags.0 |= ObjSymbolFlags::Global;
};
}
} else if is_linker_symbol(&symbol.name) {
if let Some(section_idx) = symbol.section {
log::debug!("Skipping {:?} in {}", symbol, obj.name);
let section = &mut obj.sections[section_idx];
// TODO assuming end of file
section.size -= symbol.size;
section.data.truncate(section.data.len() - symbol.size as usize);
*symbol = ObjSymbol {
name: symbol.name.clone(),
demangled_name: symbol.demangled_name.clone(),
..Default::default()
};
} }
obj.symbols.replace(symbol_idx, symbol)?;
} }
} }
} }
// Extern linker generated symbols
for obj in &mut objects {
let mut replace_symbols = vec![];
for (symbol_idx, symbol) in obj.symbols.iter().enumerate() {
if is_linker_generated_label(&symbol.name) && symbol.section.is_some() {
log::debug!("Externing {:?} in {}", symbol, obj.name);
replace_symbols.push((symbol_idx, ObjSymbol {
name: symbol.name.clone(),
demangled_name: symbol.demangled_name.clone(),
..Default::default()
}));
}
}
for (symbol_idx, symbol) in replace_symbols {
obj.symbols.replace(symbol_idx, symbol)?;
}
}
Ok(objects) Ok(objects)
} }
/// mwld doesn't preserve the original section alignment values /// mwld doesn't preserve the original section alignment values
fn default_section_align(section: &ObjSection) -> u64 { pub fn default_section_align(section: &ObjSection) -> u64 {
match section.kind { match section.kind {
ObjSectionKind::Code => 4, ObjSectionKind::Code => 4,
_ => match section.name.as_str() { _ => match section.name.as_str() {
".ctors" | ".dtors" | "extab" | "extabindex" => 4, ".ctors" | ".dtors" | "extab" | "extabindex" => 4,
".sbss" => 1, // ?
_ => 8, _ => 8,
}, },
} }
@ -285,7 +716,7 @@ fn default_section_align(section: &ObjSection) -> u64 {
/// Linker-generated symbols to extern /// Linker-generated symbols to extern
#[inline] #[inline]
fn is_skip_symbol(name: &str) -> bool { pub fn is_linker_generated_label(name: &str) -> bool {
matches!( matches!(
name, name,
"_ctors" "_ctors"
@ -347,11 +778,47 @@ fn is_skip_symbol(name: &str) -> bool {
) )
} }
/// Linker generated symbols to strip entirely /// Linker generated objects to strip entirely
#[inline] #[inline]
fn is_linker_symbol(name: &str) -> bool { pub fn is_linker_generated_object(name: &str) -> bool {
matches!( matches!(
name, name,
"_eti_init_info" | "_rom_copy_info" | "_bss_init_info" | "_ctors$99" | "_dtors$99" "_eti_init_info" | "_rom_copy_info" | "_bss_init_info" | "_ctors$99" | "_dtors$99"
) )
} }
/// Locate the end address of a section when excluding linker generated objects
pub fn end_for_section(obj: &ObjInfo, section_index: usize) -> Result<u32> {
let section = &obj.sections[section_index];
let section_start = section.address as u32;
let mut section_end = (section.address + section.size) as u32;
// .ctors and .dtors end with a linker-generated null pointer,
// adjust section size appropriately
if matches!(section.name.as_str(), ".ctors" | ".dtors")
&& section.data[section.data.len() - 4..] == [0u8; 4]
{
section_end -= 4;
return Ok(section_end);
}
loop {
let last_symbol = obj
.symbols
.for_range(section_start..section_end)
.filter(|(_, s)| s.kind == ObjSymbolKind::Object && s.size_known && s.size > 0)
.last();
match last_symbol {
Some((_, symbol)) if is_linker_generated_object(&symbol.name) => {
log::debug!(
"Found {}, adjusting section {} end {:#010X} -> {:#010X}",
section.name,
symbol.name,
section_end,
symbol.address
);
section_end = symbol.address as u32;
}
_ => break,
}
}
Ok(section_end)
}

View File

@ -9,8 +9,8 @@ use ppc750cl::{disasm_iter, Argument, Ins, Opcode};
use crate::{ use crate::{
obj::{ obj::{
ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol, ObjSymbolFlags, ObjDataKind, ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol,
ObjSymbolKind, ObjSymbolFlags, ObjSymbolKind,
}, },
util::nested::NestedVec, util::nested::NestedVec,
}; };
@ -22,7 +22,7 @@ enum SymbolEntryKind {
Label, Label,
} }
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone, Eq, PartialEq)]
struct SymbolEntry { struct SymbolEntry {
index: usize, index: usize,
kind: SymbolEntryKind, kind: SymbolEntryKind,
@ -31,20 +31,24 @@ struct SymbolEntry {
pub fn write_asm<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> { pub fn write_asm<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
writeln!(w, ".include \"macros.inc\"")?; writeln!(w, ".include \"macros.inc\"")?;
if !obj.name.is_empty() { if !obj.name.is_empty() {
writeln!(w, ".file \"{}\"", obj.name.replace('\\', "\\\\"))?; let name = obj
.name
.rsplit_once('/')
.or_else(|| obj.name.rsplit_once('\\'))
.or_else(|| obj.name.rsplit_once(' '))
.map(|(_, b)| b)
.unwrap_or(&obj.name);
writeln!(w, ".file \"{}\"", name.replace('\\', "\\\\"))?;
} }
// We'll append generated symbols to the end // We'll append generated symbols to the end
let mut symbols: Vec<ObjSymbol> = obj.symbols.clone(); let mut symbols: Vec<ObjSymbol> = obj.symbols.iter().cloned().collect();
let mut section_entries: Vec<BTreeMap<u32, Vec<SymbolEntry>>> = vec![]; let mut section_entries: Vec<BTreeMap<u32, Vec<SymbolEntry>>> = vec![];
let mut section_relocations: Vec<BTreeMap<u32, ObjReloc>> = vec![]; let mut section_relocations: Vec<BTreeMap<u32, ObjReloc>> = vec![];
for (section_idx, section) in obj.sections.iter().enumerate() { for (section_idx, section) in obj.sections.iter().enumerate() {
// Build symbol start/end entries // Build symbol start/end entries
let mut entries = BTreeMap::<u32, Vec<SymbolEntry>>::new(); let mut entries = BTreeMap::<u32, Vec<SymbolEntry>>::new();
for (symbol_index, symbol) in obj.symbols_for_section(section_idx) { for (symbol_index, symbol) in obj.symbols.for_section(section) {
if symbol.kind == ObjSymbolKind::Section {
continue;
}
entries.nested_push(symbol.address as u32, SymbolEntry { entries.nested_push(symbol.address as u32, SymbolEntry {
index: symbol_index, index: symbol_index,
kind: SymbolEntryKind::Start, kind: SymbolEntryKind::Start,
@ -57,16 +61,13 @@ pub fn write_asm<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
} }
} }
let mut relocations = section.build_relocation_map()?; let mut relocations = section.build_relocation_map_cloned()?;
// Generate local jump labels // Generate local jump labels
if section.kind == ObjSectionKind::Code { if section.kind == ObjSectionKind::Code {
for ins in disasm_iter(&section.data, section.address as u32) { for ins in disasm_iter(&section.data, section.address as u32) {
if let Some(address) = ins.branch_dest() { if let Some(address) = ins.branch_dest() {
if ins.field_AA() if ins.field_AA() || !section.contains(address) {
|| (address as u64) < section.address
|| (address as u64) >= section.address + section.size
{
continue; continue;
} }
@ -128,7 +129,7 @@ pub fn write_asm<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
if reloc.addend == 0 { if reloc.addend == 0 {
continue; continue;
} }
let target = &obj.symbols[reloc.target_symbol]; let target = &symbols[reloc.target_symbol];
let target_section_idx = match target.section { let target_section_idx = match target.section {
Some(v) => v, Some(v) => v,
None => continue, None => continue,
@ -157,6 +158,24 @@ pub fn write_asm<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
} }
} }
// Write common symbols
let mut common_symbols = Vec::new();
for symbol in symbols.iter().filter(|s| s.flags.0.contains(ObjSymbolFlags::Common)) {
ensure!(symbol.section.is_none(), "Invalid: common symbol with section {:?}", symbol);
common_symbols.push(symbol);
}
if !common_symbols.is_empty() {
writeln!(w)?;
for symbol in common_symbols {
if let Some(name) = &symbol.demangled_name {
writeln!(w, "# {name}")?;
}
write!(w, ".comm ")?;
write_symbol_name(w, &symbol.name)?;
writeln!(w, ", {:#X}, 4", symbol.size)?;
}
}
for section in &obj.sections { for section in &obj.sections {
let entries = &section_entries[section.index]; let entries = &section_entries[section.index];
let relocations = &section_relocations[section.index]; let relocations = &section_relocations[section.index];
@ -336,10 +355,11 @@ fn write_symbol_entry<W: Write>(
}; };
let scope = if symbol.flags.0.contains(ObjSymbolFlags::Weak) { let scope = if symbol.flags.0.contains(ObjSymbolFlags::Weak) {
"weak" "weak"
} else if symbol.flags.0.contains(ObjSymbolFlags::Global) { } else if symbol.flags.0.contains(ObjSymbolFlags::Local) {
"global"
} else {
"local" "local"
} else {
// Default to global
"global"
}; };
match entry.kind { match entry.kind {
@ -397,6 +417,7 @@ fn write_data<W: Write>(
let mut current_address = start; let mut current_address = start;
let mut current_symbol_kind = ObjSymbolKind::Unknown; let mut current_symbol_kind = ObjSymbolKind::Unknown;
let mut current_data_kind = ObjDataKind::Unknown;
let mut entry = entry_iter.next(); let mut entry = entry_iter.next();
let mut reloc = reloc_iter.next(); let mut reloc = reloc_iter.next();
let mut begin = true; let mut begin = true;
@ -413,6 +434,7 @@ fn write_data<W: Write>(
write_symbol_entry(w, symbols, entry)?; write_symbol_entry(w, symbols, entry)?;
} }
current_symbol_kind = find_symbol_kind(current_symbol_kind, symbols, vec)?; current_symbol_kind = find_symbol_kind(current_symbol_kind, symbols, vec)?;
current_data_kind = find_data_kind(current_data_kind, symbols, vec)?;
entry = entry_iter.next(); entry = entry_iter.next();
} }
} }
@ -464,7 +486,7 @@ fn write_data<W: Write>(
); );
write_code_chunk(w, symbols, entries, relocations, section, current_address, data)?; write_code_chunk(w, symbols, entries, relocations, section, current_address, data)?;
} else { } else {
write_data_chunk(w, data)?; write_data_chunk(w, data, current_data_kind)?;
} }
current_address = until; current_address = until;
} }
@ -497,24 +519,174 @@ fn find_symbol_kind(
Ok(kind) Ok(kind)
} }
fn write_data_chunk<W: Write>(w: &mut W, data: &[u8]) -> Result<()> { fn find_data_kind(
current_data_kind: ObjDataKind,
symbols: &[ObjSymbol],
entries: &Vec<SymbolEntry>,
) -> Result<ObjDataKind> {
let mut kind = ObjDataKind::Unknown;
let mut found = false;
for entry in entries {
match entry.kind {
SymbolEntryKind::Start => {
let new_kind = symbols[entry.index].data_kind;
if !matches!(new_kind, ObjDataKind::Unknown) {
ensure!(
!found || new_kind == kind,
"Conflicting data kinds found: {kind:?} and {new_kind:?}"
);
found = true;
kind = new_kind;
}
}
SymbolEntryKind::Label => {
// If type is a local label, don't change data types
if !found {
kind = current_data_kind;
}
}
_ => continue,
}
}
Ok(kind)
}
fn write_string<W: Write>(w: &mut W, data: &[u8]) -> Result<()> {
let terminated = matches!(data.last(), Some(&b) if b == 0);
if terminated {
write!(w, "\t.string \"")?;
} else {
write!(w, "\t.ascii \"")?;
}
for &b in &data[..data.len() - if terminated { 1 } else { 0 }] {
match b as char {
'\x08' => write!(w, "\\b")?,
'\x09' => write!(w, "\\t")?,
'\x0A' => write!(w, "\\n")?,
'\x0C' => write!(w, "\\f")?,
'\x0D' => write!(w, "\\r")?,
'\\' => write!(w, "\\\\")?,
'"' => write!(w, "\\\"")?,
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{}", c)?,
_ => write!(w, "\\{:03o}", b)?,
}
}
writeln!(w, "\"")?;
Ok(())
}
fn write_string16<W: Write>(w: &mut W, data: &[u16]) -> Result<()> {
if matches!(data.last(), Some(&b) if b == 0) {
write!(w, "\t.string16 \"")?;
} else {
bail!("Non-terminated UTF-16 string");
}
if data.len() > 1 {
for result in std::char::decode_utf16(data[..data.len() - 1].iter().cloned()) {
let c = match result {
Ok(c) => c,
Err(_) => bail!("Failed to decode UTF-16"),
};
match c {
'\x08' => write!(w, "\\b")?,
'\x09' => write!(w, "\\t")?,
'\x0A' => write!(w, "\\n")?,
'\x0C' => write!(w, "\\f")?,
'\x0D' => write!(w, "\\r")?,
'\\' => write!(w, "\\\\")?,
'"' => write!(w, "\\\"")?,
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{}", c)?,
_ => write!(w, "\\{:#X}", c as u32)?,
}
}
}
writeln!(w, "\"")?;
Ok(())
}
fn write_data_chunk<W: Write>(w: &mut W, data: &[u8], data_kind: ObjDataKind) -> Result<()> {
let remain = data; let remain = data;
for chunk in remain.chunks(4) { match data_kind {
match chunk.len() { ObjDataKind::String => {
4 => { return write_string(w, data);
let data = u32::from_be_bytes(chunk.try_into().unwrap()); }
writeln!(w, "\t.4byte {data:#010X}")?; ObjDataKind::String16 => {
if data.len() % 2 != 0 {
bail!("Attempted to write wstring with length {:#X}", data.len());
} }
3 => { let data = data
writeln!(w, "\t.byte {:#04X}, {:#04X}, {:#04X}", chunk[0], chunk[1], chunk[2])?; .chunks_exact(2)
.map(|c| u16::from_be_bytes(c.try_into().unwrap()))
.collect::<Vec<u16>>();
return write_string16(w, &data);
}
ObjDataKind::StringTable => {
for slice in data.split_inclusive(|&b| b == 0) {
write_string(w, slice)?;
} }
2 => { return Ok(());
writeln!(w, "\t.2byte {:#06X}", u16::from_be_bytes(chunk.try_into().unwrap()))?; }
ObjDataKind::String16Table => {
if data.len() % 2 != 0 {
bail!("Attempted to write wstring_table with length {:#X}", data.len());
} }
1 => { let data = data
writeln!(w, "\t.byte {:#04X}", chunk[0])?; .chunks_exact(2)
.map(|c| u16::from_be_bytes(c.try_into().unwrap()))
.collect::<Vec<u16>>();
for slice in data.split_inclusive(|&b| b == 0) {
write_string16(w, slice)?;
}
return Ok(());
}
_ => {}
}
let chunk_size = match data_kind {
ObjDataKind::Byte2 => 2,
ObjDataKind::Unknown | ObjDataKind::Byte4 | ObjDataKind::Float => 4,
ObjDataKind::Byte | ObjDataKind::Byte8 | ObjDataKind::Double => 8,
ObjDataKind::String
| ObjDataKind::String16
| ObjDataKind::StringTable
| ObjDataKind::String16Table => unreachable!(),
};
for chunk in remain.chunks(chunk_size) {
if data_kind == ObjDataKind::Byte || matches!(chunk.len(), 1 | 3 | 5..=7) {
let bytes = chunk.iter().map(|c| format!("{:#04X}", c)).collect::<Vec<String>>();
writeln!(w, "\t.byte {}", bytes.join(", "))?;
} else {
match chunk.len() {
8 if data_kind == ObjDataKind::Double => {
let data = f64::from_be_bytes(chunk.try_into().unwrap());
if data.is_nan() {
let int_data = u64::from_be_bytes(chunk.try_into().unwrap());
writeln!(w, "\t.8byte {int_data:#018X} # {data}")?;
} else {
writeln!(w, "\t.double {data}")?;
}
}
8 => {
let data = u64::from_be_bytes(chunk.try_into().unwrap());
writeln!(w, "\t.8byte {data:#018X}")?;
}
4 if data_kind == ObjDataKind::Float => {
let data = f32::from_be_bytes(chunk.try_into().unwrap());
if data.is_nan() {
let int_data = u32::from_be_bytes(chunk.try_into().unwrap());
writeln!(w, "\t.4byte {int_data:#010X} # {data}")?;
} else {
writeln!(w, "\t.float {data}")?;
}
}
4 => {
let data = u32::from_be_bytes(chunk.try_into().unwrap());
writeln!(w, "\t.4byte {data:#010X}")?;
}
2 => {
writeln!(w, "\t.2byte {:#06X}", u16::from_be_bytes(chunk.try_into().unwrap()))?;
}
_ => unreachable!(),
} }
_ => unreachable!(),
} }
} }
Ok(()) Ok(())
@ -644,6 +816,10 @@ fn write_section_header<W: Write>(
write!(w, ".section {}", section.name)?; write!(w, ".section {}", section.name)?;
write!(w, ", \"a\"")?; write!(w, ", \"a\"")?;
} }
".comment" => {
write!(w, ".section {}", section.name)?;
write!(w, ", \"\"")?;
}
name => { name => {
log::warn!("Unknown section {name}"); log::warn!("Unknown section {name}");
write!(w, ".section {}", section.name)?; write!(w, ".section {}", section.name)?;
@ -676,7 +852,12 @@ fn write_reloc_symbol<W: Write>(
} }
fn write_symbol_name<W: Write>(w: &mut W, name: &str) -> std::io::Result<()> { fn write_symbol_name<W: Write>(w: &mut W, name: &str) -> std::io::Result<()> {
if name.contains('@') || name.contains('<') || name.contains('\\') { if name.contains('@')
|| name.contains('<')
|| name.contains('\\')
|| name.contains('-')
|| name.contains('+')
{
write!(w, "\"{name}\"")?; write!(w, "\"{name}\"")?;
} else { } else {
write!(w, "{name}")?; write!(w, "{name}")?;

161
src/util/comment.rs Normal file
View File

@ -0,0 +1,161 @@
use std::{
io::{Read, Seek, SeekFrom, Write},
ops::Deref,
};
use anyhow::{bail, Context, Result};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use num_enum::{IntoPrimitive, TryFromPrimitive};
use crate::obj::{ObjSymbol, ObjSymbolFlags, ObjSymbolKind};
#[derive(Debug, Copy, Clone, IntoPrimitive, TryFromPrimitive)]
#[repr(u8)]
pub enum MWFloatKind {
None = 0,
Soft = 1,
Hard = 2,
}
#[derive(Debug, Clone)]
pub struct MWComment {
pub compiler_version: [u8; 4],
pub pool_data: bool,
pub float: MWFloatKind,
pub processor: u16,
pub incompatible_return_small_structs: bool,
pub incompatible_sfpe_double_params: bool,
pub unsafe_global_reg_vars: bool,
}
impl Default for MWComment {
fn default() -> Self {
Self {
// Metrowerks C/C++ Compiler for Embedded PowerPC
// Version 2.4.2 build 81
// (CodeWarrior for GameCube 1.3.2)
compiler_version: [2, 4, 2, 1],
pool_data: true,
float: MWFloatKind::Hard,
processor: 0x16, // gekko
incompatible_return_small_structs: false,
incompatible_sfpe_double_params: false,
unsafe_global_reg_vars: false,
}
}
}
const MAGIC: &[u8] = "CodeWarrior\n".as_bytes();
const PADDING: &[u8] = &[0u8; 0x16];
impl MWComment {
pub fn parse_header<R: Read + Seek>(reader: &mut R) -> Result<MWComment> {
let mut header = MWComment {
compiler_version: [0; 4],
pool_data: false,
float: MWFloatKind::None,
processor: 0,
incompatible_return_small_structs: false,
incompatible_sfpe_double_params: false,
unsafe_global_reg_vars: false,
};
// 0x0 - 0xB
let mut magic = vec![0u8; MAGIC.len()];
reader.read_exact(&mut magic).context("While reading magic")?;
if magic.deref() != MAGIC {
bail!("Invalid comment section magic: {:?}", magic);
}
// 0xC - 0xF
reader
.read_exact(&mut header.compiler_version)
.context("While reading compiler version")?;
// 0x10
header.pool_data = match reader.read_u8()? {
0 => false,
1 => true,
value => bail!("Invalid value for pool_data: {}", value),
};
// 0x11
header.float =
MWFloatKind::try_from(reader.read_u8()?).context("Invalid value for float")?;
// 0x12 - 0x13
header.processor = reader.read_u16::<BigEndian>()?;
// 0x14
match reader.read_u8()? as char {
// This is 0x2C, which could also be the size of the header? Unclear
',' => {}
c => bail!("Expected ',' after processor, got '{}'", c),
}
// 0x15
let flags = reader.read_u8()?;
if flags & !7 != 0 {
bail!("Unexpected flag value {:#X}", flags);
}
if flags & 1 == 1 {
header.incompatible_return_small_structs = true;
}
if flags & 2 == 2 {
header.incompatible_sfpe_double_params = true;
}
if flags & 4 == 4 {
header.unsafe_global_reg_vars = true;
}
// 0x16 - 0x2C
reader.seek(SeekFrom::Current(0x16))?;
Ok(header)
}
pub fn write_header<W: Write>(&self, w: &mut W) -> Result<()> {
w.write_all(MAGIC)?;
w.write_all(&self.compiler_version)?;
w.write_u8(if self.pool_data { 1 } else { 0 })?;
w.write_u8(self.float.into())?;
w.write_u16::<BigEndian>(self.processor)?;
w.write_u8(0x2C)?;
let mut flags = 0u8;
if self.incompatible_return_small_structs {
flags |= 1;
}
if self.incompatible_sfpe_double_params {
flags |= 2;
}
if self.unsafe_global_reg_vars {
flags |= 4;
}
w.write_u8(flags)?;
w.write_all(PADDING)?;
Ok(())
}
}
pub fn write_comment_sym<W: Write>(w: &mut W, symbol: &ObjSymbol) -> Result<()> {
let align = match symbol.align {
Some(align) => align,
None => {
if symbol.flags.0.contains(ObjSymbolFlags::Common) {
symbol.address as u32
} else {
match symbol.kind {
ObjSymbolKind::Unknown => 0,
ObjSymbolKind::Function => 4,
ObjSymbolKind::Object => 4,
ObjSymbolKind::Section => 8, // TODO?
}
}
}
};
w.write_u32::<BigEndian>(align)?;
let mut vis_flags = 0;
if symbol.flags.0.contains(ObjSymbolFlags::Weak) {
vis_flags |= 0xE; // TODO 0xD?
}
w.write_u8(vis_flags)?;
let mut active_flags = 0;
if symbol.flags.0.contains(ObjSymbolFlags::ForceActive) {
active_flags |= 8;
}
w.write_u8(active_flags)?;
w.write_u8(0)?;
w.write_u8(0)?;
Ok(())
}

View File

@ -1,23 +1,26 @@
use std::{ use std::{
io::{BufRead, Write}, io::{BufRead, Write},
iter,
num::ParseIntError, num::ParseIntError,
str::FromStr, str::FromStr,
}; };
use anyhow::{anyhow, bail, Result}; use anyhow::{anyhow, bail, ensure, Result};
use cwdemangle::{demangle, DemangleOptions}; use cwdemangle::{demangle, DemangleOptions};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use crate::obj::{ObjInfo, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind}; use crate::{
use crate::util::nested::NestedVec; obj::{
ObjDataKind, ObjInfo, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
},
util::nested::NestedVec,
};
fn parse_hex(s: &str) -> Result<u32, ParseIntError> { fn parse_hex(s: &str) -> Result<u32, ParseIntError> {
u32::from_str_radix(s.trim_start_matches("0x"), 16) u32::from_str_radix(s.trim_start_matches("0x"), 16)
} }
pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>> { pub fn parse_symbol_line(line: &str, obj: &mut ObjInfo) -> Result<Option<ObjSymbol>> {
static SYMBOL_LINE: Lazy<Regex> = Lazy::new(|| { static SYMBOL_LINE: Lazy<Regex> = Lazy::new(|| {
Regex::new( Regex::new(
"^\\s*(?P<name>[^\\s=]+)\\s*=\\s*(?:(?P<section>[A-Za-z0-9.]+):)?(?P<addr>[0-9A-Fa-fXx]+);(?:\\s*//\\s*(?P<attrs>.*))?$", "^\\s*(?P<name>[^\\s=]+)\\s*=\\s*(?:(?P<section>[A-Za-z0-9.]+):)?(?P<addr>[0-9A-Fa-fXx]+);(?:\\s*//\\s*(?P<attrs>.*))?$",
@ -26,7 +29,7 @@ pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>>
}); });
static COMMENT_LINE: Lazy<Regex> = Lazy::new(|| Regex::new("^\\s*(?://|#).*$").unwrap()); static COMMENT_LINE: Lazy<Regex> = Lazy::new(|| Regex::new("^\\s*(?://|#).*$").unwrap());
if let Some(captures) = SYMBOL_LINE.captures(&line) { if let Some(captures) = SYMBOL_LINE.captures(line) {
let name = captures["name"].to_string(); let name = captures["name"].to_string();
let addr = parse_hex(&captures["addr"])?; let addr = parse_hex(&captures["addr"])?;
let demangled_name = demangle(&name, &DemangleOptions::default()); let demangled_name = demangle(&name, &DemangleOptions::default());
@ -39,6 +42,8 @@ pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>>
size_known: false, size_known: false,
flags: Default::default(), flags: Default::default(),
kind: ObjSymbolKind::Unknown, kind: ObjSymbolKind::Unknown,
align: None,
data_kind: Default::default(),
}; };
let attrs = captures["attrs"].split(' '); let attrs = captures["attrs"].split(' ');
for attr in attrs { for attr in attrs {
@ -49,21 +54,36 @@ pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>>
.ok_or_else(|| anyhow!("Unknown symbol type '{}'", value))?; .ok_or_else(|| anyhow!("Unknown symbol type '{}'", value))?;
} }
"size" => { "size" => {
symbol.size = parse_hex(&value)? as u64; symbol.size = parse_hex(value)? as u64;
symbol.size_known = true; symbol.size_known = true;
} }
"scope" => { "scope" => {
symbol.flags.0 |= symbol_flags_from_str(value) symbol.flags.0 |= symbol_flags_from_str(value)
.ok_or_else(|| anyhow!("Unknown symbol scope '{}'", value))?; .ok_or_else(|| anyhow!("Unknown symbol scope '{}'", value))?;
} }
_ => bail!("Unknown attribute '{name}'"), "align" => {
symbol.align = Some(parse_hex(value)?);
}
"data" => {
symbol.data_kind = symbol_data_kind_from_str(value)
.ok_or_else(|| anyhow!("Unknown symbol data type '{}'", value))?;
}
_ => bail!("Unknown symbol attribute '{name}'"),
} }
} else { } else {
match attr { match attr {
"hidden" => { "hidden" => {
symbol.flags.0 |= ObjSymbolFlags::Hidden; symbol.flags.0 |= ObjSymbolFlags::Hidden;
} }
_ => bail!("Unknown attribute '{attr}'"), "noreloc" => {
ensure!(
symbol.size != 0,
"Symbol {} requires size != 0 with noreloc",
symbol.name
);
obj.blocked_ranges.insert(addr, addr + symbol.size as u32);
}
_ => bail!("Unknown symbol attribute '{attr}'"),
} }
} }
} }
@ -71,7 +91,7 @@ pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>>
} else if COMMENT_LINE.is_match(line) { } else if COMMENT_LINE.is_match(line) {
Ok(None) Ok(None)
} else { } else {
Err(anyhow!("Failed to parse line '{line}'")) Err(anyhow!("Failed to parse symbol line '{line}'"))
} }
} }
@ -86,9 +106,7 @@ fn is_skip_symbol(symbol: &ObjSymbol) -> bool {
} }
pub fn write_symbols<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> { pub fn write_symbols<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
let mut symbols: Vec<&ObjSymbol> = obj.symbols.iter().map(|s| s).collect(); for (_, symbol) in obj.symbols.for_range(..) {
symbols.sort_by_key(|s| s.address);
for symbol in symbols {
if symbol.kind == ObjSymbolKind::Section if symbol.kind == ObjSymbolKind::Section
// Ignore absolute symbols for now (usually linker-generated) // Ignore absolute symbols for now (usually linker-generated)
|| symbol.section.is_none() || symbol.section.is_none()
@ -128,9 +146,18 @@ fn write_symbol<W: Write>(w: &mut W, obj: &ObjInfo, symbol: &ObjSymbol) -> Resul
if let Some(scope) = symbol_flags_to_str(symbol.flags) { if let Some(scope) = symbol_flags_to_str(symbol.flags) {
write!(w, " scope:{scope}")?; write!(w, " scope:{scope}")?;
} }
if let Some(align) = symbol.align {
write!(w, " align:{align:#X}")?;
}
if let Some(kind) = symbol_data_kind_to_str(symbol.data_kind) {
write!(w, " data:{kind}")?;
}
if symbol.flags.0.contains(ObjSymbolFlags::Hidden) { if symbol.flags.0.contains(ObjSymbolFlags::Hidden) {
write!(w, " hidden")?; write!(w, " hidden")?;
} }
if obj.blocked_ranges.contains_key(&(symbol.address as u32)) {
write!(w, " noreloc")?;
}
writeln!(w)?; writeln!(w)?;
Ok(()) Ok(())
} }
@ -145,6 +172,23 @@ fn symbol_kind_to_str(kind: ObjSymbolKind) -> &'static str {
} }
} }
#[inline]
fn symbol_data_kind_to_str(kind: ObjDataKind) -> Option<&'static str> {
match kind {
ObjDataKind::Unknown => None,
ObjDataKind::Byte => Some("byte"),
ObjDataKind::Byte2 => Some("2byte"),
ObjDataKind::Byte4 => Some("4byte"),
ObjDataKind::Byte8 => Some("8byte"),
ObjDataKind::Float => Some("float"),
ObjDataKind::Double => Some("double"),
ObjDataKind::String => Some("string"),
ObjDataKind::String16 => Some("wstring"),
ObjDataKind::StringTable => Some("string_table"),
ObjDataKind::String16Table => Some("wstring_table"),
}
}
#[inline] #[inline]
fn symbol_kind_from_str(s: &str) -> Option<ObjSymbolKind> { fn symbol_kind_from_str(s: &str) -> Option<ObjSymbolKind> {
match s { match s {
@ -182,45 +226,36 @@ fn symbol_flags_from_str(s: &str) -> Option<ObjSymbolFlags> {
} }
} }
pub fn write_splits<W: Write>( #[inline]
w: &mut W, fn symbol_data_kind_from_str(s: &str) -> Option<ObjDataKind> {
obj: &ObjInfo, match s {
obj_files: Option<Vec<String>>, "byte" => Some(ObjDataKind::Byte),
) -> Result<()> { "2byte" => Some(ObjDataKind::Byte2),
let mut obj_files_iter = obj_files.map(|v| v.into_iter()); "4byte" => Some(ObjDataKind::Byte4),
"8byte" => Some(ObjDataKind::Byte8),
"float" => Some(ObjDataKind::Float),
"double" => Some(ObjDataKind::Double),
"string" => Some(ObjDataKind::String),
"wstring" => Some(ObjDataKind::String16),
"string_table" => Some(ObjDataKind::StringTable),
"wstring_table" => Some(ObjDataKind::String16Table),
_ => None,
}
}
pub fn write_splits<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
for unit in &obj.link_order { for unit in &obj.link_order {
let obj_file = if let Some(obj_files_iter) = &mut obj_files_iter { writeln!(w, "{}:", unit)?;
obj_files_iter.next() let mut split_iter = obj.splits_for_range(..).peekable();
} else { while let Some((addr, split)) = split_iter.next() {
None if &split.unit != unit {
};
log::info!("Processing {} (obj file {:?})", unit, obj_file);
if let Some(obj_file) = obj_file {
let trim_unit = unit
.trim_end_matches("_1")
.trim_end_matches(" (asm)")
.trim_end_matches(".o")
.trim_end_matches(".cpp")
.trim_end_matches(".c");
if !obj_file.contains(trim_unit) {
bail!("Unit mismatch: {} vs {}", unit, obj_file);
}
let trim_obj = obj_file
.trim_end_matches(" \\")
.trim_start_matches("\t$(BUILD_DIR)/")
.trim_start_matches("asm/")
.trim_start_matches("src/");
writeln!(w, "{}:", trim_obj)?;
} else {
writeln!(w, "{}:", unit)?;
}
let mut split_iter = obj.splits.iter()
.flat_map(|(addr, v)| v.iter().map(move |u| (addr, u))).peekable();
while let Some((&addr, it_unit)) = split_iter.next() {
if it_unit != unit {
continue; continue;
} }
let end = split_iter.peek().map(|(&addr, _)| addr).unwrap_or(u32::MAX); let end = if split.end > 0 {
split.end
} else {
split_iter.peek().map(|&(addr, _)| addr).unwrap_or(0)
};
let section = obj.section_at(addr)?; let section = obj.section_at(addr)?;
writeln!(w, "\t{:<11} start:{:#010X} end:{:#010X}", section.name, addr, end)?; writeln!(w, "\t{:<11} start:{:#010X} end:{:#010X}", section.name, addr, end)?;
// align:{} // align:{}
@ -232,7 +267,7 @@ pub fn write_splits<W: Write>(
enum SplitLine { enum SplitLine {
Unit { name: String }, Unit { name: String },
Section { name: String, start: u32, end: u32, align: Option<u32> }, Section { name: String, start: u32, end: u32, align: Option<u32>, common: bool },
None, None,
} }
@ -245,40 +280,49 @@ fn parse_split_line(line: &str) -> Result<SplitLine> {
if line.is_empty() || COMMENT_LINE.is_match(line) { if line.is_empty() || COMMENT_LINE.is_match(line) {
Ok(SplitLine::None) Ok(SplitLine::None)
} else if let Some(captures) = UNIT_LINE.captures(&line) { } else if let Some(captures) = UNIT_LINE.captures(line) {
let name = captures["name"].to_string(); let name = captures["name"].to_string();
Ok(SplitLine::Unit { name }) Ok(SplitLine::Unit { name })
} else if let Some(captures) = SECTION_LINE.captures(&line) { } else if let Some(captures) = SECTION_LINE.captures(line) {
let mut name = captures["name"].to_string(); let mut name = captures["name"].to_string();
let mut start: Option<u32> = None; let mut start: Option<u32> = None;
let mut end: Option<u32> = None; let mut end: Option<u32> = None;
let mut align: Option<u32> = None; let mut align: Option<u32> = None;
let mut common = false;
let attrs = captures["attrs"].split(' '); let attrs = captures["attrs"].split(' ');
for attr in attrs { for attr in attrs {
if let Some((attr, value)) = attr.split_once(':') { if let Some((attr, value)) = attr.split_once(':') {
match attr { match attr {
"start" => { "start" => {
start = Some(parse_hex(&value)?); start = Some(parse_hex(value)?);
} }
"end" => { "end" => {
end = Some(parse_hex(&value)?); end = Some(parse_hex(value)?);
} }
"align" => align = Some(u32::from_str(value)?), "align" => align = Some(u32::from_str(value)?),
"rename" => name = value.to_string(), "rename" => name = value.to_string(),
_ => bail!("Unknown attribute '{name}'"), _ => bail!("Unknown split attribute '{name}'"),
} }
} else { } else {
bail!("Unknown attribute '{attr}'") match attr {
"common" => {
common = true;
if align.is_none() {
align = Some(4);
}
}
_ => bail!("Unknown split attribute '{attr}'"),
}
} }
} }
if let (Some(start), Some(end)) = (start, end) { if let (Some(start), Some(end)) = (start, end) {
Ok(SplitLine::Section { name, start, end, align }) Ok(SplitLine::Section { name, start, end, align, common })
} else { } else {
Err(anyhow!("Missing attribute: '{line}'")) Err(anyhow!("Missing split attribute: '{line}'"))
} }
} else { } else {
Err(anyhow!("Failed to parse line: '{line}'")) Err(anyhow!("Failed to parse split line: '{line}'"))
} }
} }
@ -302,10 +346,8 @@ pub fn apply_splits<R: BufRead>(r: R, obj: &mut ObjInfo) -> Result<()> {
(SplitState::None, SplitLine::Section { name, .. }) => { (SplitState::None, SplitLine::Section { name, .. }) => {
bail!("Section {} defined outside of unit", name); bail!("Section {} defined outside of unit", name);
} }
(SplitState::Unit(unit), SplitLine::Section { name, start, end, align }) => { (SplitState::Unit(unit), SplitLine::Section { name, start, end, align, common }) => {
let _ = end; obj.splits.nested_push(start, ObjSplit { unit: unit.clone(), end, align, common });
let _ = align;
obj.splits.nested_push(start, unit.clone());
obj.named_sections.insert(start, name); obj.named_sections.insert(start, name);
} }
_ => {} _ => {}

View File

@ -33,27 +33,8 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
let mmap = map_file(path)?; let mmap = map_file(path)?;
Dol::read_from(map_reader(&mmap))? Dol::read_from(map_reader(&mmap))?
}; };
let mut obj = ObjInfo { let mut obj = ObjInfo::new(ObjKind::Executable, ObjArchitecture::PowerPc, name, vec![], vec![]);
module_id: 0, obj.entry = dol.header.entry_point as u64;
kind: ObjKind::Executable,
architecture: ObjArchitecture::PowerPc,
name,
symbols: vec![],
sections: vec![],
entry: dol.header.entry_point as u64,
sda2_base: None,
sda_base: None,
stack_address: None,
stack_end: None,
db_stack_addr: None,
arena_lo: None,
arena_hi: None,
splits: Default::default(),
named_sections: Default::default(),
link_order: vec![],
known_functions: Default::default(),
unresolved_relocations: vec![],
};
// Locate _rom_copy_info // Locate _rom_copy_info
let first_rom_section = dol let first_rom_section = dol
@ -355,46 +336,61 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
if let (Some(rom_copy_info_addr), Some(rom_copy_info_end)) = if let (Some(rom_copy_info_addr), Some(rom_copy_info_end)) =
(rom_copy_info_addr, rom_copy_info_end) (rom_copy_info_addr, rom_copy_info_end)
{ {
obj.symbols.push(ObjSymbol { obj.add_symbol(
name: "_rom_copy_info".to_string(), ObjSymbol {
demangled_name: None, name: "_rom_copy_info".to_string(),
address: rom_copy_info_addr as u64, demangled_name: None,
section: init_section_index, address: rom_copy_info_addr as u64,
size: (rom_copy_info_end - rom_copy_info_addr) as u64, section: init_section_index,
size_known: true, size: (rom_copy_info_end - rom_copy_info_addr) as u64,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()), size_known: true,
kind: ObjSymbolKind::Object, flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
}); kind: ObjSymbolKind::Object,
align: None,
data_kind: Default::default(),
},
true,
)?;
} }
// Generate _bss_init_info symbol // Generate _bss_init_info symbol
if let (Some(bss_init_info_addr), Some(bss_init_info_end)) = if let (Some(bss_init_info_addr), Some(bss_init_info_end)) =
(bss_init_info_addr, bss_init_info_end) (bss_init_info_addr, bss_init_info_end)
{ {
obj.symbols.push(ObjSymbol { obj.add_symbol(
name: "_bss_init_info".to_string(), ObjSymbol {
demangled_name: None, name: "_bss_init_info".to_string(),
address: bss_init_info_addr as u64, demangled_name: None,
section: init_section_index, address: bss_init_info_addr as u64,
size: (bss_init_info_end - bss_init_info_addr) as u64, section: init_section_index,
size_known: true, size: (bss_init_info_end - bss_init_info_addr) as u64,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()), size_known: true,
kind: ObjSymbolKind::Object, flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
}); kind: ObjSymbolKind::Object,
align: None,
data_kind: Default::default(),
},
true,
)?;
} }
// Generate _eti_init_info symbol // Generate _eti_init_info symbol
if let Some((eti_init_info_addr, eti_init_info_end)) = eti_init_info_range { if let Some((eti_init_info_addr, eti_init_info_end)) = eti_init_info_range {
obj.symbols.push(ObjSymbol { obj.add_symbol(
name: "_eti_init_info".to_string(), ObjSymbol {
demangled_name: None, name: "_eti_init_info".to_string(),
address: eti_init_info_addr as u64, demangled_name: None,
section: extabindex_section, address: eti_init_info_addr as u64,
size: (eti_init_info_end - eti_init_info_addr) as u64, section: extabindex_section,
size_known: true, size: (eti_init_info_end - eti_init_info_addr) as u64,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()), size_known: true,
kind: ObjSymbolKind::Object, flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
}); kind: ObjSymbolKind::Object,
align: None,
data_kind: Default::default(),
},
true,
)?;
} }
// Generate symbols for extab & extabindex entries // Generate symbols for extab & extabindex entries
@ -402,7 +398,12 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
(extabindex_section, extab_section) (extabindex_section, extab_section)
{ {
let extabindex_section = &obj.sections[extabindex_section_idx]; let extabindex_section = &obj.sections[extabindex_section_idx];
let extabindex_section_index = extabindex_section.index;
let extabindex_section_address = extabindex_section.address;
let extabindex_section_size = extabindex_section.size;
let extab_section = &obj.sections[extab_section_idx]; let extab_section = &obj.sections[extab_section_idx];
let extab_section_index = extab_section.index;
for entry in &eti_entries { for entry in &eti_entries {
// Add functions from extabindex entries as known function bounds // Add functions from extabindex entries as known function bounds
if let Some(old_value) = obj.known_functions.insert(entry.function, entry.function_size) if let Some(old_value) = obj.known_functions.insert(entry.function, entry.function_size)
@ -416,16 +417,21 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
); );
} }
} }
obj.symbols.push(ObjSymbol { obj.add_symbol(
name: format!("@eti_{:08X}", entry.address), ObjSymbol {
demangled_name: None, name: format!("@eti_{:08X}", entry.address),
address: entry.address as u64, demangled_name: None,
section: Some(extabindex_section.index), address: entry.address as u64,
size: 12, section: Some(extabindex_section_index),
size_known: true, size: 12,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local | ObjSymbolFlags::Hidden), size_known: true,
kind: ObjSymbolKind::Object, flags: ObjSymbolFlagSet(ObjSymbolFlags::Local | ObjSymbolFlags::Hidden),
}); kind: ObjSymbolKind::Object,
align: None,
data_kind: Default::default(),
},
false,
)?;
} }
let mut entry_iter = eti_entries.iter().peekable(); let mut entry_iter = eti_entries.iter().peekable();
@ -434,20 +440,25 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
(Some(a), Some(&b)) => (a.extab_addr, b.extab_addr - a.extab_addr), (Some(a), Some(&b)) => (a.extab_addr, b.extab_addr - a.extab_addr),
(Some(a), None) => ( (Some(a), None) => (
a.extab_addr, a.extab_addr,
(extabindex_section.address + extabindex_section.size) as u32 - a.extab_addr, (extabindex_section_address + extabindex_section_size) as u32 - a.extab_addr,
), ),
_ => break, _ => break,
}; };
obj.symbols.push(ObjSymbol { obj.add_symbol(
name: format!("@etb_{:08X}", addr), ObjSymbol {
demangled_name: None, name: format!("@etb_{:08X}", addr),
address: addr as u64, demangled_name: None,
section: Some(extab_section.index), address: addr as u64,
size: size as u64, section: Some(extab_section_index),
size_known: true, size: size as u64,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local | ObjSymbolFlags::Hidden), size_known: true,
kind: ObjSymbolKind::Object, flags: ObjSymbolFlagSet(ObjSymbolFlags::Local | ObjSymbolFlags::Hidden),
}); kind: ObjSymbolKind::Object,
align: None,
data_kind: Default::default(),
},
false,
)?;
} }
} }
@ -456,26 +467,36 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
Ok(true) => { Ok(true) => {
let sda2_base = obj.sda2_base.unwrap(); let sda2_base = obj.sda2_base.unwrap();
let sda_base = obj.sda_base.unwrap(); let sda_base = obj.sda_base.unwrap();
obj.symbols.push(ObjSymbol { obj.add_symbol(
name: "_SDA2_BASE_".to_string(), ObjSymbol {
demangled_name: None, name: "_SDA2_BASE_".to_string(),
address: sda2_base as u64, demangled_name: None,
section: None, address: sda2_base as u64,
size: 0, section: None,
size_known: false, size: 0,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()), size_known: false,
kind: ObjSymbolKind::Unknown, flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
}); kind: ObjSymbolKind::Unknown,
obj.symbols.push(ObjSymbol { align: None,
name: "_SDA_BASE_".to_string(), data_kind: Default::default(),
demangled_name: None, },
address: sda_base as u64, true,
section: None, )?;
size: 0, obj.add_symbol(
size_known: false, ObjSymbol {
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()), name: "_SDA_BASE_".to_string(),
kind: ObjSymbolKind::Unknown, demangled_name: None,
}); address: sda_base as u64,
section: None,
size: 0,
size_known: false,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
kind: ObjSymbolKind::Unknown,
align: None,
data_kind: Default::default(),
},
true,
)?;
} }
Ok(false) => { Ok(false) => {
log::warn!("Unable to locate SDA bases"); log::warn!("Unable to locate SDA bases");
@ -512,17 +533,17 @@ struct EtiEntry {
} }
fn read_eti_init_info(dol: &Dol, addr: u32) -> Result<EtiInitInfo> { fn read_eti_init_info(dol: &Dol, addr: u32) -> Result<EtiInitInfo> {
let eti_start = read_u32(&dol, addr)?; let eti_start = read_u32(dol, addr)?;
let eti_end = read_u32(&dol, addr + 4)?; let eti_end = read_u32(dol, addr + 4)?;
let code_start = read_u32(&dol, addr + 8)?; let code_start = read_u32(dol, addr + 8)?;
let code_size = read_u32(&dol, addr + 12)?; let code_size = read_u32(dol, addr + 12)?;
Ok(EtiInitInfo { eti_start, eti_end, code_start, code_size }) Ok(EtiInitInfo { eti_start, eti_end, code_start, code_size })
} }
fn read_eti_entry(dol: &Dol, address: u32) -> Result<EtiEntry> { fn read_eti_entry(dol: &Dol, address: u32) -> Result<EtiEntry> {
let function = read_u32(&dol, address)?; let function = read_u32(dol, address)?;
let function_size = read_u32(&dol, address + 4)?; let function_size = read_u32(dol, address + 4)?;
let extab_addr = read_u32(&dol, address + 8)?; let extab_addr = read_u32(dol, address + 8)?;
Ok(EtiEntry { address, function, function_size, extab_addr }) Ok(EtiEntry { address, function, function_size, extab_addr })
} }
@ -538,7 +559,7 @@ fn validate_eti_init_info(
&& eti_init_info.eti_end >= eti_section.target && eti_init_info.eti_end >= eti_section.target
&& eti_init_info.eti_end < eti_section_end && eti_init_info.eti_end < eti_section_end
{ {
if let Some(code_section) = section_by_address(&dol, eti_init_info.code_start) { if let Some(code_section) = section_by_address(dol, eti_init_info.code_start) {
let code_section_size = match rom_sections.get(&code_section.target) { let code_section_size = match rom_sections.get(&code_section.target) {
Some(&size) => size, Some(&size) => size,
None => code_section.size, None => code_section.size,

View File

@ -601,7 +601,7 @@ impl Type {
let tag = tags let tag = tags
.get(&key) .get(&key)
.ok_or_else(|| anyhow!("Failed to locate user defined type {}", key))?; .ok_or_else(|| anyhow!("Failed to locate user defined type {}", key))?;
let ud_type = ud_type(&tags, tag)?; let ud_type = ud_type(tags, tag)?;
ud_type.size(tags) ud_type.size(tags)
} }
} }
@ -784,7 +784,7 @@ pub fn struct_def_string(
if let Some(bit) = &member.bit { if let Some(bit) = &member.bit {
write!(out, " : {}", bit.bit_size)?; write!(out, " : {}", bit.bit_size)?;
} }
write!(out, "; // offset {:#X}, size {:#X}\n", member.offset, member.kind.size(tags)?)?; writeln!(out, "; // offset {:#X}, size {:#X}", member.offset, member.kind.size(tags)?)?;
} }
write!(out, "}}")?; write!(out, "}}")?;
Ok(out) Ok(out)
@ -866,8 +866,16 @@ pub fn process_variable_location(block: &[u8]) -> Result<String> {
// TODO: float regs // TODO: float regs
if block.len() == 5 && block[0] == LocationOp::Register as u8 { if block.len() == 5 && block[0] == LocationOp::Register as u8 {
Ok(format!("r{}", u32::from_be_bytes(block[1..].try_into()?))) Ok(format!("r{}", u32::from_be_bytes(block[1..].try_into()?)))
} else if block.len() == 11 && block[0] == LocationOp::BaseRegister as u8 && block[5] == LocationOp::Const as u8 && block[10] == LocationOp::Add as u8 { } else if block.len() == 11
Ok(format!("r{}+{:#X}", u32::from_be_bytes(block[1..5].try_into()?), u32::from_be_bytes(block[6..10].try_into()?))) && block[0] == LocationOp::BaseRegister as u8
&& block[5] == LocationOp::Const as u8
&& block[10] == LocationOp::Add as u8
{
Ok(format!(
"r{}+{:#X}",
u32::from_be_bytes(block[1..5].try_into()?),
u32::from_be_bytes(block[6..10].try_into()?)
))
} else { } else {
Err(anyhow!("Unhandled location data {:?}, expected variable loc", block)) Err(anyhow!("Unhandled location data {:?}, expected variable loc", block))
} }

View File

@ -1,10 +1,11 @@
use std::{ use std::{
collections::{btree_map::Entry, hash_map, BTreeMap, HashMap}, collections::{hash_map, BTreeMap, HashMap},
io::Cursor, io::Cursor,
path::Path, path::Path,
}; };
use anyhow::{anyhow, bail, ensure, Context, Result}; use anyhow::{anyhow, bail, ensure, Context, Result};
use byteorder::{BigEndian, WriteBytesExt};
use cwdemangle::demangle; use cwdemangle::demangle;
use flagset::Flags; use flagset::Flags;
use indexmap::IndexMap; use indexmap::IndexMap;
@ -16,24 +17,20 @@ use object::{
StringId, StringId,
}, },
Architecture, Endianness, Object, ObjectKind, ObjectSection, ObjectSymbol, Relocation, Architecture, Endianness, Object, ObjectKind, ObjectSection, ObjectSymbol, Relocation,
RelocationKind, RelocationTarget, Section, SectionKind, Symbol, SymbolKind, SymbolScope, RelocationKind, RelocationTarget, SectionKind, Symbol, SymbolKind, SymbolScope, SymbolSection,
SymbolSection,
}; };
use crate::{ use crate::{
obj::{ obj::{
ObjArchitecture, ObjInfo, ObjKind, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjArchitecture, ObjInfo, ObjKind, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind,
ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
}, },
util::{ util::{
dwarf::{ comment::{write_comment_sym, MWComment},
process_address, process_type, read_debug_section, type_string, ud_type,
ud_type_string, AttributeKind, TagKind, TypeKind,
},
file::map_file, file::map_file,
nested::NestedVec,
}, },
}; };
use crate::util::nested::NestedVec;
enum BoundaryState { enum BoundaryState {
/// Looking for a file symbol, any section symbols are queued /// Looking for a file symbol, any section symbols are queued
@ -70,11 +67,17 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
let mut sections: Vec<ObjSection> = vec![]; let mut sections: Vec<ObjSection> = vec![];
let mut section_indexes: Vec<Option<usize>> = vec![]; let mut section_indexes: Vec<Option<usize>> = vec![];
for section in obj_file.sections() { for section in obj_file.sections() {
if section.size() == 0 {
section_indexes.push(None);
continue;
}
let section_name = section.name()?;
let section_kind = match section.kind() { let section_kind = match section.kind() {
SectionKind::Text => ObjSectionKind::Code, SectionKind::Text => ObjSectionKind::Code,
SectionKind::Data => ObjSectionKind::Data, SectionKind::Data => ObjSectionKind::Data,
SectionKind::ReadOnlyData => ObjSectionKind::ReadOnlyData, SectionKind::ReadOnlyData => ObjSectionKind::ReadOnlyData,
SectionKind::UninitializedData => ObjSectionKind::Bss, SectionKind::UninitializedData => ObjSectionKind::Bss,
// SectionKind::Other if section_name == ".comment" => ObjSectionKind::Comment,
_ => { _ => {
section_indexes.push(None); section_indexes.push(None);
continue; continue;
@ -82,7 +85,7 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
}; };
section_indexes.push(Some(sections.len())); section_indexes.push(Some(sections.len()));
sections.push(ObjSection { sections.push(ObjSection {
name: section.name()?.to_string(), name: section_name.to_string(),
kind: section_kind, kind: section_kind,
address: section.address(), address: section.address(),
size: section.size(), size: section.size(),
@ -232,14 +235,14 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
} }
BoundaryState::FilesEnded => {} BoundaryState::FilesEnded => {}
}, },
SymbolSection::Undefined => {} SymbolSection::Common | SymbolSection::Undefined => {}
_ => bail!("Unsupported symbol section type {symbol:?}"), _ => bail!("Unsupported symbol section type {symbol:?}"),
}, },
} }
// Generate symbols // Generate symbols
if matches!(symbol.kind(), SymbolKind::Null | SymbolKind::File) if matches!(symbol.kind(), SymbolKind::Null | SymbolKind::File)
|| matches!(symbol.section_index(), Some(idx) if section_indexes[idx.0] == None) || matches!(symbol.section_index(), Some(idx) if section_indexes[idx.0].is_none())
{ {
symbol_indexes.push(None); symbol_indexes.push(None);
continue; continue;
@ -249,7 +252,7 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
} }
let mut link_order = Vec::<String>::new(); let mut link_order = Vec::<String>::new();
let mut splits = BTreeMap::<u32, Vec<String>>::new(); let mut splits = BTreeMap::<u32, Vec<ObjSplit>>::new();
if kind == ObjKind::Executable { if kind == ObjKind::Executable {
// Link order is trivially deduced // Link order is trivially deduced
for file_name in section_starts.keys() { for file_name in section_starts.keys() {
@ -259,7 +262,12 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
// Create a map of address -> file splits // Create a map of address -> file splits
for (file_name, sections) in section_starts { for (file_name, sections) in section_starts {
for (address, _) in sections { for (address, _) in sections {
splits.nested_push(address as u32, file_name.clone()); splits.nested_push(address as u32, ObjSplit {
unit: file_name.clone(),
end: 0, // TODO
align: None,
common: false, // TODO
});
} }
} }
@ -283,27 +291,30 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
} }
} }
Ok(ObjInfo { let mw_comment = if let Some(comment_section) = obj_file.section_by_name(".comment") {
module_id: 0, let data = comment_section.uncompressed_data()?;
kind, let mut reader = Cursor::new(&*data);
architecture, let header = MWComment::parse_header(&mut reader)?;
name: obj_name, log::info!("Loaded comment header {:?}", header);
symbols,
sections, header
entry: obj_file.entry(), } else {
sda2_base, MWComment::default()
sda_base, };
stack_address,
stack_end, let mut obj = ObjInfo::new(kind, architecture, obj_name, symbols, sections);
db_stack_addr, obj.entry = obj_file.entry();
arena_lo, obj.mw_comment = mw_comment;
arena_hi, obj.sda2_base = sda2_base;
splits, obj.sda_base = sda_base;
named_sections: Default::default(), obj.stack_address = stack_address;
link_order, obj.stack_end = stack_end;
known_functions: Default::default(), obj.db_stack_addr = db_stack_addr;
unresolved_relocations: vec![], obj.arena_lo = arena_lo;
}) obj.arena_hi = arena_hi;
obj.splits = splits;
obj.link_order = link_order;
Ok(obj)
} }
pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> { pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
@ -319,6 +330,7 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
rela_name: Option<StringId>, rela_name: Option<StringId>,
} }
struct OutSymbol { struct OutSymbol {
#[allow(dead_code)]
index: SymbolIndex, index: SymbolIndex,
sym: object::write::elf::Sym, sym: object::write::elf::Sym,
} }
@ -337,26 +349,50 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
rela_name: None, rela_name: None,
}); });
} }
let mut rela_names: Vec<String> = vec![Default::default(); obj.sections.len()]; let mut rela_names: Vec<String> = vec![Default::default(); obj.sections.len()];
for ((section, out_section), rela_name) in for ((section, out_section), rela_name) in
obj.sections.iter().zip(&mut out_sections).zip(&mut rela_names) obj.sections.iter().zip(&mut out_sections).zip(&mut rela_names)
{ {
if !section.relocations.is_empty() { if section.relocations.is_empty() {
*rela_name = format!(".rela{}", section.name); continue;
out_section.rela_name = Some(writer.add_section_name(rela_name.as_bytes()));
out_section.rela_index = Some(writer.reserve_section_index());
} }
*rela_name = format!(".rela{}", section.name);
out_section.rela_name = Some(writer.add_section_name(rela_name.as_bytes()));
out_section.rela_index = Some(writer.reserve_section_index());
} }
let symtab = writer.reserve_symtab_section_index();
writer.reserve_shstrtab_section_index();
writer.reserve_strtab_section_index();
// Add symbols let symtab = writer.reserve_symtab_section_index();
let mut out_symbols: Vec<OutSymbol> = Vec::with_capacity(obj.symbols.len()); writer.reserve_strtab_section_index();
let mut symbol_offset = 0; writer.reserve_shstrtab_section_index();
// Generate comment section
let mut comment_data = if obj.kind == ObjKind::Relocatable {
// let mut comment_data = Vec::<u8>::with_capacity(0x2C + obj.symbols.len() * 8);
// let name = writer.add_section_name(".comment".as_bytes());
// let index = writer.reserve_section_index();
// out_sections.push(OutSection {
// index,
// rela_index: None,
// offset: 0,
// rela_offset: 0,
// name,
// rela_name: None,
// });
// obj.mw_comment.write_header(&mut comment_data)?;
// Some(comment_data)
None::<Vec<u8>>
} else {
None
};
let mut out_symbols: Vec<OutSymbol> = Vec::with_capacity(obj.symbols.count());
let mut symbol_map = vec![None; obj.symbols.count()];
let mut section_symbol_offset = 0;
let mut num_local = 0; let mut num_local = 0;
// Add file symbol
if !obj.name.is_empty() { if !obj.name.is_empty() {
// Add file symbol
let name_index = writer.add_string(obj.name.as_bytes()); let name_index = writer.add_string(obj.name.as_bytes());
let index = writer.reserve_symbol_index(None); let index = writer.reserve_symbol_index(None);
out_symbols.push(OutSymbol { out_symbols.push(OutSymbol {
@ -375,9 +411,42 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
st_size: 0, st_size: 0,
}, },
}); });
symbol_offset += 1; if let Some(comment_data) = &mut comment_data {
comment_data.write_u64::<BigEndian>(0)?;
}
section_symbol_offset += 1;
} }
for symbol in &obj.symbols {
// Add section symbols for relocatable objects
if obj.kind == ObjKind::Relocatable {
for section in &obj.sections {
let section_index = out_sections.get(section.index).map(|s| s.index);
let index = writer.reserve_symbol_index(section_index);
let name_index = writer.add_string(section.name.as_bytes());
let sym = object::write::elf::Sym {
name: Some(name_index),
section: section_index,
st_info: (elf::STB_LOCAL << 4) + elf::STT_SECTION,
st_other: elf::STV_DEFAULT,
st_shndx: 0,
st_value: 0,
st_size: section.size,
};
num_local = writer.symbol_count();
out_symbols.push(OutSymbol { index, sym });
}
}
// Add symbols
for (symbol, symbol_map) in obj.symbols.iter().zip(&mut symbol_map) {
if obj.kind == ObjKind::Relocatable && symbol.kind == ObjSymbolKind::Section {
// We wrote section symbols above, so skip them here
let section_index =
symbol.section.ok_or_else(|| anyhow!("section symbol without section index"))?;
*symbol_map = Some(section_symbol_offset + section_index as u32);
continue;
}
let section_index = symbol.section.and_then(|idx| out_sections.get(idx)).map(|s| s.index); let section_index = symbol.section.and_then(|idx| out_sections.get(idx)).map(|s| s.index);
let index = writer.reserve_symbol_index(section_index); let index = writer.reserve_symbol_index(section_index);
let name_index = if symbol.name.is_empty() { let name_index = if symbol.name.is_empty() {
@ -429,6 +498,10 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
num_local = writer.symbol_count(); num_local = writer.symbol_count();
} }
out_symbols.push(OutSymbol { index, sym }); out_symbols.push(OutSymbol { index, sym });
*symbol_map = Some(index.0);
if let Some(comment_data) = &mut comment_data {
write_comment_sym(comment_data, symbol)?;
}
} }
writer.reserve_file_header(); writer.reserve_file_header();
@ -438,18 +511,20 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
} }
for (section, out_section) in obj.sections.iter().zip(&mut out_sections) { for (section, out_section) in obj.sections.iter().zip(&mut out_sections) {
match section.kind { if section.kind == ObjSectionKind::Bss {
ObjSectionKind::Code | ObjSectionKind::Data | ObjSectionKind::ReadOnlyData => {} continue;
ObjSectionKind::Bss => continue, }
ensure!(section.data.len() as u64 == section.size);
if section.size == 0 {
// Bug in Writer::reserve doesn't align when len is 0
let offset = (writer.reserved_len() + 31) & !31;
writer.reserve_until(offset);
out_section.offset = offset;
} else {
out_section.offset = writer.reserve(section.data.len(), 32);
} }
ensure!(section.data.len() as u64 == section.size, "Mismatched section size");
out_section.offset = writer.reserve(section.data.len(), 32);
} }
writer.reserve_shstrtab();
writer.reserve_strtab();
writer.reserve_symtab();
for (section, out_section) in obj.sections.iter().zip(&mut out_sections) { for (section, out_section) in obj.sections.iter().zip(&mut out_sections) {
if section.relocations.is_empty() { if section.relocations.is_empty() {
continue; continue;
@ -457,6 +532,16 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
out_section.rela_offset = writer.reserve_relocations(section.relocations.len(), true); out_section.rela_offset = writer.reserve_relocations(section.relocations.len(), true);
} }
writer.reserve_symtab();
writer.reserve_strtab();
writer.reserve_shstrtab();
// Reserve comment section
if let Some(comment_data) = &comment_data {
let out_section = out_sections.last_mut().unwrap();
out_section.offset = writer.reserve(comment_data.len(), 32);
}
writer.reserve_section_headers(); writer.reserve_section_headers();
writer.write_file_header(&object::write::elf::FileHeader { writer.write_file_header(&object::write::elf::FileHeader {
@ -499,24 +584,16 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
continue; continue;
} }
writer.write_align(32); writer.write_align(32);
debug_assert_eq!(writer.len(), out_section.offset); ensure!(writer.len() == out_section.offset);
writer.write(&section.data); writer.write(&section.data);
} }
writer.write_shstrtab();
writer.write_strtab();
writer.write_null_symbol();
for out_symbol in &out_symbols {
writer.write_symbol(&out_symbol.sym);
}
for (section, out_section) in obj.sections.iter().zip(&out_sections) { for (section, out_section) in obj.sections.iter().zip(&out_sections) {
if section.relocations.is_empty() { if section.relocations.is_empty() {
continue; continue;
} }
writer.write_align_relocation(); writer.write_align_relocation();
debug_assert_eq!(writer.len(), out_section.rela_offset); ensure!(writer.len() == out_section.rela_offset);
for reloc in &section.relocations { for reloc in &section.relocations {
let mut r_offset = reloc.address; let mut r_offset = reloc.address;
let r_type = match reloc.kind { let r_type = match reloc.kind {
@ -540,11 +617,11 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
elf::R_PPC_ADDR16_LO elf::R_PPC_ADDR16_LO
} }
ObjRelocKind::PpcRel24 => { ObjRelocKind::PpcRel24 => {
r_offset = r_offset & !3; r_offset &= !3;
elf::R_PPC_REL24 elf::R_PPC_REL24
} }
ObjRelocKind::PpcRel14 => { ObjRelocKind::PpcRel14 => {
r_offset = r_offset & !3; r_offset &= !3;
elf::R_PPC_REL14 elf::R_PPC_REL14
} }
ObjRelocKind::PpcEmbSda21 => { ObjRelocKind::PpcEmbSda21 => {
@ -552,15 +629,28 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
elf::R_PPC_EMB_SDA21 elf::R_PPC_EMB_SDA21
} }
}; };
writer.write_relocation(true, &Rel { let r_sym = symbol_map[reloc.target_symbol]
r_offset, .ok_or_else(|| anyhow!("Relocation against stripped symbol"))?;
r_sym: (reloc.target_symbol + symbol_offset + 1) as u32, writer.write_relocation(true, &Rel { r_offset, r_sym, r_type, r_addend: reloc.addend });
r_type,
r_addend: reloc.addend,
});
} }
} }
writer.write_null_symbol();
for out_symbol in &out_symbols {
writer.write_symbol(&out_symbol.sym);
}
writer.write_strtab();
writer.write_shstrtab();
// Write comment section
if let Some(comment_data) = &comment_data {
let out_section = out_sections.last().unwrap();
writer.write_align(32);
ensure!(writer.len() == out_section.offset);
writer.write(comment_data);
}
writer.write_null_section_header(); writer.write_null_section_header();
for (section, out_section) in obj.sections.iter().zip(&out_sections) { for (section, out_section) in obj.sections.iter().zip(&out_sections) {
writer.write_section_header(&SectionHeader { writer.write_section_header(&SectionHeader {
@ -598,11 +688,29 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
true, true,
); );
} }
writer.write_symtab_section_header(num_local);
writer.write_shstrtab_section_header();
writer.write_strtab_section_header();
debug_assert_eq!(writer.reserved_len(), writer.len()); writer.write_symtab_section_header(num_local);
writer.write_strtab_section_header();
writer.write_shstrtab_section_header();
// Write comment section header
if let Some(comment_data) = &comment_data {
let out_section = out_sections.last().unwrap();
writer.write_section_header(&SectionHeader {
name: Some(out_section.name),
sh_type: SHT_PROGBITS,
sh_flags: 0,
sh_addr: 0,
sh_offset: out_section.offset as u64,
sh_size: comment_data.len() as u64,
sh_link: 0,
sh_info: 0,
sh_addralign: 1,
sh_entsize: 1,
});
}
ensure!(writer.reserved_len() == writer.len());
Ok(out_data) Ok(out_data)
} }
@ -655,6 +763,9 @@ fn to_obj_symbol(
SymbolKind::Section => ObjSymbolKind::Section, SymbolKind::Section => ObjSymbolKind::Section,
_ => bail!("Unsupported symbol kind: {:?}", symbol.kind()), _ => bail!("Unsupported symbol kind: {:?}", symbol.kind()),
}, },
// TODO common symbol value?
align: None,
data_kind: Default::default(),
}) })
} }

View File

@ -1,13 +1,15 @@
use std::{ use std::{
fs::File, fs::File,
io::{BufReader, Cursor, Read}, io::{BufRead, BufReader, Cursor, Read},
path::Path, path::{Path, PathBuf},
}; };
use anyhow::{Context, Result}; use anyhow::{anyhow, Context, Result};
use byteorder::ReadBytesExt; use byteorder::ReadBytesExt;
use memmap2::{Mmap, MmapOptions}; use memmap2::{Mmap, MmapOptions};
use crate::util::{rarc, rarc::Node, yaz0};
/// Opens a memory mapped file. /// Opens a memory mapped file.
pub fn map_file<P: AsRef<Path>>(path: P) -> Result<Mmap> { pub fn map_file<P: AsRef<Path>>(path: P) -> Result<Mmap> {
let file = File::open(&path) let file = File::open(&path)
@ -21,7 +23,7 @@ pub type Reader<'a> = Cursor<&'a [u8]>;
/// Creates a reader for the memory mapped file. /// Creates a reader for the memory mapped file.
#[inline] #[inline]
pub fn map_reader(mmap: &Mmap) -> Reader { Cursor::new(&*mmap) } pub fn map_reader(mmap: &Mmap) -> Reader { Cursor::new(&**mmap) }
/// Creates a buffered reader around a file (not memory mapped). /// Creates a buffered reader around a file (not memory mapped).
pub fn buf_reader<P: AsRef<Path>>(path: P) -> Result<BufReader<File>> { pub fn buf_reader<P: AsRef<Path>>(path: P) -> Result<BufReader<File>> {
@ -55,3 +57,180 @@ pub fn read_c_string(reader: &mut Reader, off: u64) -> Result<String> {
reader.set_position(pos); reader.set_position(pos);
Ok(s) Ok(s)
} }
/// Process response files (starting with '@') and glob patterns (*).
pub fn process_rsp(files: &[PathBuf]) -> Result<Vec<PathBuf>> {
let mut out = Vec::with_capacity(files.len());
for path in files {
let path_str =
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
if let Some(rsp_file) = path_str.strip_prefix('@') {
let reader = buf_reader(rsp_file)?;
for result in reader.lines() {
let line = result?;
if !line.is_empty() {
out.push(PathBuf::from(line));
}
}
} else if path_str.contains('*') {
for entry in glob::glob(path_str)? {
out.push(entry?);
}
} else {
out.push(path.clone());
}
}
Ok(out)
}
/// Iterator over files in a RARC archive.
struct RarcIterator {
file: Mmap,
paths: Vec<(PathBuf, u64, u32)>,
index: usize,
}
impl RarcIterator {
pub fn new(file: Mmap, base_path: &Path) -> Result<Self> {
let reader = rarc::RarcReader::new(map_reader(&file))?;
let paths = Self::collect_paths(&reader, base_path);
Ok(Self { file, paths, index: 0 })
}
fn collect_paths(reader: &rarc::RarcReader, base_path: &Path) -> Vec<(PathBuf, u64, u32)> {
let mut current_path = PathBuf::new();
let mut paths = vec![];
for node in reader.nodes() {
match node {
Node::DirectoryBegin { name } => {
current_path.push(name.name);
}
Node::DirectoryEnd { name: _ } => {
current_path.pop();
}
Node::File { name, offset, size } => {
let path = base_path.join(&current_path).join(name.name);
paths.push((path, offset, size));
}
Node::CurrentDirectory => {}
Node::ParentDirectory => {}
}
}
paths
}
fn decompress_if_needed(buf: &[u8]) -> Result<Vec<u8>> {
if buf.len() > 4 && buf[0..4] == *b"Yaz0" {
yaz0::decompress_file(&mut Cursor::new(buf))
} else {
Ok(buf.to_vec())
}
}
}
impl Iterator for RarcIterator {
type Item = Result<(PathBuf, Vec<u8>)>;
fn next(&mut self) -> Option<Self::Item> {
if self.index >= self.paths.len() {
return None;
}
let (path, off, size) = self.paths[self.index].clone();
self.index += 1;
let slice = &self.file[off as usize..off as usize + size as usize];
match Self::decompress_if_needed(slice) {
Ok(buf) => Some(Ok((path, buf))),
Err(e) => Some(Err(e)),
}
}
}
/// A file entry, either a memory mapped file or an owned buffer.
pub enum FileEntry {
Map(Mmap),
Buffer(Vec<u8>),
}
impl FileEntry {
/// Creates a reader for the file.
pub fn as_reader(&self) -> Reader {
match self {
Self::Map(map) => map_reader(map),
Self::Buffer(slice) => Cursor::new(slice),
}
}
}
/// Iterate over file paths, expanding response files (@) and glob patterns (*).
/// If a file is a RARC archive, iterate over its contents.
/// If a file is a Yaz0 compressed file, decompress it.
pub struct FileIterator {
paths: Vec<PathBuf>,
index: usize,
rarc: Option<RarcIterator>,
}
impl FileIterator {
pub fn new(paths: &[PathBuf]) -> Result<Self> {
Ok(Self { paths: process_rsp(paths)?, index: 0, rarc: None })
}
fn next_rarc(&mut self) -> Option<Result<(PathBuf, FileEntry)>> {
if let Some(rarc) = &mut self.rarc {
match rarc.next() {
Some(Ok((path, buf))) => return Some(Ok((path, FileEntry::Buffer(buf)))),
Some(Err(err)) => return Some(Err(err)),
None => self.rarc = None,
}
}
None
}
fn next_path(&mut self) -> Option<Result<(PathBuf, FileEntry)>> {
if self.index >= self.paths.len() {
return None;
}
let path = self.paths[self.index].clone();
self.index += 1;
match map_file(&path) {
Ok(map) => self.handle_file(map, path),
Err(err) => Some(Err(err)),
}
}
fn handle_file(&mut self, map: Mmap, path: PathBuf) -> Option<Result<(PathBuf, FileEntry)>> {
if map.len() <= 4 {
return Some(Ok((path, FileEntry::Map(map))));
}
match &map[0..4] {
b"Yaz0" => self.handle_yaz0(map, path),
b"RARC" => self.handle_rarc(map, path),
_ => Some(Ok((path, FileEntry::Map(map)))),
}
}
fn handle_yaz0(&mut self, map: Mmap, path: PathBuf) -> Option<Result<(PathBuf, FileEntry)>> {
Some(match yaz0::decompress_file(&mut map_reader(&map)) {
Ok(buf) => Ok((path, FileEntry::Buffer(buf))),
Err(e) => Err(e),
})
}
fn handle_rarc(&mut self, map: Mmap, path: PathBuf) -> Option<Result<(PathBuf, FileEntry)>> {
self.rarc = match RarcIterator::new(map, &path) {
Ok(iter) => Some(iter),
Err(e) => return Some(Err(e)),
};
self.next()
}
}
impl Iterator for FileIterator {
type Item = Result<(PathBuf, FileEntry)>;
fn next(&mut self) -> Option<Self::Item> { self.next_rarc().or_else(|| self.next_path()) }
}

39
src/util/lcf.rs Normal file
View File

@ -0,0 +1,39 @@
use std::path::PathBuf;
use anyhow::Result;
use itertools::Itertools;
use crate::obj::ObjInfo;
pub fn generate_ldscript(obj: &ObjInfo) -> Result<String> {
let stack_size = match (obj.stack_address, obj.stack_end) {
(Some(stack_address), Some(stack_end)) => stack_address - stack_end,
_ => 65535, // default
};
let section_defs = obj
.sections
.iter()
.map(|s| format!("{} ALIGN({:#X}):{{}}", s.name, 0x20 /* TODO */))
.join("\n ");
let mut force_files = Vec::with_capacity(obj.link_order.len());
for unit in &obj.link_order {
let obj_path = obj_path_for_unit(unit);
force_files.push(obj_path.file_name().unwrap().to_str().unwrap().to_string());
}
let out = include_str!("../../assets/ldscript.lcf")
.replacen("$SECTIONS", &section_defs, 1)
.replacen("$STACKSIZE", &format!("{:#X}", stack_size), 1)
.replacen("$FORCEFILES", &force_files.join("\n "), 1);
Ok(out)
}
pub fn obj_path_for_unit(unit: &str) -> PathBuf {
PathBuf::from(unit).with_extension("").with_extension("o")
}
pub fn asm_path_for_unit(unit: &str) -> PathBuf {
PathBuf::from(unit).with_extension("").with_extension("s")
}

View File

@ -1,16 +1,27 @@
#![allow(dead_code)]
#![allow(unused_mut)]
use std::{ use std::{
collections::{btree_map, BTreeMap, HashMap}, collections::{btree_map, BTreeMap, HashMap, HashSet},
hash::Hash, hash::Hash,
io::BufRead, io::BufRead,
mem::replace,
}; };
use anyhow::{bail, ensure, Error, Result}; use anyhow::{anyhow, bail, ensure, Error, Result};
use cwdemangle::{demangle, DemangleOptions}; use cwdemangle::{demangle, DemangleOptions};
use multimap::MultiMap; use multimap::MultiMap;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::{Captures, Regex}; use regex::{Captures, Regex};
use topological_sort::TopologicalSort; use topological_sort::TopologicalSort;
use crate::{
obj::{
section_kind_for_section, ObjInfo, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags,
ObjSymbolKind,
},
util::nested::NestedVec,
};
#[derive(Debug, Copy, Clone, Eq, PartialEq)] #[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum SymbolKind { pub enum SymbolKind {
Function, Function,
@ -35,7 +46,7 @@ pub struct SymbolEntry {
pub unit: Option<String>, pub unit: Option<String>,
pub address: u32, pub address: u32,
pub size: u32, pub size: u32,
pub section: String, pub align: Option<u32>,
} }
#[derive(Debug, Clone, Hash, Eq, PartialEq)] #[derive(Debug, Clone, Hash, Eq, PartialEq)]
@ -55,10 +66,10 @@ fn is_code_section(section: &str) -> bool { matches!(section, ".text" | ".init")
/// Iterate over the BTreeMap and generate an ordered list of symbols and TUs by address. /// Iterate over the BTreeMap and generate an ordered list of symbols and TUs by address.
fn resolve_section_order( fn resolve_section_order(
address_to_symbol: &BTreeMap<u32, SymbolRef>, _address_to_symbol: &BTreeMap<u32, SymbolRef>,
symbol_entries: &mut HashMap<SymbolRef, SymbolEntry>, symbol_entries: &mut HashMap<SymbolRef, SymbolEntry>,
) -> Result<SectionOrder> { ) -> Result<SectionOrder> {
let mut ordering = SectionOrder::default(); let ordering = SectionOrder::default();
// let mut last_unit = String::new(); // let mut last_unit = String::new();
// let mut last_section = String::new(); // let mut last_section = String::new();
@ -138,17 +149,19 @@ fn resolve_section_order(
/// There can be ambiguities, but any solution that satisfies the link order /// There can be ambiguities, but any solution that satisfies the link order
/// constraints is considered valid. /// constraints is considered valid.
// TODO account for library ordering // TODO account for library ordering
#[allow(dead_code)]
pub fn resolve_link_order(section_unit_order: &[(String, Vec<String>)]) -> Result<Vec<String>> { pub fn resolve_link_order(section_unit_order: &[(String, Vec<String>)]) -> Result<Vec<String>> {
let mut global_unit_order = Vec::<String>::new(); let mut global_unit_order = Vec::<String>::new();
let mut t_sort = TopologicalSort::<String>::new(); let mut t_sort = TopologicalSort::<String>::new();
for (section, order) in section_unit_order { for (section, order) in section_unit_order {
let mut order: &[String] = order; let mut order = order.clone();
if matches!(section.as_str(), ".ctors" | ".dtors") && order.len() > 1 { if matches!(section.as_str(), ".ctors" | ".dtors" | "extab") {
// __init_cpp_exceptions.o has symbols that get ordered to the beginning of continue;
// .ctors and .dtors, so our topological sort would fail if we added them. // if order.len() > 1 {
// Always skip the first TU of .ctors and .dtors. // // __init_cpp_exceptions.o has symbols that get ordered to the beginning of
order = &order[1..]; // // .ctors and .dtors, so our topological sort would fail if we added them.
// // Always skip the first TU of .ctors and .dtors.
// order = order[1..].to_vec();
// }
} }
for iter in order.windows(2) { for iter in order.windows(2) {
t_sort.add_dependency(iter[0].clone(), iter[1].clone()); t_sort.add_dependency(iter[0].clone(), iter[1].clone());
@ -158,7 +171,7 @@ pub fn resolve_link_order(section_unit_order: &[(String, Vec<String>)]) -> Resul
global_unit_order.push(unit); global_unit_order.push(unit);
} }
// An incomplete topological sort indicates that a cyclic dependency was encountered. // An incomplete topological sort indicates that a cyclic dependency was encountered.
ensure!(t_sort.is_empty(), "Cyclic dependency encountered!"); ensure!(t_sort.is_empty(), "Cyclic dependency encountered while resolving link order");
// Sanity check, did we get all TUs in the final order? // Sanity check, did we get all TUs in the final order?
for (_, order) in section_unit_order { for (_, order) in section_unit_order {
for unit in order { for unit in order {
@ -173,6 +186,8 @@ macro_rules! static_regex {
static $name: Lazy<Regex> = Lazy::new(|| Regex::new($str).unwrap()); static $name: Lazy<Regex> = Lazy::new(|| Regex::new($str).unwrap());
}; };
} }
// Link map
static_regex!(LINK_MAP_START, "^Link map of (?P<entry>.*)$"); static_regex!(LINK_MAP_START, "^Link map of (?P<entry>.*)$");
static_regex!( static_regex!(
LINK_MAP_ENTRY, LINK_MAP_ENTRY,
@ -186,35 +201,54 @@ static_regex!(
LINK_MAP_ENTRY_DUPLICATE, LINK_MAP_ENTRY_DUPLICATE,
"^\\s*(?P<depth>\\d+)] >>> UNREFERENCED DUPLICATE (?P<sym>.*)$" "^\\s*(?P<depth>\\d+)] >>> UNREFERENCED DUPLICATE (?P<sym>.*)$"
); );
static_regex!(LINK_MAP_EXTERN_SYMBOL, "^\\s*>>> SYMBOL NOT FOUND: (.*)$");
// Section layout
static_regex!(SECTION_LAYOUT_START, "^(?P<section>.*) section layout$"); static_regex!(SECTION_LAYOUT_START, "^(?P<section>.*) section layout$");
static_regex!( static_regex!(
SECTION_LAYOUT_SYMBOL, SECTION_LAYOUT_SYMBOL,
"^\\s*(?P<rom_addr>[0-9A-Fa-f]+|UNUSED)\\s+(?P<size>[0-9A-Fa-f]+)\\s+(?P<addr>[0-9A-Fa-f]+|\\.{8})\\s+(?P<align>\\d+)?\\s*(?P<sym>.*?)(?:\\s+\\(entry of (?P<entry_of>.*?)\\))?\\s+(?P<tu>.*)$" "^\\s*(?P<rom_addr>[0-9A-Fa-f]+|UNUSED)\\s+(?P<size>[0-9A-Fa-f]+)\\s+(?P<addr>[0-9A-Fa-f]{8}|\\.{8})\\s+(?P<offset>[0-9A-Fa-f]{8}|\\.{8})\\s+(?P<align>\\d+)?\\s*(?P<sym>.*?)(?:\\s+\\(entry of (?P<entry_of>.*?)\\))?\\s+(?P<tu>.*)$"
); );
static_regex!( static_regex!(
SECTION_LAYOUT_HEADER, SECTION_LAYOUT_HEADER,
"^(\\s*Starting\\s+Virtual\\s*|\\s*address\\s+Size\\s+address\\s*|\\s*-----------------------\\s*)$" "^(\\s*Starting\\s+Virtual\\s*(File\\s*)?|\\s*address\\s+Size\\s+address\\s*(offset\\s*)?|\\s*-----------------------(----------)?\\s*)$"
); );
static_regex!(MEMORY_MAP_HEADER, "^\\s*Memory map:\\s*$");
static_regex!(EXTERN_SYMBOL, "^\\s*>>> SYMBOL NOT FOUND: (.*)$"); // Memory map
static_regex!(LINKER_SYMBOLS_HEADER, "^\\s*Linker generated symbols:\\s*$"); static_regex!(MEMORY_MAP_START, "^\\s*Memory map:\\s*$");
static_regex!(MEMORY_MAP_HEADER, "^(\\s*Starting Size\\s+File\\s*|\\s*address\\s+Offset\\s*)$");
static_regex!(MEMORY_MAP_ENTRY, "^\\s*(?P<section>\\S+)\\s+(?P<addr>[0-9A-Fa-f]+|\\.{0,8})\\s+(?P<size>[0-9A-Fa-f]+|\\.{1,8})\\s+(?P<offset>[0-9A-Fa-f]+|\\.{1,8})\\s*$");
// Linker generated symbols
static_regex!(LINKER_SYMBOLS_START, "^\\s*Linker generated symbols:\\s*$");
static_regex!(LINKER_SYMBOL_ENTRY, "^\\s*(?P<name>\\S+)\\s+(?P<addr>[0-9A-Fa-f]+|\\.{0,8})\\s*$");
pub struct SectionInfo {
name: String,
address: u32,
size: u32,
file_offset: u32,
}
#[derive(Default)] #[derive(Default)]
pub struct MapEntries { pub struct MapInfo {
pub entry_point: String, pub entry_point: String,
pub symbols: HashMap<SymbolRef, SymbolEntry>,
pub unit_entries: MultiMap<String, SymbolRef>, pub unit_entries: MultiMap<String, SymbolRef>,
pub entry_references: MultiMap<SymbolRef, SymbolRef>, pub entry_references: MultiMap<SymbolRef, SymbolRef>,
pub entry_referenced_from: MultiMap<SymbolRef, SymbolRef>, pub entry_referenced_from: MultiMap<SymbolRef, SymbolRef>,
// pub address_to_symbol: BTreeMap<u32, SymbolRef>, // pub address_to_symbol: BTreeMap<u32, SymbolRef>,
// pub unit_section_ranges: HashMap<String, HashMap<String, Range<u32>>>, // pub unit_section_ranges: HashMap<String, HashMap<String, Range<u32>>>,
pub symbol_order: Vec<SymbolRef>, // pub symbol_order: Vec<SymbolRef>,
pub unit_order: Vec<(String, Vec<String>)>, // pub unit_order: Vec<(String, Vec<String>)>,
pub sections: BTreeMap<u32, SectionInfo>,
pub link_map_symbols: HashMap<SymbolRef, SymbolEntry>,
pub section_symbols: HashMap<String, BTreeMap<u32, Vec<SymbolEntry>>>,
pub section_units: HashMap<String, Vec<(u32, String)>>,
} }
#[derive(Default)] #[derive(Default)]
struct LinkMapState { struct LinkMapState {
last_name: String, last_symbol_name: String,
symbol_stack: Vec<SymbolRef>, symbol_stack: Vec<SymbolRef>,
} }
@ -224,10 +258,6 @@ struct SectionLayoutState {
current_unit: Option<String>, current_unit: Option<String>,
units: Vec<(u32, String)>, units: Vec<(u32, String)>,
symbols: BTreeMap<u32, Vec<SymbolEntry>>, symbols: BTreeMap<u32, Vec<SymbolEntry>>,
// unit_override: Option<String>,
// relative_offset: u32,
// last_unit_start: u32,
// last_section_end: u32,
has_link_map: bool, has_link_map: bool,
} }
@ -241,7 +271,7 @@ enum ProcessMapState {
struct StateMachine { struct StateMachine {
state: ProcessMapState, state: ProcessMapState,
entries: MapEntries, result: MapInfo,
has_link_map: bool, has_link_map: bool,
} }
@ -253,7 +283,7 @@ impl StateMachine {
match &mut self.state { match &mut self.state {
ProcessMapState::None => { ProcessMapState::None => {
if let Some(captures) = LINK_MAP_START.captures(&line) { if let Some(captures) = LINK_MAP_START.captures(&line) {
self.entries.entry_point = captures["entry"].to_string(); self.result.entry_point = captures["entry"].to_string();
self.switch_state(ProcessMapState::LinkMap(Default::default()))?; self.switch_state(ProcessMapState::LinkMap(Default::default()))?;
} else if let Some(captures) = SECTION_LAYOUT_START.captures(&line) { } else if let Some(captures) = SECTION_LAYOUT_START.captures(&line) {
self.switch_state(ProcessMapState::SectionLayout(SectionLayoutState { self.switch_state(ProcessMapState::SectionLayout(SectionLayoutState {
@ -261,9 +291,9 @@ impl StateMachine {
has_link_map: self.has_link_map, has_link_map: self.has_link_map,
..Default::default() ..Default::default()
}))?; }))?;
} else if MEMORY_MAP_HEADER.is_match(&line) { } else if MEMORY_MAP_START.is_match(&line) {
self.switch_state(ProcessMapState::MemoryMap)?; self.switch_state(ProcessMapState::MemoryMap)?;
} else if LINKER_SYMBOLS_HEADER.is_match(&line) { } else if LINKER_SYMBOLS_START.is_match(&line) {
self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?; self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?;
} else { } else {
bail!("Unexpected line while processing map: '{line}'"); bail!("Unexpected line while processing map: '{line}'");
@ -271,10 +301,11 @@ impl StateMachine {
} }
ProcessMapState::LinkMap(ref mut state) => { ProcessMapState::LinkMap(ref mut state) => {
if let Some(captures) = LINK_MAP_ENTRY.captures(&line) { if let Some(captures) = LINK_MAP_ENTRY.captures(&line) {
StateMachine::process_link_map_entry(captures, state, &mut self.entries)?; StateMachine::process_link_map_entry(captures, state, &mut self.result)?;
} else if let Some(captures) = LINK_MAP_ENTRY_GENERATED.captures(&line) { } else if let Some(captures) = LINK_MAP_ENTRY_GENERATED.captures(&line) {
StateMachine::process_link_map_generated(captures, state, &mut self.entries)?; StateMachine::process_link_map_generated(captures, state, &mut self.result)?;
} else if LINK_MAP_ENTRY_DUPLICATE.is_match(&line) || EXTERN_SYMBOL.is_match(&line) } else if LINK_MAP_ENTRY_DUPLICATE.is_match(&line)
|| LINK_MAP_EXTERN_SYMBOL.is_match(&line)
{ {
// Ignore // Ignore
} else if let Some(captures) = SECTION_LAYOUT_START.captures(&line) { } else if let Some(captures) = SECTION_LAYOUT_START.captures(&line) {
@ -283,9 +314,9 @@ impl StateMachine {
has_link_map: self.has_link_map, has_link_map: self.has_link_map,
..Default::default() ..Default::default()
}))?; }))?;
} else if MEMORY_MAP_HEADER.is_match(&line) { } else if MEMORY_MAP_START.is_match(&line) {
self.switch_state(ProcessMapState::MemoryMap)?; self.switch_state(ProcessMapState::MemoryMap)?;
} else if LINKER_SYMBOLS_HEADER.is_match(&line) { } else if LINKER_SYMBOLS_START.is_match(&line) {
self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?; self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?;
} else { } else {
bail!("Unexpected line while processing map: '{line}'"); bail!("Unexpected line while processing map: '{line}'");
@ -293,51 +324,52 @@ impl StateMachine {
} }
ProcessMapState::SectionLayout(ref mut state) => { ProcessMapState::SectionLayout(ref mut state) => {
if let Some(captures) = SECTION_LAYOUT_SYMBOL.captures(&line) { if let Some(captures) = SECTION_LAYOUT_SYMBOL.captures(&line) {
StateMachine::section_layout_entry(captures, state, &mut self.entries)?; StateMachine::section_layout_entry(captures, state, &mut self.result)?;
} else if let Some(captures) = SECTION_LAYOUT_START.captures(&line) { } else if let Some(captures) = SECTION_LAYOUT_START.captures(&line) {
// let last_section_end = state.last_section_end;
self.switch_state(ProcessMapState::SectionLayout(SectionLayoutState { self.switch_state(ProcessMapState::SectionLayout(SectionLayoutState {
current_section: captures["section"].to_string(), current_section: captures["section"].to_string(),
has_link_map: self.has_link_map, has_link_map: self.has_link_map,
// last_section_end,
..Default::default() ..Default::default()
}))?; }))?;
} else if SECTION_LAYOUT_HEADER.is_match(&line) { } else if SECTION_LAYOUT_HEADER.is_match(&line) {
// Ignore // Ignore
} else if MEMORY_MAP_HEADER.is_match(&line) { } else if MEMORY_MAP_START.is_match(&line) {
self.switch_state(ProcessMapState::MemoryMap)?; self.switch_state(ProcessMapState::MemoryMap)?;
} else if LINKER_SYMBOLS_HEADER.is_match(&line) { } else if LINKER_SYMBOLS_START.is_match(&line) {
self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?; self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?;
} else { } else {
bail!("Unexpected line while processing map: '{line}'"); bail!("Unexpected line while processing map: '{line}'");
} }
} }
ProcessMapState::MemoryMap => { ProcessMapState::MemoryMap => {
// TODO if let Some(captures) = MEMORY_MAP_ENTRY.captures(&line) {
if LINKER_SYMBOLS_HEADER.is_match(&line) { StateMachine::memory_map_entry(captures, &mut self.result)?;
} else if LINKER_SYMBOLS_START.is_match(&line) {
self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?; self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?;
} }
} }
ProcessMapState::LinkerGeneratedSymbols => { ProcessMapState::LinkerGeneratedSymbols => {
// TODO if let Some(captures) = LINKER_SYMBOL_ENTRY.captures(&line) {
StateMachine::linker_symbol_entry(captures, &mut self.result)?;
}
} }
} }
Ok(()) Ok(())
} }
fn switch_state(&mut self, new_state: ProcessMapState) -> Result<()> { fn switch_state(&mut self, new_state: ProcessMapState) -> Result<()> {
self.end_state()?; let old_state = replace(&mut self.state, new_state);
self.state = new_state; self.end_state(old_state)?;
Ok(()) Ok(())
} }
fn end_state(&mut self) -> Result<()> { fn end_state(&mut self, old_state: ProcessMapState) -> Result<()> {
match self.state { match old_state {
ProcessMapState::LinkMap { .. } => { ProcessMapState::LinkMap { .. } => {
self.has_link_map = true; self.has_link_map = true;
} }
ProcessMapState::SectionLayout(ref mut state) => { ProcessMapState::SectionLayout(state) => {
StateMachine::end_section_layout(state, &mut self.entries)?; StateMachine::end_section_layout(state, &mut self.result)?;
} }
_ => {} _ => {}
} }
@ -347,17 +379,13 @@ impl StateMachine {
fn process_link_map_entry( fn process_link_map_entry(
captures: Captures, captures: Captures,
state: &mut LinkMapState, state: &mut LinkMapState,
entries: &mut MapEntries, result: &mut MapInfo,
) -> Result<()> { ) -> Result<()> {
// if captures["sym"].starts_with('.') {
// state.last_name.clear();
// return Ok(());
// }
let is_duplicate = &captures["sym"] == ">>>"; let is_duplicate = &captures["sym"] == ">>>";
let unit = captures["tu"].trim().to_string(); let unit = captures["tu"].trim().to_string();
let name = if is_duplicate { let name = if is_duplicate {
ensure!(!state.last_name.is_empty(), "Last name empty?"); ensure!(!state.last_symbol_name.is_empty(), "Last name empty?");
state.last_name.clone() state.last_symbol_name.clone()
} else { } else {
captures["sym"].to_string() captures["sym"].to_string()
}; };
@ -385,11 +413,11 @@ impl StateMachine {
}; };
if !is_duplicate && state.symbol_stack.len() > 1 { if !is_duplicate && state.symbol_stack.len() > 1 {
let from = &state.symbol_stack[state.symbol_stack.len() - 2]; let from = &state.symbol_stack[state.symbol_stack.len() - 2];
entries.entry_referenced_from.insert(symbol_ref.clone(), from.clone()); result.entry_referenced_from.insert(symbol_ref.clone(), from.clone());
entries.entry_references.insert(from.clone(), symbol_ref.clone()); result.entry_references.insert(from.clone(), symbol_ref.clone());
} }
let mut should_insert = true; let mut should_insert = true;
if let Some(symbol) = entries.symbols.get(&symbol_ref) { if let Some(symbol) = result.link_map_symbols.get(&symbol_ref) {
if symbol.kind != kind { if symbol.kind != kind {
log::warn!( log::warn!(
"Kind mismatch for {}: was {:?}, now {:?}", "Kind mismatch for {}: was {:?}, now {:?}",
@ -406,12 +434,12 @@ impl StateMachine {
visibility visibility
); );
} }
entries.unit_entries.insert(unit.clone(), symbol_ref.clone()); result.unit_entries.insert(unit.clone(), symbol_ref.clone());
should_insert = false; should_insert = false;
} }
if should_insert { if should_insert {
let demangled = demangle(&name, &DemangleOptions::default()); let demangled = demangle(&name, &DemangleOptions::default());
entries.symbols.insert(symbol_ref.clone(), SymbolEntry { result.link_map_symbols.insert(symbol_ref.clone(), SymbolEntry {
name: name.clone(), name: name.clone(),
demangled, demangled,
kind, kind,
@ -419,10 +447,10 @@ impl StateMachine {
unit: Some(unit.clone()), unit: Some(unit.clone()),
address: 0, address: 0,
size: 0, size: 0,
section: String::new(), align: None,
}); });
state.last_name = name; state.last_symbol_name = name;
entries.unit_entries.insert(unit, symbol_ref); result.unit_entries.insert(unit, symbol_ref);
} }
Ok(()) Ok(())
} }
@ -430,12 +458,12 @@ impl StateMachine {
fn process_link_map_generated( fn process_link_map_generated(
captures: Captures, captures: Captures,
_state: &mut LinkMapState, _state: &mut LinkMapState,
entries: &mut MapEntries, result: &mut MapInfo,
) -> Result<()> { ) -> Result<()> {
let name = captures["sym"].to_string(); let name = captures["sym"].to_string();
let demangled = demangle(&name, &DemangleOptions::default()); let demangled = demangle(&name, &DemangleOptions::default());
let symbol_ref = SymbolRef { name: name.clone(), unit: None }; let symbol_ref = SymbolRef { name: name.clone(), unit: None };
entries.symbols.insert(symbol_ref, SymbolEntry { result.link_map_symbols.insert(symbol_ref, SymbolEntry {
name, name,
demangled, demangled,
kind: SymbolKind::NoType, kind: SymbolKind::NoType,
@ -443,12 +471,47 @@ impl StateMachine {
unit: None, unit: None,
address: 0, address: 0,
size: 0, size: 0,
section: String::new(), align: None,
}); });
Ok(()) Ok(())
} }
fn end_section_layout(state: &mut SectionLayoutState, entries: &mut MapEntries) -> Result<()> { fn end_section_layout(mut state: SectionLayoutState, entries: &mut MapInfo) -> Result<()> {
// Resolve duplicate TUs
let mut existing = HashSet::new();
for idx in 0..state.units.len() {
let (addr, unit) = &state.units[idx];
// FIXME
if
/*state.current_section == ".bss" ||*/
existing.contains(unit) {
if
/*state.current_section == ".bss" ||*/
&state.units[idx - 1].1 != unit {
let new_name = format!("{unit}_{}_{:010X}", state.current_section, addr);
log::info!("Renaming {unit} to {new_name}");
for idx2 in 0..idx {
let (addr, n_unit) = &state.units[idx2];
if unit == n_unit {
let new_name =
format!("{n_unit}_{}_{:010X}", state.current_section, addr);
log::info!("Renaming 2 {n_unit} to {new_name}");
state.units[idx2].1 = new_name;
break;
}
}
state.units[idx].1 = new_name;
}
} else {
existing.insert(unit.clone());
}
}
if !state.symbols.is_empty() {
entries.section_symbols.insert(state.current_section.clone(), state.symbols);
}
if !state.units.is_empty() {
entries.section_units.insert(state.current_section.clone(), state.units);
}
// Set last section size // Set last section size
// if let Some(last_unit) = state.section_units.last() { // if let Some(last_unit) = state.section_units.last() {
// let last_unit = state.unit_override.as_ref().unwrap_or(last_unit); // let last_unit = state.unit_override.as_ref().unwrap_or(last_unit);
@ -468,16 +531,26 @@ impl StateMachine {
fn section_layout_entry( fn section_layout_entry(
captures: Captures, captures: Captures,
state: &mut SectionLayoutState, state: &mut SectionLayoutState,
entries: &mut MapEntries, result: &mut MapInfo,
) -> Result<()> { ) -> Result<()> {
if captures["rom_addr"].trim() == "UNUSED" { if captures["rom_addr"].trim() == "UNUSED" {
return Ok(()); return Ok(());
} }
let sym_name = captures["sym"].trim(); let sym_name = captures["sym"].trim();
let mut tu = captures["tu"].trim().to_string(); if sym_name == "*fill*" {
let mut address = u32::from_str_radix(captures["addr"].trim(), 16)?; return Ok(());
let mut size = u32::from_str_radix(captures["size"].trim(), 16)?; }
let tu = captures["tu"].trim().to_string();
if tu == "*fill*" || tu == "Linker Generated Symbol File" {
return Ok(());
}
let address = u32::from_str_radix(captures["addr"].trim(), 16)?;
let size = u32::from_str_radix(captures["size"].trim(), 16)?;
let align =
captures.name("align").and_then(|m| u32::from_str_radix(m.as_str().trim(), 16).ok());
if state.current_unit.as_ref() != Some(&tu) || sym_name == state.current_section { if state.current_unit.as_ref() != Some(&tu) || sym_name == state.current_section {
state.current_unit = Some(tu.clone()); state.current_unit = Some(tu.clone());
@ -488,7 +561,7 @@ impl StateMachine {
} }
let symbol_ref = SymbolRef { name: sym_name.to_string(), unit: Some(tu.clone()) }; let symbol_ref = SymbolRef { name: sym_name.to_string(), unit: Some(tu.clone()) };
let entry = if let Some(existing) = entries.symbols.get(&symbol_ref) { let entry = if let Some(existing) = result.link_map_symbols.get(&symbol_ref) {
SymbolEntry { SymbolEntry {
name: existing.name.clone(), name: existing.name.clone(),
demangled: existing.demangled.clone(), demangled: existing.demangled.clone(),
@ -497,7 +570,7 @@ impl StateMachine {
unit: existing.unit.clone(), unit: existing.unit.clone(),
address, address,
size, size,
section: state.current_section.clone(), align,
} }
} else { } else {
let visibility = if state.has_link_map { let visibility = if state.has_link_map {
@ -518,7 +591,7 @@ impl StateMachine {
unit: Some(tu.clone()), unit: Some(tu.clone()),
address, address,
size, size,
section: state.current_section.clone(), align,
} }
}; };
match state.symbols.entry(address) { match state.symbols.entry(address) {
@ -529,25 +602,188 @@ impl StateMachine {
} }
Ok(()) Ok(())
} }
fn memory_map_entry(captures: Captures, entries: &mut MapInfo) -> Result<()> {
let section = &captures["section"];
let addr_str = &captures["addr"];
if addr_str.is_empty() {
// Stripped from DOL
return Ok(());
}
let address = u32::from_str_radix(addr_str, 16)?;
let size = u32::from_str_radix(&captures["size"], 16)?;
let file_offset = u32::from_str_radix(&captures["offset"], 16)?;
// log::info!("Memory map entry: {section} {address:#010X} {size:#010X} {file_offset:#010X}");
entries.sections.insert(address, SectionInfo {
name: section.to_string(),
address,
size,
file_offset,
});
Ok(())
}
fn linker_symbol_entry(captures: Captures, result: &mut MapInfo) -> Result<()> {
let name = &captures["name"];
let address = u32::from_str_radix(&captures["addr"], 16)?;
if address == 0 {
return Ok(());
}
let symbol_ref = SymbolRef { name: name.to_string(), unit: None };
if let Some(existing) = result.link_map_symbols.get_mut(&symbol_ref) {
existing.address = address;
} else {
result.link_map_symbols.insert(symbol_ref, SymbolEntry {
name: name.to_string(),
demangled: demangle(name, &DemangleOptions::default()),
kind: SymbolKind::NoType,
visibility: SymbolVisibility::Global,
unit: None,
address,
size: 0,
align: None,
});
};
// log::info!("Linker generated symbol: {} @ {:#010X}", name, address);
Ok(())
}
} }
pub fn process_map<R: BufRead>(reader: R) -> Result<MapEntries> { pub fn process_map<R: BufRead>(reader: R) -> Result<MapInfo> {
let mut state = StateMachine { let mut sm = StateMachine {
state: ProcessMapState::None, state: ProcessMapState::None,
entries: Default::default(), result: Default::default(),
has_link_map: false, has_link_map: false,
}; };
for result in reader.lines() { for result in reader.lines() {
match result { match result {
Ok(line) => state.process_line(line)?, Ok(line) => sm.process_line(line)?,
Err(e) => return Err(Error::from(e)), Err(e) => return Err(Error::from(e)),
} }
} }
state.end_state()?; let state = replace(&mut sm.state, ProcessMapState::None);
sm.end_state(state)?;
let mut entries = state.entries; let entries = sm.result;
// let section_order = resolve_section_order(&entries.address_to_symbol, &mut entries.symbols)?; // let section_order = resolve_section_order(&entries.address_to_symbol, &mut entries.symbols)?;
// entries.symbol_order = section_order.symbol_order; // entries.symbol_order = section_order.symbol_order;
// entries.unit_order = section_order.unit_order; // entries.unit_order = section_order.unit_order;
Ok(entries) Ok(entries)
} }
pub fn apply_map(result: &MapInfo, obj: &mut ObjInfo) -> Result<()> {
for section in &mut obj.sections {
if let Some(info) = result.sections.get(&(section.address as u32)) {
let kind = section_kind_for_section(&info.name)?;
if section.section_known {
if section.name != info.name {
log::warn!("Section mismatch: was {}, map says {}", section.name, info.name);
}
if section.kind != kind {
log::warn!(
"Section type mismatch: {} was {:?}, map says {:?}",
info.name,
section.kind,
kind
);
}
}
// if section.size != info.size as u64 {
// log::warn!(
// "Section size mismatch: {} was {:#X}, map says {:#X}",
// info.name,
// section.size,
// info.size
// );
// }
// if section.file_offset != info.file_offset as u64 {
// log::warn!(
// "Section file offset mismatch: {} was {:#X}, map says {:#X}",
// info.name,
// section.file_offset,
// info.file_offset
// );
// }
section.name = info.name.clone();
section.kind = kind;
// section.size = info.size as u64;
// section.file_offset = info.file_offset as u64;
// section.original_address = info.address as u64;
section.section_known = true;
} else {
log::warn!("Section {} @ {:#010X} not found in map", section.name, section.address);
}
}
// Add section symbols
for (section_name, symbol_map) in &result.section_symbols {
let section_index = obj
.sections
.iter()
.find(|s| &s.name == section_name)
.map(|s| s.index)
.ok_or_else(|| anyhow!("Failed to locate section {section_name} from map"))?;
for symbol_entry in symbol_map.values().flatten() {
add_symbol(obj, symbol_entry, Some(section_index))?;
}
}
// Add absolute symbols
for symbol_entry in result.link_map_symbols.values().filter(|s| s.unit.is_none()) {
add_symbol(obj, symbol_entry, None)?;
}
// Add splits
let mut section_order: Vec<(String, Vec<String>)> = Vec::new();
for (section, unit_order) in &result.section_units {
let mut units = Vec::new();
let mut existing = HashSet::new();
for (addr, unit) in unit_order {
let unit = unit.clone();
if !existing.contains(&unit) {
units.push(unit.clone());
existing.insert(unit.clone());
}
obj.splits.nested_push(*addr, ObjSplit {
unit,
end: 0, // TODO?
align: None,
common: false, // TODO?
});
}
section_order.push((section.clone(), units));
}
log::info!("Section order: {:#?}", section_order);
obj.link_order = resolve_link_order(&section_order)?;
Ok(())
}
fn add_symbol(obj: &mut ObjInfo, symbol_entry: &SymbolEntry, section: Option<usize>) -> Result<()> {
let demangled_name = demangle(&symbol_entry.name, &DemangleOptions::default());
obj.add_symbol(
ObjSymbol {
name: symbol_entry.name.clone(),
demangled_name,
address: symbol_entry.address as u64,
section,
size: symbol_entry.size as u64,
size_known: symbol_entry.size != 0,
flags: ObjSymbolFlagSet(
match symbol_entry.visibility {
SymbolVisibility::Global => ObjSymbolFlags::Global,
SymbolVisibility::Local => ObjSymbolFlags::Local,
SymbolVisibility::Weak => ObjSymbolFlags::Weak,
}
.into(),
),
kind: match symbol_entry.kind {
SymbolKind::Function => ObjSymbolKind::Function,
SymbolKind::Object => ObjSymbolKind::Object,
SymbolKind::Section => ObjSymbolKind::Section,
SymbolKind::NoType => ObjSymbolKind::Unknown,
},
align: None,
data_kind: Default::default(),
},
true,
)?;
Ok(())
}

View File

@ -1,13 +1,17 @@
pub mod asm; pub mod asm;
pub mod comment;
pub mod config; pub mod config;
pub mod dol; pub mod dol;
pub mod dwarf; pub mod dwarf;
pub mod elf; pub mod elf;
pub mod file; pub mod file;
pub mod lcf;
pub mod map; pub mod map;
pub mod nested; pub mod nested;
pub mod rarc;
pub mod rel; pub mod rel;
pub mod rso; pub mod rso;
pub mod yaz0;
/// Creates a fixed-size array reference from a slice. /// Creates a fixed-size array reference from a slice.
#[macro_export] #[macro_export]
@ -20,3 +24,15 @@ macro_rules! array_ref {
to_array(&$slice[$offset..$offset + $size]) to_array(&$slice[$offset..$offset + $size])
}}; }};
} }
/// Creates a fixed-size mutable array reference from a slice.
#[macro_export]
macro_rules! array_ref_mut {
($slice:expr, $offset:expr, $size:expr) => {{
#[inline]
fn to_array_mut<T>(slice: &mut [T]) -> &mut [T; $size] {
unsafe { &mut *(slice.as_mut_ptr() as *mut [_; $size]) }
}
to_array_mut(&mut $slice[$offset..$offset + $size])
}};
}

View File

@ -11,6 +11,7 @@ pub trait NestedMap<T1, T2, T3> {
pub trait NestedVec<T1, T2> { pub trait NestedVec<T1, T2> {
fn nested_push(&mut self, v1: T1, v2: T2); fn nested_push(&mut self, v1: T1, v2: T2);
fn nested_remove(&mut self, v1: &T1, v2: &T2);
} }
impl<T1, T2, T3> NestedMap<T1, T2, T3> for BTreeMap<T1, BTreeMap<T2, T3>> impl<T1, T2, T3> NestedMap<T1, T2, T3> for BTreeMap<T1, BTreeMap<T2, T3>>
@ -19,11 +20,7 @@ where
T2: Eq + Ord, T2: Eq + Ord,
{ {
fn nested_insert(&mut self, v1: T1, v2: T2, v3: T3) -> Result<()> { fn nested_insert(&mut self, v1: T1, v2: T2, v3: T3) -> Result<()> {
let inner = match self.entry(v1) { match self.entry(v1).or_default().entry(v2) {
btree_map::Entry::Occupied(entry) => entry.into_mut(),
btree_map::Entry::Vacant(entry) => entry.insert(Default::default()),
};
match inner.entry(v2) {
btree_map::Entry::Occupied(_) => bail!("Entry already exists"), btree_map::Entry::Occupied(_) => bail!("Entry already exists"),
btree_map::Entry::Vacant(entry) => entry.insert(v3), btree_map::Entry::Vacant(entry) => entry.insert(v3),
}; };
@ -37,11 +34,7 @@ where
T2: Eq + Hash, T2: Eq + Hash,
{ {
fn nested_insert(&mut self, v1: T1, v2: T2, v3: T3) -> Result<()> { fn nested_insert(&mut self, v1: T1, v2: T2, v3: T3) -> Result<()> {
let inner = match self.entry(v1) { match self.entry(v1).or_default().entry(v2) {
hash_map::Entry::Occupied(entry) => entry.into_mut(),
hash_map::Entry::Vacant(entry) => entry.insert(Default::default()),
};
match inner.entry(v2) {
hash_map::Entry::Occupied(_) => bail!("Entry already exists"), hash_map::Entry::Occupied(_) => bail!("Entry already exists"),
hash_map::Entry::Vacant(entry) => entry.insert(v3), hash_map::Entry::Vacant(entry) => entry.insert(v3),
}; };
@ -50,16 +43,29 @@ where
} }
impl<T1, T2> NestedVec<T1, T2> for BTreeMap<T1, Vec<T2>> impl<T1, T2> NestedVec<T1, T2> for BTreeMap<T1, Vec<T2>>
where T1: Ord where
T1: Ord,
T2: PartialEq,
{ {
fn nested_push(&mut self, v1: T1, v2: T2) { fn nested_push(&mut self, v1: T1, v2: T2) { self.entry(v1).or_default().push(v2); }
match self.entry(v1) {
btree_map::Entry::Occupied(mut e) => { fn nested_remove(&mut self, v1: &T1, v2: &T2) {
e.get_mut().push(v2); if let Some(vec) = self.get_mut(v1) {
} vec.retain(|n| n != v2);
btree_map::Entry::Vacant(e) => { }
e.insert(vec![v2]); }
} }
impl<T1, T2> NestedVec<T1, T2> for HashMap<T1, Vec<T2>>
where
T1: Ord + Hash,
T2: PartialEq,
{
fn nested_push(&mut self, v1: T1, v2: T2) { self.entry(v1).or_default().push(v2); }
fn nested_remove(&mut self, v1: &T1, v2: &T2) {
if let Some(vec) = self.get_mut(v1) {
vec.retain(|n| n != v2);
} }
} }
} }

258
src/util/rarc.rs Normal file
View File

@ -0,0 +1,258 @@
// Source: https://github.com/Julgodis/picori/blob/650da9f4fe6050b39b80d5360416591c748058d5/src/rarc.rs
// License: MIT
// Modified to use `std::io::Cursor<&[u8]>` and `byteorder`
use std::{collections::HashMap, fmt::Display};
use anyhow::{anyhow, ensure, Result};
use byteorder::{BigEndian, LittleEndian, ReadBytesExt};
use crate::util::file::{read_c_string, Reader};
#[derive(Debug, Clone)]
pub struct NamedHash {
pub name: String,
pub hash: u16,
}
impl Display for NamedHash {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.name)
}
}
impl std::hash::Hash for NamedHash {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.hash.hash(state); }
}
impl PartialEq for NamedHash {
fn eq(&self, other: &Self) -> bool {
if self.hash == other.hash {
self.name == other.name
} else {
false
}
}
}
impl Eq for NamedHash {}
#[derive(Debug, Clone)]
enum RarcDirectory {
File {
/// Name of the file.
name: NamedHash,
/// Offset of the file in the RARC file. This offset is relative to the start of the RARC file.
offset: u64,
/// Size of the file.
size: u32,
},
Folder {
/// Name of the folder.
name: NamedHash,
},
CurrentFolder,
ParentFolder,
}
#[derive(Debug, Clone)]
struct RarcNode {
/// Index of first directory.
pub index: u32,
/// Number of directories.
pub count: u32,
}
pub struct RarcReader<'a> {
reader: Reader<'a>,
directories: Vec<RarcDirectory>,
nodes: HashMap<NamedHash, RarcNode>,
root_node: NamedHash,
}
impl<'a> RarcReader<'a> {
/// Creates a new RARC reader.
pub fn new(mut reader: Reader<'a>) -> Result<Self> {
let base = reader.position();
let magic = reader.read_u32::<LittleEndian>()?;
let _file_length = reader.read_u32::<BigEndian>()?;
let header_length = reader.read_u32::<BigEndian>()?;
let file_offset = reader.read_u32::<BigEndian>()?;
let _file_length = reader.read_u32::<BigEndian>()?;
let _ = reader.read_u32::<BigEndian>()?;
let _ = reader.read_u32::<BigEndian>()?;
let _ = reader.read_u32::<BigEndian>()?;
let node_count = reader.read_u32::<BigEndian>()?;
let node_offset = reader.read_u32::<BigEndian>()?;
let directory_count = reader.read_u32::<BigEndian>()?;
let directory_offset = reader.read_u32::<BigEndian>()?;
let string_table_length = reader.read_u32::<BigEndian>()?;
let string_table_offset = reader.read_u32::<BigEndian>()?;
let _file_count = reader.read_u16::<BigEndian>()?;
let _ = reader.read_u16::<BigEndian>()?;
let _ = reader.read_u32::<BigEndian>()?;
ensure!(magic == 0x43524152, "invalid RARC magic");
ensure!(node_count < 0x10000, "invalid node count");
ensure!(directory_count < 0x10000, "invalid directory count");
let base = base + header_length as u64;
let directory_base = base + directory_offset as u64;
let data_base = base + file_offset as u64;
let mut directories = Vec::with_capacity(directory_count as usize);
for i in 0..directory_count {
reader.set_position(directory_base + 20 * i as u64);
let index = reader.read_u16::<BigEndian>()?;
let name_hash = reader.read_u16::<BigEndian>()?;
let _ = reader.read_u16::<BigEndian>()?; // 0x200 for folders, 0x1100 for files
let name_offset = reader.read_u16::<BigEndian>()?;
let data_offset = reader.read_u32::<BigEndian>()?;
let data_length = reader.read_u32::<BigEndian>()?;
let _ = reader.read_u32::<BigEndian>()?;
let name = {
let offset = string_table_offset as u64;
let offset = offset + name_offset as u64;
ensure!((name_offset as u32) < string_table_length, "invalid string table offset");
read_c_string(&mut reader, base + offset)
}?;
if index == 0xFFFF {
if name == "." {
directories.push(RarcDirectory::CurrentFolder);
} else if name == ".." {
directories.push(RarcDirectory::ParentFolder);
} else {
directories
.push(RarcDirectory::Folder { name: NamedHash { name, hash: name_hash } });
}
} else {
directories.push(RarcDirectory::File {
name: NamedHash { name, hash: name_hash },
offset: data_base + data_offset as u64,
size: data_length,
});
}
}
let node_base = base + node_offset as u64;
let mut root_node: Option<NamedHash> = None;
let mut nodes = HashMap::with_capacity(node_count as usize);
for i in 0..node_count {
reader.set_position(node_base + 16 * i as u64);
let _identifier = reader.read_u32::<BigEndian>()?;
let name_offset = reader.read_u32::<BigEndian>()?;
let name_hash = reader.read_u16::<BigEndian>()?;
let count = reader.read_u16::<BigEndian>()? as u32;
let index = reader.read_u32::<BigEndian>()?;
ensure!(index < directory_count, "first directory index out of bounds");
let last_index = index.checked_add(count);
ensure!(
last_index.is_some() && last_index.unwrap() <= directory_count,
"last directory index out of bounds"
);
let name = {
let offset = string_table_offset as u64;
let offset = offset + name_offset as u64;
ensure!(name_offset < string_table_length, "invalid string table offset");
read_c_string(&mut reader, base + offset)
}?;
// FIXME: this assumes that the root node is the first node in the list
if root_node.is_none() {
root_node = Some(NamedHash { name: name.clone(), hash: name_hash });
}
let name = NamedHash { name, hash: name_hash };
nodes.insert(name.clone(), RarcNode { index, count });
}
if let Some(root_node) = root_node {
Ok(Self { reader, directories, nodes, root_node })
} else {
Err(anyhow!("no root node"))
}
}
/// Get the data for a file.
pub fn file_data(&mut self, offset: u64, size: u32) -> Result<&'a [u8]> {
ensure!(offset + size as u64 <= self.reader.get_ref().len() as u64, "out of bounds");
Ok(&self.reader.get_ref()[offset as usize..offset as usize + size as usize])
}
/// Get a iterator over the nodes in the RARC file.
pub fn nodes(&self) -> Nodes<'_, '_> {
let root_node = self.root_node.clone();
Nodes { parent: self, stack: vec![NodeState::Begin(root_node)] }
}
}
/// A node in an RARC file.
pub enum Node {
/// A directory that has been entered.
DirectoryBegin { name: NamedHash },
/// A directory that has been exited.
DirectoryEnd { name: NamedHash },
/// A file in the current directory.
File { name: NamedHash, offset: u64, size: u32 },
/// The current directory. This is equivalent to ".".
CurrentDirectory,
/// The parent directory. This is equivalent to "..".
ParentDirectory,
}
enum NodeState {
Begin(NamedHash),
End(NamedHash),
File(NamedHash, u32),
}
/// An iterator over the nodes in an RARC file.
pub struct Nodes<'parent, 'a> {
parent: &'parent RarcReader<'a>,
stack: Vec<NodeState>,
}
impl<'parent, 'a> Iterator for Nodes<'parent, 'a> {
type Item = Node;
fn next(&mut self) -> Option<Self::Item> {
let Some(state) = self.stack.pop() else {
return None;
};
match state {
NodeState::Begin(name) => {
self.stack.push(NodeState::File(name.clone(), 0));
Some(Node::DirectoryBegin { name })
}
NodeState::End(name) => Some(Node::DirectoryEnd { name }),
NodeState::File(name, index) => {
if let Some(node) = self.parent.nodes.get(&name) {
if index + 1 >= node.count {
self.stack.push(NodeState::End(name.clone()));
} else {
self.stack.push(NodeState::File(name.clone(), index + 1));
}
let directory = &self.parent.directories[(node.index + index) as usize];
match directory {
RarcDirectory::CurrentFolder => Some(Node::CurrentDirectory),
RarcDirectory::ParentFolder => Some(Node::ParentDirectory),
RarcDirectory::Folder { name } => {
self.stack.push(NodeState::Begin(name.clone()));
self.next()
}
RarcDirectory::File { name, offset, size } => {
Some(Node::File { name: name.clone(), offset: *offset, size: *size })
}
}
} else {
None
}
}
}
}
}

View File

@ -1,4 +1,4 @@
use std::{io::Read, path::Path}; use std::io::Read;
use anyhow::{anyhow, bail, ensure, Result}; use anyhow::{anyhow, bail, ensure, Result};
use byteorder::{BigEndian, ReadBytesExt}; use byteorder::{BigEndian, ReadBytesExt};
@ -9,7 +9,7 @@ use crate::{
ObjArchitecture, ObjInfo, ObjKind, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol, ObjArchitecture, ObjInfo, ObjKind, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol,
ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
}, },
util::file::{map_file, map_reader, read_string}, util::file::Reader,
}; };
/// Do not relocate anything, but accumulate the offset field for the next relocation offset calculation. /// Do not relocate anything, but accumulate the offset field for the next relocation offset calculation.
@ -24,17 +24,14 @@ pub const R_DOLPHIN_END: u32 = 203;
#[allow(unused)] #[allow(unused)]
pub const R_DOLPHIN_MRKREF: u32 = 204; pub const R_DOLPHIN_MRKREF: u32 = 204;
pub fn process_rel<P: AsRef<Path>>(path: P) -> Result<ObjInfo> { pub fn process_rel(mut reader: Reader) -> Result<ObjInfo> {
let mmap = map_file(path)?;
let mut reader = map_reader(&mmap);
let module_id = reader.read_u32::<BigEndian>()?; let module_id = reader.read_u32::<BigEndian>()?;
ensure!(reader.read_u32::<BigEndian>()? == 0, "Expected 'next' to be 0"); ensure!(reader.read_u32::<BigEndian>()? == 0, "Expected 'next' to be 0");
ensure!(reader.read_u32::<BigEndian>()? == 0, "Expected 'prev' to be 0"); ensure!(reader.read_u32::<BigEndian>()? == 0, "Expected 'prev' to be 0");
let num_sections = reader.read_u32::<BigEndian>()?; let num_sections = reader.read_u32::<BigEndian>()?;
let section_info_offset = reader.read_u32::<BigEndian>()?; let section_info_offset = reader.read_u32::<BigEndian>()?;
let name_offset = reader.read_u32::<BigEndian>()?; let _name_offset = reader.read_u32::<BigEndian>()?;
let name_size = reader.read_u32::<BigEndian>()?; let _name_size = reader.read_u32::<BigEndian>()?;
let version = reader.read_u32::<BigEndian>()?; let version = reader.read_u32::<BigEndian>()?;
ensure!(matches!(version, 1..=3), "Unsupported REL version {}", version); ensure!(matches!(version, 1..=3), "Unsupported REL version {}", version);
let bss_size = reader.read_u32::<BigEndian>()?; let bss_size = reader.read_u32::<BigEndian>()?;
@ -125,7 +122,7 @@ pub fn process_rel<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
.iter() .iter()
.find(|section| section.elf_index == section_idx as usize) .find(|section| section.elf_index == section_idx as usize)
.ok_or_else(|| anyhow!("Failed to locate {name} section {section_idx}"))?; .ok_or_else(|| anyhow!("Failed to locate {name} section {section_idx}"))?;
log::info!("Adding {name} section {section_idx} offset {offset:#X}"); log::debug!("Adding {name} section {section_idx} offset {offset:#X}");
symbols.push(ObjSymbol { symbols.push(ObjSymbol {
name: name.to_string(), name: name.to_string(),
demangled_name: None, demangled_name: None,
@ -135,6 +132,8 @@ pub fn process_rel<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
size_known: false, size_known: false,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()), flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
kind: ObjSymbolKind::Function, kind: ObjSymbolKind::Function,
align: None,
data_kind: Default::default(),
}); });
} }
Ok(()) Ok(())
@ -222,31 +221,21 @@ pub fn process_rel<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
reader.set_position(position); reader.set_position(position);
} }
let name = match name_offset { // let name = match name_offset {
0 => String::new(), // 0 => String::new(),
_ => read_string(&mut reader, name_offset as u64, name_size as usize)?, // _ => read_string(&mut reader, name_offset as u64, name_size as usize).unwrap_or_default(),
}; // };
Ok(ObjInfo { log::debug!("Read REL ID {module_id}");
module_id, let mut obj = ObjInfo::new(
kind: ObjKind::Relocatable, ObjKind::Relocatable,
architecture: ObjArchitecture::PowerPc, ObjArchitecture::PowerPc,
name, String::new(),
symbols, symbols,
sections, sections,
entry: 0, );
sda2_base: None, obj.module_id = module_id;
sda_base: None, obj.unresolved_relocations = unresolved_relocations;
stack_address: None, Ok(obj)
stack_end: None,
db_stack_addr: None,
arena_lo: None,
arena_hi: None,
splits: Default::default(),
named_sections: Default::default(),
link_order: vec![],
known_functions: Default::default(),
unresolved_relocations,
})
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]

View File

@ -32,8 +32,8 @@ pub fn process_rso<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
let prolog_offset = reader.read_u32::<BigEndian>()?; let prolog_offset = reader.read_u32::<BigEndian>()?;
let epilog_offset = reader.read_u32::<BigEndian>()?; let epilog_offset = reader.read_u32::<BigEndian>()?;
let unresolved_offset = reader.read_u32::<BigEndian>()?; let unresolved_offset = reader.read_u32::<BigEndian>()?;
let internal_rel_offset = reader.read_u32::<BigEndian>()?; let _internal_rel_offset = reader.read_u32::<BigEndian>()?;
let internal_rel_size = reader.read_u32::<BigEndian>()?; let _internal_rel_size = reader.read_u32::<BigEndian>()?;
let external_rel_offset = reader.read_u32::<BigEndian>()?; let external_rel_offset = reader.read_u32::<BigEndian>()?;
let external_rel_size = reader.read_u32::<BigEndian>()?; let external_rel_size = reader.read_u32::<BigEndian>()?;
let export_table_offset = reader.read_u32::<BigEndian>()?; let export_table_offset = reader.read_u32::<BigEndian>()?;
@ -118,6 +118,8 @@ pub fn process_rso<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
size_known: false, size_known: false,
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()), flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
kind: ObjSymbolKind::Function, kind: ObjSymbolKind::Function,
align: None,
data_kind: Default::default(),
}); });
} }
Ok(()) Ok(())
@ -172,6 +174,8 @@ pub fn process_rso<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
size_known: false, size_known: false,
flags: Default::default(), flags: Default::default(),
kind: Default::default(), kind: Default::default(),
align: None,
data_kind: Default::default(),
}); });
} }
reader.set_position(import_table_offset as u64); reader.set_position(import_table_offset as u64);
@ -187,32 +191,14 @@ pub fn process_rso<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
0 => String::new(), 0 => String::new(),
_ => read_string(&mut reader, name_offset as u64, name_size as usize)?, _ => read_string(&mut reader, name_offset as u64, name_size as usize)?,
}; };
Ok(ObjInfo {
kind: ObjKind::Relocatable, let obj = ObjInfo::new(ObjKind::Relocatable, ObjArchitecture::PowerPc, name, symbols, sections);
architecture: ObjArchitecture::PowerPc, Ok(obj)
name,
symbols,
sections,
entry: 0,
sda2_base: None,
sda_base: None,
stack_address: None,
stack_end: None,
db_stack_addr: None,
arena_lo: None,
arena_hi: None,
splits: Default::default(),
named_sections: Default::default(),
link_order: vec![],
known_functions: Default::default(),
module_id: 0,
unresolved_relocations: vec![],
})
} }
fn symbol_hash(s: &str) -> u32 { fn symbol_hash(s: &str) -> u32 {
s.bytes().fold(0u32, |hash, c| { s.bytes().fold(0u32, |hash, c| {
let mut m = (hash << 4) + c as u32; let mut m = (hash << 4).wrapping_add(c as u32);
let n = m & 0xF0000000; let n = m & 0xF0000000;
if n != 0 { if n != 0 {
m ^= n >> 24; m ^= n >> 24;

97
src/util/yaz0.rs Normal file
View File

@ -0,0 +1,97 @@
// Source: https://github.com/Julgodis/picori/blob/650da9f4fe6050b39b80d5360416591c748058d5/src/yaz0.rs
// License: MIT
// Modified to use `std::io::Read`/`Seek` and `byteorder`
use std::io::{Read, Seek};
use anyhow::{ensure, Result};
use byteorder::{BigEndian, ReadBytesExt};
/// Yaz0 header.
pub struct Header {
/// Yaz0 magic (0x59617A30).
pub magic: u32,
/// Size of decompressed data.
pub decompressed_size: u32,
_reserved0: u32,
_reserved1: u32,
}
impl Header {
/// Reads a Yaz0 header from a reader.
pub fn from_binary<D: Read>(input: &mut D) -> Result<Header> {
Ok(Header {
magic: input.read_u32::<BigEndian>()?,
decompressed_size: input.read_u32::<BigEndian>()?,
_reserved0: input.read_u32::<BigEndian>()?,
_reserved1: input.read_u32::<BigEndian>()?,
})
}
/// Checks if the header is valid.
pub fn is_valid(&self) -> bool { self.magic == 0x59617A30 }
pub fn decompressed_size(input: &mut impl Read) -> Result<usize> {
let header = Header::from_binary(input)?;
ensure!(header.is_valid(), "Invalid Yaz0 magic");
Ok(header.decompressed_size as usize)
}
}
/// Decompresses the data into a new allocated [`Vec`]. Assumes a Yaz0 header followed by
/// compressed data.
pub fn decompress_file<D: Read + Seek>(input: &mut D) -> Result<Vec<u8>> {
let decompressed_size = Header::decompressed_size(input)?;
decompress(input, decompressed_size)
}
/// Decompresses the data into a new allocated [`Vec`]. `decompressed_size` can be determined
/// by looking at the Yaz0 header [`Header`].
pub fn decompress<D: Read + Seek>(input: &mut D, decompressed_size: usize) -> Result<Vec<u8>> {
let mut output = vec![0; decompressed_size];
decompress_into(input, output.as_mut_slice())?;
Ok(output)
}
/// Decompresses the data into the given buffer. The buffer must be large
/// enough to hold the decompressed data.
pub fn decompress_into<D: Read + Seek>(input: &mut D, destination: &mut [u8]) -> Result<()> {
let decompressed_size = destination.len();
let mut dest = 0;
let mut code = 0;
let mut code_bits = 0;
while dest < decompressed_size {
if code_bits == 0 {
code = input.read_u8()? as u32;
code_bits = 8;
}
if code & 0x80 != 0 {
let byte = input.read_u8()?;
destination[dest] = byte;
dest += 1;
} else {
let byte0 = input.read_u8()?;
let byte1 = input.read_u8()?;
let a = (byte0 & 0xf) as usize;
let b = (byte0 >> 4) as usize;
let offset = (a << 8) | (byte1 as usize);
let length = match b {
0 => (input.read_u8()? as usize) + 0x12,
length => length + 2,
};
ensure!(offset < dest, "Unexpected EOF");
let base = dest - (offset + 1);
for n in 0..length {
destination[dest] = destination[base + n];
dest += 1;
}
}
code <<= 1;
code_bits -= 1;
}
Ok(())
}