6 months of occasional work I guess
This commit is contained in:
parent
f1b4afa885
commit
0fa0aafaea
|
@ -30,18 +30,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.7.20"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
|
||||
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.66"
|
||||
version = "1.0.71"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
|
||||
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
]
|
||||
|
@ -53,9 +53,9 @@ source = "git+https://github.com/bjorn3/rust-ar.git?branch=write_symbol_table#a6
|
|||
|
||||
[[package]]
|
||||
name = "argh"
|
||||
version = "0.1.9"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c375edecfd2074d5edcc31396860b6e54b6f928714d0e097b983053fac0cabe3"
|
||||
checksum = "ab257697eb9496bf75526f0217b5ed64636a9cfafa78b8365c71bd283fcef93e"
|
||||
dependencies = [
|
||||
"argh_derive",
|
||||
"argh_shared",
|
||||
|
@ -63,22 +63,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "argh_derive"
|
||||
version = "0.1.9"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa013479b80109a1bf01a039412b0f0013d716f36921226d86c6709032fb7a03"
|
||||
checksum = "b382dbd3288e053331f03399e1db106c9fb0d8562ad62cb04859ae926f324fa6"
|
||||
dependencies = [
|
||||
"argh_shared",
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "argh_shared"
|
||||
version = "0.1.9"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "149f75bbec1827618262e0855a68f0f9a7f2edc13faebf33c4f16d6725edb6a9"
|
||||
checksum = "64cb94155d965e3d37ffbbe7cc5b82c3dd79dd33bd48e536f73d2cfb8d85506f"
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
|
@ -96,22 +95,22 @@ dependencies = [
|
|||
"cc",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"miniz_oxide",
|
||||
"object",
|
||||
"miniz_oxide 0.6.2",
|
||||
"object 0.30.0",
|
||||
"rustc-demangle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base16ct"
|
||||
version = "0.1.1"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce"
|
||||
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.0"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"
|
||||
checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
|
||||
|
||||
[[package]]
|
||||
name = "bincode"
|
||||
|
@ -145,9 +144,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
|
|||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.0.78"
|
||||
version = "1.0.79"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d"
|
||||
checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
|
@ -185,9 +184,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cwdemangle"
|
||||
version = "0.1.3"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e411efa4ed072fa5bdb637c945ea7f618ebd416748cecc255b00968c1db81e68"
|
||||
checksum = "b58d34a3a03cfe0a4ebfd03aeda6ee8a0f2e99bd3308476a8a89815add3ec373"
|
||||
dependencies = [
|
||||
"argh",
|
||||
]
|
||||
|
@ -209,14 +208,16 @@ dependencies = [
|
|||
"fixedbitset",
|
||||
"flagset",
|
||||
"flate2",
|
||||
"glob",
|
||||
"hex",
|
||||
"indexmap",
|
||||
"indexmap 2.0.0",
|
||||
"itertools",
|
||||
"log",
|
||||
"memchr",
|
||||
"memmap2",
|
||||
"multimap",
|
||||
"num_enum",
|
||||
"object",
|
||||
"object 0.31.1",
|
||||
"once_cell",
|
||||
"ppc750cl",
|
||||
"regex",
|
||||
|
@ -249,6 +250,12 @@ dependencies = [
|
|||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.10.0"
|
||||
|
@ -262,6 +269,12 @@ dependencies = [
|
|||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "equivalent"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1"
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.2.8"
|
||||
|
@ -285,14 +298,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.18"
|
||||
version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4b9663d381d07ae25dc88dbdf27df458faa83a9b25336bcac83d5e452b5fc9d3"
|
||||
checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"windows-sys",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -312,12 +325,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.0.25"
|
||||
version = "1.0.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
|
||||
checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"miniz_oxide",
|
||||
"miniz_oxide 0.7.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -336,6 +349,12 @@ version = "0.27.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793"
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.3"
|
||||
|
@ -352,10 +371,10 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.4.0"
|
||||
name = "hashbrown"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
|
||||
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
|
@ -388,6 +407,16 @@ dependencies = [
|
|||
"hashbrown 0.12.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.14.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "io-lifetimes"
|
||||
version = "1.0.3"
|
||||
|
@ -395,7 +424,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "46112a93252b123d31a119a8d1a1ac19deac4fac6e0e8b0df58f0d4e5870e63c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys",
|
||||
"windows-sys 0.42.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -407,7 +436,16 @@ dependencies = [
|
|||
"hermit-abi",
|
||||
"io-lifetimes",
|
||||
"rustix",
|
||||
"windows-sys",
|
||||
"windows-sys 0.42.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
|
||||
dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -418,9 +456,9 @@ checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.137"
|
||||
version = "0.2.147"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
|
||||
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
|
@ -430,12 +468,9 @@ checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4"
|
|||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.17"
|
||||
version = "0.4.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
|
@ -445,9 +480,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
|||
|
||||
[[package]]
|
||||
name = "memmap2"
|
||||
version = "0.5.8"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"
|
||||
checksum = "f49388d20533534cd19360ad3d6a7dadc885944aa802ba3995040c5ec11288c6"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
@ -462,10 +497,19 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "multimap"
|
||||
version = "0.8.3"
|
||||
name = "miniz_oxide"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
|
||||
checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
|
||||
dependencies = [
|
||||
"adler",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "multimap"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "70db9248a93dc36a36d9a47898caa007a32755c7ad140ec64eeeb50d5a730631"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
@ -481,23 +525,23 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "num_enum"
|
||||
version = "0.5.7"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf5395665662ef45796a4ff5486c5d41d29e0c09640af4c5f17fd94ee2c119c9"
|
||||
checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1"
|
||||
dependencies = [
|
||||
"num_enum_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_enum_derive"
|
||||
version = "0.5.7"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b0498641e53dd6ac1a4f22547548caa6864cc4933784319cd1775271c5a46ce"
|
||||
checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6"
|
||||
dependencies = [
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -505,18 +549,27 @@ name = "object"
|
|||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "239da7f290cfa979f43f85a8efeee9a8a76d0827c356d37f9d3d7254d6b537fb"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.31.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"hashbrown 0.13.1",
|
||||
"indexmap",
|
||||
"indexmap 1.9.2",
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.17.0"
|
||||
version = "1.18.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
|
||||
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
|
@ -546,18 +599,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.47"
|
||||
version = "1.0.63"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
|
||||
checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.21"
|
||||
version = "1.0.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||
checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
@ -573,9 +626,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.7.0"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
|
||||
checksum = "89089e897c013b3deb627116ae56a6955a72b8bed395c9526af31c9fe528b484"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa250384981ea14565685dea16a9ccc4d1c541a13f82b9c168572264d1df8c56"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
|
@ -584,9 +649,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.28"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
|
||||
checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846"
|
||||
|
||||
[[package]]
|
||||
name = "rmp"
|
||||
|
@ -612,9 +677,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.21"
|
||||
version = "0.1.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
|
||||
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
|
@ -627,7 +692,7 @@ dependencies = [
|
|||
"io-lifetimes",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys",
|
||||
"windows-sys 0.42.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -638,42 +703,42 @@ checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
|
|||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.152"
|
||||
version = "1.0.166"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
|
||||
checksum = "d01b7404f9d441d3ad40e6a636a7782c377d2abdbe4fa2440e2edcc2f4f10db8"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.152"
|
||||
version = "1.0.166"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
||||
checksum = "5dd83d6dde2b6b2d466e14d9d1acce8816dedee94f735eac6395808b3483c6d6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_repr"
|
||||
version = "0.1.10"
|
||||
version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a5ec9fa74a20ebbe5d9ac23dac1fc96ba0ecfe9f50f2843b52e537b10fbcb4e"
|
||||
checksum = "1d89a8107374290037607734c0b73a85db7ed80cae314b3c5791f192a496e731"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_yaml"
|
||||
version = "0.9.16"
|
||||
version = "0.9.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "92b5b431e8907b50339b51223b97d102db8d987ced36f6e4d03621db9316c834"
|
||||
checksum = "452e67b9c20c37fa79df53201dc03839651086ed9bbe92b3ca585ca9fdaa7d85"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"indexmap 2.0.0",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
|
@ -682,9 +747,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "sha-1"
|
||||
version = "0.10.0"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f"
|
||||
checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
|
@ -693,9 +758,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
||||
checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
|
@ -708,6 +773,17 @@ dependencies = [
|
|||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "59fb7d6d8281a51045d62b8eb3a7d1ce347b76f312af50cd3dc0af39c87c1737"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.1.3"
|
||||
|
@ -734,7 +810,7 @@ checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -766,9 +842,9 @@ checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
|
|||
|
||||
[[package]]
|
||||
name = "unsafe-libyaml"
|
||||
version = "0.2.5"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc7ed8ba44ca06be78ea1ad2c3682a43349126c8818054231ee6f4748012aed2"
|
||||
checksum = "1865806a559042e51ab5414598446a5871b561d21b6764f2eabb0dd481d880a6"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
|
@ -813,13 +889,37 @@ version = "0.42.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
"windows_aarch64_gnullvm 0.42.0",
|
||||
"windows_aarch64_msvc 0.42.0",
|
||||
"windows_i686_gnu 0.42.0",
|
||||
"windows_i686_msvc 0.42.0",
|
||||
"windows_x86_64_gnu 0.42.0",
|
||||
"windows_x86_64_gnullvm 0.42.0",
|
||||
"windows_x86_64_msvc 0.42.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.48.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.48.0",
|
||||
"windows_aarch64_msvc 0.48.0",
|
||||
"windows_i686_gnu 0.48.0",
|
||||
"windows_i686_msvc 0.48.0",
|
||||
"windows_x86_64_gnu 0.48.0",
|
||||
"windows_x86_64_gnullvm 0.48.0",
|
||||
"windows_x86_64_msvc 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -828,38 +928,80 @@ version = "0.42.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.42.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.42.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.42.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.42.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.42.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.42.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
|
||||
|
|
55
Cargo.toml
55
Cargo.toml
|
@ -21,43 +21,46 @@ panic = "abort"
|
|||
strip = "debuginfo"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { version = "1.0.64", features = ["backtrace"] }
|
||||
anyhow = { version = "1.0.71", features = ["backtrace"] }
|
||||
ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "write_symbol_table" }
|
||||
argh = "0.1.8"
|
||||
base16ct = "0.1.1"
|
||||
base64 = "0.21.0"
|
||||
argh = "0.1.10"
|
||||
base16ct = "0.2.0"
|
||||
base64 = "0.21.2"
|
||||
byteorder = "1.4.3"
|
||||
cwdemangle = "0.1.3"
|
||||
cwdemangle = "0.1.5"
|
||||
dol = { git = "https://github.com/encounter/ppc750cl", rev = "5f6e991bf495388c4104f188d2e90c79da9f78de" }
|
||||
env_logger = "0.10.0"
|
||||
filetime = "0.2.18"
|
||||
filetime = "0.2.21"
|
||||
fixedbitset = "0.4.2"
|
||||
flagset = { version = "0.4.3", features = ["serde"] }
|
||||
flate2 = "1.0.26"
|
||||
glob = "0.3.1"
|
||||
hex = "0.4.3"
|
||||
indexmap = "1.9.2"
|
||||
log = "0.4.17"
|
||||
indexmap = "2.0.0"
|
||||
itertools = "0.11.0"
|
||||
log = "0.4.19"
|
||||
memchr = "2.5.0"
|
||||
memmap2 = "0.5.7"
|
||||
multimap = "0.8.3"
|
||||
num_enum = "0.5.7"
|
||||
object = { version = "0.30.0", features = ["read_core", "std", "elf", "write_std"], default-features = false }
|
||||
once_cell = "1.17.0"
|
||||
memmap2 = "0.7.1"
|
||||
multimap = "0.9.0"
|
||||
num_enum = "0.6.1"
|
||||
object = { version = "0.31.1", features = ["read_core", "std", "elf", "write_std"], default-features = false }
|
||||
once_cell = "1.18.0"
|
||||
ppc750cl = { git = "https://github.com/encounter/ppc750cl", rev = "5f6e991bf495388c4104f188d2e90c79da9f78de" }
|
||||
regex = "1.6.0"
|
||||
serde = "1.0.152"
|
||||
serde_repr = "0.1.10"
|
||||
serde_yaml = "0.9.16"
|
||||
sha-1 = "0.10.0"
|
||||
smallvec = "1.10.0"
|
||||
regex = "1.9.0"
|
||||
serde = "1.0.166"
|
||||
serde_repr = "0.1.14"
|
||||
serde_yaml = "0.9.22"
|
||||
sha-1 = "0.10.1"
|
||||
smallvec = "1.11.0"
|
||||
topological-sort = "0.2.2"
|
||||
flate2 = "1.0.25"
|
||||
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = { version = "1.0.64", features = ["backtrace"] }
|
||||
base64 = "0.21.0"
|
||||
anyhow = { version = "1.0.71", features = ["backtrace"] }
|
||||
base64 = "0.21.2"
|
||||
flagset = { version = "0.4.3", features = ["serde"] }
|
||||
serde = "1.0.152"
|
||||
serde_repr = "0.1.10"
|
||||
serde_yaml = "0.9.16"
|
||||
serde = "1.0.166"
|
||||
serde_repr = "0.1.14"
|
||||
serde_yaml = "0.9.22"
|
||||
rmp-serde = "1.1.1"
|
||||
flate2 = "1.0.25"
|
||||
flate2 = "1.0.26"
|
||||
|
|
294
README.md
294
README.md
|
@ -3,11 +3,230 @@
|
|||
[Build Status]: https://github.com/encounter/decomp-toolkit/actions/workflows/build.yml/badge.svg
|
||||
[actions]: https://github.com/encounter/decomp-toolkit/actions
|
||||
|
||||
GameCube/Wii decompilation project tools.
|
||||
Yet another GameCube/Wii decompilation toolkit.
|
||||
|
||||
This provides various commands that assist with creating a build system that works
|
||||
across all major platforms without dealing with platform-specific C compilers,
|
||||
UNIX compatibility layers like msys2, or other idiosyncrasies.
|
||||
decomp-toolkit functions both as a command-line tool for developers, and as a replacement for various parts of a
|
||||
decompilation project's build system.
|
||||
|
||||
## Goals
|
||||
|
||||
- Automate as much as possible, allowing developers to focus on matching code rather than months-long tedious setup.
|
||||
- Provide highly **accurate** and performant analysis and tooling.
|
||||
- Provide everything in a single portable binary. This simplifies project setup: a script can simply fetch the
|
||||
binary from GitHub.
|
||||
- Replace common usages of msys2 and GNU assembler, eliminating the need to depend on devkitPro.
|
||||
- Integrate well with other decompilation tooling like [objdiff](https://github.com/encounter/objdiff) and
|
||||
[decomp.me](https://decomp.me).
|
||||
|
||||
## Background
|
||||
|
||||
The goal of a matching decompilation project is to write C/C++ code that compiles back to the _exact_ same binary as
|
||||
the original game. This often requires using the same compiler as the original game. (For GameCube and Wii,
|
||||
[Metrowerks CodeWarrior](https://en.wikipedia.org/wiki/CodeWarrior))
|
||||
|
||||
When compiling C/C++ code, the compiler (in our case, `mwcceppc`) generates an object file (`.o`) for every source file.
|
||||
This object file contains the compiled machine code, as well as information that the linker (`mwldeppc`) uses to
|
||||
generate the final executable.
|
||||
|
||||
One way to verify that our code is a match is by taking any code that has been decompiled, and
|
||||
linking it alongside portions of the original binary that have not been decompiled yet. First, we create relocatable
|
||||
objects from the original binary:
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="assets/diagram_dark.svg">
|
||||
<source media="(prefers-color-scheme: light)" srcset="assets/diagram_light.svg">
|
||||
<img alt="Binary split diagram" src="assets/diagram.svg">
|
||||
</picture>
|
||||
(Heavily simplified)
|
||||
|
||||
Then, each object can be replaced by a decompiled version as matching code is written. If the linker still generates a
|
||||
binary that is byte-for-byte identical to the original, then we know that the decompiled code is a match.
|
||||
|
||||
decomp-toolkit provides tooling for analyzing and splitting the original binary into relocatable objects, as well
|
||||
as generating the linker script and other files needed to link the decompiled code.
|
||||
|
||||
## Other approaches
|
||||
|
||||
### Manual assembly
|
||||
|
||||
With existing GameCube/Wii decompilation tooling, the setup process is very tedious and error-prone.
|
||||
The general process is:
|
||||
|
||||
- Begin by disassembling the original binary with a tool like
|
||||
[doldisasm.py](https://gist.github.com/camthesaxman/a36f610dbf4cc53a874322ef146c4123). This produces one giant
|
||||
assembly file per section.
|
||||
- Manually comb through the assembly files and fix many issues, like incorrect or missing relocations, incorrect or
|
||||
missing symbols, and more.
|
||||
- Manually find-and-replace the auto-generated symbol names based on other sources, like other decompilation projects
|
||||
or a map file. (If you're lucky enough to have one)
|
||||
- Manually determine data types and sizes, and convert them accordingly. (For example, `.4byte` -> `.float`, strings,
|
||||
etc)
|
||||
- Manually split the assembly files into individual objects. This is a very tedious process, as it requires identifying
|
||||
the boundaries of each function, determining whether adjacent functions are related, finding associated
|
||||
data from each data section, and cut-and-pasting all of this into a new file.
|
||||
|
||||
Other downsides of this approach:
|
||||
|
||||
- Manually editing the assembly means that the result is not reproducible. You can't run the script again to
|
||||
make any updates, because your changes will be overwritten. This also means that the assembly files must be
|
||||
stored in version control, which is not ideal.
|
||||
- Incorrectly splitting objects is very easy to do, and can be difficult to detect. For example, a `.ctors` entry _must_
|
||||
be located in the same object as the function it references, otherwise the linker will not generate the correct
|
||||
`.ctors` entry. `extab` and `extabindex` entries _must also_ be located in the same object as the function they
|
||||
reference, have a label and have the correct size, and have a direct relocation rather than a section-relative
|
||||
relocation. Otherwise, the linker will crash with a cryptic error message.
|
||||
- Relying on assembly means that you need an assembler. For GameCube/Wii, this means devkitPro, which is a
|
||||
large dependency and an obstacle for new contributors. The assembler also has some quirks that don't interact well
|
||||
with `mwldeppc`, which means that the object files must be manually post-processed to fix these issues. (See the
|
||||
[elf fixup](#elf-fixup) command)
|
||||
|
||||
With decomp-toolkit:
|
||||
|
||||
- Many analysis steps are automated and highly accurate. Many DOL files can be analyzed and split into re-linkable
|
||||
objects with no configuration.
|
||||
- Signature analysis automatically labels common functions and objects, and allows for more accurate relocation
|
||||
rebuilding.
|
||||
- Any manual adjustments are stored in configuration files, which are stored in version control.
|
||||
- Splitting is simplified by updating a configuration file. The analyzer will check for common issues, like
|
||||
incorrectly split `.ctors`/`.dtors`/`extab`/`extabindex` entries. If the user hasn't configured a split for these,
|
||||
the analyzer will automatically split them along with their associated functions to ensure that the linker will
|
||||
generate everything correctly. This means that matching code can be written without worrying about splitting all
|
||||
sections up front.
|
||||
- The splitter generates object files directly, with no assembler required. This means that we can avoid the devkitPro
|
||||
requirement. (Although we can still generate assembly files for viewing, editing, and compatibility with other tools)
|
||||
|
||||
### dadosod
|
||||
|
||||
[dadosod](https://github.com/InusualZ/dadosod) is a newer replacement for `doldisasm.py`. It has more accurate function
|
||||
and relocation analysis than `doldisasm.py`, as well as support for renaming symbols based on a map file. However, since
|
||||
it operates as a one-shot assembly generator, it still suffers from many of the same issues described above.
|
||||
|
||||
### ppcdis
|
||||
|
||||
[ppcdis](https://github.com/SeekyCt/ppcdis) is one of the tools that inspired decomp-toolkit. It has more accurate
|
||||
analysis than doldisasm.py, and has similar goals to decomp-toolkit. It also has some features that decomp-toolkit does
|
||||
not yet, like support for REL files.
|
||||
|
||||
However, decomp-toolkit has a few advantages:
|
||||
|
||||
- Faster and more accurate analysis. (See [Analyzer features](#analyzer-features))
|
||||
- Emits object files directly, with no assembler required.
|
||||
- More robust handling of features like common BSS, `.ctors`/`.dtors`/`extab`/`extabindex`, and more.
|
||||
- Requires very little configuration to start.
|
||||
- Automatically labels common functions and objects with signature analysis.
|
||||
|
||||
### Honorable mentions
|
||||
|
||||
[splat](https://github.com/ethteck/splat) is a binary splitting tool for N64 and PSX. Some ideas from splat inspired
|
||||
decomp-toolkit, like the symbol configuration format.
|
||||
|
||||
## Terminology
|
||||
|
||||
### DOL
|
||||
|
||||
A [DOL file](https://wiki.tockdom.com/wiki/DOL_(File_Format)) is the executable format used by GameCube and Wii games.
|
||||
It's essentially a raw binary with a header that contains information about the code and data sections, as well as the
|
||||
entry point.
|
||||
|
||||
### ELF
|
||||
|
||||
An [ELF file](https://en.wikipedia.org/wiki/Executable_and_Linkable_Format) is the executable format used by most
|
||||
Unix-like operating systems. There are two common types of ELF files: **relocatable** and **executable**.
|
||||
|
||||
A relocatable ELF (`.o`, also called "object file") contains machine code and relocation information, and is used as
|
||||
input to the linker. Each object file is compiled from a single source file (`.c`, `.cpp`).
|
||||
|
||||
An executable ELF (`.elf`) contains the final machine code that can be loaded and executed. It *can* include
|
||||
information about symbols, debug information (DWARF), and sometimes information about the original relocations, but it
|
||||
is often missing some or all of these (referred to as "stripped").
|
||||
|
||||
### Symbol
|
||||
|
||||
A symbol is a name that is assigned to a memory address. Symbols can be functions, variables, or other data.
|
||||
|
||||
**Local** symbols are only visible within the object file they are defined in.
|
||||
These are usually defined as `static` in C/C++ or are compiler-generated.
|
||||
|
||||
**Global** symbols are visible to all object files, and their names must be unique.
|
||||
|
||||
**Weak** symbols are similar to global symbols, but can be replaced by a global symbol with the same name.
|
||||
For example: the SDK defines a weak `OSReport` function, which can be replaced by a game-specific implementation.
|
||||
Weak symbols are also used for functions generated by the compiler or as a result of C++ features, since they can exist
|
||||
in multiple object files. The linker will deduplicate these functions, keeping only the first copy.
|
||||
|
||||
### Relocation
|
||||
|
||||
A relocation is essentially a pointer to a symbol. At compile time, the final address of a symbol is
|
||||
not known yet, therefore a relocation is needed.
|
||||
At link time, each symbol is assigned a final address, and the linker will use the relocations to update the machine
|
||||
code with the final addresses of the symbol.
|
||||
|
||||
Before:
|
||||
|
||||
```asm
|
||||
# Unrelocated, instructions point to address 0 (unknown)
|
||||
lis r3, 0
|
||||
ori r3, r3, 0
|
||||
```
|
||||
|
||||
After:
|
||||
|
||||
```asm
|
||||
# Relocated, instructions point to 0x80001234
|
||||
lis r3, 0x8000
|
||||
ori r3, r3, 0x1234
|
||||
```
|
||||
|
||||
Once the linker performs the relocation with the final address, the relocation is no longer needed. Still, sometimes the
|
||||
final ELF will still contain the relocation information, but the conversion to DOL will **always** remove it.
|
||||
|
||||
When we analyze a file, we attempt to rebuild the relocations. This is useful for several reasons:
|
||||
|
||||
- It allows us to split the file into relocatable objects. Each object can then be replaced with a decompiled version,
|
||||
as matching code is written.
|
||||
- It allows us to modify or add code and data to the game and have all machine code still to point to the correct
|
||||
symbols, which may now be in a different location.
|
||||
- It allows us to view the machine code in a disassembler and show symbol names instead of raw addresses.
|
||||
|
||||
## Analyzer features
|
||||
|
||||
**Function boundary analysis**
|
||||
Discovers function boundaries with high accuracy. Uses various heuristics to disambiguate tail calls from
|
||||
inner-function control flow.
|
||||
|
||||
**Signature analysis**
|
||||
Utilizes a built-in signature database to identify common Metrowerks and SDK functions and objects.
|
||||
This also helps decomp-toolkit automatically generate required splits, like `__init_cpp_exceptions`.
|
||||
|
||||
**Relocation analysis**
|
||||
Performs control-flow analysis and rebuilds relocations with high accuracy.
|
||||
With some manual tweaking (mainly in data), this should generate fully-shiftable objects.
|
||||
|
||||
**Section analysis**
|
||||
Automatically identifies DOL and REL sections based on information from signature and relocation analysis.
|
||||
|
||||
**Object analysis**
|
||||
Attempts to identify the type and size of data objects by analyzing usage.
|
||||
Also attempts to identify string literals, wide string literals, and string tables.
|
||||
|
||||
**Splitting**
|
||||
Generates split object files in memory based on user configuration.
|
||||
In order to support relinking with `mwldeppc.exe`, any **unsplit** `.ctors`, `.dtors`, `extab` and `extabindex` entries
|
||||
are analyzed and automatically split along with their associated functions. This ensures that the linker will properly
|
||||
generate these sections without any additional configuration.
|
||||
A topological sort is performed to determine the final link order of the split objects.
|
||||
|
||||
**Object file writing**
|
||||
Writes object files directly, with no assembler required. (Bye devkitPPC!)
|
||||
If desired, optionally writes GNU assembler-compatible files alongside the object files.
|
||||
|
||||
**Linker script generation**
|
||||
Generates `ldscript.lcf` for `mwldeppc.exe`.
|
||||
|
||||
**Future work**
|
||||
|
||||
- Support REL and RSO files
|
||||
- Add more signatures
|
||||
- Rework CodeWarrior map parsing
|
||||
|
||||
## Commands
|
||||
|
||||
|
@ -32,17 +251,45 @@ $ dtk demangle 'BuildLight__9CGuiLightCFv'
|
|||
CGuiLight::BuildLight() const
|
||||
```
|
||||
|
||||
### elf disasm
|
||||
### dol info
|
||||
|
||||
Disassemble an unstripped CodeWarrior ELF file into fully-split & fully-shiftable assembly files.
|
||||
Analyzes a DOL file and outputs information section and symbol information.
|
||||
|
||||
```shell
|
||||
$ dtk disasm input.elf out
|
||||
$ dtk dol info input.dol
|
||||
```
|
||||
|
||||
### dol split
|
||||
|
||||
> [!NOTE]
|
||||
> This command is a work-in-progress.
|
||||
|
||||
Analyzes and splits a DOL file into relocatable objects based on user configuration.
|
||||
|
||||
```shell
|
||||
$ dtk dol split input.dol target -s config/symbols.txt -p config/splits.txt
|
||||
```
|
||||
|
||||
### dwarf dump
|
||||
|
||||
Dumps DWARF 1.1 information from an ELF file. (Does **not** support DWARF 2+)
|
||||
|
||||
```shell
|
||||
$ dtk dwarf dump input.elf
|
||||
```
|
||||
|
||||
### elf disasm
|
||||
|
||||
Disassemble an unstripped CodeWarrior ELF file. Attempts to automatically split objects and rebuild relocations
|
||||
when possible.
|
||||
|
||||
```shell
|
||||
$ dtk elf disasm input.elf out
|
||||
```
|
||||
|
||||
### elf fixup
|
||||
|
||||
Fixes issues with GNU assembler-built objects to ensure compatibility with `mwldeppc`.
|
||||
Fixes issues with GNU assembler-built objects to ensure compatibility with `mwldeppc.exe`.
|
||||
|
||||
- Strips empty sections
|
||||
- Generates section symbols for all allocatable sections
|
||||
|
@ -64,6 +311,9 @@ $ dtk elf2dol input.elf output.dol
|
|||
|
||||
### map
|
||||
|
||||
> [!WARNING]
|
||||
> This command is currently broken.
|
||||
|
||||
Processes CodeWarrior map files and provides information about symbols and TUs.
|
||||
|
||||
```shell
|
||||
|
@ -79,6 +329,34 @@ $ dtk map symbol Game.MAP 'Function__5ClassFv'
|
|||
# in a readable format.
|
||||
```
|
||||
|
||||
### rel info
|
||||
|
||||
Prints basic information about a REL file.
|
||||
|
||||
```shell
|
||||
$ dtk rel info input.rel
|
||||
```
|
||||
|
||||
### rel merge
|
||||
|
||||
Merges a DOL file and associated RELs into a single ELF file, suitable for analysis in your favorite
|
||||
reverse engineering software.
|
||||
|
||||
```shell
|
||||
$ dtk rel info main.dol rels/*.rel -o merged.elf
|
||||
```
|
||||
|
||||
### rso info
|
||||
|
||||
> [!WARNING]
|
||||
> This command is not yet functional.
|
||||
|
||||
Prints basic information about an RSO file.
|
||||
|
||||
```shell
|
||||
$ dtk rso info input.rso
|
||||
```
|
||||
|
||||
### shasum
|
||||
|
||||
Calculate and verify SHA-1 hashes.
|
||||
|
|
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 40 KiB |
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 34 KiB |
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 34 KiB |
|
@ -0,0 +1,29 @@
|
|||
MEMORY
|
||||
{
|
||||
text : origin = 0x80003100
|
||||
}
|
||||
|
||||
SECTIONS
|
||||
{
|
||||
GROUP:
|
||||
{
|
||||
$SECTIONS
|
||||
.stack ALIGN(0x100):{}
|
||||
} > text
|
||||
|
||||
_stack_addr = (_f_sbss2 + SIZEOF(.sbss2) + $STACKSIZE + 0x7) & ~0x7;
|
||||
_stack_end = _f_sbss2 + SIZEOF(.sbss2);
|
||||
_db_stack_addr = (_stack_addr + 0x2000);
|
||||
_db_stack_end = _stack_addr;
|
||||
__ArenaLo = (_db_stack_addr + 0x1f) & ~0x1f;
|
||||
__ArenaHi = 0x81700000;
|
||||
}
|
||||
|
||||
FORCEFILES
|
||||
{
|
||||
$FORCEFILES
|
||||
}
|
||||
|
||||
FORCEACTIVE
|
||||
{
|
||||
}
|
2
build.rs
2
build.rs
|
@ -311,7 +311,7 @@ fn main() -> Result<()> {
|
|||
rmp_serde::encode::write(&mut encoder, &Output { symbols, signatures: out })?;
|
||||
let compressed = encoder.finish()?;
|
||||
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
|
||||
fs::write(out_dir.join("signatures.bin"), &compressed)?;
|
||||
fs::write(out_dir.join("signatures.bin"), compressed)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use anyhow::{bail, Context, Result};
|
||||
|
||||
use crate::{
|
||||
analysis::{
|
||||
|
@ -9,7 +9,7 @@ use crate::{
|
|||
slices::{FunctionSlices, TailCallResult},
|
||||
vm::{BranchTarget, GprValue, StepResult, VM},
|
||||
},
|
||||
obj::{ObjInfo, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind},
|
||||
obj::{ObjInfo, ObjSectionKind, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind},
|
||||
};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
|
@ -29,102 +29,66 @@ impl AnalyzerState {
|
|||
if end == 0 {
|
||||
continue;
|
||||
}
|
||||
if let Some(existing_symbol) = obj
|
||||
.symbols
|
||||
.iter_mut()
|
||||
.find(|sym| sym.address == start as u64 && sym.kind == ObjSymbolKind::Function)
|
||||
{
|
||||
let new_size = (end - start) as u64;
|
||||
if !existing_symbol.size_known || existing_symbol.size == 0 {
|
||||
existing_symbol.size = new_size;
|
||||
existing_symbol.size_known = true;
|
||||
} else if existing_symbol.size != new_size {
|
||||
log::warn!(
|
||||
"Conflicting size for {}: was {:#X}, now {:#X}",
|
||||
existing_symbol.name,
|
||||
existing_symbol.size,
|
||||
new_size
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
let section = obj
|
||||
.sections
|
||||
.iter()
|
||||
.find(|section| {
|
||||
(start as u64) >= section.address
|
||||
&& (end as u64) <= section.address + section.size
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
anyhow!("Failed to locate section for function {:#010X}-{:#010X}", start, end)
|
||||
})?;
|
||||
obj.symbols.push(ObjSymbol {
|
||||
let section_index =
|
||||
obj.section_for(start..end).context("Failed to locate section for function")?.index;
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: format!("fn_{:08X}", start),
|
||||
demangled_name: None,
|
||||
address: start as u64,
|
||||
section: Some(section.index),
|
||||
section: Some(section_index),
|
||||
size: (end - start) as u64,
|
||||
size_known: true,
|
||||
flags: Default::default(),
|
||||
kind: ObjSymbolKind::Function,
|
||||
});
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
false,
|
||||
)?;
|
||||
}
|
||||
for (&addr, &size) in &self.jump_tables {
|
||||
let section = obj
|
||||
.sections
|
||||
.iter()
|
||||
.find(|section| {
|
||||
(addr as u64) >= section.address
|
||||
&& ((addr + size) as u64) <= section.address + section.size
|
||||
})
|
||||
.ok_or_else(|| anyhow!("Failed to locate section for jump table"))?;
|
||||
if let Some(existing_symbol) = obj
|
||||
.symbols
|
||||
.iter_mut()
|
||||
.find(|sym| sym.address == addr as u64 && sym.kind == ObjSymbolKind::Object)
|
||||
{
|
||||
let new_size = size as u64;
|
||||
if !existing_symbol.size_known || existing_symbol.size == 0 {
|
||||
existing_symbol.size = new_size;
|
||||
existing_symbol.size_known = true;
|
||||
// existing_symbol.flags.0 &= ObjSymbolFlags::Global;
|
||||
// existing_symbol.flags.0 |= ObjSymbolFlags::Local;
|
||||
} else if existing_symbol.size != new_size {
|
||||
log::warn!(
|
||||
"Conflicting size for {}: was {:#X}, now {:#X}",
|
||||
existing_symbol.name,
|
||||
existing_symbol.size,
|
||||
new_size
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
obj.symbols.push(ObjSymbol {
|
||||
let section_index = obj
|
||||
.section_for(addr..addr + size)
|
||||
.context("Failed to locate section for jump table")?
|
||||
.index;
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: format!("jumptable_{:08X}", addr),
|
||||
demangled_name: None,
|
||||
address: addr as u64,
|
||||
section: Some(section.index),
|
||||
section: Some(section_index),
|
||||
size: size as u64,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local.into()),
|
||||
kind: ObjSymbolKind::Object,
|
||||
});
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
false,
|
||||
)?;
|
||||
}
|
||||
for (&_addr, symbol) in &self.known_symbols {
|
||||
if let Some(existing_symbol) = obj
|
||||
.symbols
|
||||
.iter_mut()
|
||||
.find(|e| symbol.address == e.address && symbol.kind == e.kind)
|
||||
{
|
||||
*existing_symbol = symbol.clone();
|
||||
continue;
|
||||
}
|
||||
obj.symbols.push(symbol.clone());
|
||||
obj.add_symbol(symbol.clone(), true)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn detect_functions(&mut self, obj: &ObjInfo) -> Result<()> {
|
||||
// Apply known functions from extab
|
||||
for (&addr, &size) in &obj.known_functions {
|
||||
self.function_entries.insert(addr);
|
||||
self.function_bounds.insert(addr, addr + size);
|
||||
}
|
||||
// Apply known functions from symbols
|
||||
for (_, symbol) in obj.symbols.by_kind(ObjSymbolKind::Function) {
|
||||
self.function_entries.insert(symbol.address as u32);
|
||||
if symbol.size_known {
|
||||
self.function_bounds
|
||||
.insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
|
||||
}
|
||||
}
|
||||
|
||||
// Process known functions first
|
||||
let known_functions = self.function_entries.clone();
|
||||
for addr in known_functions {
|
||||
|
@ -189,6 +153,7 @@ impl AnalyzerState {
|
|||
)?;
|
||||
}
|
||||
}
|
||||
TailCallResult::Error(e) => return Err(e),
|
||||
}
|
||||
}
|
||||
if slices.can_finalize() {
|
||||
|
@ -249,17 +214,15 @@ impl AnalyzerState {
|
|||
match self.first_unbounded_function() {
|
||||
Some(addr) => {
|
||||
log::trace!("Processing {:#010X}", addr);
|
||||
self.process_function_at(&obj, addr)?;
|
||||
self.process_function_at(obj, addr)?;
|
||||
}
|
||||
None => {
|
||||
if !self.finalize_functions(obj, false)? {
|
||||
if !self.detect_new_functions(obj)? {
|
||||
if !self.finalize_functions(obj, false)? && !self.detect_new_functions(obj)? {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -291,9 +254,6 @@ impl AnalyzerState {
|
|||
fn process_function(&mut self, obj: &ObjInfo, start: u32) -> Result<Option<FunctionSlices>> {
|
||||
let mut slices = FunctionSlices::default();
|
||||
let function_end = self.function_bounds.get(&start).cloned();
|
||||
if start == 0x801FC300 {
|
||||
log::info!("Processing TRKExceptionHandler");
|
||||
}
|
||||
Ok(match slices.analyze(obj, start, start, function_end, &self.function_entries)? {
|
||||
true => Some(slices),
|
||||
false => None,
|
||||
|
@ -302,10 +262,17 @@ impl AnalyzerState {
|
|||
|
||||
fn detect_new_functions(&mut self, obj: &ObjInfo) -> Result<bool> {
|
||||
let mut found_new = false;
|
||||
let mut iter = self.function_bounds.iter().peekable();
|
||||
while let (Some((&first_begin, &first_end)), Some(&(&second_begin, &second_end))) =
|
||||
(iter.next(), iter.peek())
|
||||
{
|
||||
for section in &obj.sections {
|
||||
if section.kind != ObjSectionKind::Code {
|
||||
continue;
|
||||
}
|
||||
|
||||
let section_start = section.address as u32;
|
||||
let section_end = (section.address + section.size) as u32;
|
||||
let mut iter = self.function_bounds.range(section_start..section_end).peekable();
|
||||
loop {
|
||||
match (iter.next(), iter.peek()) {
|
||||
(Some((&first_begin, &first_end)), Some(&(&second_begin, &second_end))) => {
|
||||
if first_end == 0 || first_end > second_begin {
|
||||
continue;
|
||||
}
|
||||
|
@ -325,6 +292,28 @@ impl AnalyzerState {
|
|||
found_new = true;
|
||||
}
|
||||
}
|
||||
(Some((&last_begin, &last_end)), None) => {
|
||||
if last_end > 0 && last_end < section_end {
|
||||
let addr = match skip_alignment(obj, last_end, section_end) {
|
||||
Some(addr) => addr,
|
||||
None => continue,
|
||||
};
|
||||
if addr < section_end && self.function_entries.insert(addr) {
|
||||
log::debug!(
|
||||
"Trying function @ {:#010X} (from {:#010X}-{:#010X} <-> {:#010X})",
|
||||
addr,
|
||||
last_begin,
|
||||
last_end,
|
||||
section_end,
|
||||
);
|
||||
found_new = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(found_new)
|
||||
}
|
||||
}
|
||||
|
@ -342,20 +331,16 @@ pub fn locate_sda_bases(obj: &mut ObjInfo) -> Result<bool> {
|
|||
return Ok(ExecCbResult::Continue);
|
||||
}
|
||||
StepResult::Illegal => bail!("Illegal instruction @ {:#010X}", ins.addr),
|
||||
StepResult::Jump(target) => match target {
|
||||
BranchTarget::Address(addr) => {
|
||||
StepResult::Jump(target) => {
|
||||
if let BranchTarget::Address(addr) = target {
|
||||
return Ok(ExecCbResult::Jump(addr));
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
}
|
||||
StepResult::Branch(branches) => {
|
||||
for branch in branches {
|
||||
match branch.target {
|
||||
BranchTarget::Address(addr) => {
|
||||
if let BranchTarget::Address(addr) = branch.target {
|
||||
executor.push(addr, branch.vm, false);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,9 @@ use crate::obj::{ObjInfo, ObjSection, ObjSectionKind};
|
|||
|
||||
pub mod cfa;
|
||||
pub mod executor;
|
||||
pub mod objects;
|
||||
pub mod pass;
|
||||
pub mod signatures;
|
||||
pub mod slices;
|
||||
pub mod tracker;
|
||||
pub mod vm;
|
||||
|
|
|
@ -0,0 +1,155 @@
|
|||
use anyhow::Result;
|
||||
|
||||
use crate::obj::{
|
||||
split::is_linker_generated_label, ObjDataKind, ObjInfo, ObjSectionKind, ObjSymbolKind,
|
||||
};
|
||||
|
||||
pub fn detect_object_boundaries(obj: &mut ObjInfo) -> Result<()> {
|
||||
for section in obj.sections.iter().filter(|s| s.kind != ObjSectionKind::Code) {
|
||||
let section_start = section.address as u32;
|
||||
let section_end = (section.address + section.size) as u32;
|
||||
|
||||
let mut replace_symbols = vec![];
|
||||
for (idx, symbol) in obj.symbols.for_range(section_start..section_end) {
|
||||
let mut symbol = symbol.clone();
|
||||
if is_linker_generated_label(&symbol.name) {
|
||||
continue;
|
||||
}
|
||||
let expected_size = match symbol.data_kind {
|
||||
ObjDataKind::Byte => 1,
|
||||
ObjDataKind::Byte2 => 2,
|
||||
ObjDataKind::Byte4 | ObjDataKind::Float => 4,
|
||||
ObjDataKind::Byte8 | ObjDataKind::Double => 8,
|
||||
_ => 0,
|
||||
};
|
||||
if !symbol.size_known {
|
||||
let next_addr = obj
|
||||
.symbols
|
||||
.for_range(symbol.address as u32 + 1..section_end)
|
||||
.next()
|
||||
.map_or(section_end, |(_, symbol)| symbol.address as u32);
|
||||
let new_size = next_addr - symbol.address as u32;
|
||||
log::debug!("Guessed {} size {:#X}", symbol.name, new_size);
|
||||
symbol.size = match (new_size, expected_size) {
|
||||
(..=4, 1) => expected_size,
|
||||
(2 | 4, 2) => expected_size,
|
||||
(..=8, 1 | 2 | 4) => {
|
||||
// alignment to double
|
||||
if obj.symbols.at_address(next_addr).any(|(_, sym)| sym.data_kind == ObjDataKind::Double)
|
||||
// If we're at a TU boundary, we can assume it's just padding
|
||||
|| obj.splits.contains_key(&(symbol.address as u32 + new_size))
|
||||
{
|
||||
expected_size
|
||||
} else {
|
||||
new_size
|
||||
}
|
||||
}
|
||||
_ => new_size,
|
||||
} as u64;
|
||||
symbol.size_known = true;
|
||||
}
|
||||
symbol.kind = ObjSymbolKind::Object;
|
||||
if expected_size > 1 && symbol.size as u32 % expected_size != 0 {
|
||||
symbol.data_kind = ObjDataKind::Unknown;
|
||||
}
|
||||
replace_symbols.push((idx, symbol));
|
||||
}
|
||||
for (idx, symbol) in replace_symbols {
|
||||
obj.symbols.replace(idx, symbol)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn detect_strings(obj: &mut ObjInfo) -> Result<()> {
|
||||
let mut symbols_set = Vec::<(usize, ObjDataKind, usize)>::new();
|
||||
for section in obj
|
||||
.sections
|
||||
.iter()
|
||||
.filter(|s| matches!(s.kind, ObjSectionKind::Data | ObjSectionKind::ReadOnlyData))
|
||||
{
|
||||
enum StringResult {
|
||||
None,
|
||||
String { length: usize, terminated: bool },
|
||||
WString { length: usize, str: String },
|
||||
}
|
||||
pub const fn trim_zeroes_end(mut bytes: &[u8]) -> &[u8] {
|
||||
while let [rest @ .., last] = bytes {
|
||||
if *last == 0 {
|
||||
bytes = rest;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
bytes
|
||||
}
|
||||
fn is_string(data: &[u8]) -> StringResult {
|
||||
let bytes = trim_zeroes_end(data);
|
||||
if bytes.iter().all(|&c| c.is_ascii_graphic() || c.is_ascii_whitespace()) {
|
||||
return StringResult::String {
|
||||
length: bytes.len(),
|
||||
terminated: data.len() > bytes.len(),
|
||||
};
|
||||
}
|
||||
if bytes.len() % 2 == 0 && data.len() >= bytes.len() + 2 {
|
||||
// Found at least 2 bytes of trailing 0s, check UTF-16
|
||||
let mut ok = true;
|
||||
let mut str = String::new();
|
||||
for n in std::char::decode_utf16(
|
||||
bytes.chunks_exact(2).map(|c| u16::from_be_bytes(c.try_into().unwrap())),
|
||||
) {
|
||||
match n {
|
||||
Ok(c) if c.is_ascii_graphic() || c.is_ascii_whitespace() => {
|
||||
str.push(c);
|
||||
}
|
||||
_ => {
|
||||
ok = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if ok {
|
||||
return StringResult::WString { length: bytes.len(), str };
|
||||
}
|
||||
}
|
||||
StringResult::None
|
||||
}
|
||||
for (symbol_idx, symbol) in obj
|
||||
.symbols
|
||||
.for_section(section)
|
||||
.filter(|(_, sym)| sym.data_kind == ObjDataKind::Unknown)
|
||||
{
|
||||
let (_section, data) =
|
||||
obj.section_data(symbol.address as u32, (symbol.address + symbol.size) as u32)?;
|
||||
match is_string(data) {
|
||||
StringResult::None => {}
|
||||
StringResult::String { length, terminated } => {
|
||||
if length > 0 {
|
||||
let str = String::from_utf8_lossy(&data[..length]);
|
||||
log::debug!("Found string '{}' @ {}", str, symbol.name);
|
||||
symbols_set.push((
|
||||
symbol_idx,
|
||||
ObjDataKind::String,
|
||||
if terminated { length + 1 } else { length },
|
||||
));
|
||||
}
|
||||
}
|
||||
StringResult::WString { length, str } => {
|
||||
if length > 0 {
|
||||
log::debug!("Found wide string '{}' @ {}", str, symbol.name);
|
||||
symbols_set.push((symbol_idx, ObjDataKind::String16, length + 2));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (symbol_idx, data_kind, size) in symbols_set {
|
||||
let mut symbol = obj.symbols.at(symbol_idx).clone();
|
||||
log::debug!("Setting {} ({:#010X}) to size {:#X}", symbol.name, symbol.address, size);
|
||||
symbol.data_kind = data_kind;
|
||||
symbol.size = size as u64;
|
||||
symbol.size_known = true;
|
||||
obj.symbols.replace(symbol_idx, symbol)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
|
@ -38,6 +38,8 @@ impl AnalysisPass for FindTRKInterruptVectorTable {
|
|||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(FlagSet::from(ObjSymbolFlags::Global)),
|
||||
kind: ObjSymbolKind::Unknown,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
});
|
||||
let end = start + TRK_TABLE_SIZE;
|
||||
state.known_symbols.insert(end, ObjSymbol {
|
||||
|
@ -49,19 +51,21 @@ impl AnalysisPass for FindTRKInterruptVectorTable {
|
|||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(FlagSet::from(ObjSymbolFlags::Global)),
|
||||
kind: ObjSymbolKind::Unknown,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
});
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
log::info!("gTRKInterruptVectorTable not found");
|
||||
log::debug!("gTRKInterruptVectorTable not found");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FindSaveRestSleds {}
|
||||
|
||||
const SLEDS: [([u8; 4], &'static str, &'static str); 4] = [
|
||||
const SLEDS: [([u8; 4], &str, &str); 4] = [
|
||||
([0xd9, 0xcb, 0xff, 0x70], "__save_fpr", "_savefpr_"),
|
||||
([0xc9, 0xcb, 0xff, 0x70], "__restore_fpr", "_restfpr_"),
|
||||
([0x91, 0xcb, 0xff, 0xb8], "__save_gpr", "_savegpr_"),
|
||||
|
@ -77,7 +81,7 @@ impl AnalysisPass for FindSaveRestSleds {
|
|||
let (section, data) = obj.section_data(start, 0)?;
|
||||
for (needle, func, label) in &SLEDS {
|
||||
if data.starts_with(needle) {
|
||||
log::info!("Found {} @ {:#010X}", func, start);
|
||||
log::debug!("Found {} @ {:#010X}", func, start);
|
||||
clear_ranges.push(start + 4..start + SLED_SIZE as u32);
|
||||
state.known_symbols.insert(start, ObjSymbol {
|
||||
name: func.to_string(),
|
||||
|
@ -88,6 +92,8 @@ impl AnalysisPass for FindSaveRestSleds {
|
|||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
kind: ObjSymbolKind::Function,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
});
|
||||
for i in 14..=31 {
|
||||
let addr = start + (i - 14) * 4;
|
||||
|
@ -100,6 +106,8 @@ impl AnalysisPass for FindSaveRestSleds {
|
|||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
kind: ObjSymbolKind::Unknown,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,365 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
|
||||
use crate::{
|
||||
analysis::{cfa::AnalyzerState, read_u32},
|
||||
obj::{
|
||||
signatures::{
|
||||
apply_signature, check_signatures, check_signatures_str, parse_signatures,
|
||||
FunctionSignature,
|
||||
},
|
||||
ObjInfo, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
|
||||
},
|
||||
};
|
||||
|
||||
const SIGNATURES: &[(&str, &str)] = &[
|
||||
("__init_registers", include_str!("../../assets/signatures/__init_registers.yml")),
|
||||
("__init_hardware", include_str!("../../assets/signatures/__init_hardware.yml")),
|
||||
("__init_data", include_str!("../../assets/signatures/__init_data.yml")),
|
||||
("__set_debug_bba", include_str!("../../assets/signatures/__set_debug_bba.yml")),
|
||||
("__OSPSInit", include_str!("../../assets/signatures/__OSPSInit.yml")),
|
||||
("__OSFPRInit", include_str!("../../assets/signatures/__OSFPRInit.yml")),
|
||||
("__OSCacheInit", include_str!("../../assets/signatures/__OSCacheInit.yml")),
|
||||
("DMAErrorHandler", include_str!("../../assets/signatures/DMAErrorHandler.yml")),
|
||||
("DBInit", include_str!("../../assets/signatures/DBInit.yml")),
|
||||
("OSInit", include_str!("../../assets/signatures/OSInit.yml")),
|
||||
("__OSThreadInit", include_str!("../../assets/signatures/__OSThreadInit.yml")),
|
||||
("__OSInitIPCBuffer", include_str!("../../assets/signatures/__OSInitIPCBuffer.yml")),
|
||||
("EXIInit", include_str!("../../assets/signatures/EXIInit.yml")),
|
||||
("EXIGetID", include_str!("../../assets/signatures/EXIGetID.yml")),
|
||||
("exit", include_str!("../../assets/signatures/exit.yml")),
|
||||
("_ExitProcess", include_str!("../../assets/signatures/_ExitProcess.yml")),
|
||||
("__fini_cpp", include_str!("../../assets/signatures/__fini_cpp.yml")),
|
||||
// ("__destroy_global_chain", include_str!("../../assets/signatures/__destroy_global_chain.yml")),
|
||||
("InitMetroTRK", include_str!("../../assets/signatures/InitMetroTRK.yml")),
|
||||
("InitMetroTRKCommTable", include_str!("../../assets/signatures/InitMetroTRKCommTable.yml")),
|
||||
("OSExceptionInit", include_str!("../../assets/signatures/OSExceptionInit.yml")),
|
||||
(
|
||||
"OSDefaultExceptionHandler",
|
||||
include_str!("../../assets/signatures/OSDefaultExceptionHandler.yml"),
|
||||
),
|
||||
("__OSUnhandledException", include_str!("../../assets/signatures/__OSUnhandledException.yml")),
|
||||
("OSDisableScheduler", include_str!("../../assets/signatures/OSDisableScheduler.yml")),
|
||||
("__OSReschedule", include_str!("../../assets/signatures/__OSReschedule.yml")),
|
||||
("__OSInitSystemCall", include_str!("../../assets/signatures/__OSInitSystemCall.yml")),
|
||||
("OSInitAlarm", include_str!("../../assets/signatures/OSInitAlarm.yml")),
|
||||
("__OSInitAlarm", include_str!("../../assets/signatures/__OSInitAlarm.yml")),
|
||||
("__OSEVStart", include_str!("../../assets/signatures/OSExceptionVector.yml")),
|
||||
("__OSDBINTSTART", include_str!("../../assets/signatures/__OSDBIntegrator.yml")),
|
||||
("__OSDBJUMPSTART", include_str!("../../assets/signatures/__OSDBJump.yml")),
|
||||
("SIInit", include_str!("../../assets/signatures/SIInit.yml")),
|
||||
("SIGetType", include_str!("../../assets/signatures/SIGetType.yml")),
|
||||
("SISetSamplingRate", include_str!("../../assets/signatures/SISetSamplingRate.yml")),
|
||||
("SISetXY", include_str!("../../assets/signatures/SISetXY.yml")),
|
||||
("VIGetTvFormat", include_str!("../../assets/signatures/VIGetTvFormat.yml")),
|
||||
("DVDInit", include_str!("../../assets/signatures/DVDInit.yml")),
|
||||
(
|
||||
"DVDSetAutoFatalMessaging",
|
||||
include_str!("../../assets/signatures/DVDSetAutoFatalMessaging.yml"),
|
||||
),
|
||||
("OSSetArenaLo", include_str!("../../assets/signatures/OSSetArenaLo.yml")),
|
||||
("OSSetArenaHi", include_str!("../../assets/signatures/OSSetArenaHi.yml")),
|
||||
("OSSetMEM1ArenaLo", include_str!("../../assets/signatures/OSSetMEM1ArenaLo.yml")),
|
||||
("OSSetMEM1ArenaHi", include_str!("../../assets/signatures/OSSetMEM1ArenaHi.yml")),
|
||||
("OSSetMEM2ArenaLo", include_str!("../../assets/signatures/OSSetMEM2ArenaLo.yml")),
|
||||
("OSSetMEM2ArenaHi", include_str!("../../assets/signatures/OSSetMEM2ArenaHi.yml")),
|
||||
("__OSInitAudioSystem", include_str!("../../assets/signatures/__OSInitAudioSystem.yml")),
|
||||
(
|
||||
"__OSInitMemoryProtection",
|
||||
include_str!("../../assets/signatures/__OSInitMemoryProtection.yml"),
|
||||
),
|
||||
// ("BATConfig", include_str!("../../assets/signatures/BATConfig.yml")), TODO
|
||||
("ReportOSInfo", include_str!("../../assets/signatures/ReportOSInfo.yml")),
|
||||
("__check_pad3", include_str!("../../assets/signatures/__check_pad3.yml")),
|
||||
("OSResetSystem", include_str!("../../assets/signatures/OSResetSystem.yml")),
|
||||
("OSReturnToMenu", include_str!("../../assets/signatures/OSReturnToMenu.yml")),
|
||||
("__OSReturnToMenu", include_str!("../../assets/signatures/__OSReturnToMenu.yml")),
|
||||
("__OSShutdownDevices", include_str!("../../assets/signatures/__OSShutdownDevices.yml")),
|
||||
("__OSInitSram", include_str!("../../assets/signatures/__OSInitSram.yml")),
|
||||
("__OSSyncSram", include_str!("../../assets/signatures/__OSSyncSram.yml")),
|
||||
(
|
||||
"__OSGetExceptionHandler",
|
||||
include_str!("../../assets/signatures/__OSGetExceptionHandler.yml"),
|
||||
),
|
||||
(
|
||||
"OSRegisterResetFunction",
|
||||
include_str!("../../assets/signatures/OSRegisterResetFunction.yml"),
|
||||
),
|
||||
(
|
||||
"OSRegisterShutdownFunction",
|
||||
include_str!("../../assets/signatures/OSRegisterShutdownFunction.yml"),
|
||||
),
|
||||
(
|
||||
"DecrementerExceptionHandler",
|
||||
include_str!("../../assets/signatures/DecrementerExceptionHandler.yml"),
|
||||
),
|
||||
(
|
||||
"DecrementerExceptionCallback",
|
||||
include_str!("../../assets/signatures/DecrementerExceptionCallback.yml"),
|
||||
),
|
||||
("__OSInterruptInit", include_str!("../../assets/signatures/__OSInterruptInit.yml")),
|
||||
("__OSContextInit", include_str!("../../assets/signatures/__OSContextInit.yml")),
|
||||
("OSSwitchFPUContext", include_str!("../../assets/signatures/OSSwitchFPUContext.yml")),
|
||||
("OSReport", include_str!("../../assets/signatures/OSReport.yml")),
|
||||
("TRK_main", include_str!("../../assets/signatures/TRK_main.yml")),
|
||||
("TRKNubWelcome", include_str!("../../assets/signatures/TRKNubWelcome.yml")),
|
||||
("TRKInitializeNub", include_str!("../../assets/signatures/TRKInitializeNub.yml")),
|
||||
(
|
||||
"TRKInitializeIntDrivenUART",
|
||||
include_str!("../../assets/signatures/TRKInitializeIntDrivenUART.yml"),
|
||||
),
|
||||
("TRKEXICallBack", include_str!("../../assets/signatures/TRKEXICallBack.yml")),
|
||||
("TRKLoadContext", include_str!("../../assets/signatures/TRKLoadContext.yml")),
|
||||
("TRKInterruptHandler", include_str!("../../assets/signatures/TRKInterruptHandler.yml")),
|
||||
("TRKExceptionHandler", include_str!("../../assets/signatures/TRKExceptionHandler.yml")),
|
||||
("TRKSaveExtended1Block", include_str!("../../assets/signatures/TRKSaveExtended1Block.yml")),
|
||||
("TRKNubMainLoop", include_str!("../../assets/signatures/TRKNubMainLoop.yml")),
|
||||
("TRKTargetContinue", include_str!("../../assets/signatures/TRKTargetContinue.yml")),
|
||||
("TRKSwapAndGo", include_str!("../../assets/signatures/TRKSwapAndGo.yml")),
|
||||
(
|
||||
"TRKRestoreExtended1Block",
|
||||
include_str!("../../assets/signatures/TRKRestoreExtended1Block.yml"),
|
||||
),
|
||||
(
|
||||
"TRKInterruptHandlerEnableInterrupts",
|
||||
include_str!("../../assets/signatures/TRKInterruptHandlerEnableInterrupts.yml"),
|
||||
),
|
||||
("memset", include_str!("../../assets/signatures/memset.yml")),
|
||||
(
|
||||
"__msl_runtime_constraint_violation_s",
|
||||
include_str!("../../assets/signatures/__msl_runtime_constraint_violation_s.yml"),
|
||||
),
|
||||
("ClearArena", include_str!("../../assets/signatures/ClearArena.yml")),
|
||||
("IPCCltInit", include_str!("../../assets/signatures/IPCCltInit.yml")),
|
||||
("__OSInitSTM", include_str!("../../assets/signatures/__OSInitSTM.yml")),
|
||||
("IOS_Open", include_str!("../../assets/signatures/IOS_Open.yml")),
|
||||
("__ios_Ipc2", include_str!("../../assets/signatures/__ios_Ipc2.yml")),
|
||||
("IPCiProfQueueReq", include_str!("../../assets/signatures/IPCiProfQueueReq.yml")),
|
||||
("SCInit", include_str!("../../assets/signatures/SCInit.yml")),
|
||||
("SCReloadConfFileAsync", include_str!("../../assets/signatures/SCReloadConfFileAsync.yml")),
|
||||
("NANDPrivateOpenAsync", include_str!("../../assets/signatures/NANDPrivateOpenAsync.yml")),
|
||||
("nandIsInitialized", include_str!("../../assets/signatures/nandIsInitialized.yml")),
|
||||
("nandOpen", include_str!("../../assets/signatures/nandOpen.yml")),
|
||||
("nandGenerateAbsPath", include_str!("../../assets/signatures/nandGenerateAbsPath.yml")),
|
||||
("nandGetHeadToken", include_str!("../../assets/signatures/nandGetHeadToken.yml")),
|
||||
("ISFS_OpenAsync", include_str!("../../assets/signatures/ISFS_OpenAsync.yml")),
|
||||
("nandConvertErrorCode", include_str!("../../assets/signatures/nandConvertErrorCode.yml")),
|
||||
(
|
||||
"NANDLoggingAddMessageAsync",
|
||||
include_str!("../../assets/signatures/NANDLoggingAddMessageAsync.yml"),
|
||||
),
|
||||
(
|
||||
"__NANDPrintErrorMessage",
|
||||
include_str!("../../assets/signatures/__NANDPrintErrorMessage.yml"),
|
||||
),
|
||||
("__OSInitNet", include_str!("../../assets/signatures/__OSInitNet.yml")),
|
||||
("__DVDCheckDevice", include_str!("../../assets/signatures/__DVDCheckDevice.yml")),
|
||||
("__OSInitPlayTime", include_str!("../../assets/signatures/__OSInitPlayTime.yml")),
|
||||
("__OSStartPlayRecord", include_str!("../../assets/signatures/__OSStartPlayRecord.yml")),
|
||||
("NANDInit", include_str!("../../assets/signatures/NANDInit.yml")),
|
||||
("ISFS_OpenLib", include_str!("../../assets/signatures/ISFS_OpenLib.yml")),
|
||||
("ESP_GetTitleId", include_str!("../../assets/signatures/ESP_GetTitleId.yml")),
|
||||
(
|
||||
"NANDSetAutoErrorMessaging",
|
||||
include_str!("../../assets/signatures/NANDSetAutoErrorMessaging.yml"),
|
||||
),
|
||||
("__DVDFSInit", include_str!("../../assets/signatures/__DVDFSInit.yml")),
|
||||
("__DVDClearWaitingQueue", include_str!("../../assets/signatures/__DVDClearWaitingQueue.yml")),
|
||||
("__DVDInitWA", include_str!("../../assets/signatures/__DVDInitWA.yml")),
|
||||
("__DVDLowSetWAType", include_str!("../../assets/signatures/__DVDLowSetWAType.yml")),
|
||||
("__fstLoad", include_str!("../../assets/signatures/__fstLoad.yml")),
|
||||
("DVDReset", include_str!("../../assets/signatures/DVDReset.yml")),
|
||||
("DVDLowReset", include_str!("../../assets/signatures/DVDLowReset.yml")),
|
||||
("DVDReadDiskID", include_str!("../../assets/signatures/DVDReadDiskID.yml")),
|
||||
("stateReady", include_str!("../../assets/signatures/stateReady.yml")),
|
||||
("DVDLowWaitCoverClose", include_str!("../../assets/signatures/DVDLowWaitCoverClose.yml")),
|
||||
("__DVDStoreErrorCode", include_str!("../../assets/signatures/__DVDStoreErrorCode.yml")),
|
||||
("DVDLowStopMotor", include_str!("../../assets/signatures/DVDLowStopMotor.yml")),
|
||||
("DVDGetDriveStatus", include_str!("../../assets/signatures/DVDGetDriveStatus.yml")),
|
||||
("printf", include_str!("../../assets/signatures/printf.yml")),
|
||||
("sprintf", include_str!("../../assets/signatures/sprintf.yml")),
|
||||
("vprintf", include_str!("../../assets/signatures/vprintf.yml")),
|
||||
("vsprintf", include_str!("../../assets/signatures/vsprintf.yml")),
|
||||
("vsnprintf", include_str!("../../assets/signatures/vsnprintf.yml")),
|
||||
("__pformatter", include_str!("../../assets/signatures/__pformatter.yml")),
|
||||
("longlong2str", include_str!("../../assets/signatures/longlong2str.yml")),
|
||||
("__mod2u", include_str!("../../assets/signatures/__mod2u.yml")),
|
||||
("__FileWrite", include_str!("../../assets/signatures/__FileWrite.yml")),
|
||||
("fwrite", include_str!("../../assets/signatures/fwrite.yml")),
|
||||
("__fwrite", include_str!("../../assets/signatures/__fwrite.yml")),
|
||||
("__stdio_atexit", include_str!("../../assets/signatures/__stdio_atexit.yml")),
|
||||
("__StringWrite", include_str!("../../assets/signatures/__StringWrite.yml")),
|
||||
];
|
||||
const POST_SIGNATURES: &[(&str, &str)] = &[
|
||||
("RSOStaticLocateObject", include_str!("../../assets/signatures/RSOStaticLocateObject.yml")),
|
||||
// ("GXInit", include_str!("../../assets/signatures/GXInit.yml")),
|
||||
("__register_fragment", include_str!("../../assets/signatures/__register_fragment.yml")),
|
||||
];
|
||||
|
||||
pub fn apply_signatures(obj: &mut ObjInfo) -> Result<()> {
|
||||
let entry = obj.entry as u32;
|
||||
if let Some(signature) =
|
||||
check_signatures_str(obj, entry, include_str!("../../assets/signatures/__start.yml"))?
|
||||
{
|
||||
apply_signature(obj, entry, &signature)?;
|
||||
}
|
||||
for &(name, sig_str) in SIGNATURES {
|
||||
if let Some((_, symbol)) = obj.symbols.by_name(name)? {
|
||||
let addr = symbol.address as u32;
|
||||
if let Some(signature) = check_signatures_str(obj, addr, sig_str)? {
|
||||
apply_signature(obj, addr, &signature)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some((_, symbol)) = obj.symbols.by_name("__init_user")? {
|
||||
// __init_user can be overridden, but we can still look for __init_cpp from it
|
||||
let mut analyzer = AnalyzerState::default();
|
||||
analyzer.process_function_at(obj, symbol.address as u32)?;
|
||||
for addr in analyzer.function_entries {
|
||||
if let Some(signature) = check_signatures_str(
|
||||
obj,
|
||||
addr,
|
||||
include_str!("../../assets/signatures/__init_cpp.yml"),
|
||||
)? {
|
||||
apply_signature(obj, addr, &signature)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some((_, symbol)) = obj.symbols.by_name("_ctors")? {
|
||||
// First entry of ctors is __init_cpp_exceptions
|
||||
let section = obj.section_at(symbol.address as u32)?;
|
||||
let target = read_u32(§ion.data, symbol.address as u32, section.address as u32)
|
||||
.ok_or_else(|| anyhow!("Failed to read _ctors data"))?;
|
||||
if target != 0 {
|
||||
if let Some(signature) = check_signatures_str(
|
||||
obj,
|
||||
target,
|
||||
include_str!("../../assets/signatures/__init_cpp_exceptions.yml"),
|
||||
)? {
|
||||
let address = symbol.address;
|
||||
let section_index = section.index;
|
||||
apply_signature(obj, target, &signature)?;
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "__init_cpp_exceptions_reference".to_string(),
|
||||
demangled_name: None,
|
||||
address,
|
||||
section: Some(section_index),
|
||||
size: 4,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local.into()),
|
||||
kind: ObjSymbolKind::Object,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
if obj.split_for(address as u32).is_none() {
|
||||
obj.add_split(address as u32, ObjSplit {
|
||||
unit: "__init_cpp_exceptions.cpp".to_string(),
|
||||
end: address as u32 + 4,
|
||||
align: None,
|
||||
common: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some((_, symbol)) = obj.symbols.by_name("_dtors")? {
|
||||
let section = obj.section_at(symbol.address as u32)?;
|
||||
let address = symbol.address;
|
||||
let section_address = section.address;
|
||||
let section_index = section.index;
|
||||
// First entry of dtors is __destroy_global_chain
|
||||
let target = read_u32(§ion.data, address as u32, section_address as u32)
|
||||
.ok_or_else(|| anyhow!("Failed to read _dtors data"))?;
|
||||
let target2 = read_u32(§ion.data, address as u32 + 4, section_address as u32)
|
||||
.ok_or_else(|| anyhow!("Failed to read _dtors data"))?;
|
||||
let mut target_ok = false;
|
||||
let mut target2_ok = false;
|
||||
if target != 0 {
|
||||
if let Some(signature) = check_signatures_str(
|
||||
obj,
|
||||
target,
|
||||
include_str!("../../assets/signatures/__destroy_global_chain.yml"),
|
||||
)? {
|
||||
apply_signature(obj, target, &signature)?;
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "__destroy_global_chain_reference".to_string(),
|
||||
demangled_name: None,
|
||||
address,
|
||||
section: Some(section_index),
|
||||
size: 4,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local.into()),
|
||||
kind: ObjSymbolKind::Object,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
target_ok = true;
|
||||
}
|
||||
}
|
||||
// Second entry of dtors is __fini_cpp_exceptions
|
||||
if target2 != 0 {
|
||||
if let Some(signature) = check_signatures_str(
|
||||
obj,
|
||||
target2,
|
||||
include_str!("../../assets/signatures/__fini_cpp_exceptions.yml"),
|
||||
)? {
|
||||
apply_signature(obj, target2, &signature)?;
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "__fini_cpp_exceptions_reference".to_string(),
|
||||
demangled_name: None,
|
||||
address: address + 4,
|
||||
section: Some(section_index),
|
||||
size: 4,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local.into()),
|
||||
kind: ObjSymbolKind::Object,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
target2_ok = true;
|
||||
}
|
||||
}
|
||||
|
||||
if target_ok && target2_ok && obj.split_for(address as u32).is_none() {
|
||||
obj.add_split(address as u32, ObjSplit {
|
||||
unit: "__init_cpp_exceptions.cpp".to_string(),
|
||||
end: address as u32 + 8,
|
||||
align: None,
|
||||
common: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn apply_signatures_post(obj: &mut ObjInfo) -> Result<()> {
|
||||
log::info!("Checking post CFA signatures...");
|
||||
for &(_name, sig_str) in POST_SIGNATURES {
|
||||
let signatures = parse_signatures(sig_str)?;
|
||||
let mut iter = obj.symbols.by_kind(ObjSymbolKind::Function);
|
||||
let opt = loop {
|
||||
let Some((_, symbol)) = iter.next() else {
|
||||
break Option::<(u32, FunctionSignature)>::None;
|
||||
};
|
||||
if let Some(signature) = check_signatures(obj, symbol.address as u32, &signatures)? {
|
||||
break Some((symbol.address as u32, signature));
|
||||
}
|
||||
};
|
||||
if let Some((addr, signature)) = opt {
|
||||
drop(iter);
|
||||
apply_signature(obj, addr, &signature)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
log::info!("Done!");
|
||||
Ok(())
|
||||
}
|
|
@ -35,6 +35,7 @@ pub enum TailCallResult {
|
|||
Not,
|
||||
Is,
|
||||
Possible,
|
||||
Error(anyhow::Error),
|
||||
}
|
||||
|
||||
type BlockRange = Range<u32>;
|
||||
|
@ -137,7 +138,7 @@ impl FunctionSlices {
|
|||
.with_context(|| format!("While processing {:#010X}", function_start))?;
|
||||
self.check_epilogue(section, ins)
|
||||
.with_context(|| format!("While processing {:#010X}", function_start))?;
|
||||
if !self.has_conditional_blr && is_conditional_blr(&ins) {
|
||||
if !self.has_conditional_blr && is_conditional_blr(ins) {
|
||||
self.has_conditional_blr = true;
|
||||
}
|
||||
if !self.has_rfi && ins.op == Opcode::Rfi {
|
||||
|
@ -351,13 +352,15 @@ impl FunctionSlices {
|
|||
}
|
||||
|
||||
let end = self.end();
|
||||
if let Ok(section) = obj.section_at(end) {
|
||||
match (obj.section_at(end), obj.section_at(end - 4)) {
|
||||
(Ok(section), Ok(other_section)) if section.index == other_section.index => {
|
||||
// FIXME this is real bad
|
||||
if !self.has_conditional_blr {
|
||||
if let Some(ins) = disassemble(§ion, end - 4) {
|
||||
if ins.op == Opcode::B {
|
||||
if self.function_references.contains(&ins.branch_dest().unwrap()) {
|
||||
for (_, branches) in &self.branches {
|
||||
if let Some(ins) = disassemble(section, end - 4) {
|
||||
if ins.op == Opcode::B
|
||||
&& self.function_references.contains(&ins.branch_dest().unwrap())
|
||||
{
|
||||
for branches in self.branches.values() {
|
||||
if branches.len() > 1
|
||||
&& branches.contains(self.blocks.last_key_value().unwrap().0)
|
||||
{
|
||||
|
@ -367,29 +370,28 @@ impl FunctionSlices {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MWCC optimization sometimes leaves an unreachable blr
|
||||
// after generating a conditional blr in the function.
|
||||
if self.has_conditional_blr {
|
||||
if matches!(disassemble(§ion, end - 4), Some(ins) if !ins.is_blr())
|
||||
&& matches!(disassemble(§ion, end), Some(ins) if ins.is_blr())
|
||||
if self.has_conditional_blr
|
||||
&& matches!(disassemble(section, end - 4), Some(ins) if !ins.is_blr())
|
||||
&& matches!(disassemble(section, end), Some(ins) if ins.is_blr())
|
||||
&& !known_functions.contains(&end)
|
||||
{
|
||||
log::trace!("Found trailing blr @ {:#010X}, merging with function", end);
|
||||
self.blocks.insert(end, end + 4);
|
||||
}
|
||||
}
|
||||
|
||||
// Some functions with rfi also include a trailing nop
|
||||
if self.has_rfi {
|
||||
if matches!(disassemble(§ion, end), Some(ins) if is_nop(&ins))
|
||||
if self.has_rfi
|
||||
&& matches!(disassemble(section, end), Some(ins) if is_nop(&ins))
|
||||
&& !known_functions.contains(&end)
|
||||
{
|
||||
log::trace!("Found trailing nop @ {:#010X}, merging with function", end);
|
||||
self.blocks.insert(end, end + 4);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.finalized = true;
|
||||
|
@ -417,6 +419,14 @@ impl FunctionSlices {
|
|||
if addr < function_start {
|
||||
return TailCallResult::Is;
|
||||
}
|
||||
// If the jump target is in a different section, known tail call.
|
||||
let section = match obj.section_at(function_start) {
|
||||
Ok(section) => section,
|
||||
Err(e) => return TailCallResult::Error(e),
|
||||
};
|
||||
if !section.contains(addr) {
|
||||
return TailCallResult::Is;
|
||||
}
|
||||
// If the jump target has 0'd padding before it, known tail call.
|
||||
if matches!(obj.section_data(addr - 4, addr), Ok((_, data)) if data == [0u8; 4]) {
|
||||
return TailCallResult::Is;
|
||||
|
@ -428,15 +438,16 @@ impl FunctionSlices {
|
|||
}
|
||||
// If jump target is known to be a function, or there's a function in between
|
||||
// this and the jump target, known tail call.
|
||||
log::trace!("Checking {:#010X}..={:#010X}", function_start + 4, addr);
|
||||
if self.function_references.range(function_start + 4..=addr).next().is_some()
|
||||
|| known_functions.range(function_start + 4..=addr).next().is_some()
|
||||
{
|
||||
return TailCallResult::Is;
|
||||
}
|
||||
// Perform CFA on jump target to determine more
|
||||
let mut slices = FunctionSlices::default();
|
||||
slices.function_references = self.function_references.clone();
|
||||
let mut slices = FunctionSlices {
|
||||
function_references: self.function_references.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
if let Ok(result) =
|
||||
slices.analyze(obj, addr, function_start, Some(function_end), known_functions)
|
||||
{
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use std::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
collections::{btree_map::Entry, BTreeMap, BTreeSet},
|
||||
mem::take,
|
||||
};
|
||||
|
||||
|
@ -12,8 +12,10 @@ use crate::{
|
|||
uniq_jump_table_entries,
|
||||
vm::{is_store_op, BranchTarget, GprValue, StepResult, VM},
|
||||
},
|
||||
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol, ObjSymbolKind},
|
||||
util::nested::NestedVec,
|
||||
obj::{
|
||||
ObjDataKind, ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol,
|
||||
ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
|
@ -99,80 +101,38 @@ impl Tracker {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// fn update_stack_address(&mut self, addr: u32) {
|
||||
// if let Some(db_stack_addr) = self.db_stack_addr {
|
||||
// if db_stack_addr == addr {
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
// if let Some(stack_addr) = self.stack_address {
|
||||
// if stack_addr != addr {
|
||||
// log::error!("Stack address overridden from {:#010X} to {:#010X}", stack_addr, addr);
|
||||
// return;
|
||||
// }
|
||||
// }
|
||||
// log::debug!("Located stack address: {:08X}", addr);
|
||||
// self.stack_address = Some(addr);
|
||||
// let db_stack_addr = addr + 0x2000;
|
||||
// self.db_stack_addr = Some(db_stack_addr);
|
||||
// self.arena_lo = Some((db_stack_addr + 0x1F) & !0x1F);
|
||||
// // __ArenaHi is fixed (until it isn't?)
|
||||
// self.arena_hi = Some(0x81700000);
|
||||
// log::debug!("_stack_addr: {:#010X}", addr);
|
||||
// log::debug!("_stack_end: {:#010X}", self.stack_end.unwrap());
|
||||
// log::debug!("_db_stack_addr: {:#010X}", db_stack_addr);
|
||||
// log::debug!("__ArenaLo: {:#010X}", self.arena_lo.unwrap());
|
||||
// log::debug!("__ArenaHi: {:#010X}", self.arena_hi.unwrap());
|
||||
// }
|
||||
|
||||
fn process_code(&mut self, obj: &ObjInfo) -> Result<()> {
|
||||
let mut symbol_map = BTreeMap::new();
|
||||
self.process_function_by_address(obj, obj.entry as u32)?;
|
||||
for section in obj.sections.iter().filter(|s| s.kind == ObjSectionKind::Code) {
|
||||
symbol_map.append(&mut obj.build_symbol_map(section.index)?);
|
||||
}
|
||||
self.process_function_by_address(obj, &symbol_map, obj.entry as u32)?;
|
||||
'outer: for (&addr, symbols) in &symbol_map {
|
||||
if self.processed_functions.contains(&addr) {
|
||||
for (_, symbol) in obj
|
||||
.symbols
|
||||
.for_range(section.address as u32..(section.address + section.size) as u32)
|
||||
.filter(|(_, symbol)| symbol.kind == ObjSymbolKind::Function && symbol.size_known)
|
||||
{
|
||||
let addr = symbol.address as u32;
|
||||
if !self.processed_functions.insert(addr) {
|
||||
continue;
|
||||
}
|
||||
self.processed_functions.insert(addr);
|
||||
for &symbol_idx in symbols {
|
||||
let symbol = &obj.symbols[symbol_idx];
|
||||
if symbol.kind == ObjSymbolKind::Function && symbol.size_known {
|
||||
self.process_function(obj, symbol)?;
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Special handling for gTRKInterruptVectorTable
|
||||
// TODO
|
||||
// if let (Some(trk_interrupt_table), Some(trk_interrupt_vector_table_end)) = (
|
||||
// obj.symbols.iter().find(|sym| sym.name == "gTRKInterruptVectorTable"),
|
||||
// obj.symbols.iter().find(|sym| sym.name == "gTRKInterruptVectorTableEnd"),
|
||||
// ) {}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn process_function_by_address(
|
||||
&mut self,
|
||||
obj: &ObjInfo,
|
||||
symbol_map: &BTreeMap<u32, Vec<usize>>,
|
||||
addr: u32,
|
||||
) -> Result<()> {
|
||||
fn process_function_by_address(&mut self, obj: &ObjInfo, addr: u32) -> Result<()> {
|
||||
if self.processed_functions.contains(&addr) {
|
||||
return Ok(());
|
||||
}
|
||||
self.processed_functions.insert(addr);
|
||||
if let Some(symbols) = symbol_map.get(&addr) {
|
||||
for &symbol_idx in symbols {
|
||||
let symbol = &obj.symbols[symbol_idx];
|
||||
if symbol.kind == ObjSymbolKind::Function && symbol.size_known {
|
||||
if let Some((_, symbol)) = obj
|
||||
.symbols
|
||||
.at_address(addr)
|
||||
.find(|(_, symbol)| symbol.kind == ObjSymbolKind::Function && symbol.size_known)
|
||||
{
|
||||
self.process_function(obj, symbol)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::warn!("Failed to locate function symbol @ {:#010X}", addr);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -189,12 +149,9 @@ impl Tracker {
|
|||
|
||||
match result {
|
||||
StepResult::Continue => {
|
||||
// if ins.addr == 0x8000ed0c || ins.addr == 0x8000ed08 || ins.addr == 0x8000ca50 {
|
||||
// println!("ok");
|
||||
// }
|
||||
match ins.op {
|
||||
Opcode::Addi | Opcode::Addic | Opcode::Addic_ => {
|
||||
// addi rD, rA, SIMM
|
||||
Opcode::Addi | Opcode::Addic | Opcode::Addic_ => {
|
||||
let source = ins.field_rA();
|
||||
let target = ins.field_rD();
|
||||
if let GprValue::Constant(value) = vm.gpr[target].value {
|
||||
|
@ -224,8 +181,8 @@ impl Tracker {
|
|||
}
|
||||
}
|
||||
}
|
||||
Opcode::Ori => {
|
||||
// ori rA, rS, UIMM
|
||||
Opcode::Ori => {
|
||||
let target = ins.field_rA();
|
||||
if let GprValue::Constant(value) = vm.gpr[target].value {
|
||||
if self.is_valid_address(obj, ins.addr, value) {
|
||||
|
@ -416,6 +373,11 @@ impl Tracker {
|
|||
if self.ignore_addresses.contains(&addr) {
|
||||
return false;
|
||||
}
|
||||
if let Some((&start, &end)) = obj.blocked_ranges.range(..=from).last() {
|
||||
if from >= start && from < end {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if self.known_relocations.contains(&from) {
|
||||
return true;
|
||||
}
|
||||
|
@ -432,12 +394,10 @@ impl Tracker {
|
|||
// if addr > 0x80000000 && addr < 0x80003100 {
|
||||
// return true;
|
||||
// }
|
||||
for section in &obj.sections {
|
||||
if addr >= section.address as u32 && addr <= (section.address + section.size) as u32 {
|
||||
if let Ok(section) = obj.section_at(addr) {
|
||||
// References to code sections will never be unaligned
|
||||
return section.kind != ObjSectionKind::Code || addr & 3 == 0;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
|
@ -451,16 +411,16 @@ impl Tracker {
|
|||
return None;
|
||||
}
|
||||
// HACK for RSOStaticLocateObject
|
||||
for section in &obj.sections {
|
||||
if addr == section.address as u32 {
|
||||
let name = format!("_f_{}", section.name.trim_start_matches('.'));
|
||||
return Some(generate_special_symbol(obj, addr, &name));
|
||||
}
|
||||
}
|
||||
// for section in &obj.sections {
|
||||
// if addr == section.address as u32 {
|
||||
// let name = format!("_f_{}", section.name.trim_start_matches('.'));
|
||||
// return generate_special_symbol(obj, addr, &name).ok();
|
||||
// }
|
||||
// }
|
||||
let mut check_symbol = |opt: Option<u32>, name: &str| -> Option<usize> {
|
||||
if let Some(value) = opt {
|
||||
if addr == value {
|
||||
return Some(generate_special_symbol(obj, value, name));
|
||||
return generate_special_symbol(obj, value, name).ok();
|
||||
}
|
||||
}
|
||||
None
|
||||
|
@ -475,11 +435,22 @@ impl Tracker {
|
|||
}
|
||||
|
||||
pub fn apply(&self, obj: &mut ObjInfo, replace: bool) -> Result<()> {
|
||||
fn apply_section_name(section: &mut ObjSection, name: &str) {
|
||||
let module_id = if let Some((_, b)) = section.name.split_once(':') {
|
||||
b.parse::<u32>().unwrap_or(0)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
let new_name =
|
||||
if module_id == 0 { name.to_string() } else { format!("{}:{}", name, module_id) };
|
||||
log::debug!("Renaming {} to {}", section.name, new_name);
|
||||
section.name = new_name;
|
||||
}
|
||||
|
||||
for section in &mut obj.sections {
|
||||
if !section.section_known {
|
||||
if section.kind == ObjSectionKind::Code {
|
||||
log::info!("Renaming {} to .text", section.name);
|
||||
section.name = ".text".to_string();
|
||||
apply_section_name(section, ".text");
|
||||
continue;
|
||||
}
|
||||
let start = section.address as u32;
|
||||
|
@ -487,39 +458,32 @@ impl Tracker {
|
|||
if self.sda_to.range(start..end).next().is_some() {
|
||||
if self.stores_to.range(start..end).next().is_some() {
|
||||
if section.kind == ObjSectionKind::Bss {
|
||||
log::info!("Renaming {} to .sbss", section.name);
|
||||
section.name = ".sbss".to_string();
|
||||
apply_section_name(section, ".sbss");
|
||||
} else {
|
||||
log::info!("Renaming {} to .sdata", section.name);
|
||||
section.name = ".sdata".to_string();
|
||||
apply_section_name(section, ".sdata");
|
||||
}
|
||||
} else if section.kind == ObjSectionKind::Bss {
|
||||
log::info!("Renaming {} to .sbss2", section.name);
|
||||
section.name = ".sbss2".to_string();
|
||||
apply_section_name(section, ".sbss2");
|
||||
} else {
|
||||
log::info!("Renaming {} to .sdata2", section.name);
|
||||
section.name = ".sdata2".to_string();
|
||||
apply_section_name(section, ".sdata2");
|
||||
section.kind = ObjSectionKind::ReadOnlyData;
|
||||
}
|
||||
} else if self.hal_to.range(start..end).next().is_some() {
|
||||
if section.kind == ObjSectionKind::Bss {
|
||||
log::info!("Renaming {} to .bss", section.name);
|
||||
section.name = ".bss".to_string();
|
||||
apply_section_name(section, ".bss");
|
||||
} else if self.stores_to.range(start..end).next().is_some() {
|
||||
log::info!("Renaming {} to .data", section.name);
|
||||
section.name = ".data".to_string();
|
||||
apply_section_name(section, ".data");
|
||||
} else {
|
||||
log::info!("Renaming {} to .rodata", section.name);
|
||||
section.name = ".rodata".to_string();
|
||||
apply_section_name(section, ".rodata");
|
||||
section.kind = ObjSectionKind::ReadOnlyData;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut symbol_maps = Vec::new();
|
||||
let mut relocation_maps = Vec::new();
|
||||
for section in &obj.sections {
|
||||
symbol_maps.push(obj.build_symbol_map(section.index)?);
|
||||
relocation_maps.push(section.build_relocation_map()?);
|
||||
}
|
||||
|
||||
for (addr, reloc) in &self.relocations {
|
||||
|
@ -533,6 +497,18 @@ impl Tracker {
|
|||
Relocation::Rel24(v) => (ObjRelocKind::PpcRel24, v),
|
||||
Relocation::Absolute(v) => (ObjRelocKind::Absolute, v),
|
||||
};
|
||||
let data_kind = self
|
||||
.data_types
|
||||
.get(&target)
|
||||
.map(|dt| match dt {
|
||||
DataKind::Unknown => ObjDataKind::Unknown,
|
||||
DataKind::Word => ObjDataKind::Byte4,
|
||||
DataKind::Half => ObjDataKind::Byte2,
|
||||
DataKind::Byte => ObjDataKind::Byte,
|
||||
DataKind::Float => ObjDataKind::Float,
|
||||
DataKind::Double => ObjDataKind::Double,
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let (target_symbol, addend) =
|
||||
if let Some(symbol) = self.special_symbol(obj, target, reloc_kind) {
|
||||
(symbol, 0)
|
||||
|
@ -544,16 +520,15 @@ impl Tracker {
|
|||
None => continue,
|
||||
};
|
||||
// Try to find a previous sized symbol that encompasses the target
|
||||
let sym_map = &mut symbol_maps[target_section.index];
|
||||
let target_symbol = {
|
||||
let mut result = None;
|
||||
for (_addr, symbol_idxs) in sym_map.range(..=target).rev() {
|
||||
for (_addr, symbol_idxs) in obj.symbols.indexes_for_range(..=target).rev() {
|
||||
let symbol_idx = if symbol_idxs.len() == 1 {
|
||||
symbol_idxs.first().cloned().unwrap()
|
||||
} else {
|
||||
let mut symbol_idxs = symbol_idxs.clone();
|
||||
let mut symbol_idxs = symbol_idxs.to_vec();
|
||||
symbol_idxs.sort_by_key(|&symbol_idx| {
|
||||
let symbol = &obj.symbols[symbol_idx];
|
||||
let symbol = obj.symbols.at(symbol_idx);
|
||||
let mut rank = match symbol.kind {
|
||||
ObjSymbolKind::Function | ObjSymbolKind::Object => {
|
||||
match reloc_kind {
|
||||
|
@ -589,7 +564,7 @@ impl Tracker {
|
|||
None => continue,
|
||||
}
|
||||
};
|
||||
let symbol = &obj.symbols[symbol_idx];
|
||||
let symbol = obj.symbols.at(symbol_idx);
|
||||
if symbol.address == target as u64 {
|
||||
result = Some(symbol_idx);
|
||||
break;
|
||||
|
@ -604,12 +579,20 @@ impl Tracker {
|
|||
result
|
||||
};
|
||||
if let Some(symbol_idx) = target_symbol {
|
||||
let symbol = &obj.symbols[symbol_idx];
|
||||
(symbol_idx, target as i64 - symbol.address as i64)
|
||||
let symbol = obj.symbols.at(symbol_idx);
|
||||
let symbol_address = symbol.address;
|
||||
// TODO meh
|
||||
if data_kind != ObjDataKind::Unknown
|
||||
&& symbol.data_kind == ObjDataKind::Unknown
|
||||
&& symbol_address as u32 == target
|
||||
{
|
||||
obj.symbols
|
||||
.replace(symbol_idx, ObjSymbol { data_kind, ..symbol.clone() })?;
|
||||
}
|
||||
(symbol_idx, target as i64 - symbol_address as i64)
|
||||
} else {
|
||||
// Create a new label
|
||||
let symbol_idx = obj.symbols.len();
|
||||
obj.symbols.push(ObjSymbol {
|
||||
let symbol_idx = obj.symbols.add_direct(ObjSymbol {
|
||||
name: format!("lbl_{:08X}", target),
|
||||
demangled_name: None,
|
||||
address: target as u64,
|
||||
|
@ -618,8 +601,9 @@ impl Tracker {
|
|||
size_known: false,
|
||||
flags: Default::default(),
|
||||
kind: Default::default(),
|
||||
});
|
||||
sym_map.nested_push(target, symbol_idx);
|
||||
align: None,
|
||||
data_kind,
|
||||
})?;
|
||||
(symbol_idx, 0)
|
||||
}
|
||||
};
|
||||
|
@ -636,25 +620,35 @@ impl Tracker {
|
|||
reloc
|
||||
),
|
||||
};
|
||||
match section.relocations.iter_mut().find(|r| r.address as u32 == addr) {
|
||||
Some(v) => {
|
||||
let iter_symbol = &obj.symbols[v.target_symbol];
|
||||
let reloc_symbol = &obj.symbols[reloc.target_symbol];
|
||||
|
||||
let reloc_map = &mut relocation_maps[section.index];
|
||||
match reloc_map.entry(addr) {
|
||||
Entry::Vacant(e) => {
|
||||
e.insert(section.relocations.len());
|
||||
section.relocations.push(reloc);
|
||||
}
|
||||
Entry::Occupied(e) => {
|
||||
let reloc_symbol = obj.symbols.at(reloc.target_symbol);
|
||||
if reloc_symbol.name != "_unresolved" {
|
||||
let v = &mut section.relocations[*e.get()];
|
||||
let iter_symbol = obj.symbols.at(v.target_symbol);
|
||||
if iter_symbol.address as i64 + v.addend
|
||||
!= reloc_symbol.address as i64 + reloc.addend
|
||||
{
|
||||
bail!(
|
||||
"Conflicting relocations (target {:#010X}): {:#010X?} != {:#010X?}",
|
||||
"Conflicting relocations (target {:#010X}): {:#010X?} ({}) != {:#010X?} ({})",
|
||||
target,
|
||||
v,
|
||||
reloc
|
||||
iter_symbol.name,
|
||||
reloc,
|
||||
reloc_symbol.name
|
||||
);
|
||||
}
|
||||
if replace {
|
||||
*v = reloc;
|
||||
}
|
||||
}
|
||||
None => section.relocations.push(reloc),
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -716,17 +710,16 @@ fn data_kind_from_op(op: Opcode) -> DataKind {
|
|||
}
|
||||
}
|
||||
|
||||
fn generate_special_symbol(obj: &mut ObjInfo, addr: u32, name: &str) -> usize {
|
||||
if let Some((symbol_idx, _)) =
|
||||
obj.symbols.iter().enumerate().find(|&(_, symbol)| symbol.name == name)
|
||||
{
|
||||
return symbol_idx;
|
||||
}
|
||||
let symbol_idx = obj.symbols.len();
|
||||
obj.symbols.push(ObjSymbol {
|
||||
fn generate_special_symbol(obj: &mut ObjInfo, addr: u32, name: &str) -> Result<usize> {
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: name.to_string(),
|
||||
address: addr as u64,
|
||||
size: 0,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
..Default::default()
|
||||
});
|
||||
symbol_idx
|
||||
},
|
||||
true,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -162,8 +162,8 @@ impl VM {
|
|||
Opcode::Illegal => {
|
||||
return StepResult::Illegal;
|
||||
}
|
||||
Opcode::Add => {
|
||||
// add rD, rA, rB
|
||||
Opcode::Add => {
|
||||
let left = self.gpr[ins.field_rA()].value;
|
||||
let right = self.gpr[ins.field_rB()].value;
|
||||
let value = match (left, right) {
|
||||
|
@ -174,8 +174,8 @@ impl VM {
|
|||
};
|
||||
self.gpr[ins.field_rD()].set_direct(value);
|
||||
}
|
||||
Opcode::Addis => {
|
||||
// addis rD, rA, SIMM
|
||||
Opcode::Addis => {
|
||||
let left = if ins.field_rA() == 0 {
|
||||
GprValue::Constant(0)
|
||||
} else {
|
||||
|
@ -194,10 +194,10 @@ impl VM {
|
|||
self.gpr[ins.field_rD()].set_direct(value);
|
||||
}
|
||||
}
|
||||
Opcode::Addi | Opcode::Addic | Opcode::Addic_ => {
|
||||
// addi rD, rA, SIMM
|
||||
// addic rD, rA, SIMM
|
||||
// addic. rD, rA, SIMM
|
||||
Opcode::Addi | Opcode::Addic | Opcode::Addic_ => {
|
||||
let left = if ins.field_rA() == 0 && ins.op == Opcode::Addi {
|
||||
GprValue::Constant(0)
|
||||
} else {
|
||||
|
@ -216,8 +216,8 @@ impl VM {
|
|||
self.gpr[ins.field_rD()].set_lo(value, ins.addr, self.gpr[ins.field_rA()]);
|
||||
}
|
||||
}
|
||||
Opcode::Ori => {
|
||||
// ori rA, rS, UIMM
|
||||
Opcode::Ori => {
|
||||
let value = match self.gpr[ins.field_rS()].value {
|
||||
GprValue::Constant(value) => {
|
||||
GprValue::Constant(value | ins.field_uimm() as u32)
|
||||
|
@ -226,8 +226,8 @@ impl VM {
|
|||
};
|
||||
self.gpr[ins.field_rA()].set_lo(value, ins.addr, self.gpr[ins.field_rS()]);
|
||||
}
|
||||
Opcode::Or => {
|
||||
// or rA, rS, rB
|
||||
Opcode::Or => {
|
||||
if ins.field_rS() == ins.field_rB() {
|
||||
// Register copy
|
||||
self.gpr[ins.field_rA()] = self.gpr[ins.field_rS()];
|
||||
|
@ -428,12 +428,9 @@ impl VM {
|
|||
}
|
||||
_ => {
|
||||
for field in ins.defs() {
|
||||
match field.argument() {
|
||||
Some(Argument::GPR(GPR(reg))) => {
|
||||
if let Some(Argument::GPR(GPR(reg))) = field.argument() {
|
||||
self.gpr[reg as usize].set_direct(GprValue::Unknown);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::{
|
||||
collections::{btree_map::Entry, BTreeMap},
|
||||
fs::File,
|
||||
io::{BufRead, BufWriter, Write},
|
||||
io::{BufWriter, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
|
@ -9,7 +9,7 @@ use anyhow::{anyhow, bail, Result};
|
|||
use argh::FromArgs;
|
||||
use object::{Object, ObjectSymbol, SymbolScope};
|
||||
|
||||
use crate::util::file::{buf_reader, map_file};
|
||||
use crate::util::file::{map_file, process_rsp};
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Commands for processing static libraries.
|
||||
|
@ -45,25 +45,7 @@ pub fn run(args: Args) -> Result<()> {
|
|||
|
||||
fn create(args: CreateArgs) -> Result<()> {
|
||||
// Process response files (starting with '@')
|
||||
let mut files = Vec::with_capacity(args.files.len());
|
||||
for path in args.files {
|
||||
let path_str =
|
||||
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
|
||||
match path_str.strip_prefix('@') {
|
||||
Some(rsp_file) => {
|
||||
let reader = buf_reader(rsp_file)?;
|
||||
for result in reader.lines() {
|
||||
let line = result?;
|
||||
if !line.is_empty() {
|
||||
files.push(PathBuf::from(line));
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
let files = process_rsp(&args.files)?;
|
||||
|
||||
// Build identifiers & symbol table
|
||||
let mut identifiers = Vec::with_capacity(files.len());
|
||||
|
|
534
src/cmd/dol.rs
534
src/cmd/dol.rs
|
@ -1,11 +1,10 @@
|
|||
use std::{
|
||||
collections::BTreeMap,
|
||||
collections::{hash_map, BTreeMap, HashMap},
|
||||
fs,
|
||||
fs::{DirBuilder, File},
|
||||
io::{BufRead, BufWriter, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::collections::{hash_map, HashMap};
|
||||
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use argh::FromArgs;
|
||||
|
@ -13,25 +12,24 @@ use argh::FromArgs;
|
|||
use crate::{
|
||||
analysis::{
|
||||
cfa::AnalyzerState,
|
||||
objects::{detect_object_boundaries, detect_strings},
|
||||
pass::{AnalysisPass, FindSaveRestSleds, FindTRKInterruptVectorTable},
|
||||
read_u32,
|
||||
signatures::{apply_signatures, apply_signatures_post},
|
||||
tracker::Tracker,
|
||||
},
|
||||
obj::{
|
||||
signatures::{apply_signature, check_signatures, check_signatures_str, parse_signatures},
|
||||
split::split_obj,
|
||||
split::{split_obj, update_splits},
|
||||
ObjInfo, ObjRelocKind, ObjSectionKind, ObjSymbolKind,
|
||||
},
|
||||
util::{
|
||||
asm::write_asm,
|
||||
config::{apply_splits, parse_symbol_line, write_symbols},
|
||||
config::{apply_splits, parse_symbol_line, write_splits, write_symbols},
|
||||
dol::process_dol,
|
||||
elf::process_elf,
|
||||
elf::{process_elf, write_elf},
|
||||
file::{map_file, map_reader},
|
||||
map::process_map,
|
||||
lcf::{asm_path_for_unit, generate_ldscript, obj_path_for_unit},
|
||||
},
|
||||
};
|
||||
use crate::util::elf::write_elf;
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
/// Commands for processing DOL files.
|
||||
|
@ -44,32 +42,8 @@ pub struct Args {
|
|||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
#[argh(subcommand)]
|
||||
enum SubCommand {
|
||||
Disasm(DisasmArgs),
|
||||
Info(InfoArgs),
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// disassembles a DOL file
|
||||
#[argh(subcommand, name = "disasm")]
|
||||
pub struct DisasmArgs {
|
||||
#[argh(option, short = 'm')]
|
||||
/// path to input map
|
||||
map_file: Option<PathBuf>,
|
||||
#[argh(option, short = 's')]
|
||||
/// path to symbols file
|
||||
symbols_file: Option<PathBuf>,
|
||||
#[argh(option, short = 'p')]
|
||||
/// path to splits file
|
||||
splits_file: Option<PathBuf>,
|
||||
#[argh(option, short = 'e')]
|
||||
/// ELF file to validate against (debugging only)
|
||||
elf_file: Option<PathBuf>,
|
||||
#[argh(positional)]
|
||||
/// DOL file
|
||||
dol_file: PathBuf,
|
||||
#[argh(option, short = 'o')]
|
||||
/// output file (or directory, if splitting)
|
||||
out: PathBuf,
|
||||
Split(SplitArgs),
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
|
@ -81,301 +55,39 @@ pub struct InfoArgs {
|
|||
dol_file: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Splits a DOL into relocatable objects.
|
||||
#[argh(subcommand, name = "split")]
|
||||
pub struct SplitArgs {
|
||||
#[argh(positional)]
|
||||
/// input file
|
||||
in_file: PathBuf,
|
||||
#[argh(positional)]
|
||||
/// output directory
|
||||
out_dir: PathBuf,
|
||||
#[argh(option, short = 's')]
|
||||
/// path to symbols file
|
||||
symbols_file: Option<PathBuf>,
|
||||
#[argh(option, short = 'p')]
|
||||
/// path to splits file
|
||||
splits_file: Option<PathBuf>,
|
||||
#[argh(option, short = 'e')]
|
||||
/// ELF file to validate against (debugging only)
|
||||
elf_file: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
match args.command {
|
||||
SubCommand::Disasm(c_args) => disasm(c_args),
|
||||
SubCommand::Info(c_args) => info(c_args),
|
||||
SubCommand::Split(c_args) => split(c_args),
|
||||
}
|
||||
}
|
||||
|
||||
const SIGNATURES: &[(&str, &str)] = &[
|
||||
("__init_registers", include_str!("../../assets/signatures/__init_registers.yml")),
|
||||
("__init_hardware", include_str!("../../assets/signatures/__init_hardware.yml")),
|
||||
("__init_data", include_str!("../../assets/signatures/__init_data.yml")),
|
||||
("__set_debug_bba", include_str!("../../assets/signatures/__set_debug_bba.yml")),
|
||||
("__OSPSInit", include_str!("../../assets/signatures/__OSPSInit.yml")),
|
||||
("__OSFPRInit", include_str!("../../assets/signatures/__OSFPRInit.yml")),
|
||||
("__OSCacheInit", include_str!("../../assets/signatures/__OSCacheInit.yml")),
|
||||
("DMAErrorHandler", include_str!("../../assets/signatures/DMAErrorHandler.yml")),
|
||||
("DBInit", include_str!("../../assets/signatures/DBInit.yml")),
|
||||
("OSInit", include_str!("../../assets/signatures/OSInit.yml")),
|
||||
("__OSThreadInit", include_str!("../../assets/signatures/__OSThreadInit.yml")),
|
||||
("__OSInitIPCBuffer", include_str!("../../assets/signatures/__OSInitIPCBuffer.yml")),
|
||||
("EXIInit", include_str!("../../assets/signatures/EXIInit.yml")),
|
||||
("EXIGetID", include_str!("../../assets/signatures/EXIGetID.yml")),
|
||||
("exit", include_str!("../../assets/signatures/exit.yml")),
|
||||
("_ExitProcess", include_str!("../../assets/signatures/_ExitProcess.yml")),
|
||||
("__fini_cpp", include_str!("../../assets/signatures/__fini_cpp.yml")),
|
||||
("__destroy_global_chain", include_str!("../../assets/signatures/__destroy_global_chain.yml")),
|
||||
("InitMetroTRK", include_str!("../../assets/signatures/InitMetroTRK.yml")),
|
||||
("InitMetroTRKCommTable", include_str!("../../assets/signatures/InitMetroTRKCommTable.yml")),
|
||||
("OSExceptionInit", include_str!("../../assets/signatures/OSExceptionInit.yml")),
|
||||
(
|
||||
"OSDefaultExceptionHandler",
|
||||
include_str!("../../assets/signatures/OSDefaultExceptionHandler.yml"),
|
||||
),
|
||||
("__OSUnhandledException", include_str!("../../assets/signatures/__OSUnhandledException.yml")),
|
||||
("OSDisableScheduler", include_str!("../../assets/signatures/OSDisableScheduler.yml")),
|
||||
("__OSReschedule", include_str!("../../assets/signatures/__OSReschedule.yml")),
|
||||
("__OSInitSystemCall", include_str!("../../assets/signatures/__OSInitSystemCall.yml")),
|
||||
("OSInitAlarm", include_str!("../../assets/signatures/OSInitAlarm.yml")),
|
||||
("__OSInitAlarm", include_str!("../../assets/signatures/__OSInitAlarm.yml")),
|
||||
("__OSEVStart", include_str!("../../assets/signatures/OSExceptionVector.yml")),
|
||||
("__OSDBINTSTART", include_str!("../../assets/signatures/__OSDBIntegrator.yml")),
|
||||
("__OSDBJUMPSTART", include_str!("../../assets/signatures/__OSDBJump.yml")),
|
||||
("SIInit", include_str!("../../assets/signatures/SIInit.yml")),
|
||||
("SIGetType", include_str!("../../assets/signatures/SIGetType.yml")),
|
||||
("SISetSamplingRate", include_str!("../../assets/signatures/SISetSamplingRate.yml")),
|
||||
("SISetXY", include_str!("../../assets/signatures/SISetXY.yml")),
|
||||
("VIGetTvFormat", include_str!("../../assets/signatures/VIGetTvFormat.yml")),
|
||||
("DVDInit", include_str!("../../assets/signatures/DVDInit.yml")),
|
||||
(
|
||||
"DVDSetAutoFatalMessaging",
|
||||
include_str!("../../assets/signatures/DVDSetAutoFatalMessaging.yml"),
|
||||
),
|
||||
("OSSetArenaLo", include_str!("../../assets/signatures/OSSetArenaLo.yml")),
|
||||
("OSSetArenaHi", include_str!("../../assets/signatures/OSSetArenaHi.yml")),
|
||||
("OSSetMEM1ArenaLo", include_str!("../../assets/signatures/OSSetMEM1ArenaLo.yml")),
|
||||
("OSSetMEM1ArenaHi", include_str!("../../assets/signatures/OSSetMEM1ArenaHi.yml")),
|
||||
("OSSetMEM2ArenaLo", include_str!("../../assets/signatures/OSSetMEM2ArenaLo.yml")),
|
||||
("OSSetMEM2ArenaHi", include_str!("../../assets/signatures/OSSetMEM2ArenaHi.yml")),
|
||||
("__OSInitAudioSystem", include_str!("../../assets/signatures/__OSInitAudioSystem.yml")),
|
||||
(
|
||||
"__OSInitMemoryProtection",
|
||||
include_str!("../../assets/signatures/__OSInitMemoryProtection.yml"),
|
||||
),
|
||||
// ("BATConfig", include_str!("../../assets/signatures/BATConfig.yml")), TODO
|
||||
("ReportOSInfo", include_str!("../../assets/signatures/ReportOSInfo.yml")),
|
||||
("__check_pad3", include_str!("../../assets/signatures/__check_pad3.yml")),
|
||||
("OSResetSystem", include_str!("../../assets/signatures/OSResetSystem.yml")),
|
||||
("OSReturnToMenu", include_str!("../../assets/signatures/OSReturnToMenu.yml")),
|
||||
("__OSReturnToMenu", include_str!("../../assets/signatures/__OSReturnToMenu.yml")),
|
||||
("__OSShutdownDevices", include_str!("../../assets/signatures/__OSShutdownDevices.yml")),
|
||||
("__OSInitSram", include_str!("../../assets/signatures/__OSInitSram.yml")),
|
||||
("__OSSyncSram", include_str!("../../assets/signatures/__OSSyncSram.yml")),
|
||||
(
|
||||
"__OSGetExceptionHandler",
|
||||
include_str!("../../assets/signatures/__OSGetExceptionHandler.yml"),
|
||||
),
|
||||
(
|
||||
"OSRegisterResetFunction",
|
||||
include_str!("../../assets/signatures/OSRegisterResetFunction.yml"),
|
||||
),
|
||||
(
|
||||
"OSRegisterShutdownFunction",
|
||||
include_str!("../../assets/signatures/OSRegisterShutdownFunction.yml"),
|
||||
),
|
||||
(
|
||||
"DecrementerExceptionHandler",
|
||||
include_str!("../../assets/signatures/DecrementerExceptionHandler.yml"),
|
||||
),
|
||||
(
|
||||
"DecrementerExceptionCallback",
|
||||
include_str!("../../assets/signatures/DecrementerExceptionCallback.yml"),
|
||||
),
|
||||
("__OSInterruptInit", include_str!("../../assets/signatures/__OSInterruptInit.yml")),
|
||||
("__OSContextInit", include_str!("../../assets/signatures/__OSContextInit.yml")),
|
||||
("OSSwitchFPUContext", include_str!("../../assets/signatures/OSSwitchFPUContext.yml")),
|
||||
("OSReport", include_str!("../../assets/signatures/OSReport.yml")),
|
||||
("TRK_main", include_str!("../../assets/signatures/TRK_main.yml")),
|
||||
("TRKNubWelcome", include_str!("../../assets/signatures/TRKNubWelcome.yml")),
|
||||
("TRKInitializeNub", include_str!("../../assets/signatures/TRKInitializeNub.yml")),
|
||||
(
|
||||
"TRKInitializeIntDrivenUART",
|
||||
include_str!("../../assets/signatures/TRKInitializeIntDrivenUART.yml"),
|
||||
),
|
||||
("TRKEXICallBack", include_str!("../../assets/signatures/TRKEXICallBack.yml")),
|
||||
("TRKLoadContext", include_str!("../../assets/signatures/TRKLoadContext.yml")),
|
||||
("TRKInterruptHandler", include_str!("../../assets/signatures/TRKInterruptHandler.yml")),
|
||||
("TRKExceptionHandler", include_str!("../../assets/signatures/TRKExceptionHandler.yml")),
|
||||
("TRKSaveExtended1Block", include_str!("../../assets/signatures/TRKSaveExtended1Block.yml")),
|
||||
("TRKNubMainLoop", include_str!("../../assets/signatures/TRKNubMainLoop.yml")),
|
||||
("TRKTargetContinue", include_str!("../../assets/signatures/TRKTargetContinue.yml")),
|
||||
("TRKSwapAndGo", include_str!("../../assets/signatures/TRKSwapAndGo.yml")),
|
||||
(
|
||||
"TRKRestoreExtended1Block",
|
||||
include_str!("../../assets/signatures/TRKRestoreExtended1Block.yml"),
|
||||
),
|
||||
(
|
||||
"TRKInterruptHandlerEnableInterrupts",
|
||||
include_str!("../../assets/signatures/TRKInterruptHandlerEnableInterrupts.yml"),
|
||||
),
|
||||
("memset", include_str!("../../assets/signatures/memset.yml")),
|
||||
(
|
||||
"__msl_runtime_constraint_violation_s",
|
||||
include_str!("../../assets/signatures/__msl_runtime_constraint_violation_s.yml"),
|
||||
),
|
||||
("ClearArena", include_str!("../../assets/signatures/ClearArena.yml")),
|
||||
("IPCCltInit", include_str!("../../assets/signatures/IPCCltInit.yml")),
|
||||
("__OSInitSTM", include_str!("../../assets/signatures/__OSInitSTM.yml")),
|
||||
("IOS_Open", include_str!("../../assets/signatures/IOS_Open.yml")),
|
||||
("__ios_Ipc2", include_str!("../../assets/signatures/__ios_Ipc2.yml")),
|
||||
("IPCiProfQueueReq", include_str!("../../assets/signatures/IPCiProfQueueReq.yml")),
|
||||
("SCInit", include_str!("../../assets/signatures/SCInit.yml")),
|
||||
("SCReloadConfFileAsync", include_str!("../../assets/signatures/SCReloadConfFileAsync.yml")),
|
||||
("NANDPrivateOpenAsync", include_str!("../../assets/signatures/NANDPrivateOpenAsync.yml")),
|
||||
("nandIsInitialized", include_str!("../../assets/signatures/nandIsInitialized.yml")),
|
||||
("nandOpen", include_str!("../../assets/signatures/nandOpen.yml")),
|
||||
("nandGenerateAbsPath", include_str!("../../assets/signatures/nandGenerateAbsPath.yml")),
|
||||
("nandGetHeadToken", include_str!("../../assets/signatures/nandGetHeadToken.yml")),
|
||||
("ISFS_OpenAsync", include_str!("../../assets/signatures/ISFS_OpenAsync.yml")),
|
||||
("nandConvertErrorCode", include_str!("../../assets/signatures/nandConvertErrorCode.yml")),
|
||||
(
|
||||
"NANDLoggingAddMessageAsync",
|
||||
include_str!("../../assets/signatures/NANDLoggingAddMessageAsync.yml"),
|
||||
),
|
||||
(
|
||||
"__NANDPrintErrorMessage",
|
||||
include_str!("../../assets/signatures/__NANDPrintErrorMessage.yml"),
|
||||
),
|
||||
("__OSInitNet", include_str!("../../assets/signatures/__OSInitNet.yml")),
|
||||
("__DVDCheckDevice", include_str!("../../assets/signatures/__DVDCheckDevice.yml")),
|
||||
("__OSInitPlayTime", include_str!("../../assets/signatures/__OSInitPlayTime.yml")),
|
||||
("__OSStartPlayRecord", include_str!("../../assets/signatures/__OSStartPlayRecord.yml")),
|
||||
("NANDInit", include_str!("../../assets/signatures/NANDInit.yml")),
|
||||
("ISFS_OpenLib", include_str!("../../assets/signatures/ISFS_OpenLib.yml")),
|
||||
("ESP_GetTitleId", include_str!("../../assets/signatures/ESP_GetTitleId.yml")),
|
||||
(
|
||||
"NANDSetAutoErrorMessaging",
|
||||
include_str!("../../assets/signatures/NANDSetAutoErrorMessaging.yml"),
|
||||
),
|
||||
("__DVDFSInit", include_str!("../../assets/signatures/__DVDFSInit.yml")),
|
||||
("__DVDClearWaitingQueue", include_str!("../../assets/signatures/__DVDClearWaitingQueue.yml")),
|
||||
("__DVDInitWA", include_str!("../../assets/signatures/__DVDInitWA.yml")),
|
||||
("__DVDLowSetWAType", include_str!("../../assets/signatures/__DVDLowSetWAType.yml")),
|
||||
("__fstLoad", include_str!("../../assets/signatures/__fstLoad.yml")),
|
||||
("DVDReset", include_str!("../../assets/signatures/DVDReset.yml")),
|
||||
("DVDLowReset", include_str!("../../assets/signatures/DVDLowReset.yml")),
|
||||
("DVDReadDiskID", include_str!("../../assets/signatures/DVDReadDiskID.yml")),
|
||||
("stateReady", include_str!("../../assets/signatures/stateReady.yml")),
|
||||
("DVDLowWaitCoverClose", include_str!("../../assets/signatures/DVDLowWaitCoverClose.yml")),
|
||||
("__DVDStoreErrorCode", include_str!("../../assets/signatures/__DVDStoreErrorCode.yml")),
|
||||
("DVDLowStopMotor", include_str!("../../assets/signatures/DVDLowStopMotor.yml")),
|
||||
("DVDGetDriveStatus", include_str!("../../assets/signatures/DVDGetDriveStatus.yml")),
|
||||
("printf", include_str!("../../assets/signatures/printf.yml")),
|
||||
("sprintf", include_str!("../../assets/signatures/sprintf.yml")),
|
||||
("vprintf", include_str!("../../assets/signatures/vprintf.yml")),
|
||||
("vsprintf", include_str!("../../assets/signatures/vsprintf.yml")),
|
||||
("vsnprintf", include_str!("../../assets/signatures/vsnprintf.yml")),
|
||||
("__pformatter", include_str!("../../assets/signatures/__pformatter.yml")),
|
||||
("longlong2str", include_str!("../../assets/signatures/longlong2str.yml")),
|
||||
("__mod2u", include_str!("../../assets/signatures/__mod2u.yml")),
|
||||
("__FileWrite", include_str!("../../assets/signatures/__FileWrite.yml")),
|
||||
("fwrite", include_str!("../../assets/signatures/fwrite.yml")),
|
||||
("__fwrite", include_str!("../../assets/signatures/__fwrite.yml")),
|
||||
("__stdio_atexit", include_str!("../../assets/signatures/__stdio_atexit.yml")),
|
||||
("__StringWrite", include_str!("../../assets/signatures/__StringWrite.yml")),
|
||||
];
|
||||
const POST_SIGNATURES: &[(&str, &str)] = &[
|
||||
("RSOStaticLocateObject", include_str!("../../assets/signatures/RSOStaticLocateObject.yml")),
|
||||
// ("GXInit", include_str!("../../assets/signatures/GXInit.yml")),
|
||||
];
|
||||
|
||||
pub fn apply_signatures(obj: &mut ObjInfo) -> Result<()> {
|
||||
let entry = obj.entry as u32;
|
||||
if let Some(signature) =
|
||||
check_signatures_str(obj, entry, include_str!("../../assets/signatures/__start.yml"))?
|
||||
{
|
||||
apply_signature(obj, entry, &signature)?;
|
||||
}
|
||||
for &(name, sig_str) in SIGNATURES {
|
||||
if let Some(symbol) = obj.symbols.iter().find(|symbol| symbol.name == name) {
|
||||
let addr = symbol.address as u32;
|
||||
if let Some(signature) = check_signatures_str(obj, addr, sig_str)? {
|
||||
apply_signature(obj, addr, &signature)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(symbol) = obj.symbols.iter().find(|symbol| symbol.name == "__init_user") {
|
||||
// __init_user can be overridden, but we can still look for __init_cpp from it
|
||||
let mut analyzer = AnalyzerState::default();
|
||||
analyzer.process_function_at(&obj, symbol.address as u32)?;
|
||||
for addr in analyzer.function_entries {
|
||||
if let Some(signature) = check_signatures_str(
|
||||
obj,
|
||||
addr,
|
||||
include_str!("../../assets/signatures/__init_cpp.yml"),
|
||||
)? {
|
||||
apply_signature(obj, addr, &signature)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(symbol) = obj.symbols.iter().find(|symbol| symbol.name == "_ctors") {
|
||||
// First entry of ctors is __init_cpp_exceptions
|
||||
let section = obj.section_at(symbol.address as u32)?;
|
||||
let target = read_u32(§ion.data, symbol.address as u32, section.address as u32)
|
||||
.ok_or_else(|| anyhow!("Failed to read _ctors data"))?;
|
||||
if target != 0 {
|
||||
if let Some(signature) = check_signatures_str(
|
||||
obj,
|
||||
target,
|
||||
include_str!("../../assets/signatures/__init_cpp_exceptions.yml"),
|
||||
)? {
|
||||
apply_signature(obj, target, &signature)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(symbol) = obj.symbols.iter().find(|symbol| symbol.name == "_dtors") {
|
||||
// Second entry of dtors is __fini_cpp_exceptions
|
||||
let section = obj.section_at(symbol.address as u32)?;
|
||||
let target = read_u32(§ion.data, symbol.address as u32 + 4, section.address as u32)
|
||||
.ok_or_else(|| anyhow!("Failed to read _dtors data"))?;
|
||||
if target != 0 {
|
||||
if let Some(signature) = check_signatures_str(
|
||||
obj,
|
||||
target,
|
||||
include_str!("../../assets/signatures/__fini_cpp_exceptions.yml"),
|
||||
)? {
|
||||
apply_signature(obj, target, &signature)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn apply_signatures_post(obj: &mut ObjInfo) -> Result<()> {
|
||||
log::info!("Checking post CFA signatures...");
|
||||
for &(_name, sig_str) in POST_SIGNATURES {
|
||||
let signatures = parse_signatures(sig_str)?;
|
||||
for symbol in obj.symbols.iter().filter(|symbol| symbol.kind == ObjSymbolKind::Function) {
|
||||
let addr = symbol.address as u32;
|
||||
if let Some(signature) = check_signatures(obj, addr, &signatures)? {
|
||||
apply_signature(obj, addr, &signature)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
log::info!("Done!");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let mut obj = process_dol(&args.dol_file)?;
|
||||
apply_signatures(&mut obj)?;
|
||||
// Apply known functions from extab
|
||||
let mut state = AnalyzerState::default();
|
||||
for (&addr, &size) in &obj.known_functions {
|
||||
state.function_entries.insert(addr);
|
||||
state.function_bounds.insert(addr, addr + size);
|
||||
}
|
||||
for symbol in &obj.symbols {
|
||||
if symbol.kind != ObjSymbolKind::Function {
|
||||
continue;
|
||||
}
|
||||
state.function_entries.insert(symbol.address as u32);
|
||||
if !symbol.size_known {
|
||||
continue;
|
||||
}
|
||||
state.function_bounds.insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
|
||||
}
|
||||
// Also check the start of each code section
|
||||
for section in &obj.sections {
|
||||
if section.kind == ObjSectionKind::Code {
|
||||
state.function_entries.insert(section.address as u32);
|
||||
}
|
||||
}
|
||||
|
||||
let mut state = AnalyzerState::default();
|
||||
state.detect_functions(&obj)?;
|
||||
log::info!("Discovered {} functions", state.function_slices.len());
|
||||
|
||||
|
@ -397,9 +109,7 @@ fn info(args: InfoArgs) -> Result<()> {
|
|||
}
|
||||
println!("\nDiscovered symbols:");
|
||||
println!("\t{: >23} | {: <10} | {: <10}", "Name", "Address", "Size");
|
||||
let mut symbols = obj.symbols.clone();
|
||||
symbols.sort_by_key(|sym| sym.address);
|
||||
for symbol in symbols {
|
||||
for (_, symbol) in obj.symbols.for_range(..) {
|
||||
if symbol.name.starts_with('@') || symbol.name.starts_with("fn_") {
|
||||
continue;
|
||||
}
|
||||
|
@ -414,83 +124,39 @@ fn info(args: InfoArgs) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn disasm(args: DisasmArgs) -> Result<()> {
|
||||
let mut obj = process_dol(&args.dol_file)?;
|
||||
log::info!("Performing initial control flow analysis");
|
||||
fn split(args: SplitArgs) -> Result<()> {
|
||||
log::info!("Loading {}", args.in_file.display());
|
||||
let mut obj = process_dol(&args.in_file)?;
|
||||
|
||||
// if detect_sda_bases(&mut obj).context("Failed to locate SDA bases")? {
|
||||
// let (sda2_base, sda_base) = obj.sda_bases.unwrap();
|
||||
// log::info!("Found _SDA2_BASE_ @ {:#010X}, _SDA_BASE_ @ {:#010X}", sda2_base, sda_base);
|
||||
// } else {
|
||||
// bail!("Unable to locate SDA bases");
|
||||
// }
|
||||
|
||||
if let Some(map) = &args.map_file {
|
||||
let mmap = map_file(map)?;
|
||||
let _entries = process_map(map_reader(&mmap))?;
|
||||
}
|
||||
|
||||
if let Some(splits_file) = &args.splits_file {
|
||||
let map = map_file(splits_file)?;
|
||||
if let Some(splits_path) = &args.splits_file {
|
||||
if splits_path.is_file() {
|
||||
let map = map_file(splits_path)?;
|
||||
apply_splits(map_reader(&map), &mut obj)?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut state = AnalyzerState::default();
|
||||
|
||||
if let Some(symbols_path) = &args.symbols_file {
|
||||
if symbols_path.is_file() {
|
||||
let map = map_file(symbols_path)?;
|
||||
for result in map_reader(&map).lines() {
|
||||
let line = match result {
|
||||
Ok(line) => line,
|
||||
Err(e) => bail!("Failed to process symbols file: {e:?}"),
|
||||
};
|
||||
if let Some(symbol) = parse_symbol_line(&line, &obj)? {
|
||||
// if symbol.kind == ObjSymbolKind::Function {
|
||||
// state.function_entries.insert(symbol.address as u32);
|
||||
// if symbol.size_known {
|
||||
// state
|
||||
// .function_bounds
|
||||
// .insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
|
||||
// }
|
||||
// }
|
||||
if let Some(existing_symbol) = obj
|
||||
.symbols
|
||||
.iter_mut()
|
||||
.find(|e| e.address == symbol.address && e.kind == symbol.kind)
|
||||
{
|
||||
*existing_symbol = symbol;
|
||||
} else {
|
||||
obj.symbols.push(symbol);
|
||||
if let Some(symbol) = parse_symbol_line(&line, &mut obj)? {
|
||||
obj.add_symbol(symbol, true)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO move before symbols?
|
||||
log::info!("Performing signature analysis");
|
||||
apply_signatures(&mut obj)?;
|
||||
|
||||
// Apply known functions from extab
|
||||
for (&addr, &size) in &obj.known_functions {
|
||||
state.function_entries.insert(addr);
|
||||
state.function_bounds.insert(addr, addr + size);
|
||||
}
|
||||
for symbol in &obj.symbols {
|
||||
if symbol.kind != ObjSymbolKind::Function {
|
||||
continue;
|
||||
}
|
||||
state.function_entries.insert(symbol.address as u32);
|
||||
if !symbol.size_known {
|
||||
continue;
|
||||
}
|
||||
state.function_bounds.insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
|
||||
}
|
||||
// Also check the start of each code section
|
||||
for section in &obj.sections {
|
||||
if section.kind == ObjSectionKind::Code {
|
||||
state.function_entries.insert(section.address as u32);
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Detecting function boundaries");
|
||||
state.detect_functions(&obj)?;
|
||||
log::info!("Discovered {} functions", state.function_slices.len());
|
||||
|
||||
|
@ -505,17 +171,40 @@ fn disasm(args: DisasmArgs) -> Result<()> {
|
|||
log::info!("Applying relocations");
|
||||
tracker.apply(&mut obj, false)?;
|
||||
|
||||
if args.splits_file.is_some() {
|
||||
log::info!("Detecting object boundaries");
|
||||
detect_object_boundaries(&mut obj)?;
|
||||
|
||||
log::info!("Detecting strings");
|
||||
detect_strings(&mut obj)?;
|
||||
|
||||
if let Some(symbols_path) = &args.symbols_file {
|
||||
let mut symbols_writer = BufWriter::new(
|
||||
File::create(symbols_path)
|
||||
.with_context(|| format!("Failed to create '{}'", symbols_path.display()))?,
|
||||
);
|
||||
write_symbols(&mut symbols_writer, &obj)?;
|
||||
}
|
||||
|
||||
if let Some(splits_path) = &args.splits_file {
|
||||
let mut splits_writer = BufWriter::new(
|
||||
File::create(splits_path)
|
||||
.with_context(|| format!("Failed to create '{}'", splits_path.display()))?,
|
||||
);
|
||||
write_splits(&mut splits_writer, &obj)?;
|
||||
}
|
||||
|
||||
log::info!("Adjusting splits");
|
||||
update_splits(&mut obj)?;
|
||||
|
||||
log::info!("Splitting {} objects", obj.link_order.len());
|
||||
let split_objs = split_obj(&obj)?;
|
||||
|
||||
// Create out dirs
|
||||
let asm_dir = args.out.join("asm");
|
||||
let include_dir = args.out.join("include");
|
||||
let obj_dir = args.out.join("expected");
|
||||
let asm_dir = args.out_dir.join("asm");
|
||||
let include_dir = args.out_dir.join("include");
|
||||
let obj_dir = args.out_dir.clone();
|
||||
DirBuilder::new().recursive(true).create(&include_dir)?;
|
||||
fs::write(include_dir.join("macros.inc"), include_bytes!("../../assets/macros.inc"))?;
|
||||
fs::write(include_dir.join("macros.inc"), include_str!("../../assets/macros.inc"))?;
|
||||
|
||||
log::info!("Writing object files");
|
||||
let mut file_map = HashMap::<String, Vec<u8>>::new();
|
||||
|
@ -527,12 +216,12 @@ fn disasm(args: DisasmArgs) -> Result<()> {
|
|||
};
|
||||
}
|
||||
|
||||
let mut rsp_file = BufWriter::new(File::create("rsp")?);
|
||||
let mut rsp_file = BufWriter::new(File::create(args.out_dir.join("rsp"))?);
|
||||
for unit in &obj.link_order {
|
||||
let object = file_map
|
||||
.get(unit)
|
||||
.ok_or_else(|| anyhow!("Failed to find object file for unit '{unit}'"))?;
|
||||
let out_path = obj_dir.join(unit);
|
||||
let out_path = obj_dir.join(obj_path_for_unit(unit));
|
||||
writeln!(rsp_file, "{}", out_path.display())?;
|
||||
if let Some(parent) = out_path.parent() {
|
||||
DirBuilder::new().recursive(true).create(parent)?;
|
||||
|
@ -544,39 +233,50 @@ fn disasm(args: DisasmArgs) -> Result<()> {
|
|||
}
|
||||
rsp_file.flush()?;
|
||||
|
||||
// Generate ldscript.lcf
|
||||
fs::write(args.out_dir.join("ldscript.lcf"), generate_ldscript(&obj)?)?;
|
||||
|
||||
log::info!("Writing disassembly");
|
||||
let mut files_out = File::create(args.out.join("link_order.txt"))?;
|
||||
// let mut files_out = File::create(args.out_dir.join("build.ps1"))?;
|
||||
// writeln!(files_out, "$ErrorActionPreference = 'Stop'")?;
|
||||
// writeln!(
|
||||
// files_out,
|
||||
// "$asflags = '-mgekko', '-I', '{}', '--defsym', 'version=0', '-W', '--strip-local-absolute', '-gdwarf-2'",
|
||||
// include_dir.display()
|
||||
// )?;
|
||||
// writeln!(files_out, "$env:PATH = \"$env:PATH;C:\\devkitPro\\devkitPPC\\bin\"")?;
|
||||
for (unit, split_obj) in obj.link_order.iter().zip(&split_objs) {
|
||||
let out_path = asm_dir.join(format!("{}.s", unit.trim_end_matches(".o")));
|
||||
let out_path = asm_dir.join(asm_path_for_unit(unit));
|
||||
|
||||
if let Some(parent) = out_path.parent() {
|
||||
DirBuilder::new().recursive(true).create(parent)?;
|
||||
}
|
||||
let mut w = BufWriter::new(File::create(out_path)?);
|
||||
let mut w = BufWriter::new(File::create(&out_path)?);
|
||||
write_asm(&mut w, split_obj)?;
|
||||
w.flush()?;
|
||||
|
||||
writeln!(files_out, "{}", unit)?;
|
||||
}
|
||||
files_out.flush()?;
|
||||
} else {
|
||||
log::info!("Writing disassembly");
|
||||
let mut w = BufWriter::new(File::create("out.s")?);
|
||||
write_asm(&mut w, &obj)?;
|
||||
}
|
||||
|
||||
if let Some(symbols_path) = &args.symbols_file {
|
||||
let mut symbols_writer = BufWriter::new(
|
||||
File::create(&symbols_path)
|
||||
.with_context(|| format!("Failed to create '{}'", symbols_path.display()))?,
|
||||
);
|
||||
write_symbols(&mut symbols_writer, &obj)?;
|
||||
// let obj_path = obj_dir.join(obj_path_for_unit(unit));
|
||||
// writeln!(files_out, "Write-Host 'Compiling {}'", obj_path.display())?;
|
||||
// writeln!(
|
||||
// files_out,
|
||||
// "powerpc-eabi-as @asflags -o '{}' '{}'",
|
||||
// obj_path.display(),
|
||||
// out_path.display()
|
||||
// )?;
|
||||
// writeln!(
|
||||
// files_out,
|
||||
// "dtk elf fixup '{}' '{}'",
|
||||
// obj_path.display(),
|
||||
// obj_path.display()
|
||||
// )?;
|
||||
}
|
||||
// files_out.flush()?;
|
||||
|
||||
// (debugging) validate against ELF
|
||||
if let Some(file) = args.elf_file {
|
||||
validate(&obj, &file, &state)?;
|
||||
if let Some(file) = &args.elf_file {
|
||||
validate(&obj, file, &state)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -610,11 +310,7 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
|
|||
if section.kind != ObjSectionKind::Code {
|
||||
continue;
|
||||
}
|
||||
for (_symbol_idx, symbol) in real_obj.symbols_for_section(section.index) {
|
||||
// if symbol.name.starts_with("switch_") {
|
||||
// continue;
|
||||
// }
|
||||
// if symbol.kind == ObjSymbolKind::Function {
|
||||
for (_symbol_idx, symbol) in real_obj.symbols.for_section(section) {
|
||||
real_functions.insert(symbol.address as u32, symbol.name.clone());
|
||||
match state.function_bounds.get(&(symbol.address as u32)) {
|
||||
Some(&end) => {
|
||||
|
@ -636,7 +332,6 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
|
|||
);
|
||||
}
|
||||
}
|
||||
// }
|
||||
}
|
||||
}
|
||||
for (&start, &end) in &state.function_bounds {
|
||||
|
@ -653,7 +348,8 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
|
|||
);
|
||||
}
|
||||
}
|
||||
return Ok(()); // TODO
|
||||
// return Ok(()); // TODO
|
||||
|
||||
for real_section in &real_obj.sections {
|
||||
let obj_section = match obj.sections.get(real_section.index) {
|
||||
Some(v) => v,
|
||||
|
@ -661,10 +357,11 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
|
|||
};
|
||||
let real_map = real_section.build_relocation_map()?;
|
||||
let obj_map = obj_section.build_relocation_map()?;
|
||||
for (&real_addr, real_reloc) in &real_map {
|
||||
let real_symbol = &real_obj.symbols[real_reloc.target_symbol];
|
||||
for (&real_addr, &real_reloc_idx) in &real_map {
|
||||
let real_reloc = &real_section.relocations[real_reloc_idx];
|
||||
let real_symbol = real_obj.symbols.at(real_reloc.target_symbol);
|
||||
let obj_reloc = match obj_map.get(&real_addr) {
|
||||
Some(v) => v,
|
||||
Some(v) => &obj_section.relocations[*v],
|
||||
None => {
|
||||
// Ignore GCC local jump branches
|
||||
if real_symbol.kind == ObjSymbolKind::Section
|
||||
|
@ -688,7 +385,7 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
|
|||
continue;
|
||||
}
|
||||
};
|
||||
let obj_symbol = &obj.symbols[obj_reloc.target_symbol];
|
||||
let obj_symbol = obj.symbols.at(obj_reloc.target_symbol);
|
||||
if real_reloc.kind != obj_reloc.kind {
|
||||
log::warn!(
|
||||
"Relocation type mismatch @ {:#010X}: {:?} != {:?}",
|
||||
|
@ -714,8 +411,9 @@ fn validate<P: AsRef<Path>>(obj: &ObjInfo, elf_file: P, state: &AnalyzerState) -
|
|||
continue;
|
||||
}
|
||||
}
|
||||
for (&obj_addr, obj_reloc) in &obj_map {
|
||||
let obj_symbol = &obj.symbols[obj_reloc.target_symbol];
|
||||
for (&obj_addr, &obj_reloc_idx) in &obj_map {
|
||||
let obj_reloc = &obj_section.relocations[obj_reloc_idx];
|
||||
let obj_symbol = obj.symbols.at(obj_reloc.target_symbol);
|
||||
if !real_map.contains_key(&obj_addr) {
|
||||
log::warn!(
|
||||
"Relocation not real @ {:#010X} {:?} to {:#010X}+{:X} ({})",
|
||||
|
|
|
@ -11,9 +11,8 @@ use object::{elf, Object, ObjectSection, ObjectSymbol, RelocationKind, Relocatio
|
|||
|
||||
use crate::util::{
|
||||
dwarf::{
|
||||
process_address, process_offset, process_type, process_variable_location,
|
||||
read_debug_section, type_string, ud_type, ud_type_def, ud_type_string, AttributeKind,
|
||||
TagKind, TypeKind,
|
||||
process_address, process_type, process_variable_location, read_debug_section, type_string,
|
||||
ud_type, ud_type_def, ud_type_string, AttributeKind, TagKind,
|
||||
},
|
||||
file::map_file,
|
||||
};
|
||||
|
@ -61,13 +60,13 @@ fn dump(args: DumpArgs) -> Result<()> {
|
|||
};
|
||||
let name = String::from_utf8_lossy(e.header().identifier()).to_string();
|
||||
let mut data = vec![0u8; e.header().size() as usize];
|
||||
e.read(&mut data)?;
|
||||
e.read_exact(&mut data)?;
|
||||
let obj_file = object::read::File::parse(&*data)?;
|
||||
let debug_section = match obj_file.section_by_name(".debug") {
|
||||
Some(section) => {
|
||||
log::info!("Processing '{}'", name);
|
||||
section
|
||||
},
|
||||
}
|
||||
None => {
|
||||
log::warn!("Object '{}' missing .debug section", name);
|
||||
continue;
|
||||
|
@ -76,7 +75,7 @@ fn dump(args: DumpArgs) -> Result<()> {
|
|||
if let Some(out_path) = &args.out {
|
||||
// TODO make a basename method
|
||||
let name = name.trim_start_matches("D:").replace('\\', "/");
|
||||
let name = name.rsplit_once('/').map(|(a, b)| b).unwrap_or(&name);
|
||||
let name = name.rsplit_once('/').map(|(_, b)| b).unwrap_or(&name);
|
||||
let file_path = out_path.join(format!("{}.txt", name));
|
||||
let mut file = BufWriter::new(File::create(file_path)?);
|
||||
dump_debug_section(&mut file, &obj_file, debug_section)?;
|
||||
|
|
|
@ -2,16 +2,17 @@ use std::{
|
|||
collections::{btree_map, hash_map, BTreeMap, HashMap},
|
||||
fs,
|
||||
fs::{DirBuilder, File},
|
||||
io::{BufRead, BufReader, BufWriter, Write},
|
||||
io::{BufWriter, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Context, Result};
|
||||
use argh::FromArgs;
|
||||
use object::{
|
||||
elf,
|
||||
write::{Mangling, SectionId, SymbolId},
|
||||
Object, ObjectSection, ObjectSymbol, RelocationKind, RelocationTarget, SectionFlags,
|
||||
SectionIndex, SectionKind, SymbolFlags, SymbolKind, SymbolScope, SymbolSection,
|
||||
FileFlags, Object, ObjectSection, ObjectSymbol, RelocationKind, RelocationTarget, SectionFlags,
|
||||
SectionIndex, SectionKind, SymbolFlags, SymbolIndex, SymbolKind, SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
|
@ -24,7 +25,7 @@ use crate::{
|
|||
asm::write_asm,
|
||||
config::{write_splits, write_symbols},
|
||||
elf::{process_elf, write_elf},
|
||||
file::buf_reader,
|
||||
file::process_rsp,
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -92,9 +93,6 @@ pub struct ConfigArgs {
|
|||
#[argh(positional)]
|
||||
/// output directory
|
||||
out_dir: PathBuf,
|
||||
#[argh(option, short = 'm')]
|
||||
/// path to obj_files.mk
|
||||
obj_files: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
|
@ -137,29 +135,12 @@ fn config(args: ConfigArgs) -> Result<()> {
|
|||
}
|
||||
|
||||
{
|
||||
let obj_files = if let Some(path) = &args.obj_files {
|
||||
Some(
|
||||
BufReader::new(
|
||||
File::open(path)
|
||||
.with_context(|| format!("Failed to open '{}'", path.display()))?,
|
||||
)
|
||||
.lines()
|
||||
.filter(|line| match line {
|
||||
Ok(line) => line.contains(".o"),
|
||||
Err(_) => false,
|
||||
})
|
||||
.map(|result| result.unwrap())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let splits_path = args.out_dir.join("splits.txt");
|
||||
let mut splits_writer = BufWriter::new(
|
||||
File::create(&splits_path)
|
||||
.with_context(|| format!("Failed to create '{}'", splits_path.display()))?,
|
||||
);
|
||||
write_splits(&mut splits_writer, &obj, obj_files)?;
|
||||
write_splits(&mut splits_writer, &obj)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -257,13 +238,22 @@ fn file_name_from_unit(str: &str, suffix: &str) -> String {
|
|||
|
||||
const ASM_SUFFIX: &str = " (asm)";
|
||||
|
||||
// fn fixup(args: FixupArgs) -> Result<()> {
|
||||
// let obj = process_elf(&args.in_file)?;
|
||||
// let out = write_elf(&obj)?;
|
||||
// fs::write(&args.out_file, &out).context("Failed to create output file")?;
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
fn fixup(args: FixupArgs) -> Result<()> {
|
||||
let in_buf = fs::read(&args.in_file)
|
||||
.with_context(|| format!("Failed to open input file: '{}'", args.in_file.display()))?;
|
||||
let in_file = object::read::File::parse(&*in_buf).context("Failed to parse input ELF")?;
|
||||
let mut out_file =
|
||||
object::write::Object::new(in_file.format(), in_file.architecture(), in_file.endianness());
|
||||
out_file.set_mangling(Mangling::None);
|
||||
out_file.flags =
|
||||
FileFlags::Elf { os_abi: elf::ELFOSABI_SYSV, abi_version: 0, e_flags: elf::EF_PPC_EMB };
|
||||
out_file.mangling = Mangling::None;
|
||||
|
||||
// Write file symbol first
|
||||
let mut file_symbol_found = false;
|
||||
|
@ -317,7 +307,7 @@ fn fixup(args: FixupArgs) -> Result<()> {
|
|||
} else {
|
||||
out_section.set_data(section.uncompressed_data()?.into_owned(), section.align());
|
||||
}
|
||||
if has_section_flags(section.flags(), object::elf::SHF_ALLOC)? {
|
||||
if has_section_flags(section.flags(), elf::SHF_ALLOC)? {
|
||||
// Generate section symbol
|
||||
out_file.section_symbol(section_id);
|
||||
}
|
||||
|
@ -398,9 +388,9 @@ fn fixup(args: FixupArgs) -> Result<()> {
|
|||
// This is a hack to avoid replacement with a section symbol
|
||||
// See [`object::write::elf::object::elf_fixup_relocation`]
|
||||
RelocationKind::Absolute => RelocationKind::Elf(if addr & 3 == 0 {
|
||||
object::elf::R_PPC_ADDR32
|
||||
elf::R_PPC_ADDR32
|
||||
} else {
|
||||
object::elf::R_PPC_UADDR32
|
||||
elf::R_PPC_UADDR32
|
||||
}),
|
||||
other => other,
|
||||
};
|
||||
|
@ -442,7 +432,9 @@ fn to_write_symbol_section(
|
|||
}
|
||||
}
|
||||
|
||||
fn to_write_symbol_flags(flags: SymbolFlags<SectionIndex>) -> Result<SymbolFlags<SectionId>> {
|
||||
fn to_write_symbol_flags(
|
||||
flags: SymbolFlags<SectionIndex, SymbolIndex>,
|
||||
) -> Result<SymbolFlags<SectionId, SymbolId>> {
|
||||
match flags {
|
||||
SymbolFlags::Elf { st_info, st_other } => Ok(SymbolFlags::Elf { st_info, st_other }),
|
||||
SymbolFlags::None => Ok(SymbolFlags::None),
|
||||
|
@ -475,25 +467,7 @@ fn has_section_flags(flags: SectionFlags, flag: u32) -> Result<bool> {
|
|||
|
||||
fn signatures(args: SignaturesArgs) -> Result<()> {
|
||||
// Process response files (starting with '@')
|
||||
let mut files = Vec::with_capacity(args.files.len());
|
||||
for path in args.files {
|
||||
let path_str =
|
||||
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
|
||||
match path_str.strip_prefix('@') {
|
||||
Some(rsp_file) => {
|
||||
let reader = buf_reader(rsp_file)?;
|
||||
for result in reader.lines() {
|
||||
let line = result?;
|
||||
if !line.is_empty() {
|
||||
files.push(PathBuf::from(line));
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
let files = process_rsp(&args.files)?;
|
||||
|
||||
let mut signatures: HashMap<String, FunctionSignature> = HashMap::new();
|
||||
for path in files {
|
||||
|
|
|
@ -65,10 +65,10 @@ pub fn run(args: Args) -> Result<()> {
|
|||
out.seek(SeekFrom::Start(offset as u64))?;
|
||||
|
||||
// Text sections
|
||||
for section in obj_file.sections() {
|
||||
if section.kind() != SectionKind::Text {
|
||||
continue;
|
||||
}
|
||||
for section in
|
||||
obj_file.sections().filter(|s| section_kind(s) == SectionKind::Text && is_alloc(s.flags()))
|
||||
{
|
||||
log::debug!("Processing text section '{}'", section.name().unwrap_or("[error]"));
|
||||
let address = section.address() as u32;
|
||||
let size = align32(section.size() as u32);
|
||||
*header.text_sections.get_mut(header.text_section_count).ok_or_else(|| {
|
||||
|
@ -83,10 +83,10 @@ pub fn run(args: Args) -> Result<()> {
|
|||
}
|
||||
|
||||
// Data sections
|
||||
for section in obj_file.sections() {
|
||||
if section.kind() != SectionKind::Data && section.kind() != SectionKind::ReadOnlyData {
|
||||
continue;
|
||||
}
|
||||
for section in
|
||||
obj_file.sections().filter(|s| section_kind(s) == SectionKind::Data && is_alloc(s.flags()))
|
||||
{
|
||||
log::debug!("Processing data section '{}'", section.name().unwrap_or("[error]"));
|
||||
let address = section.address() as u32;
|
||||
let size = align32(section.size() as u32);
|
||||
*header.data_sections.get_mut(header.data_section_count).ok_or_else(|| {
|
||||
|
@ -101,10 +101,10 @@ pub fn run(args: Args) -> Result<()> {
|
|||
}
|
||||
|
||||
// BSS sections
|
||||
for section in obj_file.sections() {
|
||||
if section.kind() != SectionKind::UninitializedData {
|
||||
continue;
|
||||
}
|
||||
for section in obj_file
|
||||
.sections()
|
||||
.filter(|s| section_kind(s) == SectionKind::UninitializedData && is_alloc(s.flags()))
|
||||
{
|
||||
let address = section.address() as u32;
|
||||
let size = section.size() as u32;
|
||||
if header.bss_address == 0 {
|
||||
|
@ -162,3 +162,28 @@ fn write_aligned<T: Write>(out: &mut T, bytes: &[u8], aligned_size: u32) -> std:
|
|||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Some ELF files don't have the proper section kind set (for small data sections in particular)
|
||||
// so we map the section name to the expected section kind when possible.
|
||||
#[inline]
|
||||
fn section_kind(section: &object::Section) -> SectionKind {
|
||||
section
|
||||
.name()
|
||||
.ok()
|
||||
.and_then(|name| match name {
|
||||
".init" | ".text" | ".vmtext" | ".dbgtext" => Some(SectionKind::Text),
|
||||
".ctors" | ".dtors" | ".data" | ".rodata" | ".sdata" | ".sdata2" | "extab"
|
||||
| "extabindex" => Some(SectionKind::Data),
|
||||
".bss" | ".sbss" | ".sbss2" => Some(SectionKind::UninitializedData),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or_else(|| match section.kind() {
|
||||
SectionKind::ReadOnlyData => SectionKind::Data,
|
||||
kind => kind,
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_alloc(flags: object::SectionFlags) -> bool {
|
||||
matches!(flags, object::SectionFlags::Elf { sh_flags } if sh_flags & object::elf::SHF_ALLOC as u64 != 0)
|
||||
}
|
||||
|
|
154
src/cmd/map.rs
154
src/cmd/map.rs
|
@ -1,11 +1,13 @@
|
|||
#![allow(clippy::needless_borrow)]
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::{bail, ensure, Result};
|
||||
use anyhow::{bail, Result};
|
||||
use argh::FromArgs;
|
||||
use cwdemangle::{demangle, DemangleOptions};
|
||||
|
||||
use crate::util::{
|
||||
file::{map_file, map_reader},
|
||||
map::{process_map, resolve_link_order, SymbolEntry, SymbolRef},
|
||||
map::{process_map, SymbolEntry, SymbolRef},
|
||||
};
|
||||
|
||||
#[derive(FromArgs, PartialEq, Debug)]
|
||||
|
@ -96,11 +98,8 @@ fn entries(args: EntriesArgs) -> Result<()> {
|
|||
if symbol_ref.name.starts_with('@') {
|
||||
continue;
|
||||
}
|
||||
if let Some(symbol) = entries.symbols.get(symbol_ref) {
|
||||
println!("{}", symbol.demangled.as_ref().unwrap_or(&symbol.name));
|
||||
} else {
|
||||
println!("Symbol not found: {}", symbol_ref.name);
|
||||
}
|
||||
let demangled = demangle(&symbol_ref.name, &DemangleOptions::default());
|
||||
println!("{}", demangled.as_deref().unwrap_or(&symbol_ref.name));
|
||||
}
|
||||
}
|
||||
None => bail!("Failed to find entries for TU '{}' in map", args.unit),
|
||||
|
@ -111,78 +110,91 @@ fn entries(args: EntriesArgs) -> Result<()> {
|
|||
fn symbol(args: SymbolArgs) -> Result<()> {
|
||||
let map = map_file(&args.map_file)?;
|
||||
let entries = process_map(map_reader(&map))?;
|
||||
let mut opt_ref: Option<(SymbolRef, SymbolEntry)> = None;
|
||||
for (symbol_ref, entry) in &entries.symbols {
|
||||
if symbol_ref.name == args.symbol {
|
||||
ensure!(opt_ref.is_none(), "Symbol '{}' found in multiple TUs", args.symbol);
|
||||
opt_ref = Some((symbol_ref.clone(), entry.clone()));
|
||||
}
|
||||
}
|
||||
match opt_ref {
|
||||
Some((symbol_ref, symbol)) => {
|
||||
println!("Located symbol {}", symbol.demangled.as_ref().unwrap_or(&symbol.name));
|
||||
if let Some(vec) = entries.entry_references.get_vec(&symbol_ref) {
|
||||
println!("\nReferences:");
|
||||
for x in vec {
|
||||
if let Some(reference) = entries.symbols.get(x) {
|
||||
println!(
|
||||
">>> {} ({:?},{:?}) [{}]",
|
||||
reference.demangled.as_ref().unwrap_or(&reference.name),
|
||||
reference.kind,
|
||||
reference.visibility,
|
||||
reference.unit.as_deref().unwrap_or("[generated]")
|
||||
);
|
||||
} else {
|
||||
println!(">>> {} (NOT FOUND)", x.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(vec) = entries.entry_referenced_from.get_vec(&symbol_ref) {
|
||||
println!("\nReferenced from:");
|
||||
for x in vec {
|
||||
if let Some(reference) = entries.symbols.get(x) {
|
||||
println!(
|
||||
">>> {} ({:?}, {:?}) [{}]",
|
||||
reference.demangled.as_ref().unwrap_or(&reference.name),
|
||||
reference.kind,
|
||||
reference.visibility,
|
||||
reference.unit.as_deref().unwrap_or("[generated]")
|
||||
);
|
||||
} else {
|
||||
println!(">>> {} (NOT FOUND)", x.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
println!("\n");
|
||||
}
|
||||
None => bail!("Failed to find symbol '{}' in map", args.symbol),
|
||||
}
|
||||
let opt_ref: Option<(SymbolRef, SymbolEntry)> = None;
|
||||
|
||||
_ = entries;
|
||||
_ = opt_ref;
|
||||
// TODO
|
||||
|
||||
// for (symbol_ref, entry) in &entries.symbols {
|
||||
// if symbol_ref.name == args.symbol {
|
||||
// ensure!(opt_ref.is_none(), "Symbol '{}' found in multiple TUs", args.symbol);
|
||||
// opt_ref = Some((symbol_ref.clone(), entry.clone()));
|
||||
// }
|
||||
// }
|
||||
// match opt_ref {
|
||||
// Some((symbol_ref, symbol)) => {
|
||||
// println!("Located symbol {}", symbol.demangled.as_ref().unwrap_or(&symbol.name));
|
||||
// if let Some(vec) = entries.entry_references.get_vec(&symbol_ref) {
|
||||
// println!("\nReferences:");
|
||||
// for x in vec {
|
||||
// if let Some(reference) = entries.symbols.get(x) {
|
||||
// println!(
|
||||
// ">>> {} ({:?},{:?}) [{}]",
|
||||
// reference.demangled.as_ref().unwrap_or(&reference.name),
|
||||
// reference.kind,
|
||||
// reference.visibility,
|
||||
// reference.unit.as_deref().unwrap_or("[generated]")
|
||||
// );
|
||||
// } else {
|
||||
// println!(">>> {} (NOT FOUND)", x.name);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// if let Some(vec) = entries.entry_referenced_from.get_vec(&symbol_ref) {
|
||||
// println!("\nReferenced from:");
|
||||
// for x in vec {
|
||||
// if let Some(reference) = entries.symbols.get(x) {
|
||||
// println!(
|
||||
// ">>> {} ({:?}, {:?}) [{}]",
|
||||
// reference.demangled.as_ref().unwrap_or(&reference.name),
|
||||
// reference.kind,
|
||||
// reference.visibility,
|
||||
// reference.unit.as_deref().unwrap_or("[generated]")
|
||||
// );
|
||||
// } else {
|
||||
// println!(">>> {} (NOT FOUND)", x.name);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// println!("\n");
|
||||
// }
|
||||
// None => bail!("Failed to find symbol '{}' in map", args.symbol),
|
||||
// }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn order(args: OrderArgs) -> Result<()> {
|
||||
let map = map_file(&args.map_file)?;
|
||||
let entries = process_map(map_reader(&map))?;
|
||||
let order = resolve_link_order(&entries.unit_order)?;
|
||||
for unit in order {
|
||||
println!("{unit}");
|
||||
}
|
||||
|
||||
_ = entries;
|
||||
// TODO
|
||||
|
||||
// let order = resolve_link_order(&entries.unit_order)?;
|
||||
// for unit in order {
|
||||
// println!("{unit}");
|
||||
// }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn slices(args: SlicesArgs) -> Result<()> {
|
||||
let map = map_file(&args.map_file)?;
|
||||
let entries = process_map(map_reader(&map))?;
|
||||
let order = resolve_link_order(&entries.unit_order)?;
|
||||
for unit in order {
|
||||
let unit_path = if let Some((lib, name)) = unit.split_once(' ') {
|
||||
format!("{}/{}", lib.strip_suffix(".a").unwrap_or(lib), name)
|
||||
} else if let Some(strip) = unit.strip_suffix(".o") {
|
||||
format!("{strip}.c")
|
||||
} else {
|
||||
unit.clone()
|
||||
};
|
||||
println!("{unit_path}:");
|
||||
|
||||
_ = entries;
|
||||
// TODO
|
||||
|
||||
// let order = resolve_link_order(&entries.unit_order)?;
|
||||
// for unit in order {
|
||||
// let unit_path = if let Some((lib, name)) = unit.split_once(' ') {
|
||||
// format!("{}/{}", lib.strip_suffix(".a").unwrap_or(lib), name)
|
||||
// } else if let Some(strip) = unit.strip_suffix(".o") {
|
||||
// format!("{strip}.c")
|
||||
// } else {
|
||||
// unit.clone()
|
||||
// };
|
||||
// println!("{unit_path}:");
|
||||
// let mut ranges = Vec::<(String, Range<u32>)>::new();
|
||||
// match entries.unit_section_ranges.get(&unit) {
|
||||
// Some(sections) => {
|
||||
|
@ -196,13 +208,17 @@ fn slices(args: SlicesArgs) -> Result<()> {
|
|||
// for (name, range) in ranges {
|
||||
// println!("\t{}: [{:#010x}, {:#010x}]", name, range.start, range.end);
|
||||
// }
|
||||
}
|
||||
// }
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn symbols(args: SymbolsArgs) -> Result<()> {
|
||||
let map = map_file(&args.map_file)?;
|
||||
let _entries = process_map(map_reader(&map))?;
|
||||
let entries = process_map(map_reader(&map))?;
|
||||
|
||||
_ = entries;
|
||||
// TODO
|
||||
|
||||
// for (address, symbol) in entries.address_to_symbol {
|
||||
// if symbol.name.starts_with('@') {
|
||||
// continue;
|
||||
|
|
162
src/cmd/rel.rs
162
src/cmd/rel.rs
|
@ -12,14 +12,16 @@ use crate::{
|
|||
analysis::{
|
||||
cfa::AnalyzerState,
|
||||
pass::{AnalysisPass, FindSaveRestSleds, FindTRKInterruptVectorTable},
|
||||
signatures::apply_signatures,
|
||||
tracker::Tracker,
|
||||
},
|
||||
cmd::dol::apply_signatures,
|
||||
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol, ObjSymbolKind},
|
||||
array_ref_mut,
|
||||
obj::{ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSymbol, ObjSymbolKind},
|
||||
util::{
|
||||
dol::process_dol,
|
||||
elf::write_elf,
|
||||
nested::{NestedMap, NestedVec},
|
||||
file::{map_file, map_reader, FileIterator},
|
||||
nested::NestedMap,
|
||||
rel::process_rel,
|
||||
},
|
||||
};
|
||||
|
@ -71,7 +73,8 @@ pub fn run(args: Args) -> Result<()> {
|
|||
}
|
||||
|
||||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let rel = process_rel(&args.rel_file)?;
|
||||
let map = map_file(args.rel_file)?;
|
||||
let rel = process_rel(map_reader(&map))?;
|
||||
println!("Read REL module ID {}", rel.module_id);
|
||||
// println!("REL: {:#?}", rel);
|
||||
Ok(())
|
||||
|
@ -81,22 +84,30 @@ fn info(args: InfoArgs) -> Result<()> {
|
|||
const fn align32(x: u32) -> u32 { (x + 31) & !31 }
|
||||
|
||||
fn merge(args: MergeArgs) -> Result<()> {
|
||||
let mut module_map = BTreeMap::<u32, ObjInfo>::new();
|
||||
log::info!("Loading {}", args.dol_file.display());
|
||||
let mut obj = process_dol(&args.dol_file)?;
|
||||
apply_signatures(&mut obj)?;
|
||||
|
||||
for path in &args.rel_files {
|
||||
log::info!("Performing signature analysis");
|
||||
apply_signatures(&mut obj)?;
|
||||
let Some(arena_lo) = obj.arena_lo else { bail!("Failed to locate __ArenaLo in DOL") };
|
||||
|
||||
let mut processed = 0;
|
||||
let mut module_map = BTreeMap::<u32, ObjInfo>::new();
|
||||
for result in FileIterator::new(&args.rel_files)? {
|
||||
let (path, entry) = result?;
|
||||
log::info!("Loading {}", path.display());
|
||||
let obj = process_rel(path)?;
|
||||
let obj = process_rel(entry.as_reader())?;
|
||||
match module_map.entry(obj.module_id) {
|
||||
btree_map::Entry::Vacant(e) => e.insert(obj),
|
||||
btree_map::Entry::Occupied(_) => bail!("Duplicate module ID {}", obj.module_id),
|
||||
};
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
log::info!("Merging {} REL(s)", processed);
|
||||
let mut section_map: BTreeMap<u32, BTreeMap<u32, u32>> = BTreeMap::new();
|
||||
let mut offset = align32(obj.arena_lo.unwrap() + 0x2000);
|
||||
for (_, module) in &module_map {
|
||||
let mut offset = align32(arena_lo + 0x2000);
|
||||
for module in module_map.values() {
|
||||
for mod_section in &module.sections {
|
||||
let section_idx = obj.sections.len();
|
||||
ensure!(mod_section.relocations.is_empty(), "Unsupported relocations during merge");
|
||||
|
@ -115,9 +126,8 @@ fn merge(args: MergeArgs) -> Result<()> {
|
|||
section_known: mod_section.section_known,
|
||||
});
|
||||
section_map.nested_insert(module.module_id, mod_section.elf_index as u32, offset)?;
|
||||
let symbols = module.symbols_for_section(mod_section.index);
|
||||
for (_, mod_symbol) in symbols {
|
||||
obj.symbols.push(ObjSymbol {
|
||||
for (_, mod_symbol) in module.symbols.for_section(mod_section) {
|
||||
obj.symbols.add_direct(ObjSymbol {
|
||||
name: mod_symbol.name.clone(),
|
||||
demangled_name: mod_symbol.demangled_name.clone(),
|
||||
address: mod_symbol.address + offset as u64,
|
||||
|
@ -126,44 +136,41 @@ fn merge(args: MergeArgs) -> Result<()> {
|
|||
size_known: mod_symbol.size_known,
|
||||
flags: mod_symbol.flags,
|
||||
kind: mod_symbol.kind,
|
||||
});
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
})?;
|
||||
}
|
||||
offset += align32(mod_section.size as u32);
|
||||
}
|
||||
}
|
||||
|
||||
let mut symbol_maps = Vec::new();
|
||||
for section in &obj.sections {
|
||||
symbol_maps.push(obj.build_symbol_map(section.index)?);
|
||||
}
|
||||
|
||||
// Apply relocations
|
||||
for (_, module) in &module_map {
|
||||
log::info!("Applying REL relocations");
|
||||
for module in module_map.values() {
|
||||
for rel_reloc in &module.unresolved_relocations {
|
||||
let source_addr =
|
||||
section_map[&module.module_id][&(rel_reloc.section as u32)] + rel_reloc.address;
|
||||
let source_addr = (section_map[&module.module_id][&(rel_reloc.section as u32)]
|
||||
+ rel_reloc.address)
|
||||
& !3;
|
||||
let target_addr = if rel_reloc.module_id == 0 {
|
||||
rel_reloc.addend
|
||||
} else {
|
||||
let base = section_map[&rel_reloc.module_id][&(rel_reloc.target_section as u32)];
|
||||
let addend = rel_reloc.addend;
|
||||
base + addend
|
||||
let section_map = §ion_map.get(&rel_reloc.module_id).with_context(|| {
|
||||
format!("Relocation against unknown module ID {}", rel_reloc.module_id)
|
||||
})?;
|
||||
section_map[&(rel_reloc.target_section as u32)] + rel_reloc.addend
|
||||
};
|
||||
let source_section = obj.section_at(source_addr)?;
|
||||
let target_section = obj.section_at(target_addr)?;
|
||||
let target_section_index = target_section.index;
|
||||
let source_section_index = obj.section_at(source_addr)?.index;
|
||||
let target_section_index = obj.section_at(target_addr)?.index;
|
||||
|
||||
// Try to find a previous sized symbol that encompasses the target
|
||||
let sym_map = &mut symbol_maps[target_section_index];
|
||||
let target_symbol = {
|
||||
let mut result = None;
|
||||
for (_addr, symbol_idxs) in sym_map.range(..=target_addr).rev() {
|
||||
for (_addr, symbol_idxs) in obj.symbols.indexes_for_range(..=target_addr).rev() {
|
||||
let symbol_idx = if symbol_idxs.len() == 1 {
|
||||
symbol_idxs.first().cloned().unwrap()
|
||||
} else {
|
||||
let mut symbol_idxs = symbol_idxs.clone();
|
||||
let mut symbol_idxs = symbol_idxs.to_vec();
|
||||
symbol_idxs.sort_by_key(|&symbol_idx| {
|
||||
let symbol = &obj.symbols[symbol_idx];
|
||||
let symbol = obj.symbols.at(symbol_idx);
|
||||
let mut rank = match symbol.kind {
|
||||
ObjSymbolKind::Function | ObjSymbolKind::Object => {
|
||||
match rel_reloc.kind {
|
||||
|
@ -199,7 +206,7 @@ fn merge(args: MergeArgs) -> Result<()> {
|
|||
None => continue,
|
||||
}
|
||||
};
|
||||
let symbol = &obj.symbols[symbol_idx];
|
||||
let symbol = obj.symbols.at(symbol_idx);
|
||||
if symbol.address == target_addr as u64 {
|
||||
result = Some(symbol_idx);
|
||||
break;
|
||||
|
@ -214,12 +221,11 @@ fn merge(args: MergeArgs) -> Result<()> {
|
|||
result
|
||||
};
|
||||
let (symbol_idx, addend) = if let Some(symbol_idx) = target_symbol {
|
||||
let symbol = &obj.symbols[symbol_idx];
|
||||
let symbol = obj.symbols.at(symbol_idx);
|
||||
(symbol_idx, target_addr as i64 - symbol.address as i64)
|
||||
} else {
|
||||
// Create a new label
|
||||
let symbol_idx = obj.symbols.len();
|
||||
obj.symbols.push(ObjSymbol {
|
||||
let symbol_idx = obj.symbols.add_direct(ObjSymbol {
|
||||
name: String::new(),
|
||||
demangled_name: None,
|
||||
address: target_addr as u64,
|
||||
|
@ -228,11 +234,12 @@ fn merge(args: MergeArgs) -> Result<()> {
|
|||
size_known: false,
|
||||
flags: Default::default(),
|
||||
kind: Default::default(),
|
||||
});
|
||||
sym_map.nested_push(target_addr, symbol_idx);
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
})?;
|
||||
(symbol_idx, 0)
|
||||
};
|
||||
obj.sections[target_section_index].relocations.push(ObjReloc {
|
||||
obj.sections[source_section_index].relocations.push(ObjReloc {
|
||||
kind: rel_reloc.kind,
|
||||
address: source_addr as u64,
|
||||
target_symbol: symbol_idx,
|
||||
|
@ -241,29 +248,11 @@ fn merge(args: MergeArgs) -> Result<()> {
|
|||
}
|
||||
}
|
||||
|
||||
// Apply known functions from extab
|
||||
let mut state = AnalyzerState::default();
|
||||
for (&addr, &size) in &obj.known_functions {
|
||||
state.function_entries.insert(addr);
|
||||
state.function_bounds.insert(addr, addr + size);
|
||||
}
|
||||
for symbol in &obj.symbols {
|
||||
if symbol.kind != ObjSymbolKind::Function {
|
||||
continue;
|
||||
}
|
||||
state.function_entries.insert(symbol.address as u32);
|
||||
if !symbol.size_known {
|
||||
continue;
|
||||
}
|
||||
state.function_bounds.insert(symbol.address as u32, (symbol.address + symbol.size) as u32);
|
||||
}
|
||||
// Also check the start of each code section
|
||||
for section in &obj.sections {
|
||||
if section.kind == ObjSectionKind::Code {
|
||||
state.function_entries.insert(section.address as u32);
|
||||
}
|
||||
}
|
||||
// Apply relocations to code/data for analyzer
|
||||
link_relocations(&mut obj)?;
|
||||
|
||||
log::info!("Detecting function boundaries");
|
||||
let mut state = AnalyzerState::default();
|
||||
state.detect_functions(&obj)?;
|
||||
log::info!("Discovered {} functions", state.function_slices.len());
|
||||
|
||||
|
@ -281,8 +270,57 @@ fn merge(args: MergeArgs) -> Result<()> {
|
|||
// Write ELF
|
||||
let mut file = File::create(&args.out_file)
|
||||
.with_context(|| format!("Failed to create '{}'", args.out_file.display()))?;
|
||||
log::info!("Writing {}", args.out_file.display());
|
||||
let out_object = write_elf(&obj)?;
|
||||
file.write_all(&out_object)?;
|
||||
file.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn link_relocations(obj: &mut ObjInfo) -> Result<()> {
|
||||
for section in &mut obj.sections {
|
||||
for reloc in §ion.relocations {
|
||||
let source_address = reloc.address /*& !3*/;
|
||||
let target_address =
|
||||
(obj.symbols.address_of(reloc.target_symbol) as i64 + reloc.addend) as u32;
|
||||
let ins_ref =
|
||||
array_ref_mut!(section.data, (source_address - section.address) as usize, 4);
|
||||
let mut ins = u32::from_be_bytes(*ins_ref);
|
||||
match reloc.kind {
|
||||
ObjRelocKind::Absolute => {
|
||||
ins = target_address;
|
||||
}
|
||||
ObjRelocKind::PpcAddr16Hi => {
|
||||
ins = (ins & 0xffff0000) | ((target_address >> 16) & 0xffff);
|
||||
}
|
||||
ObjRelocKind::PpcAddr16Ha => {
|
||||
ins = (ins & 0xffff0000) | (((target_address + 0x8000) >> 16) & 0xffff);
|
||||
}
|
||||
ObjRelocKind::PpcAddr16Lo => {
|
||||
ins = (ins & 0xffff0000) | (target_address & 0xffff);
|
||||
}
|
||||
ObjRelocKind::PpcRel24 => {
|
||||
let diff = target_address as i32 - source_address as i32;
|
||||
ensure!(
|
||||
(-0x2000000..0x2000000).contains(&diff),
|
||||
"R_PPC_REL24 relocation out of range"
|
||||
);
|
||||
ins = (ins & !0x3fffffc) | (diff as u32 & 0x3fffffc);
|
||||
}
|
||||
ObjRelocKind::PpcRel14 => {
|
||||
let diff = target_address as i32 - source_address as i32;
|
||||
ensure!(
|
||||
(-0x2000..0x2000).contains(&diff),
|
||||
"R_PPC_REL14 relocation out of range"
|
||||
);
|
||||
ins = (ins & !0xfffc) | (diff as u32 & 0xfffc);
|
||||
}
|
||||
ObjRelocKind::PpcEmbSda21 => {
|
||||
// Unused in RELs
|
||||
}
|
||||
};
|
||||
*ins_ref = ins.to_be_bytes();
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ pub fn run(args: Args) -> Result<()> {
|
|||
}
|
||||
|
||||
fn info(args: InfoArgs) -> Result<()> {
|
||||
let rso = process_rso(&args.rso_file)?;
|
||||
let rso = process_rso(args.rso_file)?;
|
||||
println!("Read RSO module {}", rso.name);
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -9,6 +9,8 @@ use argh::FromArgs;
|
|||
use filetime::{set_file_mtime, FileTime};
|
||||
use sha1::{Digest, Sha1};
|
||||
|
||||
use crate::util::file::process_rsp;
|
||||
|
||||
#[derive(FromArgs, PartialEq, Eq, Debug)]
|
||||
/// Print or check SHA1 (160-bit) checksums.
|
||||
#[argh(subcommand, name = "shasum")]
|
||||
|
@ -17,8 +19,8 @@ pub struct Args {
|
|||
/// check SHA sums against given list
|
||||
check: bool,
|
||||
#[argh(positional)]
|
||||
/// path to file
|
||||
file: PathBuf,
|
||||
/// path to input file(s)
|
||||
files: Vec<PathBuf>,
|
||||
#[argh(option, short = 'o')]
|
||||
/// touch output file on successful check
|
||||
output: Option<PathBuf>,
|
||||
|
@ -27,16 +29,23 @@ pub struct Args {
|
|||
const DEFAULT_BUF_SIZE: usize = 8192;
|
||||
|
||||
pub fn run(args: Args) -> Result<()> {
|
||||
let file = File::open(&args.file)
|
||||
.with_context(|| format!("Failed to open file '{}'", args.file.display()))?;
|
||||
for path in process_rsp(&args.files)? {
|
||||
let file = File::open(&path)
|
||||
.with_context(|| format!("Failed to open file '{}'", path.display()))?;
|
||||
if args.check {
|
||||
check(args, file)
|
||||
check(file)?
|
||||
} else {
|
||||
hash(args, file)
|
||||
hash(file, &path)?
|
||||
}
|
||||
}
|
||||
if let Some(out_path) = args.output {
|
||||
touch(&out_path)
|
||||
.with_context(|| format!("Failed to touch output file '{}'", out_path.display()))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check(args: Args, file: File) -> Result<()> {
|
||||
fn check(file: File) -> Result<()> {
|
||||
let reader = BufReader::new(file);
|
||||
let mut mismatches = 0usize;
|
||||
for line in reader.lines() {
|
||||
|
@ -68,19 +77,15 @@ fn check(args: Args, file: File) -> Result<()> {
|
|||
eprintln!("WARNING: {mismatches} computed checksum did NOT match");
|
||||
std::process::exit(1);
|
||||
}
|
||||
if let Some(out_path) = args.output {
|
||||
touch(&out_path)
|
||||
.with_context(|| format!("Failed to touch output file '{}'", out_path.display()))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn hash(args: Args, file: File) -> Result<()> {
|
||||
fn hash(file: File, path: &Path) -> Result<()> {
|
||||
let hash = file_sha1(file)?;
|
||||
let mut hash_buf = [0u8; 40];
|
||||
let hash_str = base16ct::lower::encode_str(&hash, &mut hash_buf)
|
||||
.map_err(|e| anyhow!("Failed to encode hash: {e}"))?;
|
||||
println!("{} {}", hash_str, args.file.display());
|
||||
println!("{} {}", hash_str, path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
11
src/main.rs
11
src/main.rs
|
@ -1,3 +1,6 @@
|
|||
#![feature(seek_stream_len)]
|
||||
use std::io::Write;
|
||||
|
||||
use argh::FromArgs;
|
||||
|
||||
pub mod analysis;
|
||||
|
@ -22,7 +25,7 @@ enum SubCommand {
|
|||
Dwarf(cmd::dwarf::Args),
|
||||
Elf(cmd::elf::Args),
|
||||
Elf2Dol(cmd::elf2dol::Args),
|
||||
Map(cmd::map::Args),
|
||||
// Map(cmd::map::Args),
|
||||
MetroidBuildInfo(cmd::metroidbuildinfo::Args),
|
||||
Rel(cmd::rel::Args),
|
||||
Rso(cmd::rso::Args),
|
||||
|
@ -30,7 +33,9 @@ enum SubCommand {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
|
||||
.format(|f, r| writeln!(f, "[{}] {}", r.level(), r.args()))
|
||||
.init();
|
||||
|
||||
let args: TopLevel = argh_version::from_env();
|
||||
let result = match args.command {
|
||||
|
@ -40,7 +45,7 @@ fn main() {
|
|||
SubCommand::Dwarf(c_args) => cmd::dwarf::run(c_args),
|
||||
SubCommand::Elf(c_args) => cmd::elf::run(c_args),
|
||||
SubCommand::Elf2Dol(c_args) => cmd::elf2dol::run(c_args),
|
||||
SubCommand::Map(c_args) => cmd::map::run(c_args),
|
||||
// SubCommand::Map(c_args) => cmd::map::run(c_args),
|
||||
SubCommand::MetroidBuildInfo(c_args) => cmd::metroidbuildinfo::run(c_args),
|
||||
SubCommand::Rel(c_args) => cmd::rel::run(c_args),
|
||||
SubCommand::Rso(c_args) => cmd::rso::run(c_args),
|
||||
|
|
410
src/obj/mod.rs
410
src/obj/mod.rs
|
@ -3,16 +3,17 @@ pub mod split;
|
|||
|
||||
use std::{
|
||||
cmp::min,
|
||||
collections::{btree_map, BTreeMap},
|
||||
collections::{btree_map, BTreeMap, HashMap},
|
||||
hash::{Hash, Hasher},
|
||||
ops::{Range, RangeBounds},
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use anyhow::{anyhow, bail, ensure, Result};
|
||||
use flagset::{flags, FlagSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||
|
||||
use crate::util::{nested::NestedVec, rel::RelReloc};
|
||||
use crate::util::{comment::MWComment, nested::NestedVec, rel::RelReloc};
|
||||
|
||||
flags! {
|
||||
#[repr(u8)]
|
||||
|
@ -23,14 +24,18 @@ flags! {
|
|||
Weak,
|
||||
Common,
|
||||
Hidden,
|
||||
ForceActive,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Default, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ObjSymbolFlagSet(pub FlagSet<ObjSymbolFlags>);
|
||||
#[allow(clippy::derive_hash_xor_eq)]
|
||||
|
||||
#[allow(clippy::derived_hash_with_manual_eq)]
|
||||
impl Hash for ObjSymbolFlagSet {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) { self.0.bits().hash(state) }
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub enum ObjSectionKind {
|
||||
Code,
|
||||
|
@ -38,6 +43,7 @@ pub enum ObjSectionKind {
|
|||
ReadOnlyData,
|
||||
Bss,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ObjSection {
|
||||
pub name: String,
|
||||
|
@ -54,6 +60,7 @@ pub struct ObjSection {
|
|||
pub file_offset: u64,
|
||||
pub section_known: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Default, Serialize, Deserialize)]
|
||||
pub enum ObjSymbolKind {
|
||||
#[default]
|
||||
|
@ -62,7 +69,24 @@ pub enum ObjSymbolKind {
|
|||
Object,
|
||||
Section,
|
||||
}
|
||||
#[derive(Debug, Clone, Default)]
|
||||
|
||||
#[derive(Debug, Copy, Clone, Default, PartialEq, Eq)]
|
||||
pub enum ObjDataKind {
|
||||
#[default]
|
||||
Unknown,
|
||||
Byte,
|
||||
Byte2,
|
||||
Byte4,
|
||||
Byte8,
|
||||
Float,
|
||||
Double,
|
||||
String,
|
||||
String16,
|
||||
StringTable,
|
||||
String16Table,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Eq, PartialEq)]
|
||||
pub struct ObjSymbol {
|
||||
pub name: String,
|
||||
pub demangled_name: Option<String>,
|
||||
|
@ -72,7 +96,10 @@ pub struct ObjSymbol {
|
|||
pub size_known: bool,
|
||||
pub flags: ObjSymbolFlagSet,
|
||||
pub kind: ObjSymbolKind,
|
||||
pub align: Option<u32>,
|
||||
pub data_kind: ObjDataKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub enum ObjKind {
|
||||
/// Fully linked object
|
||||
|
@ -80,18 +107,38 @@ pub enum ObjKind {
|
|||
/// Relocatable object
|
||||
Relocatable,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub enum ObjArchitecture {
|
||||
PowerPc,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct ObjSplit {
|
||||
pub unit: String,
|
||||
pub end: u32,
|
||||
pub align: Option<u32>,
|
||||
pub common: bool,
|
||||
}
|
||||
|
||||
type SymbolIndex = usize;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ObjSymbols {
|
||||
symbols: Vec<ObjSymbol>,
|
||||
symbols_by_address: BTreeMap<u32, Vec<SymbolIndex>>,
|
||||
symbols_by_name: HashMap<String, Vec<SymbolIndex>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ObjInfo {
|
||||
pub kind: ObjKind,
|
||||
pub architecture: ObjArchitecture,
|
||||
pub name: String,
|
||||
pub symbols: Vec<ObjSymbol>,
|
||||
pub symbols: ObjSymbols,
|
||||
pub sections: Vec<ObjSection>,
|
||||
pub entry: u64,
|
||||
pub mw_comment: MWComment,
|
||||
|
||||
// Linker generated
|
||||
pub sda2_base: Option<u32>,
|
||||
|
@ -103,9 +150,10 @@ pub struct ObjInfo {
|
|||
pub arena_hi: Option<u32>,
|
||||
|
||||
// Extracted
|
||||
pub splits: BTreeMap<u32, Vec<String>>,
|
||||
pub splits: BTreeMap<u32, Vec<ObjSplit>>,
|
||||
pub named_sections: BTreeMap<u32, String>,
|
||||
pub link_order: Vec<String>,
|
||||
pub blocked_ranges: BTreeMap<u32, u32>, // start -> end
|
||||
|
||||
// From extab
|
||||
pub known_functions: BTreeMap<u32, u32>,
|
||||
|
@ -115,6 +163,7 @@ pub struct ObjInfo {
|
|||
pub module_id: u32,
|
||||
pub unresolved_relocations: Vec<RelReloc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
||||
pub enum ObjRelocKind {
|
||||
Absolute,
|
||||
|
@ -125,42 +174,289 @@ pub enum ObjRelocKind {
|
|||
PpcRel14,
|
||||
PpcEmbSda21,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ObjReloc {
|
||||
pub kind: ObjRelocKind,
|
||||
pub address: u64,
|
||||
pub target_symbol: usize,
|
||||
pub target_symbol: SymbolIndex,
|
||||
pub addend: i64,
|
||||
}
|
||||
|
||||
impl ObjInfo {
|
||||
pub fn symbols_for_section(
|
||||
&self,
|
||||
section_idx: usize,
|
||||
) -> impl Iterator<Item = (usize, &ObjSymbol)> {
|
||||
self.symbols
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(move |&(_, symbol)| symbol.section == Some(section_idx))
|
||||
impl ObjSymbols {
|
||||
pub fn new(symbols: Vec<ObjSymbol>) -> Self {
|
||||
let mut symbols_by_address = BTreeMap::<u32, Vec<SymbolIndex>>::new();
|
||||
let mut symbols_by_name = HashMap::<String, Vec<SymbolIndex>>::new();
|
||||
for (idx, symbol) in symbols.iter().enumerate() {
|
||||
symbols_by_address.nested_push(symbol.address as u32, idx);
|
||||
if !symbol.name.is_empty() {
|
||||
symbols_by_name.nested_push(symbol.name.clone(), idx);
|
||||
}
|
||||
}
|
||||
Self { symbols, symbols_by_address, symbols_by_name }
|
||||
}
|
||||
|
||||
pub fn build_symbol_map(&self, section_idx: usize) -> Result<BTreeMap<u32, Vec<usize>>> {
|
||||
let mut symbols = BTreeMap::<u32, Vec<usize>>::new();
|
||||
for (symbol_idx, symbol) in self.symbols_for_section(section_idx) {
|
||||
symbols.nested_push(symbol.address as u32, symbol_idx);
|
||||
pub fn add(&mut self, in_symbol: ObjSymbol, replace: bool) -> Result<SymbolIndex> {
|
||||
let opt = self.at_address(in_symbol.address as u32).find(|(_, symbol)| {
|
||||
(symbol.kind == in_symbol.kind ||
|
||||
// Replace lbl_* with real symbols
|
||||
(symbol.kind == ObjSymbolKind::Unknown && symbol.name.starts_with("lbl_")))
|
||||
// Hack to avoid replacing different ABS symbols
|
||||
&& (symbol.section.is_some() || symbol.name == in_symbol.name)
|
||||
});
|
||||
let target_symbol_idx = if let Some((symbol_idx, existing)) = opt {
|
||||
let size =
|
||||
if existing.size_known && in_symbol.size_known && existing.size != in_symbol.size {
|
||||
log::warn!(
|
||||
"Conflicting size for {}: was {:#X}, now {:#X}",
|
||||
existing.name,
|
||||
existing.size,
|
||||
in_symbol.size
|
||||
);
|
||||
if replace {
|
||||
in_symbol.size
|
||||
} else {
|
||||
existing.size
|
||||
}
|
||||
Ok(symbols)
|
||||
} else if in_symbol.size_known {
|
||||
in_symbol.size
|
||||
} else {
|
||||
existing.size
|
||||
};
|
||||
if !replace {
|
||||
// Not replacing existing symbol, but update size
|
||||
if in_symbol.size_known && !existing.size_known {
|
||||
self.replace(symbol_idx, ObjSymbol {
|
||||
size: in_symbol.size,
|
||||
size_known: true,
|
||||
..existing.clone()
|
||||
})?;
|
||||
}
|
||||
return Ok(symbol_idx);
|
||||
}
|
||||
let new_symbol = ObjSymbol {
|
||||
name: in_symbol.name,
|
||||
demangled_name: in_symbol.demangled_name,
|
||||
address: in_symbol.address,
|
||||
section: in_symbol.section,
|
||||
size,
|
||||
size_known: existing.size_known || in_symbol.size != 0,
|
||||
flags: in_symbol.flags,
|
||||
kind: in_symbol.kind,
|
||||
align: in_symbol.align.or(existing.align),
|
||||
data_kind: match in_symbol.data_kind {
|
||||
ObjDataKind::Unknown => existing.data_kind,
|
||||
kind => kind,
|
||||
},
|
||||
};
|
||||
if existing != &new_symbol {
|
||||
log::debug!("Replacing {:?} with {:?}", existing, new_symbol);
|
||||
self.replace(symbol_idx, new_symbol)?;
|
||||
}
|
||||
symbol_idx
|
||||
} else {
|
||||
let target_symbol_idx = self.symbols.len();
|
||||
self.add_direct(ObjSymbol {
|
||||
name: in_symbol.name,
|
||||
demangled_name: in_symbol.demangled_name,
|
||||
address: in_symbol.address,
|
||||
section: in_symbol.section,
|
||||
size: in_symbol.size,
|
||||
size_known: in_symbol.size != 0,
|
||||
flags: in_symbol.flags,
|
||||
kind: in_symbol.kind,
|
||||
align: in_symbol.align,
|
||||
data_kind: in_symbol.data_kind,
|
||||
})?;
|
||||
target_symbol_idx
|
||||
};
|
||||
Ok(target_symbol_idx)
|
||||
}
|
||||
|
||||
pub fn add_direct(&mut self, in_symbol: ObjSymbol) -> Result<SymbolIndex> {
|
||||
let symbol_idx = self.symbols.len();
|
||||
self.symbols_by_address.nested_push(in_symbol.address as u32, symbol_idx);
|
||||
if !in_symbol.name.is_empty() {
|
||||
self.symbols_by_name.nested_push(in_symbol.name.clone(), symbol_idx);
|
||||
}
|
||||
self.symbols.push(in_symbol);
|
||||
Ok(symbol_idx)
|
||||
}
|
||||
|
||||
pub fn at(&self, symbol_idx: SymbolIndex) -> &ObjSymbol { &self.symbols[symbol_idx] }
|
||||
|
||||
pub fn address_of(&self, symbol_idx: SymbolIndex) -> u64 { self.symbols[symbol_idx].address }
|
||||
|
||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &ObjSymbol> { self.symbols.iter() }
|
||||
|
||||
pub fn count(&self) -> usize { self.symbols.len() }
|
||||
|
||||
pub fn at_address(
|
||||
&self,
|
||||
addr: u32,
|
||||
) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)> {
|
||||
self.symbols_by_address
|
||||
.get(&addr)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(move |&idx| (idx, &self.symbols[idx]))
|
||||
}
|
||||
|
||||
pub fn kind_at_address(
|
||||
&self,
|
||||
addr: u32,
|
||||
kind: ObjSymbolKind,
|
||||
) -> Result<Option<(SymbolIndex, &ObjSymbol)>> {
|
||||
let (count, result) = self
|
||||
.at_address(addr)
|
||||
.filter(|(_, sym)| sym.kind == kind)
|
||||
.fold((0, None), |(i, _), v| (i + 1, Some(v)));
|
||||
ensure!(count <= 1, "Multiple symbols of kind {:?} at address {:#010X}", kind, addr);
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn for_range<R>(
|
||||
&self,
|
||||
range: R,
|
||||
) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)>
|
||||
where
|
||||
R: RangeBounds<u32>,
|
||||
{
|
||||
self.symbols_by_address
|
||||
.range(range)
|
||||
.flat_map(move |(_, v)| v.iter().map(move |u| (*u, &self.symbols[*u])))
|
||||
}
|
||||
|
||||
pub fn indexes_for_range<R>(
|
||||
&self,
|
||||
range: R,
|
||||
) -> impl DoubleEndedIterator<Item = (u32, &[SymbolIndex])>
|
||||
where
|
||||
R: RangeBounds<u32>,
|
||||
{
|
||||
self.symbols_by_address.range(range).map(|(k, v)| (*k, v.as_ref()))
|
||||
}
|
||||
|
||||
pub fn for_section(
|
||||
&self,
|
||||
section: &ObjSection,
|
||||
) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)> {
|
||||
let section_index = section.index;
|
||||
self.for_range(section.address as u32..(section.address + section.size) as u32)
|
||||
// TODO required?
|
||||
.filter(move |(_, symbol)| symbol.section == Some(section_index))
|
||||
}
|
||||
|
||||
pub fn for_name(
|
||||
&self,
|
||||
name: &str,
|
||||
) -> impl DoubleEndedIterator<Item = (SymbolIndex, &ObjSymbol)> {
|
||||
self.symbols_by_name
|
||||
.get(name)
|
||||
.into_iter()
|
||||
.flat_map(move |v| v.iter().map(move |u| (*u, &self.symbols[*u])))
|
||||
}
|
||||
|
||||
pub fn by_name(&self, name: &str) -> Result<Option<(SymbolIndex, &ObjSymbol)>> {
|
||||
let mut iter = self.for_name(name);
|
||||
let result = iter.next();
|
||||
if let Some((index, symbol)) = result {
|
||||
if let Some((other_index, other_symbol)) = iter.next() {
|
||||
bail!(
|
||||
"Multiple symbols with name {}: {} {:?} {:#010X} and {} {:?} {:#010X}",
|
||||
name,
|
||||
index,
|
||||
symbol.kind,
|
||||
symbol.address,
|
||||
other_index,
|
||||
other_symbol.kind,
|
||||
other_symbol.address
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn by_kind(&self, kind: ObjSymbolKind) -> impl Iterator<Item = (SymbolIndex, &ObjSymbol)> {
|
||||
self.symbols.iter().enumerate().filter(move |(_, sym)| sym.kind == kind)
|
||||
}
|
||||
|
||||
pub fn replace(&mut self, index: SymbolIndex, symbol: ObjSymbol) -> Result<()> {
|
||||
let symbol_ref = &mut self.symbols[index];
|
||||
ensure!(symbol_ref.address == symbol.address, "Can't modify address with replace_symbol");
|
||||
if symbol_ref.name != symbol.name {
|
||||
if !symbol_ref.name.is_empty() {
|
||||
self.symbols_by_name.nested_remove(&symbol_ref.name, &index);
|
||||
}
|
||||
if !symbol.name.is_empty() {
|
||||
self.symbols_by_name.nested_push(symbol.name.clone(), index);
|
||||
}
|
||||
}
|
||||
*symbol_ref = symbol;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ObjInfo {
|
||||
pub fn new(
|
||||
kind: ObjKind,
|
||||
architecture: ObjArchitecture,
|
||||
name: String,
|
||||
symbols: Vec<ObjSymbol>,
|
||||
sections: Vec<ObjSection>,
|
||||
) -> Self {
|
||||
Self {
|
||||
kind,
|
||||
architecture,
|
||||
name,
|
||||
symbols: ObjSymbols::new(symbols),
|
||||
sections,
|
||||
entry: 0,
|
||||
mw_comment: Default::default(),
|
||||
sda2_base: None,
|
||||
sda_base: None,
|
||||
stack_address: None,
|
||||
stack_end: None,
|
||||
db_stack_addr: None,
|
||||
arena_lo: None,
|
||||
arena_hi: None,
|
||||
splits: Default::default(),
|
||||
named_sections: Default::default(),
|
||||
link_order: vec![],
|
||||
blocked_ranges: Default::default(),
|
||||
known_functions: Default::default(),
|
||||
module_id: 0,
|
||||
unresolved_relocations: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_symbol(&mut self, in_symbol: ObjSymbol, replace: bool) -> Result<SymbolIndex> {
|
||||
match in_symbol.name.as_str() {
|
||||
"_SDA_BASE_" => self.sda_base = Some(in_symbol.address as u32),
|
||||
"_SDA2_BASE_" => self.sda2_base = Some(in_symbol.address as u32),
|
||||
"_stack_addr" => self.stack_address = Some(in_symbol.address as u32),
|
||||
"_stack_end" => self.stack_end = Some(in_symbol.address as u32),
|
||||
"_db_stack_addr" => self.db_stack_addr = Some(in_symbol.address as u32),
|
||||
"__ArenaLo" => self.arena_lo = Some(in_symbol.address as u32),
|
||||
"__ArenaHi" => self.arena_hi = Some(in_symbol.address as u32),
|
||||
_ => {}
|
||||
}
|
||||
self.symbols.add(in_symbol, replace)
|
||||
}
|
||||
|
||||
pub fn section_at(&self, addr: u32) -> Result<&ObjSection> {
|
||||
self.sections
|
||||
.iter()
|
||||
.find(|§ion| {
|
||||
(addr as u64) >= section.address && (addr as u64) < section.address + section.size
|
||||
})
|
||||
.find(|s| s.contains(addr))
|
||||
.ok_or_else(|| anyhow!("Failed to locate section @ {:#010X}", addr))
|
||||
}
|
||||
|
||||
pub fn section_for(&self, range: Range<u32>) -> Result<&ObjSection> {
|
||||
self.sections.iter().find(|s| s.contains_range(range.clone())).ok_or_else(|| {
|
||||
anyhow!("Failed to locate section @ {:#010X}-{:#010X}", range.start, range.end)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn section_data(&self, start: u32, end: u32) -> Result<(&ObjSection, &[u8])> {
|
||||
let section = self.section_at(start)?;
|
||||
let data = if end == 0 {
|
||||
|
@ -171,20 +467,76 @@ impl ObjInfo {
|
|||
};
|
||||
Ok((section, data))
|
||||
}
|
||||
|
||||
/// Locate an existing split for the given address.
|
||||
pub fn split_for(&self, address: u32) -> Option<(u32, &ObjSplit)> {
|
||||
match self.splits_for_range(..=address).last() {
|
||||
Some((addr, split)) if split.end == 0 || split.end > address => Some((addr, split)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Locate existing splits within the given address range.
|
||||
pub fn splits_for_range<R>(&self, range: R) -> impl Iterator<Item = (u32, &ObjSplit)>
|
||||
where R: RangeBounds<u32> {
|
||||
self.splits.range(range).flat_map(|(addr, v)| v.iter().map(move |u| (*addr, u)))
|
||||
}
|
||||
|
||||
pub fn add_split(&mut self, address: u32, split: ObjSplit) {
|
||||
log::debug!("Adding split @ {:#010X}: {:?}", address, split);
|
||||
// TODO merge with preceding split if possible
|
||||
self.splits.entry(address).or_default().push(split);
|
||||
}
|
||||
}
|
||||
|
||||
impl ObjSection {
|
||||
pub fn build_relocation_map(&self) -> Result<BTreeMap<u32, ObjReloc>> {
|
||||
let mut relocations = BTreeMap::<u32, ObjReloc>::new();
|
||||
for reloc in &self.relocations {
|
||||
pub fn build_relocation_map(&self) -> Result<BTreeMap<u32, usize>> {
|
||||
let mut relocations = BTreeMap::new();
|
||||
for (idx, reloc) in self.relocations.iter().enumerate() {
|
||||
let address = reloc.address as u32;
|
||||
match relocations.entry(address) {
|
||||
btree_map::Entry::Vacant(e) => {
|
||||
e.insert(reloc.clone());
|
||||
e.insert(idx);
|
||||
}
|
||||
btree_map::Entry::Occupied(_) => bail!("Duplicate relocation @ {address:#010X}"),
|
||||
}
|
||||
}
|
||||
Ok(relocations)
|
||||
}
|
||||
|
||||
pub fn build_relocation_map_cloned(&self) -> Result<BTreeMap<u32, ObjReloc>> {
|
||||
let mut relocations = BTreeMap::new();
|
||||
for reloc in self.relocations.iter().cloned() {
|
||||
let address = reloc.address as u32;
|
||||
match relocations.entry(address) {
|
||||
btree_map::Entry::Vacant(e) => {
|
||||
e.insert(reloc);
|
||||
}
|
||||
btree_map::Entry::Occupied(_) => bail!("Duplicate relocation @ {address:#010X}"),
|
||||
}
|
||||
}
|
||||
Ok(relocations)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn contains(&self, addr: u32) -> bool {
|
||||
(self.address..self.address + self.size).contains(&(addr as u64))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn contains_range(&self, range: Range<u32>) -> bool {
|
||||
(range.start as u64) >= self.address && (range.end as u64) <= self.address + self.size
|
||||
}
|
||||
}
|
||||
|
||||
pub fn section_kind_for_section(section_name: &str) -> Result<ObjSectionKind> {
|
||||
Ok(match section_name {
|
||||
".init" | ".text" | ".dbgtext" | ".vmtext" => ObjSectionKind::Code,
|
||||
".ctors" | ".dtors" | ".rodata" | ".sdata2" | "extab" | "extabindex" => {
|
||||
ObjSectionKind::ReadOnlyData
|
||||
}
|
||||
".bss" | ".sbss" | ".sbss2" => ObjSectionKind::Bss,
|
||||
".data" | ".sdata" => ObjSectionKind::Data,
|
||||
name => bail!("Unknown section {name}"),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -13,7 +13,8 @@ use crate::{
|
|||
analysis::tracker::{Relocation, Tracker},
|
||||
array_ref,
|
||||
obj::{
|
||||
ObjInfo, ObjReloc, ObjRelocKind, ObjSectionKind, ObjSymbol, ObjSymbolFlagSet, ObjSymbolKind,
|
||||
section_kind_for_section, ObjInfo, ObjReloc, ObjRelocKind, ObjSymbol, ObjSymbolFlagSet,
|
||||
ObjSymbolKind,
|
||||
},
|
||||
util::elf::process_elf,
|
||||
};
|
||||
|
@ -112,15 +113,7 @@ pub fn apply_symbol(obj: &mut ObjInfo, target: u32, sig_symbol: &OutSymbol) -> R
|
|||
if !target_section.section_known {
|
||||
if let Some(section_name) = &sig_symbol.section {
|
||||
target_section.name = section_name.clone();
|
||||
target_section.kind = match section_name.as_str() {
|
||||
".init" | ".text" | ".dbgtext" => ObjSectionKind::Code,
|
||||
".ctors" | ".dtors" | ".rodata" | ".sdata2" | "extab" | "extabindex" => {
|
||||
ObjSectionKind::ReadOnlyData
|
||||
}
|
||||
".bss" | ".sbss" | ".sbss2" => ObjSectionKind::Bss,
|
||||
".data" | ".sdata" => ObjSectionKind::Data,
|
||||
name => bail!("Unknown section {name}"),
|
||||
};
|
||||
target_section.kind = section_kind_for_section(section_name)?;
|
||||
target_section.section_known = true;
|
||||
}
|
||||
}
|
||||
|
@ -131,49 +124,22 @@ pub fn apply_symbol(obj: &mut ObjInfo, target: u32, sig_symbol: &OutSymbol) -> R
|
|||
// Hack to mark linker generated symbols as ABS
|
||||
target_section_index = None;
|
||||
}
|
||||
let target_symbol_idx = if let Some((symbol_idx, existing)) =
|
||||
obj.symbols.iter_mut().enumerate().find(|(_, symbol)| {
|
||||
symbol.address == target as u64
|
||||
&& symbol.kind == sig_symbol.kind
|
||||
// Hack to avoid replacing different ABS symbols
|
||||
&& (symbol.section.is_some() || symbol.name == sig_symbol.name)
|
||||
}) {
|
||||
log::debug!("Replacing {:?} with {:?}", existing, sig_symbol);
|
||||
*existing = ObjSymbol {
|
||||
let demangled_name = demangle(&sig_symbol.name, &DemangleOptions::default());
|
||||
let target_symbol_idx = obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: sig_symbol.name.clone(),
|
||||
demangled_name: demangle(&sig_symbol.name, &DemangleOptions::default()),
|
||||
address: target as u64,
|
||||
section: target_section_index,
|
||||
size: if sig_symbol.size == 0 { existing.size } else { sig_symbol.size as u64 },
|
||||
size_known: existing.size_known || sig_symbol.size != 0,
|
||||
flags: sig_symbol.flags,
|
||||
kind: sig_symbol.kind,
|
||||
};
|
||||
symbol_idx
|
||||
} else {
|
||||
let target_symbol_idx = obj.symbols.len();
|
||||
obj.symbols.push(ObjSymbol {
|
||||
name: sig_symbol.name.clone(),
|
||||
demangled_name: demangle(&sig_symbol.name, &DemangleOptions::default()),
|
||||
demangled_name,
|
||||
address: target as u64,
|
||||
section: target_section_index,
|
||||
size: sig_symbol.size as u64,
|
||||
size_known: sig_symbol.size != 0,
|
||||
size_known: sig_symbol.size > 0 || sig_symbol.kind == ObjSymbolKind::Unknown,
|
||||
flags: sig_symbol.flags,
|
||||
kind: sig_symbol.kind,
|
||||
});
|
||||
target_symbol_idx
|
||||
};
|
||||
match sig_symbol.name.as_str() {
|
||||
"_SDA_BASE_" => obj.sda_base = Some(target),
|
||||
"_SDA2_BASE_" => obj.sda2_base = Some(target),
|
||||
"_stack_addr" => obj.stack_address = Some(target),
|
||||
"_stack_end" => obj.stack_end = Some(target),
|
||||
"_db_stack_addr" => obj.db_stack_addr = Some(target),
|
||||
"__ArenaLo" => obj.arena_lo = Some(target),
|
||||
"__ArenaHi" => obj.arena_hi = Some(target),
|
||||
_ => {}
|
||||
}
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
Ok(target_symbol_idx)
|
||||
}
|
||||
|
||||
|
@ -185,7 +151,7 @@ pub fn apply_signature(obj: &mut ObjInfo, addr: u32, signature: &FunctionSignatu
|
|||
for reloc in &signature.relocations {
|
||||
tracker.known_relocations.insert(addr + reloc.offset);
|
||||
}
|
||||
tracker.process_function(obj, &obj.symbols[symbol_idx])?;
|
||||
tracker.process_function(obj, obj.symbols.at(symbol_idx))?;
|
||||
for (&reloc_addr, reloc) in &tracker.relocations {
|
||||
if reloc_addr < addr || reloc_addr >= addr + in_symbol.size {
|
||||
continue;
|
||||
|
@ -293,26 +259,20 @@ pub fn generate_signature(path: &Path, symbol_name: &str) -> Result<Option<Funct
|
|||
}
|
||||
let mut tracker = Tracker::new(&obj);
|
||||
// tracker.ignore_addresses.insert(0x80004000);
|
||||
for symbol in &obj.symbols {
|
||||
if symbol.kind != ObjSymbolKind::Function {
|
||||
continue;
|
||||
}
|
||||
for (_, symbol) in obj.symbols.by_kind(ObjSymbolKind::Function) {
|
||||
if symbol.name != symbol_name && symbol.name != symbol_name.replace("TRK", "TRK_") {
|
||||
continue;
|
||||
}
|
||||
tracker.process_function(&obj, symbol)?;
|
||||
}
|
||||
tracker.apply(&mut obj, true)?; // true
|
||||
for symbol in &obj.symbols {
|
||||
if symbol.kind != ObjSymbolKind::Function {
|
||||
continue;
|
||||
}
|
||||
for (_, symbol) in obj.symbols.by_kind(ObjSymbolKind::Function) {
|
||||
if symbol.name != symbol_name && symbol.name != symbol_name.replace("TRK", "TRK_") {
|
||||
continue;
|
||||
}
|
||||
let section_idx = symbol.section.unwrap();
|
||||
let section = &obj.sections[section_idx];
|
||||
let out_symbol_idx = out_symbols.len();
|
||||
// let out_symbol_idx = out_symbols.len();
|
||||
out_symbols.push(OutSymbol {
|
||||
kind: symbol.kind,
|
||||
name: symbol.name.clone(),
|
||||
|
@ -334,10 +294,11 @@ pub fn generate_signature(path: &Path, symbol_name: &str) -> Result<Option<Funct
|
|||
.collect::<Vec<(u32, u32)>>();
|
||||
for (idx, (ins, pat)) in instructions.iter_mut().enumerate() {
|
||||
let addr = (symbol.address as usize + idx * 4) as u32;
|
||||
if let Some(reloc) = relocations.get(&addr) {
|
||||
if let Some(&reloc_idx) = relocations.get(&addr) {
|
||||
let reloc = §ion.relocations[reloc_idx];
|
||||
let symbol_idx = match symbol_map.entry(reloc.target_symbol) {
|
||||
btree_map::Entry::Vacant(e) => {
|
||||
let target = &obj.symbols[reloc.target_symbol];
|
||||
let target = obj.symbols.at(reloc.target_symbol);
|
||||
let symbol_idx = out_symbols.len();
|
||||
e.insert(symbol_idx);
|
||||
out_symbols.push(OutSymbol {
|
||||
|
@ -363,19 +324,19 @@ pub fn generate_signature(path: &Path, symbol_name: &str) -> Result<Option<Funct
|
|||
ObjRelocKind::PpcAddr16Hi
|
||||
| ObjRelocKind::PpcAddr16Ha
|
||||
| ObjRelocKind::PpcAddr16Lo => {
|
||||
*ins = *ins & !0xFFFF;
|
||||
*ins &= !0xFFFF;
|
||||
*pat = !0xFFFF;
|
||||
}
|
||||
ObjRelocKind::PpcRel24 => {
|
||||
*ins = *ins & !0x3FFFFFC;
|
||||
*ins &= !0x3FFFFFC;
|
||||
*pat = !0x3FFFFFC;
|
||||
}
|
||||
ObjRelocKind::PpcRel14 => {
|
||||
*ins = *ins & !0xFFFC;
|
||||
*ins &= !0xFFFC;
|
||||
*pat = !0xFFFC;
|
||||
}
|
||||
ObjRelocKind::PpcEmbSda21 => {
|
||||
*ins = *ins & !0x1FFFFF;
|
||||
*ins &= !0x1FFFFF;
|
||||
*pat = !0x1FFFFF;
|
||||
}
|
||||
}
|
||||
|
|
691
src/obj/split.rs
691
src/obj/split.rs
|
@ -1,11 +1,426 @@
|
|||
use std::{cmp::min, collections::HashMap};
|
||||
use std::{
|
||||
cmp::min,
|
||||
collections::{BTreeMap, HashMap, HashSet},
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Result};
|
||||
use itertools::Itertools;
|
||||
use topological_sort::TopologicalSort;
|
||||
|
||||
use crate::obj::{
|
||||
ObjArchitecture, ObjInfo, ObjKind, ObjReloc, ObjSection, ObjSectionKind, ObjSymbol,
|
||||
ObjArchitecture, ObjInfo, ObjKind, ObjReloc, ObjSection, ObjSectionKind, ObjSplit, ObjSymbol,
|
||||
ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
|
||||
};
|
||||
|
||||
/// Create splits for function pointers in the given section.
|
||||
fn split_ctors_dtors(obj: &mut ObjInfo, section_start: u32, section_end: u32) -> Result<()> {
|
||||
let mut new_splits = BTreeMap::new();
|
||||
let mut current_address = section_start;
|
||||
|
||||
while current_address < section_end {
|
||||
let (section, chunk) = obj.section_data(current_address, current_address + 4)?;
|
||||
let function_addr = u32::from_be_bytes(chunk[0..4].try_into().unwrap());
|
||||
log::debug!("Found {} entry: {:#010X}", section.name, function_addr);
|
||||
|
||||
let Some((_, function_symbol)) =
|
||||
obj.symbols.kind_at_address(function_addr, ObjSymbolKind::Function)?
|
||||
else {
|
||||
bail!("Failed to find function symbol @ {:#010X}", function_addr);
|
||||
};
|
||||
|
||||
let ctors_split = obj.split_for(current_address);
|
||||
let function_split = obj.split_for(function_addr);
|
||||
|
||||
let mut expected_unit = None;
|
||||
if let Some((_, ctors_split)) = ctors_split {
|
||||
expected_unit = Some(ctors_split.unit.clone());
|
||||
}
|
||||
if let Some((_, function_split)) = function_split {
|
||||
if let Some(unit) = &expected_unit {
|
||||
ensure!(
|
||||
unit == &function_split.unit,
|
||||
"Mismatched splits for {} {:#010X} ({}) and function {:#010X} ({})",
|
||||
section.name,
|
||||
current_address,
|
||||
unit,
|
||||
function_addr,
|
||||
function_split.unit
|
||||
);
|
||||
} else {
|
||||
expected_unit = Some(function_split.unit.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if ctors_split.is_none() || function_split.is_none() {
|
||||
let unit = expected_unit.unwrap_or_else(|| {
|
||||
let section_name = function_symbol
|
||||
.section
|
||||
.and_then(|idx| obj.sections.get(idx).map(|s| s.name.clone()))
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
format!("{}_{}", function_symbol.name, section_name)
|
||||
});
|
||||
log::debug!("Adding splits to unit {}", unit);
|
||||
|
||||
if ctors_split.is_none() {
|
||||
log::debug!("Adding split for {} entry @ {:#010X}", section.name, current_address);
|
||||
new_splits.insert(current_address, ObjSplit {
|
||||
unit: unit.clone(),
|
||||
end: current_address + 4,
|
||||
align: None,
|
||||
common: false,
|
||||
});
|
||||
}
|
||||
if function_split.is_none() {
|
||||
log::debug!("Adding split for function @ {:#010X}", function_addr);
|
||||
new_splits.insert(function_addr, ObjSplit {
|
||||
unit,
|
||||
end: function_addr + function_symbol.size as u32,
|
||||
align: None,
|
||||
common: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
current_address += 4;
|
||||
}
|
||||
|
||||
for (addr, split) in new_splits {
|
||||
obj.add_split(addr, split);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create splits for extabindex + extab entries.
|
||||
fn split_extabindex(obj: &mut ObjInfo, section_index: usize, section_start: u32) -> Result<()> {
|
||||
let mut new_splits = BTreeMap::new();
|
||||
let (_, eti_init_info) = obj
|
||||
.symbols
|
||||
.by_name("_eti_init_info")?
|
||||
.ok_or_else(|| anyhow!("Failed to find _eti_init_info symbol"))?;
|
||||
ensure!(
|
||||
eti_init_info.section == Some(section_index),
|
||||
"_eti_init_info symbol in the wrong section: {:?} != {}",
|
||||
eti_init_info.section,
|
||||
section_index
|
||||
);
|
||||
let mut current_address = section_start;
|
||||
let section_end = eti_init_info.address as u32;
|
||||
while current_address < section_end {
|
||||
let (_eti_section, chunk) = obj.section_data(current_address, current_address + 12)?;
|
||||
let function_addr = u32::from_be_bytes(chunk[0..4].try_into().unwrap());
|
||||
let function_size = u32::from_be_bytes(chunk[4..8].try_into().unwrap());
|
||||
let extab_addr = u32::from_be_bytes(chunk[8..12].try_into().unwrap());
|
||||
log::debug!(
|
||||
"Found extabindex entry: {:#010X} size {:#010X} extab {:#010X}",
|
||||
function_addr,
|
||||
function_size,
|
||||
extab_addr
|
||||
);
|
||||
|
||||
let Some((_, eti_symbol)) =
|
||||
obj.symbols.kind_at_address(current_address, ObjSymbolKind::Object)?
|
||||
else {
|
||||
bail!("Failed to find extabindex symbol @ {:#010X}", current_address);
|
||||
};
|
||||
ensure!(
|
||||
eti_symbol.size_known && eti_symbol.size == 12,
|
||||
"extabindex symbol {} has mismatched size ({:#X}, expected {:#X})",
|
||||
eti_symbol.name,
|
||||
eti_symbol.size,
|
||||
12
|
||||
);
|
||||
|
||||
let Some((_, function_symbol)) =
|
||||
obj.symbols.kind_at_address(function_addr, ObjSymbolKind::Function)?
|
||||
else {
|
||||
bail!("Failed to find function symbol @ {:#010X}", function_addr);
|
||||
};
|
||||
ensure!(
|
||||
function_symbol.size_known && function_symbol.size == function_size as u64,
|
||||
"Function symbol {} has mismatched size ({:#X}, expected {:#X})",
|
||||
function_symbol.name,
|
||||
function_symbol.size,
|
||||
function_size
|
||||
);
|
||||
|
||||
let Some((_, extab_symbol)) =
|
||||
obj.symbols.kind_at_address(extab_addr, ObjSymbolKind::Object)?
|
||||
else {
|
||||
bail!("Failed to find extab symbol @ {:#010X}", extab_addr);
|
||||
};
|
||||
ensure!(
|
||||
extab_symbol.size_known && extab_symbol.size > 0,
|
||||
"extab symbol {} has unknown size",
|
||||
extab_symbol.name
|
||||
);
|
||||
|
||||
let extabindex_split = obj.split_for(current_address);
|
||||
let extab_split = obj.split_for(extab_addr);
|
||||
let function_split = obj.split_for(function_addr);
|
||||
|
||||
let mut expected_unit = None;
|
||||
if let Some((_, extabindex_split)) = extabindex_split {
|
||||
expected_unit = Some(extabindex_split.unit.clone());
|
||||
}
|
||||
if let Some((_, extab_split)) = extab_split {
|
||||
if let Some(unit) = &expected_unit {
|
||||
ensure!(
|
||||
unit == &extab_split.unit,
|
||||
"Mismatched splits for extabindex {:#010X} ({}) and extab {:#010X} ({})",
|
||||
current_address,
|
||||
unit,
|
||||
extab_addr,
|
||||
extab_split.unit
|
||||
);
|
||||
} else {
|
||||
expected_unit = Some(extab_split.unit.clone());
|
||||
}
|
||||
}
|
||||
if let Some((_, function_split)) = function_split {
|
||||
if let Some(unit) = &expected_unit {
|
||||
ensure!(
|
||||
unit == &function_split.unit,
|
||||
"Mismatched splits for extabindex {:#010X} ({}) and function {:#010X} ({})",
|
||||
current_address,
|
||||
unit,
|
||||
function_addr,
|
||||
function_split.unit
|
||||
);
|
||||
} else {
|
||||
expected_unit = Some(function_split.unit.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if extabindex_split.is_none() || extab_split.is_none() || function_split.is_none() {
|
||||
let unit = expected_unit.unwrap_or_else(|| {
|
||||
let section_name = function_symbol
|
||||
.section
|
||||
.and_then(|idx| obj.sections.get(idx).map(|s| s.name.clone()))
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
format!("{}_{}", function_symbol.name, section_name)
|
||||
});
|
||||
log::debug!("Adding splits to unit {}", unit);
|
||||
|
||||
if extabindex_split.is_none() {
|
||||
log::debug!("Adding split for extabindex entry @ {:#010X}", current_address);
|
||||
new_splits.insert(current_address, ObjSplit {
|
||||
unit: unit.clone(),
|
||||
end: current_address + 12,
|
||||
align: None,
|
||||
common: false,
|
||||
});
|
||||
}
|
||||
if extab_split.is_none() {
|
||||
log::debug!("Adding split for extab @ {:#010X}", extab_addr);
|
||||
new_splits.insert(extab_addr, ObjSplit {
|
||||
unit: unit.clone(),
|
||||
end: extab_addr + extab_symbol.size as u32,
|
||||
align: None,
|
||||
common: false,
|
||||
});
|
||||
}
|
||||
if function_split.is_none() {
|
||||
log::debug!("Adding split for function @ {:#010X}", function_addr);
|
||||
new_splits.insert(function_addr, ObjSplit {
|
||||
unit,
|
||||
end: function_addr + function_symbol.size as u32,
|
||||
align: None,
|
||||
common: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
current_address += 12;
|
||||
}
|
||||
|
||||
for (addr, split) in new_splits {
|
||||
obj.add_split(addr, split);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create splits for gaps between existing splits.
|
||||
fn create_gap_splits(obj: &mut ObjInfo) -> Result<()> {
|
||||
let mut new_splits = BTreeMap::new();
|
||||
|
||||
for (section_idx, section) in obj.sections.iter().enumerate() {
|
||||
let mut current_address = section.address as u32;
|
||||
let section_end = end_for_section(obj, section_idx)?;
|
||||
let mut file_iter = obj.splits_for_range(current_address..section_end).peekable();
|
||||
|
||||
log::debug!(
|
||||
"Checking splits for section {} ({:#010X}..{:#010X})",
|
||||
section.name,
|
||||
current_address,
|
||||
section_end
|
||||
);
|
||||
loop {
|
||||
if current_address >= section_end {
|
||||
break;
|
||||
}
|
||||
|
||||
let (split_start, split_end) = match file_iter.peek() {
|
||||
Some(&(addr, split)) => {
|
||||
log::debug!("Found split {} ({:#010X}..{:#010X})", split.unit, addr, split.end);
|
||||
(addr, split.end)
|
||||
}
|
||||
None => (section_end, 0),
|
||||
};
|
||||
ensure!(
|
||||
split_start >= current_address,
|
||||
"Split {:#010X}..{:#010X} overlaps with previous split",
|
||||
split_start,
|
||||
split_end
|
||||
);
|
||||
|
||||
if split_start > current_address {
|
||||
// Find any duplicate symbols in this range
|
||||
let mut new_split_end = split_start;
|
||||
let symbols = obj.symbols.for_range(current_address..split_start).collect_vec();
|
||||
let mut existing_symbols = HashSet::new();
|
||||
for (_, symbol) in symbols {
|
||||
// Sanity check? Maybe not required?
|
||||
ensure!(
|
||||
symbol.section == Some(section_idx),
|
||||
"Expected symbol {} to be in section {}",
|
||||
symbol.name,
|
||||
section_idx
|
||||
);
|
||||
if !existing_symbols.insert(symbol.name.clone()) {
|
||||
log::debug!(
|
||||
"Found duplicate symbol {} at {:#010X}",
|
||||
symbol.name,
|
||||
symbol.address
|
||||
);
|
||||
new_split_end = symbol.address as u32;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!(
|
||||
"Creating split from {:#010X}..{:#010X}",
|
||||
current_address,
|
||||
new_split_end
|
||||
);
|
||||
let unit = format!("{:08X}_{}", current_address, section.name);
|
||||
new_splits.insert(current_address, ObjSplit {
|
||||
unit: unit.clone(),
|
||||
end: new_split_end,
|
||||
align: None,
|
||||
common: false,
|
||||
});
|
||||
current_address = new_split_end;
|
||||
continue;
|
||||
}
|
||||
|
||||
file_iter.next();
|
||||
if split_end > 0 {
|
||||
current_address = split_end;
|
||||
} else {
|
||||
let mut file_end = section_end;
|
||||
if let Some(&(next_addr, _next_split)) = file_iter.peek() {
|
||||
file_end = min(next_addr, section_end);
|
||||
}
|
||||
current_address = file_end;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add new splits
|
||||
for (addr, split) in new_splits {
|
||||
obj.add_split(addr, split);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Perform any necessary adjustments to allow relinking.
|
||||
/// This includes:
|
||||
/// - Ensuring .ctors & .dtors entries are split with their associated function
|
||||
/// - Ensuring extab & extabindex entries are split with their associated function
|
||||
/// - Creating splits for gaps between existing splits
|
||||
/// - Resolving a new object link order
|
||||
pub fn update_splits(obj: &mut ObjInfo) -> Result<()> {
|
||||
// Create splits for extab and extabindex entries
|
||||
if let Some(section) = obj.sections.iter().find(|s| s.name == "extabindex") {
|
||||
split_extabindex(obj, section.index, section.address as u32)?;
|
||||
}
|
||||
|
||||
// Create splits for .ctors entries
|
||||
if let Some(section) = obj.sections.iter().find(|s| s.name == ".ctors") {
|
||||
let section_start = section.address as u32;
|
||||
let section_end = section.address as u32 + section.size as u32 - 4;
|
||||
split_ctors_dtors(obj, section_start, section_end)?;
|
||||
}
|
||||
|
||||
// Create splits for .dtors entries
|
||||
if let Some(section) = obj.sections.iter().find(|s| s.name == ".dtors") {
|
||||
let section_start = section.address as u32 + 4; // skip __destroy_global_chain_reference
|
||||
let section_end = section.address as u32 + section.size as u32 - 4;
|
||||
split_ctors_dtors(obj, section_start, section_end)?;
|
||||
}
|
||||
|
||||
// Create gap splits
|
||||
create_gap_splits(obj)?;
|
||||
|
||||
// Resolve link order
|
||||
obj.link_order = resolve_link_order(obj)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The ordering of TUs inside of each section represents a directed edge in a DAG.
|
||||
/// We can use a topological sort to determine a valid global TU order.
|
||||
/// There can be ambiguities, but any solution that satisfies the link order
|
||||
/// constraints is considered valid.
|
||||
fn resolve_link_order(obj: &ObjInfo) -> Result<Vec<String>> {
|
||||
let mut global_unit_order = Vec::<String>::new();
|
||||
let mut t_sort = TopologicalSort::<String>::new();
|
||||
for section in &obj.sections {
|
||||
let mut iter = obj
|
||||
.splits_for_range(section.address as u32..(section.address + section.size) as u32)
|
||||
.peekable();
|
||||
if section.name == ".ctors" || section.name == ".dtors" {
|
||||
// Skip __init_cpp_exceptions.o
|
||||
let skipped = iter.next();
|
||||
log::debug!("Skipping split {:?} (next: {:?})", skipped, iter.peek());
|
||||
}
|
||||
loop {
|
||||
match (iter.next(), iter.peek()) {
|
||||
(Some((a_addr, a)), Some((b_addr, b))) => {
|
||||
if a.unit != b.unit {
|
||||
log::debug!(
|
||||
"Adding dependency {} ({:#010X}) -> {} ({:#010X})",
|
||||
a.unit,
|
||||
a_addr,
|
||||
b.unit,
|
||||
b_addr
|
||||
);
|
||||
t_sort.add_dependency(a.unit.clone(), b.unit.clone());
|
||||
}
|
||||
}
|
||||
(Some((_, a)), None) => {
|
||||
t_sort.insert(a.unit.clone());
|
||||
break;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
for unit in &mut t_sort {
|
||||
global_unit_order.push(unit);
|
||||
}
|
||||
// An incomplete topological sort indicates that a cyclic dependency was encountered.
|
||||
ensure!(t_sort.is_empty(), "Cyclic dependency encountered while resolving link order");
|
||||
// Sanity check, did we get all TUs in the final order?
|
||||
for unit in obj.splits.values().flatten().map(|s| &s.unit) {
|
||||
ensure!(global_unit_order.contains(unit), "Failed to find an order for {unit}");
|
||||
}
|
||||
Ok(global_unit_order)
|
||||
}
|
||||
|
||||
/// Split an executable object into relocatable objects.
|
||||
pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
||||
ensure!(obj.kind == ObjKind::Executable, "Expected executable object");
|
||||
|
@ -15,60 +430,33 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
|||
let mut name_to_obj: HashMap<String, usize> = HashMap::new();
|
||||
for unit in &obj.link_order {
|
||||
name_to_obj.insert(unit.clone(), objects.len());
|
||||
object_symbols.push(vec![None; obj.symbols.len()]);
|
||||
objects.push(ObjInfo {
|
||||
module_id: 0,
|
||||
kind: ObjKind::Relocatable,
|
||||
architecture: ObjArchitecture::PowerPc,
|
||||
name: unit.clone(),
|
||||
symbols: vec![],
|
||||
sections: vec![],
|
||||
entry: 0,
|
||||
sda2_base: None,
|
||||
sda_base: None,
|
||||
stack_address: None,
|
||||
stack_end: None,
|
||||
db_stack_addr: None,
|
||||
arena_lo: None,
|
||||
arena_hi: None,
|
||||
splits: Default::default(),
|
||||
named_sections: Default::default(),
|
||||
link_order: vec![],
|
||||
known_functions: Default::default(),
|
||||
unresolved_relocations: vec![],
|
||||
});
|
||||
object_symbols.push(vec![None; obj.symbols.count()]);
|
||||
let mut obj = ObjInfo::new(
|
||||
ObjKind::Relocatable,
|
||||
ObjArchitecture::PowerPc,
|
||||
unit.clone(),
|
||||
vec![],
|
||||
vec![],
|
||||
);
|
||||
obj.mw_comment = obj.mw_comment.clone();
|
||||
objects.push(obj);
|
||||
}
|
||||
|
||||
for (section_idx, section) in obj.sections.iter().enumerate() {
|
||||
let mut current_address = section.address as u32;
|
||||
let mut section_end = (section.address + section.size) as u32;
|
||||
// if matches!(section.name.as_str(), "extab" | "extabindex") {
|
||||
// continue;
|
||||
// }
|
||||
// .ctors and .dtors end with a linker-generated null pointer,
|
||||
// adjust section size appropriately
|
||||
if matches!(section.name.as_str(), ".ctors" | ".dtors")
|
||||
&& section.data[section.data.len() - 4..] == [0u8; 4]
|
||||
{
|
||||
section_end -= 4;
|
||||
}
|
||||
let mut file_iter = obj
|
||||
.splits
|
||||
.range(current_address..)
|
||||
.flat_map(|(addr, v)| v.iter().map(move |u| (addr, u)))
|
||||
.peekable();
|
||||
let section_end = end_for_section(obj, section_idx)?;
|
||||
let mut file_iter = obj.splits_for_range(current_address..section_end).peekable();
|
||||
|
||||
// Build address to relocation / address to symbol maps
|
||||
let relocations = section.build_relocation_map()?;
|
||||
let symbols = obj.build_symbol_map(section_idx)?;
|
||||
|
||||
loop {
|
||||
if current_address >= section_end {
|
||||
break;
|
||||
}
|
||||
|
||||
let (file_addr, unit) = match file_iter.next() {
|
||||
Some((&addr, unit)) => (addr, unit),
|
||||
let (file_addr, split) = match file_iter.next() {
|
||||
Some((addr, split)) => (addr, split),
|
||||
None => bail!("No file found"),
|
||||
};
|
||||
ensure!(
|
||||
|
@ -76,41 +464,30 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
|||
"Gap in files: {} @ {:#010X}, {} @ {:#010X}",
|
||||
section.name,
|
||||
section.address,
|
||||
unit,
|
||||
split.unit,
|
||||
file_addr
|
||||
);
|
||||
let mut file_end = section_end;
|
||||
let mut dont_go_forward = false;
|
||||
if let Some(&(&next_addr, next_unit)) = file_iter.peek() {
|
||||
if file_addr == next_addr {
|
||||
log::warn!("Duplicating {} in {unit} and {next_unit}", section.name);
|
||||
dont_go_forward = true;
|
||||
file_end = obj
|
||||
.splits
|
||||
.range(current_address + 1..)
|
||||
.next()
|
||||
.map(|(&addr, _)| addr)
|
||||
.unwrap_or(section_end);
|
||||
} else {
|
||||
if let Some(&(next_addr, _next_split)) = file_iter.peek() {
|
||||
file_end = min(next_addr, section_end);
|
||||
}
|
||||
}
|
||||
|
||||
let file = name_to_obj
|
||||
.get(unit)
|
||||
.get(&split.unit)
|
||||
.and_then(|&idx| objects.get_mut(idx))
|
||||
.ok_or_else(|| anyhow!("Unit '{unit}' not in link order"))?;
|
||||
.ok_or_else(|| anyhow!("Unit '{}' not in link order", split.unit))?;
|
||||
let symbol_idxs = name_to_obj
|
||||
.get(unit)
|
||||
.get(&split.unit)
|
||||
.and_then(|&idx| object_symbols.get_mut(idx))
|
||||
.ok_or_else(|| anyhow!("Unit '{unit}' not in link order"))?;
|
||||
.ok_or_else(|| anyhow!("Unit '{}' not in link order", split.unit))?;
|
||||
|
||||
// Calculate & verify section alignment
|
||||
let mut align = default_section_align(section);
|
||||
let mut align =
|
||||
split.align.map(u64::from).unwrap_or_else(|| default_section_align(section));
|
||||
if current_address & (align as u32 - 1) != 0 {
|
||||
log::warn!(
|
||||
"Alignment for {} {} expected {}, but starts at {:#010X}",
|
||||
unit,
|
||||
split.unit,
|
||||
section.name,
|
||||
align,
|
||||
current_address
|
||||
|
@ -125,7 +502,7 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
|||
ensure!(
|
||||
current_address & (align as u32 - 1) == 0,
|
||||
"Invalid alignment for split: {} {} {:#010X}",
|
||||
unit,
|
||||
split.unit,
|
||||
section.name,
|
||||
current_address
|
||||
);
|
||||
|
@ -133,34 +510,62 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
|||
// Collect relocations; target_symbol will be updated later
|
||||
let out_relocations = relocations
|
||||
.range(current_address..file_end)
|
||||
.map(|(_, o)| ObjReloc {
|
||||
.map(|(_, &idx)| {
|
||||
let o = §ion.relocations[idx];
|
||||
ObjReloc {
|
||||
kind: o.kind,
|
||||
address: o.address - current_address as u64,
|
||||
target_symbol: o.target_symbol,
|
||||
addend: o.addend,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Add section symbols
|
||||
let out_section_idx = file.sections.len();
|
||||
for &symbol_idx in symbols.range(current_address..file_end).flat_map(|(_, vec)| vec) {
|
||||
let mut comm_addr = current_address;
|
||||
for (symbol_idx, symbol) in obj.symbols.for_range(current_address..file_end) {
|
||||
if symbol_idxs[symbol_idx].is_some() {
|
||||
continue; // should never happen?
|
||||
}
|
||||
let symbol = &obj.symbols[symbol_idx];
|
||||
symbol_idxs[symbol_idx] = Some(file.symbols.len());
|
||||
file.symbols.push(ObjSymbol {
|
||||
|
||||
if split.common && symbol.address as u32 > comm_addr {
|
||||
// HACK: Add padding for common bug
|
||||
file.symbols.add_direct(ObjSymbol {
|
||||
name: format!("pad_{:010X}", comm_addr),
|
||||
demangled_name: None,
|
||||
address: 0,
|
||||
section: None,
|
||||
size: symbol.address - comm_addr as u64,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Common.into()),
|
||||
kind: ObjSymbolKind::Object,
|
||||
align: Some(4),
|
||||
data_kind: Default::default(),
|
||||
})?;
|
||||
}
|
||||
comm_addr = (symbol.address + symbol.size) as u32;
|
||||
|
||||
symbol_idxs[symbol_idx] = Some(file.symbols.count());
|
||||
file.symbols.add_direct(ObjSymbol {
|
||||
name: symbol.name.clone(),
|
||||
demangled_name: symbol.demangled_name.clone(),
|
||||
address: symbol.address - current_address as u64,
|
||||
section: Some(out_section_idx),
|
||||
address: if split.common { 4 } else { symbol.address - current_address as u64 },
|
||||
section: if split.common { None } else { Some(out_section_idx) },
|
||||
size: symbol.size,
|
||||
size_known: symbol.size_known,
|
||||
flags: symbol.flags,
|
||||
flags: if split.common {
|
||||
ObjSymbolFlagSet(ObjSymbolFlags::Common.into())
|
||||
} else {
|
||||
symbol.flags
|
||||
},
|
||||
kind: symbol.kind,
|
||||
});
|
||||
align: if split.common { Some(4) } else { symbol.align },
|
||||
data_kind: symbol.data_kind,
|
||||
})?;
|
||||
}
|
||||
|
||||
if !split.common {
|
||||
let data = match section.kind {
|
||||
ObjSectionKind::Bss => vec![],
|
||||
_ => section.data[(current_address as u64 - section.address) as usize
|
||||
|
@ -186,14 +591,14 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
|||
file_offset: section.file_offset + (current_address as u64 - section.address),
|
||||
section_known: true,
|
||||
});
|
||||
|
||||
if !dont_go_forward {
|
||||
current_address = file_end;
|
||||
}
|
||||
|
||||
current_address = file_end;
|
||||
}
|
||||
}
|
||||
|
||||
// Update relocations
|
||||
let mut globalize_symbols = vec![];
|
||||
for (obj_idx, out_obj) in objects.iter_mut().enumerate() {
|
||||
let symbol_idxs = &mut object_symbols[obj_idx];
|
||||
for section in &mut out_obj.sections {
|
||||
|
@ -204,32 +609,52 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
|||
}
|
||||
None => {
|
||||
// Extern
|
||||
let out_sym_idx = out_obj.symbols.len();
|
||||
let target_sym = &obj.symbols[reloc.target_symbol];
|
||||
let out_sym_idx = out_obj.symbols.count();
|
||||
let target_sym = obj.symbols.at(reloc.target_symbol);
|
||||
|
||||
// If the symbol is local, we'll upgrade the scope to global
|
||||
// and rename it to avoid conflicts
|
||||
if target_sym.flags.0.contains(ObjSymbolFlags::Local) {
|
||||
let address_str = format!("{:08X}", target_sym.address);
|
||||
let new_name = if target_sym.name.ends_with(&address_str) {
|
||||
target_sym.name.clone()
|
||||
} else {
|
||||
format!("{}_{}", target_sym.name, address_str)
|
||||
};
|
||||
globalize_symbols.push((reloc.target_symbol, new_name));
|
||||
}
|
||||
|
||||
symbol_idxs[reloc.target_symbol] = Some(out_sym_idx);
|
||||
out_obj.symbols.push(ObjSymbol {
|
||||
out_obj.symbols.add_direct(ObjSymbol {
|
||||
name: target_sym.name.clone(),
|
||||
demangled_name: target_sym.demangled_name.clone(),
|
||||
..Default::default()
|
||||
});
|
||||
})?;
|
||||
reloc.target_symbol = out_sym_idx;
|
||||
|
||||
if section.name.as_str() == "extabindex" {
|
||||
let (target_addr, target_unit) = obj
|
||||
.splits
|
||||
.range(..=target_sym.address as u32)
|
||||
.map(|(addr, v)| (*addr, v.last().unwrap()))
|
||||
.last()
|
||||
.unwrap();
|
||||
let Some((target_addr, target_split)) =
|
||||
obj.split_for(target_sym.address as u32)
|
||||
else {
|
||||
bail!(
|
||||
"Bad extabindex relocation @ {:#010X}",
|
||||
reloc.address + section.original_address
|
||||
);
|
||||
};
|
||||
let target_section = &obj.section_at(target_addr)?.name;
|
||||
log::warn!(
|
||||
"Extern relocation @ {:#010X}\n\tSource object: {}:{:#010X} {}\n\tTarget object: {}:{:#010X} {}\n\tTarget symbol: {:#010X} {}\n",
|
||||
log::error!(
|
||||
"Bad extabindex relocation @ {:#010X}\n\
|
||||
\tSource object: {}:{:#010X} ({})\n\
|
||||
\tTarget object: {}:{:#010X} ({})\n\
|
||||
\tTarget symbol: {:#010X} ({})\n\
|
||||
This will cause the linker to crash.\n",
|
||||
reloc.address + section.original_address,
|
||||
section.name,
|
||||
section.original_address,
|
||||
out_obj.name,
|
||||
target_section,
|
||||
target_addr,
|
||||
target_unit,
|
||||
target_split.unit,
|
||||
target_sym.address,
|
||||
target_sym.demangled_name.as_deref().unwrap_or(&target_sym.name),
|
||||
);
|
||||
|
@ -240,32 +665,37 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
|||
}
|
||||
}
|
||||
|
||||
// Strip linker generated symbols
|
||||
// Upgrade local symbols to global if necessary
|
||||
for (obj, symbol_map) in objects.iter_mut().zip(&object_symbols) {
|
||||
for (globalize_idx, new_name) in &globalize_symbols {
|
||||
if let Some(symbol_idx) = symbol_map[*globalize_idx] {
|
||||
let mut symbol = obj.symbols.at(symbol_idx).clone();
|
||||
symbol.name = new_name.clone();
|
||||
if symbol.flags.0.contains(ObjSymbolFlags::Local) {
|
||||
log::debug!("Globalizing {} in {}", symbol.name, obj.name);
|
||||
symbol.flags.0 &= !ObjSymbolFlags::Local;
|
||||
symbol.flags.0 |= ObjSymbolFlags::Global;
|
||||
}
|
||||
obj.symbols.replace(symbol_idx, symbol)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extern linker generated symbols
|
||||
for obj in &mut objects {
|
||||
for symbol in &mut obj.symbols {
|
||||
if is_skip_symbol(&symbol.name) {
|
||||
if symbol.section.is_some() {
|
||||
let mut replace_symbols = vec![];
|
||||
for (symbol_idx, symbol) in obj.symbols.iter().enumerate() {
|
||||
if is_linker_generated_label(&symbol.name) && symbol.section.is_some() {
|
||||
log::debug!("Externing {:?} in {}", symbol, obj.name);
|
||||
*symbol = ObjSymbol {
|
||||
replace_symbols.push((symbol_idx, ObjSymbol {
|
||||
name: symbol.name.clone(),
|
||||
demangled_name: symbol.demangled_name.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
}
|
||||
} else if is_linker_symbol(&symbol.name) {
|
||||
if let Some(section_idx) = symbol.section {
|
||||
log::debug!("Skipping {:?} in {}", symbol, obj.name);
|
||||
let section = &mut obj.sections[section_idx];
|
||||
// TODO assuming end of file
|
||||
section.size -= symbol.size;
|
||||
section.data.truncate(section.data.len() - symbol.size as usize);
|
||||
*symbol = ObjSymbol {
|
||||
name: symbol.name.clone(),
|
||||
demangled_name: symbol.demangled_name.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
}));
|
||||
}
|
||||
}
|
||||
for (symbol_idx, symbol) in replace_symbols {
|
||||
obj.symbols.replace(symbol_idx, symbol)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -273,11 +703,12 @@ pub fn split_obj(obj: &ObjInfo) -> Result<Vec<ObjInfo>> {
|
|||
}
|
||||
|
||||
/// mwld doesn't preserve the original section alignment values
|
||||
fn default_section_align(section: &ObjSection) -> u64 {
|
||||
pub fn default_section_align(section: &ObjSection) -> u64 {
|
||||
match section.kind {
|
||||
ObjSectionKind::Code => 4,
|
||||
_ => match section.name.as_str() {
|
||||
".ctors" | ".dtors" | "extab" | "extabindex" => 4,
|
||||
".sbss" => 1, // ?
|
||||
_ => 8,
|
||||
},
|
||||
}
|
||||
|
@ -285,7 +716,7 @@ fn default_section_align(section: &ObjSection) -> u64 {
|
|||
|
||||
/// Linker-generated symbols to extern
|
||||
#[inline]
|
||||
fn is_skip_symbol(name: &str) -> bool {
|
||||
pub fn is_linker_generated_label(name: &str) -> bool {
|
||||
matches!(
|
||||
name,
|
||||
"_ctors"
|
||||
|
@ -347,11 +778,47 @@ fn is_skip_symbol(name: &str) -> bool {
|
|||
)
|
||||
}
|
||||
|
||||
/// Linker generated symbols to strip entirely
|
||||
/// Linker generated objects to strip entirely
|
||||
#[inline]
|
||||
fn is_linker_symbol(name: &str) -> bool {
|
||||
pub fn is_linker_generated_object(name: &str) -> bool {
|
||||
matches!(
|
||||
name,
|
||||
"_eti_init_info" | "_rom_copy_info" | "_bss_init_info" | "_ctors$99" | "_dtors$99"
|
||||
)
|
||||
}
|
||||
|
||||
/// Locate the end address of a section when excluding linker generated objects
|
||||
pub fn end_for_section(obj: &ObjInfo, section_index: usize) -> Result<u32> {
|
||||
let section = &obj.sections[section_index];
|
||||
let section_start = section.address as u32;
|
||||
let mut section_end = (section.address + section.size) as u32;
|
||||
// .ctors and .dtors end with a linker-generated null pointer,
|
||||
// adjust section size appropriately
|
||||
if matches!(section.name.as_str(), ".ctors" | ".dtors")
|
||||
&& section.data[section.data.len() - 4..] == [0u8; 4]
|
||||
{
|
||||
section_end -= 4;
|
||||
return Ok(section_end);
|
||||
}
|
||||
loop {
|
||||
let last_symbol = obj
|
||||
.symbols
|
||||
.for_range(section_start..section_end)
|
||||
.filter(|(_, s)| s.kind == ObjSymbolKind::Object && s.size_known && s.size > 0)
|
||||
.last();
|
||||
match last_symbol {
|
||||
Some((_, symbol)) if is_linker_generated_object(&symbol.name) => {
|
||||
log::debug!(
|
||||
"Found {}, adjusting section {} end {:#010X} -> {:#010X}",
|
||||
section.name,
|
||||
symbol.name,
|
||||
section_end,
|
||||
symbol.address
|
||||
);
|
||||
section_end = symbol.address as u32;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Ok(section_end)
|
||||
}
|
||||
|
|
237
src/util/asm.rs
237
src/util/asm.rs
|
@ -9,8 +9,8 @@ use ppc750cl::{disasm_iter, Argument, Ins, Opcode};
|
|||
|
||||
use crate::{
|
||||
obj::{
|
||||
ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol, ObjSymbolFlags,
|
||||
ObjSymbolKind,
|
||||
ObjDataKind, ObjInfo, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol,
|
||||
ObjSymbolFlags, ObjSymbolKind,
|
||||
},
|
||||
util::nested::NestedVec,
|
||||
};
|
||||
|
@ -22,7 +22,7 @@ enum SymbolEntryKind {
|
|||
Label,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
struct SymbolEntry {
|
||||
index: usize,
|
||||
kind: SymbolEntryKind,
|
||||
|
@ -31,20 +31,24 @@ struct SymbolEntry {
|
|||
pub fn write_asm<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
|
||||
writeln!(w, ".include \"macros.inc\"")?;
|
||||
if !obj.name.is_empty() {
|
||||
writeln!(w, ".file \"{}\"", obj.name.replace('\\', "\\\\"))?;
|
||||
let name = obj
|
||||
.name
|
||||
.rsplit_once('/')
|
||||
.or_else(|| obj.name.rsplit_once('\\'))
|
||||
.or_else(|| obj.name.rsplit_once(' '))
|
||||
.map(|(_, b)| b)
|
||||
.unwrap_or(&obj.name);
|
||||
writeln!(w, ".file \"{}\"", name.replace('\\', "\\\\"))?;
|
||||
}
|
||||
|
||||
// We'll append generated symbols to the end
|
||||
let mut symbols: Vec<ObjSymbol> = obj.symbols.clone();
|
||||
let mut symbols: Vec<ObjSymbol> = obj.symbols.iter().cloned().collect();
|
||||
let mut section_entries: Vec<BTreeMap<u32, Vec<SymbolEntry>>> = vec![];
|
||||
let mut section_relocations: Vec<BTreeMap<u32, ObjReloc>> = vec![];
|
||||
for (section_idx, section) in obj.sections.iter().enumerate() {
|
||||
// Build symbol start/end entries
|
||||
let mut entries = BTreeMap::<u32, Vec<SymbolEntry>>::new();
|
||||
for (symbol_index, symbol) in obj.symbols_for_section(section_idx) {
|
||||
if symbol.kind == ObjSymbolKind::Section {
|
||||
continue;
|
||||
}
|
||||
for (symbol_index, symbol) in obj.symbols.for_section(section) {
|
||||
entries.nested_push(symbol.address as u32, SymbolEntry {
|
||||
index: symbol_index,
|
||||
kind: SymbolEntryKind::Start,
|
||||
|
@ -57,16 +61,13 @@ pub fn write_asm<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
|
|||
}
|
||||
}
|
||||
|
||||
let mut relocations = section.build_relocation_map()?;
|
||||
let mut relocations = section.build_relocation_map_cloned()?;
|
||||
|
||||
// Generate local jump labels
|
||||
if section.kind == ObjSectionKind::Code {
|
||||
for ins in disasm_iter(§ion.data, section.address as u32) {
|
||||
if let Some(address) = ins.branch_dest() {
|
||||
if ins.field_AA()
|
||||
|| (address as u64) < section.address
|
||||
|| (address as u64) >= section.address + section.size
|
||||
{
|
||||
if ins.field_AA() || !section.contains(address) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -128,7 +129,7 @@ pub fn write_asm<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
|
|||
if reloc.addend == 0 {
|
||||
continue;
|
||||
}
|
||||
let target = &obj.symbols[reloc.target_symbol];
|
||||
let target = &symbols[reloc.target_symbol];
|
||||
let target_section_idx = match target.section {
|
||||
Some(v) => v,
|
||||
None => continue,
|
||||
|
@ -157,6 +158,24 @@ pub fn write_asm<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
|
|||
}
|
||||
}
|
||||
|
||||
// Write common symbols
|
||||
let mut common_symbols = Vec::new();
|
||||
for symbol in symbols.iter().filter(|s| s.flags.0.contains(ObjSymbolFlags::Common)) {
|
||||
ensure!(symbol.section.is_none(), "Invalid: common symbol with section {:?}", symbol);
|
||||
common_symbols.push(symbol);
|
||||
}
|
||||
if !common_symbols.is_empty() {
|
||||
writeln!(w)?;
|
||||
for symbol in common_symbols {
|
||||
if let Some(name) = &symbol.demangled_name {
|
||||
writeln!(w, "# {name}")?;
|
||||
}
|
||||
write!(w, ".comm ")?;
|
||||
write_symbol_name(w, &symbol.name)?;
|
||||
writeln!(w, ", {:#X}, 4", symbol.size)?;
|
||||
}
|
||||
}
|
||||
|
||||
for section in &obj.sections {
|
||||
let entries = §ion_entries[section.index];
|
||||
let relocations = §ion_relocations[section.index];
|
||||
|
@ -336,10 +355,11 @@ fn write_symbol_entry<W: Write>(
|
|||
};
|
||||
let scope = if symbol.flags.0.contains(ObjSymbolFlags::Weak) {
|
||||
"weak"
|
||||
} else if symbol.flags.0.contains(ObjSymbolFlags::Global) {
|
||||
"global"
|
||||
} else {
|
||||
} else if symbol.flags.0.contains(ObjSymbolFlags::Local) {
|
||||
"local"
|
||||
} else {
|
||||
// Default to global
|
||||
"global"
|
||||
};
|
||||
|
||||
match entry.kind {
|
||||
|
@ -397,6 +417,7 @@ fn write_data<W: Write>(
|
|||
|
||||
let mut current_address = start;
|
||||
let mut current_symbol_kind = ObjSymbolKind::Unknown;
|
||||
let mut current_data_kind = ObjDataKind::Unknown;
|
||||
let mut entry = entry_iter.next();
|
||||
let mut reloc = reloc_iter.next();
|
||||
let mut begin = true;
|
||||
|
@ -413,6 +434,7 @@ fn write_data<W: Write>(
|
|||
write_symbol_entry(w, symbols, entry)?;
|
||||
}
|
||||
current_symbol_kind = find_symbol_kind(current_symbol_kind, symbols, vec)?;
|
||||
current_data_kind = find_data_kind(current_data_kind, symbols, vec)?;
|
||||
entry = entry_iter.next();
|
||||
}
|
||||
}
|
||||
|
@ -464,7 +486,7 @@ fn write_data<W: Write>(
|
|||
);
|
||||
write_code_chunk(w, symbols, entries, relocations, section, current_address, data)?;
|
||||
} else {
|
||||
write_data_chunk(w, data)?;
|
||||
write_data_chunk(w, data, current_data_kind)?;
|
||||
}
|
||||
current_address = until;
|
||||
}
|
||||
|
@ -497,26 +519,176 @@ fn find_symbol_kind(
|
|||
Ok(kind)
|
||||
}
|
||||
|
||||
fn write_data_chunk<W: Write>(w: &mut W, data: &[u8]) -> Result<()> {
|
||||
fn find_data_kind(
|
||||
current_data_kind: ObjDataKind,
|
||||
symbols: &[ObjSymbol],
|
||||
entries: &Vec<SymbolEntry>,
|
||||
) -> Result<ObjDataKind> {
|
||||
let mut kind = ObjDataKind::Unknown;
|
||||
let mut found = false;
|
||||
for entry in entries {
|
||||
match entry.kind {
|
||||
SymbolEntryKind::Start => {
|
||||
let new_kind = symbols[entry.index].data_kind;
|
||||
if !matches!(new_kind, ObjDataKind::Unknown) {
|
||||
ensure!(
|
||||
!found || new_kind == kind,
|
||||
"Conflicting data kinds found: {kind:?} and {new_kind:?}"
|
||||
);
|
||||
found = true;
|
||||
kind = new_kind;
|
||||
}
|
||||
}
|
||||
SymbolEntryKind::Label => {
|
||||
// If type is a local label, don't change data types
|
||||
if !found {
|
||||
kind = current_data_kind;
|
||||
}
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
Ok(kind)
|
||||
}
|
||||
|
||||
fn write_string<W: Write>(w: &mut W, data: &[u8]) -> Result<()> {
|
||||
let terminated = matches!(data.last(), Some(&b) if b == 0);
|
||||
if terminated {
|
||||
write!(w, "\t.string \"")?;
|
||||
} else {
|
||||
write!(w, "\t.ascii \"")?;
|
||||
}
|
||||
for &b in &data[..data.len() - if terminated { 1 } else { 0 }] {
|
||||
match b as char {
|
||||
'\x08' => write!(w, "\\b")?,
|
||||
'\x09' => write!(w, "\\t")?,
|
||||
'\x0A' => write!(w, "\\n")?,
|
||||
'\x0C' => write!(w, "\\f")?,
|
||||
'\x0D' => write!(w, "\\r")?,
|
||||
'\\' => write!(w, "\\\\")?,
|
||||
'"' => write!(w, "\\\"")?,
|
||||
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{}", c)?,
|
||||
_ => write!(w, "\\{:03o}", b)?,
|
||||
}
|
||||
}
|
||||
writeln!(w, "\"")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_string16<W: Write>(w: &mut W, data: &[u16]) -> Result<()> {
|
||||
if matches!(data.last(), Some(&b) if b == 0) {
|
||||
write!(w, "\t.string16 \"")?;
|
||||
} else {
|
||||
bail!("Non-terminated UTF-16 string");
|
||||
}
|
||||
if data.len() > 1 {
|
||||
for result in std::char::decode_utf16(data[..data.len() - 1].iter().cloned()) {
|
||||
let c = match result {
|
||||
Ok(c) => c,
|
||||
Err(_) => bail!("Failed to decode UTF-16"),
|
||||
};
|
||||
match c {
|
||||
'\x08' => write!(w, "\\b")?,
|
||||
'\x09' => write!(w, "\\t")?,
|
||||
'\x0A' => write!(w, "\\n")?,
|
||||
'\x0C' => write!(w, "\\f")?,
|
||||
'\x0D' => write!(w, "\\r")?,
|
||||
'\\' => write!(w, "\\\\")?,
|
||||
'"' => write!(w, "\\\"")?,
|
||||
c if c.is_ascii_graphic() || c.is_ascii_whitespace() => write!(w, "{}", c)?,
|
||||
_ => write!(w, "\\{:#X}", c as u32)?,
|
||||
}
|
||||
}
|
||||
}
|
||||
writeln!(w, "\"")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_data_chunk<W: Write>(w: &mut W, data: &[u8], data_kind: ObjDataKind) -> Result<()> {
|
||||
let remain = data;
|
||||
for chunk in remain.chunks(4) {
|
||||
match data_kind {
|
||||
ObjDataKind::String => {
|
||||
return write_string(w, data);
|
||||
}
|
||||
ObjDataKind::String16 => {
|
||||
if data.len() % 2 != 0 {
|
||||
bail!("Attempted to write wstring with length {:#X}", data.len());
|
||||
}
|
||||
let data = data
|
||||
.chunks_exact(2)
|
||||
.map(|c| u16::from_be_bytes(c.try_into().unwrap()))
|
||||
.collect::<Vec<u16>>();
|
||||
return write_string16(w, &data);
|
||||
}
|
||||
ObjDataKind::StringTable => {
|
||||
for slice in data.split_inclusive(|&b| b == 0) {
|
||||
write_string(w, slice)?;
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
ObjDataKind::String16Table => {
|
||||
if data.len() % 2 != 0 {
|
||||
bail!("Attempted to write wstring_table with length {:#X}", data.len());
|
||||
}
|
||||
let data = data
|
||||
.chunks_exact(2)
|
||||
.map(|c| u16::from_be_bytes(c.try_into().unwrap()))
|
||||
.collect::<Vec<u16>>();
|
||||
for slice in data.split_inclusive(|&b| b == 0) {
|
||||
write_string16(w, slice)?;
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let chunk_size = match data_kind {
|
||||
ObjDataKind::Byte2 => 2,
|
||||
ObjDataKind::Unknown | ObjDataKind::Byte4 | ObjDataKind::Float => 4,
|
||||
ObjDataKind::Byte | ObjDataKind::Byte8 | ObjDataKind::Double => 8,
|
||||
ObjDataKind::String
|
||||
| ObjDataKind::String16
|
||||
| ObjDataKind::StringTable
|
||||
| ObjDataKind::String16Table => unreachable!(),
|
||||
};
|
||||
for chunk in remain.chunks(chunk_size) {
|
||||
if data_kind == ObjDataKind::Byte || matches!(chunk.len(), 1 | 3 | 5..=7) {
|
||||
let bytes = chunk.iter().map(|c| format!("{:#04X}", c)).collect::<Vec<String>>();
|
||||
writeln!(w, "\t.byte {}", bytes.join(", "))?;
|
||||
} else {
|
||||
match chunk.len() {
|
||||
8 if data_kind == ObjDataKind::Double => {
|
||||
let data = f64::from_be_bytes(chunk.try_into().unwrap());
|
||||
if data.is_nan() {
|
||||
let int_data = u64::from_be_bytes(chunk.try_into().unwrap());
|
||||
writeln!(w, "\t.8byte {int_data:#018X} # {data}")?;
|
||||
} else {
|
||||
writeln!(w, "\t.double {data}")?;
|
||||
}
|
||||
}
|
||||
8 => {
|
||||
let data = u64::from_be_bytes(chunk.try_into().unwrap());
|
||||
writeln!(w, "\t.8byte {data:#018X}")?;
|
||||
}
|
||||
4 if data_kind == ObjDataKind::Float => {
|
||||
let data = f32::from_be_bytes(chunk.try_into().unwrap());
|
||||
if data.is_nan() {
|
||||
let int_data = u32::from_be_bytes(chunk.try_into().unwrap());
|
||||
writeln!(w, "\t.4byte {int_data:#010X} # {data}")?;
|
||||
} else {
|
||||
writeln!(w, "\t.float {data}")?;
|
||||
}
|
||||
}
|
||||
4 => {
|
||||
let data = u32::from_be_bytes(chunk.try_into().unwrap());
|
||||
writeln!(w, "\t.4byte {data:#010X}")?;
|
||||
}
|
||||
3 => {
|
||||
writeln!(w, "\t.byte {:#04X}, {:#04X}, {:#04X}", chunk[0], chunk[1], chunk[2])?;
|
||||
}
|
||||
2 => {
|
||||
writeln!(w, "\t.2byte {:#06X}", u16::from_be_bytes(chunk.try_into().unwrap()))?;
|
||||
}
|
||||
1 => {
|
||||
writeln!(w, "\t.byte {:#04X}", chunk[0])?;
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -644,6 +816,10 @@ fn write_section_header<W: Write>(
|
|||
write!(w, ".section {}", section.name)?;
|
||||
write!(w, ", \"a\"")?;
|
||||
}
|
||||
".comment" => {
|
||||
write!(w, ".section {}", section.name)?;
|
||||
write!(w, ", \"\"")?;
|
||||
}
|
||||
name => {
|
||||
log::warn!("Unknown section {name}");
|
||||
write!(w, ".section {}", section.name)?;
|
||||
|
@ -676,7 +852,12 @@ fn write_reloc_symbol<W: Write>(
|
|||
}
|
||||
|
||||
fn write_symbol_name<W: Write>(w: &mut W, name: &str) -> std::io::Result<()> {
|
||||
if name.contains('@') || name.contains('<') || name.contains('\\') {
|
||||
if name.contains('@')
|
||||
|| name.contains('<')
|
||||
|| name.contains('\\')
|
||||
|| name.contains('-')
|
||||
|| name.contains('+')
|
||||
{
|
||||
write!(w, "\"{name}\"")?;
|
||||
} else {
|
||||
write!(w, "{name}")?;
|
||||
|
|
|
@ -0,0 +1,161 @@
|
|||
use std::{
|
||||
io::{Read, Seek, SeekFrom, Write},
|
||||
ops::Deref,
|
||||
};
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
|
||||
use num_enum::{IntoPrimitive, TryFromPrimitive};
|
||||
|
||||
use crate::obj::{ObjSymbol, ObjSymbolFlags, ObjSymbolKind};
|
||||
|
||||
#[derive(Debug, Copy, Clone, IntoPrimitive, TryFromPrimitive)]
|
||||
#[repr(u8)]
|
||||
pub enum MWFloatKind {
|
||||
None = 0,
|
||||
Soft = 1,
|
||||
Hard = 2,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MWComment {
|
||||
pub compiler_version: [u8; 4],
|
||||
pub pool_data: bool,
|
||||
pub float: MWFloatKind,
|
||||
pub processor: u16,
|
||||
pub incompatible_return_small_structs: bool,
|
||||
pub incompatible_sfpe_double_params: bool,
|
||||
pub unsafe_global_reg_vars: bool,
|
||||
}
|
||||
|
||||
impl Default for MWComment {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
// Metrowerks C/C++ Compiler for Embedded PowerPC
|
||||
// Version 2.4.2 build 81
|
||||
// (CodeWarrior for GameCube 1.3.2)
|
||||
compiler_version: [2, 4, 2, 1],
|
||||
pool_data: true,
|
||||
float: MWFloatKind::Hard,
|
||||
processor: 0x16, // gekko
|
||||
incompatible_return_small_structs: false,
|
||||
incompatible_sfpe_double_params: false,
|
||||
unsafe_global_reg_vars: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const MAGIC: &[u8] = "CodeWarrior\n".as_bytes();
|
||||
const PADDING: &[u8] = &[0u8; 0x16];
|
||||
|
||||
impl MWComment {
|
||||
pub fn parse_header<R: Read + Seek>(reader: &mut R) -> Result<MWComment> {
|
||||
let mut header = MWComment {
|
||||
compiler_version: [0; 4],
|
||||
pool_data: false,
|
||||
float: MWFloatKind::None,
|
||||
processor: 0,
|
||||
incompatible_return_small_structs: false,
|
||||
incompatible_sfpe_double_params: false,
|
||||
unsafe_global_reg_vars: false,
|
||||
};
|
||||
// 0x0 - 0xB
|
||||
let mut magic = vec![0u8; MAGIC.len()];
|
||||
reader.read_exact(&mut magic).context("While reading magic")?;
|
||||
if magic.deref() != MAGIC {
|
||||
bail!("Invalid comment section magic: {:?}", magic);
|
||||
}
|
||||
// 0xC - 0xF
|
||||
reader
|
||||
.read_exact(&mut header.compiler_version)
|
||||
.context("While reading compiler version")?;
|
||||
// 0x10
|
||||
header.pool_data = match reader.read_u8()? {
|
||||
0 => false,
|
||||
1 => true,
|
||||
value => bail!("Invalid value for pool_data: {}", value),
|
||||
};
|
||||
// 0x11
|
||||
header.float =
|
||||
MWFloatKind::try_from(reader.read_u8()?).context("Invalid value for float")?;
|
||||
// 0x12 - 0x13
|
||||
header.processor = reader.read_u16::<BigEndian>()?;
|
||||
// 0x14
|
||||
match reader.read_u8()? as char {
|
||||
// This is 0x2C, which could also be the size of the header? Unclear
|
||||
',' => {}
|
||||
c => bail!("Expected ',' after processor, got '{}'", c),
|
||||
}
|
||||
// 0x15
|
||||
let flags = reader.read_u8()?;
|
||||
if flags & !7 != 0 {
|
||||
bail!("Unexpected flag value {:#X}", flags);
|
||||
}
|
||||
if flags & 1 == 1 {
|
||||
header.incompatible_return_small_structs = true;
|
||||
}
|
||||
if flags & 2 == 2 {
|
||||
header.incompatible_sfpe_double_params = true;
|
||||
}
|
||||
if flags & 4 == 4 {
|
||||
header.unsafe_global_reg_vars = true;
|
||||
}
|
||||
// 0x16 - 0x2C
|
||||
reader.seek(SeekFrom::Current(0x16))?;
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
pub fn write_header<W: Write>(&self, w: &mut W) -> Result<()> {
|
||||
w.write_all(MAGIC)?;
|
||||
w.write_all(&self.compiler_version)?;
|
||||
w.write_u8(if self.pool_data { 1 } else { 0 })?;
|
||||
w.write_u8(self.float.into())?;
|
||||
w.write_u16::<BigEndian>(self.processor)?;
|
||||
w.write_u8(0x2C)?;
|
||||
let mut flags = 0u8;
|
||||
if self.incompatible_return_small_structs {
|
||||
flags |= 1;
|
||||
}
|
||||
if self.incompatible_sfpe_double_params {
|
||||
flags |= 2;
|
||||
}
|
||||
if self.unsafe_global_reg_vars {
|
||||
flags |= 4;
|
||||
}
|
||||
w.write_u8(flags)?;
|
||||
w.write_all(PADDING)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_comment_sym<W: Write>(w: &mut W, symbol: &ObjSymbol) -> Result<()> {
|
||||
let align = match symbol.align {
|
||||
Some(align) => align,
|
||||
None => {
|
||||
if symbol.flags.0.contains(ObjSymbolFlags::Common) {
|
||||
symbol.address as u32
|
||||
} else {
|
||||
match symbol.kind {
|
||||
ObjSymbolKind::Unknown => 0,
|
||||
ObjSymbolKind::Function => 4,
|
||||
ObjSymbolKind::Object => 4,
|
||||
ObjSymbolKind::Section => 8, // TODO?
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
w.write_u32::<BigEndian>(align)?;
|
||||
let mut vis_flags = 0;
|
||||
if symbol.flags.0.contains(ObjSymbolFlags::Weak) {
|
||||
vis_flags |= 0xE; // TODO 0xD?
|
||||
}
|
||||
w.write_u8(vis_flags)?;
|
||||
let mut active_flags = 0;
|
||||
if symbol.flags.0.contains(ObjSymbolFlags::ForceActive) {
|
||||
active_flags |= 8;
|
||||
}
|
||||
w.write_u8(active_flags)?;
|
||||
w.write_u8(0)?;
|
||||
w.write_u8(0)?;
|
||||
Ok(())
|
||||
}
|
|
@ -1,23 +1,26 @@
|
|||
use std::{
|
||||
io::{BufRead, Write},
|
||||
iter,
|
||||
num::ParseIntError,
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use anyhow::{anyhow, bail, ensure, Result};
|
||||
use cwdemangle::{demangle, DemangleOptions};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::obj::{ObjInfo, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind};
|
||||
use crate::util::nested::NestedVec;
|
||||
use crate::{
|
||||
obj::{
|
||||
ObjDataKind, ObjInfo, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
|
||||
},
|
||||
util::nested::NestedVec,
|
||||
};
|
||||
|
||||
fn parse_hex(s: &str) -> Result<u32, ParseIntError> {
|
||||
u32::from_str_radix(s.trim_start_matches("0x"), 16)
|
||||
}
|
||||
|
||||
pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>> {
|
||||
pub fn parse_symbol_line(line: &str, obj: &mut ObjInfo) -> Result<Option<ObjSymbol>> {
|
||||
static SYMBOL_LINE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
"^\\s*(?P<name>[^\\s=]+)\\s*=\\s*(?:(?P<section>[A-Za-z0-9.]+):)?(?P<addr>[0-9A-Fa-fXx]+);(?:\\s*//\\s*(?P<attrs>.*))?$",
|
||||
|
@ -26,7 +29,7 @@ pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>>
|
|||
});
|
||||
static COMMENT_LINE: Lazy<Regex> = Lazy::new(|| Regex::new("^\\s*(?://|#).*$").unwrap());
|
||||
|
||||
if let Some(captures) = SYMBOL_LINE.captures(&line) {
|
||||
if let Some(captures) = SYMBOL_LINE.captures(line) {
|
||||
let name = captures["name"].to_string();
|
||||
let addr = parse_hex(&captures["addr"])?;
|
||||
let demangled_name = demangle(&name, &DemangleOptions::default());
|
||||
|
@ -39,6 +42,8 @@ pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>>
|
|||
size_known: false,
|
||||
flags: Default::default(),
|
||||
kind: ObjSymbolKind::Unknown,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
};
|
||||
let attrs = captures["attrs"].split(' ');
|
||||
for attr in attrs {
|
||||
|
@ -49,21 +54,36 @@ pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>>
|
|||
.ok_or_else(|| anyhow!("Unknown symbol type '{}'", value))?;
|
||||
}
|
||||
"size" => {
|
||||
symbol.size = parse_hex(&value)? as u64;
|
||||
symbol.size = parse_hex(value)? as u64;
|
||||
symbol.size_known = true;
|
||||
}
|
||||
"scope" => {
|
||||
symbol.flags.0 |= symbol_flags_from_str(value)
|
||||
.ok_or_else(|| anyhow!("Unknown symbol scope '{}'", value))?;
|
||||
}
|
||||
_ => bail!("Unknown attribute '{name}'"),
|
||||
"align" => {
|
||||
symbol.align = Some(parse_hex(value)?);
|
||||
}
|
||||
"data" => {
|
||||
symbol.data_kind = symbol_data_kind_from_str(value)
|
||||
.ok_or_else(|| anyhow!("Unknown symbol data type '{}'", value))?;
|
||||
}
|
||||
_ => bail!("Unknown symbol attribute '{name}'"),
|
||||
}
|
||||
} else {
|
||||
match attr {
|
||||
"hidden" => {
|
||||
symbol.flags.0 |= ObjSymbolFlags::Hidden;
|
||||
}
|
||||
_ => bail!("Unknown attribute '{attr}'"),
|
||||
"noreloc" => {
|
||||
ensure!(
|
||||
symbol.size != 0,
|
||||
"Symbol {} requires size != 0 with noreloc",
|
||||
symbol.name
|
||||
);
|
||||
obj.blocked_ranges.insert(addr, addr + symbol.size as u32);
|
||||
}
|
||||
_ => bail!("Unknown symbol attribute '{attr}'"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -71,7 +91,7 @@ pub fn parse_symbol_line(line: &str, obj: &ObjInfo) -> Result<Option<ObjSymbol>>
|
|||
} else if COMMENT_LINE.is_match(line) {
|
||||
Ok(None)
|
||||
} else {
|
||||
Err(anyhow!("Failed to parse line '{line}'"))
|
||||
Err(anyhow!("Failed to parse symbol line '{line}'"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,9 +106,7 @@ fn is_skip_symbol(symbol: &ObjSymbol) -> bool {
|
|||
}
|
||||
|
||||
pub fn write_symbols<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
|
||||
let mut symbols: Vec<&ObjSymbol> = obj.symbols.iter().map(|s| s).collect();
|
||||
symbols.sort_by_key(|s| s.address);
|
||||
for symbol in symbols {
|
||||
for (_, symbol) in obj.symbols.for_range(..) {
|
||||
if symbol.kind == ObjSymbolKind::Section
|
||||
// Ignore absolute symbols for now (usually linker-generated)
|
||||
|| symbol.section.is_none()
|
||||
|
@ -128,9 +146,18 @@ fn write_symbol<W: Write>(w: &mut W, obj: &ObjInfo, symbol: &ObjSymbol) -> Resul
|
|||
if let Some(scope) = symbol_flags_to_str(symbol.flags) {
|
||||
write!(w, " scope:{scope}")?;
|
||||
}
|
||||
if let Some(align) = symbol.align {
|
||||
write!(w, " align:{align:#X}")?;
|
||||
}
|
||||
if let Some(kind) = symbol_data_kind_to_str(symbol.data_kind) {
|
||||
write!(w, " data:{kind}")?;
|
||||
}
|
||||
if symbol.flags.0.contains(ObjSymbolFlags::Hidden) {
|
||||
write!(w, " hidden")?;
|
||||
}
|
||||
if obj.blocked_ranges.contains_key(&(symbol.address as u32)) {
|
||||
write!(w, " noreloc")?;
|
||||
}
|
||||
writeln!(w)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -145,6 +172,23 @@ fn symbol_kind_to_str(kind: ObjSymbolKind) -> &'static str {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol_data_kind_to_str(kind: ObjDataKind) -> Option<&'static str> {
|
||||
match kind {
|
||||
ObjDataKind::Unknown => None,
|
||||
ObjDataKind::Byte => Some("byte"),
|
||||
ObjDataKind::Byte2 => Some("2byte"),
|
||||
ObjDataKind::Byte4 => Some("4byte"),
|
||||
ObjDataKind::Byte8 => Some("8byte"),
|
||||
ObjDataKind::Float => Some("float"),
|
||||
ObjDataKind::Double => Some("double"),
|
||||
ObjDataKind::String => Some("string"),
|
||||
ObjDataKind::String16 => Some("wstring"),
|
||||
ObjDataKind::StringTable => Some("string_table"),
|
||||
ObjDataKind::String16Table => Some("wstring_table"),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn symbol_kind_from_str(s: &str) -> Option<ObjSymbolKind> {
|
||||
match s {
|
||||
|
@ -182,45 +226,36 @@ fn symbol_flags_from_str(s: &str) -> Option<ObjSymbolFlags> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn write_splits<W: Write>(
|
||||
w: &mut W,
|
||||
obj: &ObjInfo,
|
||||
obj_files: Option<Vec<String>>,
|
||||
) -> Result<()> {
|
||||
let mut obj_files_iter = obj_files.map(|v| v.into_iter());
|
||||
#[inline]
|
||||
fn symbol_data_kind_from_str(s: &str) -> Option<ObjDataKind> {
|
||||
match s {
|
||||
"byte" => Some(ObjDataKind::Byte),
|
||||
"2byte" => Some(ObjDataKind::Byte2),
|
||||
"4byte" => Some(ObjDataKind::Byte4),
|
||||
"8byte" => Some(ObjDataKind::Byte8),
|
||||
"float" => Some(ObjDataKind::Float),
|
||||
"double" => Some(ObjDataKind::Double),
|
||||
"string" => Some(ObjDataKind::String),
|
||||
"wstring" => Some(ObjDataKind::String16),
|
||||
"string_table" => Some(ObjDataKind::StringTable),
|
||||
"wstring_table" => Some(ObjDataKind::String16Table),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_splits<W: Write>(w: &mut W, obj: &ObjInfo) -> Result<()> {
|
||||
for unit in &obj.link_order {
|
||||
let obj_file = if let Some(obj_files_iter) = &mut obj_files_iter {
|
||||
obj_files_iter.next()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
log::info!("Processing {} (obj file {:?})", unit, obj_file);
|
||||
if let Some(obj_file) = obj_file {
|
||||
let trim_unit = unit
|
||||
.trim_end_matches("_1")
|
||||
.trim_end_matches(" (asm)")
|
||||
.trim_end_matches(".o")
|
||||
.trim_end_matches(".cpp")
|
||||
.trim_end_matches(".c");
|
||||
if !obj_file.contains(trim_unit) {
|
||||
bail!("Unit mismatch: {} vs {}", unit, obj_file);
|
||||
}
|
||||
let trim_obj = obj_file
|
||||
.trim_end_matches(" \\")
|
||||
.trim_start_matches("\t$(BUILD_DIR)/")
|
||||
.trim_start_matches("asm/")
|
||||
.trim_start_matches("src/");
|
||||
writeln!(w, "{}:", trim_obj)?;
|
||||
} else {
|
||||
writeln!(w, "{}:", unit)?;
|
||||
}
|
||||
let mut split_iter = obj.splits.iter()
|
||||
.flat_map(|(addr, v)| v.iter().map(move |u| (addr, u))).peekable();
|
||||
while let Some((&addr, it_unit)) = split_iter.next() {
|
||||
if it_unit != unit {
|
||||
let mut split_iter = obj.splits_for_range(..).peekable();
|
||||
while let Some((addr, split)) = split_iter.next() {
|
||||
if &split.unit != unit {
|
||||
continue;
|
||||
}
|
||||
let end = split_iter.peek().map(|(&addr, _)| addr).unwrap_or(u32::MAX);
|
||||
let end = if split.end > 0 {
|
||||
split.end
|
||||
} else {
|
||||
split_iter.peek().map(|&(addr, _)| addr).unwrap_or(0)
|
||||
};
|
||||
let section = obj.section_at(addr)?;
|
||||
writeln!(w, "\t{:<11} start:{:#010X} end:{:#010X}", section.name, addr, end)?;
|
||||
// align:{}
|
||||
|
@ -232,7 +267,7 @@ pub fn write_splits<W: Write>(
|
|||
|
||||
enum SplitLine {
|
||||
Unit { name: String },
|
||||
Section { name: String, start: u32, end: u32, align: Option<u32> },
|
||||
Section { name: String, start: u32, end: u32, align: Option<u32>, common: bool },
|
||||
None,
|
||||
}
|
||||
|
||||
|
@ -245,40 +280,49 @@ fn parse_split_line(line: &str) -> Result<SplitLine> {
|
|||
|
||||
if line.is_empty() || COMMENT_LINE.is_match(line) {
|
||||
Ok(SplitLine::None)
|
||||
} else if let Some(captures) = UNIT_LINE.captures(&line) {
|
||||
} else if let Some(captures) = UNIT_LINE.captures(line) {
|
||||
let name = captures["name"].to_string();
|
||||
Ok(SplitLine::Unit { name })
|
||||
} else if let Some(captures) = SECTION_LINE.captures(&line) {
|
||||
} else if let Some(captures) = SECTION_LINE.captures(line) {
|
||||
let mut name = captures["name"].to_string();
|
||||
let mut start: Option<u32> = None;
|
||||
let mut end: Option<u32> = None;
|
||||
let mut align: Option<u32> = None;
|
||||
let mut common = false;
|
||||
|
||||
let attrs = captures["attrs"].split(' ');
|
||||
for attr in attrs {
|
||||
if let Some((attr, value)) = attr.split_once(':') {
|
||||
match attr {
|
||||
"start" => {
|
||||
start = Some(parse_hex(&value)?);
|
||||
start = Some(parse_hex(value)?);
|
||||
}
|
||||
"end" => {
|
||||
end = Some(parse_hex(&value)?);
|
||||
end = Some(parse_hex(value)?);
|
||||
}
|
||||
"align" => align = Some(u32::from_str(value)?),
|
||||
"rename" => name = value.to_string(),
|
||||
_ => bail!("Unknown attribute '{name}'"),
|
||||
_ => bail!("Unknown split attribute '{name}'"),
|
||||
}
|
||||
} else {
|
||||
bail!("Unknown attribute '{attr}'")
|
||||
match attr {
|
||||
"common" => {
|
||||
common = true;
|
||||
if align.is_none() {
|
||||
align = Some(4);
|
||||
}
|
||||
}
|
||||
_ => bail!("Unknown split attribute '{attr}'"),
|
||||
}
|
||||
}
|
||||
}
|
||||
if let (Some(start), Some(end)) = (start, end) {
|
||||
Ok(SplitLine::Section { name, start, end, align })
|
||||
Ok(SplitLine::Section { name, start, end, align, common })
|
||||
} else {
|
||||
Err(anyhow!("Missing attribute: '{line}'"))
|
||||
Err(anyhow!("Missing split attribute: '{line}'"))
|
||||
}
|
||||
} else {
|
||||
Err(anyhow!("Failed to parse line: '{line}'"))
|
||||
Err(anyhow!("Failed to parse split line: '{line}'"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -302,10 +346,8 @@ pub fn apply_splits<R: BufRead>(r: R, obj: &mut ObjInfo) -> Result<()> {
|
|||
(SplitState::None, SplitLine::Section { name, .. }) => {
|
||||
bail!("Section {} defined outside of unit", name);
|
||||
}
|
||||
(SplitState::Unit(unit), SplitLine::Section { name, start, end, align }) => {
|
||||
let _ = end;
|
||||
let _ = align;
|
||||
obj.splits.nested_push(start, unit.clone());
|
||||
(SplitState::Unit(unit), SplitLine::Section { name, start, end, align, common }) => {
|
||||
obj.splits.nested_push(start, ObjSplit { unit: unit.clone(), end, align, common });
|
||||
obj.named_sections.insert(start, name);
|
||||
}
|
||||
_ => {}
|
||||
|
|
113
src/util/dol.rs
113
src/util/dol.rs
|
@ -33,27 +33,8 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
let mmap = map_file(path)?;
|
||||
Dol::read_from(map_reader(&mmap))?
|
||||
};
|
||||
let mut obj = ObjInfo {
|
||||
module_id: 0,
|
||||
kind: ObjKind::Executable,
|
||||
architecture: ObjArchitecture::PowerPc,
|
||||
name,
|
||||
symbols: vec![],
|
||||
sections: vec![],
|
||||
entry: dol.header.entry_point as u64,
|
||||
sda2_base: None,
|
||||
sda_base: None,
|
||||
stack_address: None,
|
||||
stack_end: None,
|
||||
db_stack_addr: None,
|
||||
arena_lo: None,
|
||||
arena_hi: None,
|
||||
splits: Default::default(),
|
||||
named_sections: Default::default(),
|
||||
link_order: vec![],
|
||||
known_functions: Default::default(),
|
||||
unresolved_relocations: vec![],
|
||||
};
|
||||
let mut obj = ObjInfo::new(ObjKind::Executable, ObjArchitecture::PowerPc, name, vec![], vec![]);
|
||||
obj.entry = dol.header.entry_point as u64;
|
||||
|
||||
// Locate _rom_copy_info
|
||||
let first_rom_section = dol
|
||||
|
@ -355,7 +336,8 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
if let (Some(rom_copy_info_addr), Some(rom_copy_info_end)) =
|
||||
(rom_copy_info_addr, rom_copy_info_end)
|
||||
{
|
||||
obj.symbols.push(ObjSymbol {
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "_rom_copy_info".to_string(),
|
||||
demangled_name: None,
|
||||
address: rom_copy_info_addr as u64,
|
||||
|
@ -364,14 +346,19 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
kind: ObjSymbolKind::Object,
|
||||
});
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
}
|
||||
|
||||
// Generate _bss_init_info symbol
|
||||
if let (Some(bss_init_info_addr), Some(bss_init_info_end)) =
|
||||
(bss_init_info_addr, bss_init_info_end)
|
||||
{
|
||||
obj.symbols.push(ObjSymbol {
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "_bss_init_info".to_string(),
|
||||
demangled_name: None,
|
||||
address: bss_init_info_addr as u64,
|
||||
|
@ -380,12 +367,17 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
kind: ObjSymbolKind::Object,
|
||||
});
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
}
|
||||
|
||||
// Generate _eti_init_info symbol
|
||||
if let Some((eti_init_info_addr, eti_init_info_end)) = eti_init_info_range {
|
||||
obj.symbols.push(ObjSymbol {
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "_eti_init_info".to_string(),
|
||||
demangled_name: None,
|
||||
address: eti_init_info_addr as u64,
|
||||
|
@ -394,7 +386,11 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
kind: ObjSymbolKind::Object,
|
||||
});
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
}
|
||||
|
||||
// Generate symbols for extab & extabindex entries
|
||||
|
@ -402,7 +398,12 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
(extabindex_section, extab_section)
|
||||
{
|
||||
let extabindex_section = &obj.sections[extabindex_section_idx];
|
||||
let extabindex_section_index = extabindex_section.index;
|
||||
let extabindex_section_address = extabindex_section.address;
|
||||
let extabindex_section_size = extabindex_section.size;
|
||||
let extab_section = &obj.sections[extab_section_idx];
|
||||
let extab_section_index = extab_section.index;
|
||||
|
||||
for entry in &eti_entries {
|
||||
// Add functions from extabindex entries as known function bounds
|
||||
if let Some(old_value) = obj.known_functions.insert(entry.function, entry.function_size)
|
||||
|
@ -416,16 +417,21 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
);
|
||||
}
|
||||
}
|
||||
obj.symbols.push(ObjSymbol {
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: format!("@eti_{:08X}", entry.address),
|
||||
demangled_name: None,
|
||||
address: entry.address as u64,
|
||||
section: Some(extabindex_section.index),
|
||||
section: Some(extabindex_section_index),
|
||||
size: 12,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local | ObjSymbolFlags::Hidden),
|
||||
kind: ObjSymbolKind::Object,
|
||||
});
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
false,
|
||||
)?;
|
||||
}
|
||||
|
||||
let mut entry_iter = eti_entries.iter().peekable();
|
||||
|
@ -434,20 +440,25 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
(Some(a), Some(&b)) => (a.extab_addr, b.extab_addr - a.extab_addr),
|
||||
(Some(a), None) => (
|
||||
a.extab_addr,
|
||||
(extabindex_section.address + extabindex_section.size) as u32 - a.extab_addr,
|
||||
(extabindex_section_address + extabindex_section_size) as u32 - a.extab_addr,
|
||||
),
|
||||
_ => break,
|
||||
};
|
||||
obj.symbols.push(ObjSymbol {
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: format!("@etb_{:08X}", addr),
|
||||
demangled_name: None,
|
||||
address: addr as u64,
|
||||
section: Some(extab_section.index),
|
||||
section: Some(extab_section_index),
|
||||
size: size as u64,
|
||||
size_known: true,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Local | ObjSymbolFlags::Hidden),
|
||||
kind: ObjSymbolKind::Object,
|
||||
});
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
false,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -456,7 +467,8 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
Ok(true) => {
|
||||
let sda2_base = obj.sda2_base.unwrap();
|
||||
let sda_base = obj.sda_base.unwrap();
|
||||
obj.symbols.push(ObjSymbol {
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "_SDA2_BASE_".to_string(),
|
||||
demangled_name: None,
|
||||
address: sda2_base as u64,
|
||||
|
@ -465,8 +477,13 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
size_known: false,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
kind: ObjSymbolKind::Unknown,
|
||||
});
|
||||
obj.symbols.push(ObjSymbol {
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: "_SDA_BASE_".to_string(),
|
||||
demangled_name: None,
|
||||
address: sda_base as u64,
|
||||
|
@ -475,7 +492,11 @@ pub fn process_dol<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
size_known: false,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
kind: ObjSymbolKind::Unknown,
|
||||
});
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
}
|
||||
Ok(false) => {
|
||||
log::warn!("Unable to locate SDA bases");
|
||||
|
@ -512,17 +533,17 @@ struct EtiEntry {
|
|||
}
|
||||
|
||||
fn read_eti_init_info(dol: &Dol, addr: u32) -> Result<EtiInitInfo> {
|
||||
let eti_start = read_u32(&dol, addr)?;
|
||||
let eti_end = read_u32(&dol, addr + 4)?;
|
||||
let code_start = read_u32(&dol, addr + 8)?;
|
||||
let code_size = read_u32(&dol, addr + 12)?;
|
||||
let eti_start = read_u32(dol, addr)?;
|
||||
let eti_end = read_u32(dol, addr + 4)?;
|
||||
let code_start = read_u32(dol, addr + 8)?;
|
||||
let code_size = read_u32(dol, addr + 12)?;
|
||||
Ok(EtiInitInfo { eti_start, eti_end, code_start, code_size })
|
||||
}
|
||||
|
||||
fn read_eti_entry(dol: &Dol, address: u32) -> Result<EtiEntry> {
|
||||
let function = read_u32(&dol, address)?;
|
||||
let function_size = read_u32(&dol, address + 4)?;
|
||||
let extab_addr = read_u32(&dol, address + 8)?;
|
||||
let function = read_u32(dol, address)?;
|
||||
let function_size = read_u32(dol, address + 4)?;
|
||||
let extab_addr = read_u32(dol, address + 8)?;
|
||||
Ok(EtiEntry { address, function, function_size, extab_addr })
|
||||
}
|
||||
|
||||
|
@ -538,7 +559,7 @@ fn validate_eti_init_info(
|
|||
&& eti_init_info.eti_end >= eti_section.target
|
||||
&& eti_init_info.eti_end < eti_section_end
|
||||
{
|
||||
if let Some(code_section) = section_by_address(&dol, eti_init_info.code_start) {
|
||||
if let Some(code_section) = section_by_address(dol, eti_init_info.code_start) {
|
||||
let code_section_size = match rom_sections.get(&code_section.target) {
|
||||
Some(&size) => size,
|
||||
None => code_section.size,
|
||||
|
|
|
@ -601,7 +601,7 @@ impl Type {
|
|||
let tag = tags
|
||||
.get(&key)
|
||||
.ok_or_else(|| anyhow!("Failed to locate user defined type {}", key))?;
|
||||
let ud_type = ud_type(&tags, tag)?;
|
||||
let ud_type = ud_type(tags, tag)?;
|
||||
ud_type.size(tags)
|
||||
}
|
||||
}
|
||||
|
@ -784,7 +784,7 @@ pub fn struct_def_string(
|
|||
if let Some(bit) = &member.bit {
|
||||
write!(out, " : {}", bit.bit_size)?;
|
||||
}
|
||||
write!(out, "; // offset {:#X}, size {:#X}\n", member.offset, member.kind.size(tags)?)?;
|
||||
writeln!(out, "; // offset {:#X}, size {:#X}", member.offset, member.kind.size(tags)?)?;
|
||||
}
|
||||
write!(out, "}}")?;
|
||||
Ok(out)
|
||||
|
@ -866,8 +866,16 @@ pub fn process_variable_location(block: &[u8]) -> Result<String> {
|
|||
// TODO: float regs
|
||||
if block.len() == 5 && block[0] == LocationOp::Register as u8 {
|
||||
Ok(format!("r{}", u32::from_be_bytes(block[1..].try_into()?)))
|
||||
} else if block.len() == 11 && block[0] == LocationOp::BaseRegister as u8 && block[5] == LocationOp::Const as u8 && block[10] == LocationOp::Add as u8 {
|
||||
Ok(format!("r{}+{:#X}", u32::from_be_bytes(block[1..5].try_into()?), u32::from_be_bytes(block[6..10].try_into()?)))
|
||||
} else if block.len() == 11
|
||||
&& block[0] == LocationOp::BaseRegister as u8
|
||||
&& block[5] == LocationOp::Const as u8
|
||||
&& block[10] == LocationOp::Add as u8
|
||||
{
|
||||
Ok(format!(
|
||||
"r{}+{:#X}",
|
||||
u32::from_be_bytes(block[1..5].try_into()?),
|
||||
u32::from_be_bytes(block[6..10].try_into()?)
|
||||
))
|
||||
} else {
|
||||
Err(anyhow!("Unhandled location data {:?}, expected variable loc", block))
|
||||
}
|
||||
|
|
263
src/util/elf.rs
263
src/util/elf.rs
|
@ -1,10 +1,11 @@
|
|||
use std::{
|
||||
collections::{btree_map::Entry, hash_map, BTreeMap, HashMap},
|
||||
collections::{hash_map, BTreeMap, HashMap},
|
||||
io::Cursor,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Context, Result};
|
||||
use byteorder::{BigEndian, WriteBytesExt};
|
||||
use cwdemangle::demangle;
|
||||
use flagset::Flags;
|
||||
use indexmap::IndexMap;
|
||||
|
@ -16,24 +17,20 @@ use object::{
|
|||
StringId,
|
||||
},
|
||||
Architecture, Endianness, Object, ObjectKind, ObjectSection, ObjectSymbol, Relocation,
|
||||
RelocationKind, RelocationTarget, Section, SectionKind, Symbol, SymbolKind, SymbolScope,
|
||||
SymbolSection,
|
||||
RelocationKind, RelocationTarget, SectionKind, Symbol, SymbolKind, SymbolScope, SymbolSection,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
obj::{
|
||||
ObjArchitecture, ObjInfo, ObjKind, ObjReloc, ObjRelocKind, ObjSection, ObjSectionKind,
|
||||
ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
|
||||
ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
|
||||
},
|
||||
util::{
|
||||
dwarf::{
|
||||
process_address, process_type, read_debug_section, type_string, ud_type,
|
||||
ud_type_string, AttributeKind, TagKind, TypeKind,
|
||||
},
|
||||
comment::{write_comment_sym, MWComment},
|
||||
file::map_file,
|
||||
nested::NestedVec,
|
||||
},
|
||||
};
|
||||
use crate::util::nested::NestedVec;
|
||||
|
||||
enum BoundaryState {
|
||||
/// Looking for a file symbol, any section symbols are queued
|
||||
|
@ -70,11 +67,17 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
let mut sections: Vec<ObjSection> = vec![];
|
||||
let mut section_indexes: Vec<Option<usize>> = vec![];
|
||||
for section in obj_file.sections() {
|
||||
if section.size() == 0 {
|
||||
section_indexes.push(None);
|
||||
continue;
|
||||
}
|
||||
let section_name = section.name()?;
|
||||
let section_kind = match section.kind() {
|
||||
SectionKind::Text => ObjSectionKind::Code,
|
||||
SectionKind::Data => ObjSectionKind::Data,
|
||||
SectionKind::ReadOnlyData => ObjSectionKind::ReadOnlyData,
|
||||
SectionKind::UninitializedData => ObjSectionKind::Bss,
|
||||
// SectionKind::Other if section_name == ".comment" => ObjSectionKind::Comment,
|
||||
_ => {
|
||||
section_indexes.push(None);
|
||||
continue;
|
||||
|
@ -82,7 +85,7 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
};
|
||||
section_indexes.push(Some(sections.len()));
|
||||
sections.push(ObjSection {
|
||||
name: section.name()?.to_string(),
|
||||
name: section_name.to_string(),
|
||||
kind: section_kind,
|
||||
address: section.address(),
|
||||
size: section.size(),
|
||||
|
@ -232,14 +235,14 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
}
|
||||
BoundaryState::FilesEnded => {}
|
||||
},
|
||||
SymbolSection::Undefined => {}
|
||||
SymbolSection::Common | SymbolSection::Undefined => {}
|
||||
_ => bail!("Unsupported symbol section type {symbol:?}"),
|
||||
},
|
||||
}
|
||||
|
||||
// Generate symbols
|
||||
if matches!(symbol.kind(), SymbolKind::Null | SymbolKind::File)
|
||||
|| matches!(symbol.section_index(), Some(idx) if section_indexes[idx.0] == None)
|
||||
|| matches!(symbol.section_index(), Some(idx) if section_indexes[idx.0].is_none())
|
||||
{
|
||||
symbol_indexes.push(None);
|
||||
continue;
|
||||
|
@ -249,7 +252,7 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
}
|
||||
|
||||
let mut link_order = Vec::<String>::new();
|
||||
let mut splits = BTreeMap::<u32, Vec<String>>::new();
|
||||
let mut splits = BTreeMap::<u32, Vec<ObjSplit>>::new();
|
||||
if kind == ObjKind::Executable {
|
||||
// Link order is trivially deduced
|
||||
for file_name in section_starts.keys() {
|
||||
|
@ -259,7 +262,12 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
// Create a map of address -> file splits
|
||||
for (file_name, sections) in section_starts {
|
||||
for (address, _) in sections {
|
||||
splits.nested_push(address as u32, file_name.clone());
|
||||
splits.nested_push(address as u32, ObjSplit {
|
||||
unit: file_name.clone(),
|
||||
end: 0, // TODO
|
||||
align: None,
|
||||
common: false, // TODO
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -283,27 +291,30 @@ pub fn process_elf<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
}
|
||||
}
|
||||
|
||||
Ok(ObjInfo {
|
||||
module_id: 0,
|
||||
kind,
|
||||
architecture,
|
||||
name: obj_name,
|
||||
symbols,
|
||||
sections,
|
||||
entry: obj_file.entry(),
|
||||
sda2_base,
|
||||
sda_base,
|
||||
stack_address,
|
||||
stack_end,
|
||||
db_stack_addr,
|
||||
arena_lo,
|
||||
arena_hi,
|
||||
splits,
|
||||
named_sections: Default::default(),
|
||||
link_order,
|
||||
known_functions: Default::default(),
|
||||
unresolved_relocations: vec![],
|
||||
})
|
||||
let mw_comment = if let Some(comment_section) = obj_file.section_by_name(".comment") {
|
||||
let data = comment_section.uncompressed_data()?;
|
||||
let mut reader = Cursor::new(&*data);
|
||||
let header = MWComment::parse_header(&mut reader)?;
|
||||
log::info!("Loaded comment header {:?}", header);
|
||||
|
||||
header
|
||||
} else {
|
||||
MWComment::default()
|
||||
};
|
||||
|
||||
let mut obj = ObjInfo::new(kind, architecture, obj_name, symbols, sections);
|
||||
obj.entry = obj_file.entry();
|
||||
obj.mw_comment = mw_comment;
|
||||
obj.sda2_base = sda2_base;
|
||||
obj.sda_base = sda_base;
|
||||
obj.stack_address = stack_address;
|
||||
obj.stack_end = stack_end;
|
||||
obj.db_stack_addr = db_stack_addr;
|
||||
obj.arena_lo = arena_lo;
|
||||
obj.arena_hi = arena_hi;
|
||||
obj.splits = splits;
|
||||
obj.link_order = link_order;
|
||||
Ok(obj)
|
||||
}
|
||||
|
||||
pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
||||
|
@ -319,6 +330,7 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
rela_name: Option<StringId>,
|
||||
}
|
||||
struct OutSymbol {
|
||||
#[allow(dead_code)]
|
||||
index: SymbolIndex,
|
||||
sym: object::write::elf::Sym,
|
||||
}
|
||||
|
@ -337,26 +349,50 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
rela_name: None,
|
||||
});
|
||||
}
|
||||
|
||||
let mut rela_names: Vec<String> = vec![Default::default(); obj.sections.len()];
|
||||
for ((section, out_section), rela_name) in
|
||||
obj.sections.iter().zip(&mut out_sections).zip(&mut rela_names)
|
||||
{
|
||||
if !section.relocations.is_empty() {
|
||||
if section.relocations.is_empty() {
|
||||
continue;
|
||||
}
|
||||
*rela_name = format!(".rela{}", section.name);
|
||||
out_section.rela_name = Some(writer.add_section_name(rela_name.as_bytes()));
|
||||
out_section.rela_index = Some(writer.reserve_section_index());
|
||||
}
|
||||
}
|
||||
let symtab = writer.reserve_symtab_section_index();
|
||||
writer.reserve_shstrtab_section_index();
|
||||
writer.reserve_strtab_section_index();
|
||||
|
||||
// Add symbols
|
||||
let mut out_symbols: Vec<OutSymbol> = Vec::with_capacity(obj.symbols.len());
|
||||
let mut symbol_offset = 0;
|
||||
let symtab = writer.reserve_symtab_section_index();
|
||||
writer.reserve_strtab_section_index();
|
||||
writer.reserve_shstrtab_section_index();
|
||||
|
||||
// Generate comment section
|
||||
let mut comment_data = if obj.kind == ObjKind::Relocatable {
|
||||
// let mut comment_data = Vec::<u8>::with_capacity(0x2C + obj.symbols.len() * 8);
|
||||
// let name = writer.add_section_name(".comment".as_bytes());
|
||||
// let index = writer.reserve_section_index();
|
||||
// out_sections.push(OutSection {
|
||||
// index,
|
||||
// rela_index: None,
|
||||
// offset: 0,
|
||||
// rela_offset: 0,
|
||||
// name,
|
||||
// rela_name: None,
|
||||
// });
|
||||
// obj.mw_comment.write_header(&mut comment_data)?;
|
||||
// Some(comment_data)
|
||||
None::<Vec<u8>>
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut out_symbols: Vec<OutSymbol> = Vec::with_capacity(obj.symbols.count());
|
||||
let mut symbol_map = vec![None; obj.symbols.count()];
|
||||
let mut section_symbol_offset = 0;
|
||||
let mut num_local = 0;
|
||||
if !obj.name.is_empty() {
|
||||
|
||||
// Add file symbol
|
||||
if !obj.name.is_empty() {
|
||||
let name_index = writer.add_string(obj.name.as_bytes());
|
||||
let index = writer.reserve_symbol_index(None);
|
||||
out_symbols.push(OutSymbol {
|
||||
|
@ -375,9 +411,42 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
st_size: 0,
|
||||
},
|
||||
});
|
||||
symbol_offset += 1;
|
||||
if let Some(comment_data) = &mut comment_data {
|
||||
comment_data.write_u64::<BigEndian>(0)?;
|
||||
}
|
||||
for symbol in &obj.symbols {
|
||||
section_symbol_offset += 1;
|
||||
}
|
||||
|
||||
// Add section symbols for relocatable objects
|
||||
if obj.kind == ObjKind::Relocatable {
|
||||
for section in &obj.sections {
|
||||
let section_index = out_sections.get(section.index).map(|s| s.index);
|
||||
let index = writer.reserve_symbol_index(section_index);
|
||||
let name_index = writer.add_string(section.name.as_bytes());
|
||||
let sym = object::write::elf::Sym {
|
||||
name: Some(name_index),
|
||||
section: section_index,
|
||||
st_info: (elf::STB_LOCAL << 4) + elf::STT_SECTION,
|
||||
st_other: elf::STV_DEFAULT,
|
||||
st_shndx: 0,
|
||||
st_value: 0,
|
||||
st_size: section.size,
|
||||
};
|
||||
num_local = writer.symbol_count();
|
||||
out_symbols.push(OutSymbol { index, sym });
|
||||
}
|
||||
}
|
||||
|
||||
// Add symbols
|
||||
for (symbol, symbol_map) in obj.symbols.iter().zip(&mut symbol_map) {
|
||||
if obj.kind == ObjKind::Relocatable && symbol.kind == ObjSymbolKind::Section {
|
||||
// We wrote section symbols above, so skip them here
|
||||
let section_index =
|
||||
symbol.section.ok_or_else(|| anyhow!("section symbol without section index"))?;
|
||||
*symbol_map = Some(section_symbol_offset + section_index as u32);
|
||||
continue;
|
||||
}
|
||||
|
||||
let section_index = symbol.section.and_then(|idx| out_sections.get(idx)).map(|s| s.index);
|
||||
let index = writer.reserve_symbol_index(section_index);
|
||||
let name_index = if symbol.name.is_empty() {
|
||||
|
@ -429,6 +498,10 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
num_local = writer.symbol_count();
|
||||
}
|
||||
out_symbols.push(OutSymbol { index, sym });
|
||||
*symbol_map = Some(index.0);
|
||||
if let Some(comment_data) = &mut comment_data {
|
||||
write_comment_sym(comment_data, symbol)?;
|
||||
}
|
||||
}
|
||||
|
||||
writer.reserve_file_header();
|
||||
|
@ -438,17 +511,19 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
}
|
||||
|
||||
for (section, out_section) in obj.sections.iter().zip(&mut out_sections) {
|
||||
match section.kind {
|
||||
ObjSectionKind::Code | ObjSectionKind::Data | ObjSectionKind::ReadOnlyData => {}
|
||||
ObjSectionKind::Bss => continue,
|
||||
if section.kind == ObjSectionKind::Bss {
|
||||
continue;
|
||||
}
|
||||
ensure!(section.data.len() as u64 == section.size, "Mismatched section size");
|
||||
ensure!(section.data.len() as u64 == section.size);
|
||||
if section.size == 0 {
|
||||
// Bug in Writer::reserve doesn't align when len is 0
|
||||
let offset = (writer.reserved_len() + 31) & !31;
|
||||
writer.reserve_until(offset);
|
||||
out_section.offset = offset;
|
||||
} else {
|
||||
out_section.offset = writer.reserve(section.data.len(), 32);
|
||||
}
|
||||
|
||||
writer.reserve_shstrtab();
|
||||
writer.reserve_strtab();
|
||||
writer.reserve_symtab();
|
||||
}
|
||||
|
||||
for (section, out_section) in obj.sections.iter().zip(&mut out_sections) {
|
||||
if section.relocations.is_empty() {
|
||||
|
@ -457,6 +532,16 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
out_section.rela_offset = writer.reserve_relocations(section.relocations.len(), true);
|
||||
}
|
||||
|
||||
writer.reserve_symtab();
|
||||
writer.reserve_strtab();
|
||||
writer.reserve_shstrtab();
|
||||
|
||||
// Reserve comment section
|
||||
if let Some(comment_data) = &comment_data {
|
||||
let out_section = out_sections.last_mut().unwrap();
|
||||
out_section.offset = writer.reserve(comment_data.len(), 32);
|
||||
}
|
||||
|
||||
writer.reserve_section_headers();
|
||||
|
||||
writer.write_file_header(&object::write::elf::FileHeader {
|
||||
|
@ -499,24 +584,16 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
continue;
|
||||
}
|
||||
writer.write_align(32);
|
||||
debug_assert_eq!(writer.len(), out_section.offset);
|
||||
ensure!(writer.len() == out_section.offset);
|
||||
writer.write(§ion.data);
|
||||
}
|
||||
|
||||
writer.write_shstrtab();
|
||||
writer.write_strtab();
|
||||
|
||||
writer.write_null_symbol();
|
||||
for out_symbol in &out_symbols {
|
||||
writer.write_symbol(&out_symbol.sym);
|
||||
}
|
||||
|
||||
for (section, out_section) in obj.sections.iter().zip(&out_sections) {
|
||||
if section.relocations.is_empty() {
|
||||
continue;
|
||||
}
|
||||
writer.write_align_relocation();
|
||||
debug_assert_eq!(writer.len(), out_section.rela_offset);
|
||||
ensure!(writer.len() == out_section.rela_offset);
|
||||
for reloc in §ion.relocations {
|
||||
let mut r_offset = reloc.address;
|
||||
let r_type = match reloc.kind {
|
||||
|
@ -540,11 +617,11 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
elf::R_PPC_ADDR16_LO
|
||||
}
|
||||
ObjRelocKind::PpcRel24 => {
|
||||
r_offset = r_offset & !3;
|
||||
r_offset &= !3;
|
||||
elf::R_PPC_REL24
|
||||
}
|
||||
ObjRelocKind::PpcRel14 => {
|
||||
r_offset = r_offset & !3;
|
||||
r_offset &= !3;
|
||||
elf::R_PPC_REL14
|
||||
}
|
||||
ObjRelocKind::PpcEmbSda21 => {
|
||||
|
@ -552,15 +629,28 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
elf::R_PPC_EMB_SDA21
|
||||
}
|
||||
};
|
||||
writer.write_relocation(true, &Rel {
|
||||
r_offset,
|
||||
r_sym: (reloc.target_symbol + symbol_offset + 1) as u32,
|
||||
r_type,
|
||||
r_addend: reloc.addend,
|
||||
});
|
||||
let r_sym = symbol_map[reloc.target_symbol]
|
||||
.ok_or_else(|| anyhow!("Relocation against stripped symbol"))?;
|
||||
writer.write_relocation(true, &Rel { r_offset, r_sym, r_type, r_addend: reloc.addend });
|
||||
}
|
||||
}
|
||||
|
||||
writer.write_null_symbol();
|
||||
for out_symbol in &out_symbols {
|
||||
writer.write_symbol(&out_symbol.sym);
|
||||
}
|
||||
|
||||
writer.write_strtab();
|
||||
writer.write_shstrtab();
|
||||
|
||||
// Write comment section
|
||||
if let Some(comment_data) = &comment_data {
|
||||
let out_section = out_sections.last().unwrap();
|
||||
writer.write_align(32);
|
||||
ensure!(writer.len() == out_section.offset);
|
||||
writer.write(comment_data);
|
||||
}
|
||||
|
||||
writer.write_null_section_header();
|
||||
for (section, out_section) in obj.sections.iter().zip(&out_sections) {
|
||||
writer.write_section_header(&SectionHeader {
|
||||
|
@ -598,11 +688,29 @@ pub fn write_elf(obj: &ObjInfo) -> Result<Vec<u8>> {
|
|||
true,
|
||||
);
|
||||
}
|
||||
writer.write_symtab_section_header(num_local);
|
||||
writer.write_shstrtab_section_header();
|
||||
writer.write_strtab_section_header();
|
||||
|
||||
debug_assert_eq!(writer.reserved_len(), writer.len());
|
||||
writer.write_symtab_section_header(num_local);
|
||||
writer.write_strtab_section_header();
|
||||
writer.write_shstrtab_section_header();
|
||||
|
||||
// Write comment section header
|
||||
if let Some(comment_data) = &comment_data {
|
||||
let out_section = out_sections.last().unwrap();
|
||||
writer.write_section_header(&SectionHeader {
|
||||
name: Some(out_section.name),
|
||||
sh_type: SHT_PROGBITS,
|
||||
sh_flags: 0,
|
||||
sh_addr: 0,
|
||||
sh_offset: out_section.offset as u64,
|
||||
sh_size: comment_data.len() as u64,
|
||||
sh_link: 0,
|
||||
sh_info: 0,
|
||||
sh_addralign: 1,
|
||||
sh_entsize: 1,
|
||||
});
|
||||
}
|
||||
|
||||
ensure!(writer.reserved_len() == writer.len());
|
||||
Ok(out_data)
|
||||
}
|
||||
|
||||
|
@ -655,6 +763,9 @@ fn to_obj_symbol(
|
|||
SymbolKind::Section => ObjSymbolKind::Section,
|
||||
_ => bail!("Unsupported symbol kind: {:?}", symbol.kind()),
|
||||
},
|
||||
// TODO common symbol value?
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
|
|
187
src/util/file.rs
187
src/util/file.rs
|
@ -1,13 +1,15 @@
|
|||
use std::{
|
||||
fs::File,
|
||||
io::{BufReader, Cursor, Read},
|
||||
path::Path,
|
||||
io::{BufRead, BufReader, Cursor, Read},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use byteorder::ReadBytesExt;
|
||||
use memmap2::{Mmap, MmapOptions};
|
||||
|
||||
use crate::util::{rarc, rarc::Node, yaz0};
|
||||
|
||||
/// Opens a memory mapped file.
|
||||
pub fn map_file<P: AsRef<Path>>(path: P) -> Result<Mmap> {
|
||||
let file = File::open(&path)
|
||||
|
@ -21,7 +23,7 @@ pub type Reader<'a> = Cursor<&'a [u8]>;
|
|||
|
||||
/// Creates a reader for the memory mapped file.
|
||||
#[inline]
|
||||
pub fn map_reader(mmap: &Mmap) -> Reader { Cursor::new(&*mmap) }
|
||||
pub fn map_reader(mmap: &Mmap) -> Reader { Cursor::new(&**mmap) }
|
||||
|
||||
/// Creates a buffered reader around a file (not memory mapped).
|
||||
pub fn buf_reader<P: AsRef<Path>>(path: P) -> Result<BufReader<File>> {
|
||||
|
@ -55,3 +57,180 @@ pub fn read_c_string(reader: &mut Reader, off: u64) -> Result<String> {
|
|||
reader.set_position(pos);
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
/// Process response files (starting with '@') and glob patterns (*).
|
||||
pub fn process_rsp(files: &[PathBuf]) -> Result<Vec<PathBuf>> {
|
||||
let mut out = Vec::with_capacity(files.len());
|
||||
for path in files {
|
||||
let path_str =
|
||||
path.to_str().ok_or_else(|| anyhow!("'{}' is not valid UTF-8", path.display()))?;
|
||||
if let Some(rsp_file) = path_str.strip_prefix('@') {
|
||||
let reader = buf_reader(rsp_file)?;
|
||||
for result in reader.lines() {
|
||||
let line = result?;
|
||||
if !line.is_empty() {
|
||||
out.push(PathBuf::from(line));
|
||||
}
|
||||
}
|
||||
} else if path_str.contains('*') {
|
||||
for entry in glob::glob(path_str)? {
|
||||
out.push(entry?);
|
||||
}
|
||||
} else {
|
||||
out.push(path.clone());
|
||||
}
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
/// Iterator over files in a RARC archive.
|
||||
struct RarcIterator {
|
||||
file: Mmap,
|
||||
paths: Vec<(PathBuf, u64, u32)>,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl RarcIterator {
|
||||
pub fn new(file: Mmap, base_path: &Path) -> Result<Self> {
|
||||
let reader = rarc::RarcReader::new(map_reader(&file))?;
|
||||
let paths = Self::collect_paths(&reader, base_path);
|
||||
Ok(Self { file, paths, index: 0 })
|
||||
}
|
||||
|
||||
fn collect_paths(reader: &rarc::RarcReader, base_path: &Path) -> Vec<(PathBuf, u64, u32)> {
|
||||
let mut current_path = PathBuf::new();
|
||||
let mut paths = vec![];
|
||||
for node in reader.nodes() {
|
||||
match node {
|
||||
Node::DirectoryBegin { name } => {
|
||||
current_path.push(name.name);
|
||||
}
|
||||
Node::DirectoryEnd { name: _ } => {
|
||||
current_path.pop();
|
||||
}
|
||||
Node::File { name, offset, size } => {
|
||||
let path = base_path.join(¤t_path).join(name.name);
|
||||
paths.push((path, offset, size));
|
||||
}
|
||||
Node::CurrentDirectory => {}
|
||||
Node::ParentDirectory => {}
|
||||
}
|
||||
}
|
||||
paths
|
||||
}
|
||||
|
||||
fn decompress_if_needed(buf: &[u8]) -> Result<Vec<u8>> {
|
||||
if buf.len() > 4 && buf[0..4] == *b"Yaz0" {
|
||||
yaz0::decompress_file(&mut Cursor::new(buf))
|
||||
} else {
|
||||
Ok(buf.to_vec())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for RarcIterator {
|
||||
type Item = Result<(PathBuf, Vec<u8>)>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index >= self.paths.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (path, off, size) = self.paths[self.index].clone();
|
||||
self.index += 1;
|
||||
|
||||
let slice = &self.file[off as usize..off as usize + size as usize];
|
||||
match Self::decompress_if_needed(slice) {
|
||||
Ok(buf) => Some(Ok((path, buf))),
|
||||
Err(e) => Some(Err(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A file entry, either a memory mapped file or an owned buffer.
|
||||
pub enum FileEntry {
|
||||
Map(Mmap),
|
||||
Buffer(Vec<u8>),
|
||||
}
|
||||
|
||||
impl FileEntry {
|
||||
/// Creates a reader for the file.
|
||||
pub fn as_reader(&self) -> Reader {
|
||||
match self {
|
||||
Self::Map(map) => map_reader(map),
|
||||
Self::Buffer(slice) => Cursor::new(slice),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate over file paths, expanding response files (@) and glob patterns (*).
|
||||
/// If a file is a RARC archive, iterate over its contents.
|
||||
/// If a file is a Yaz0 compressed file, decompress it.
|
||||
pub struct FileIterator {
|
||||
paths: Vec<PathBuf>,
|
||||
index: usize,
|
||||
rarc: Option<RarcIterator>,
|
||||
}
|
||||
|
||||
impl FileIterator {
|
||||
pub fn new(paths: &[PathBuf]) -> Result<Self> {
|
||||
Ok(Self { paths: process_rsp(paths)?, index: 0, rarc: None })
|
||||
}
|
||||
|
||||
fn next_rarc(&mut self) -> Option<Result<(PathBuf, FileEntry)>> {
|
||||
if let Some(rarc) = &mut self.rarc {
|
||||
match rarc.next() {
|
||||
Some(Ok((path, buf))) => return Some(Ok((path, FileEntry::Buffer(buf)))),
|
||||
Some(Err(err)) => return Some(Err(err)),
|
||||
None => self.rarc = None,
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn next_path(&mut self) -> Option<Result<(PathBuf, FileEntry)>> {
|
||||
if self.index >= self.paths.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let path = self.paths[self.index].clone();
|
||||
self.index += 1;
|
||||
match map_file(&path) {
|
||||
Ok(map) => self.handle_file(map, path),
|
||||
Err(err) => Some(Err(err)),
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_file(&mut self, map: Mmap, path: PathBuf) -> Option<Result<(PathBuf, FileEntry)>> {
|
||||
if map.len() <= 4 {
|
||||
return Some(Ok((path, FileEntry::Map(map))));
|
||||
}
|
||||
|
||||
match &map[0..4] {
|
||||
b"Yaz0" => self.handle_yaz0(map, path),
|
||||
b"RARC" => self.handle_rarc(map, path),
|
||||
_ => Some(Ok((path, FileEntry::Map(map)))),
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_yaz0(&mut self, map: Mmap, path: PathBuf) -> Option<Result<(PathBuf, FileEntry)>> {
|
||||
Some(match yaz0::decompress_file(&mut map_reader(&map)) {
|
||||
Ok(buf) => Ok((path, FileEntry::Buffer(buf))),
|
||||
Err(e) => Err(e),
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_rarc(&mut self, map: Mmap, path: PathBuf) -> Option<Result<(PathBuf, FileEntry)>> {
|
||||
self.rarc = match RarcIterator::new(map, &path) {
|
||||
Ok(iter) => Some(iter),
|
||||
Err(e) => return Some(Err(e)),
|
||||
};
|
||||
self.next()
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for FileIterator {
|
||||
type Item = Result<(PathBuf, FileEntry)>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> { self.next_rarc().or_else(|| self.next_path()) }
|
||||
}
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::obj::ObjInfo;
|
||||
|
||||
pub fn generate_ldscript(obj: &ObjInfo) -> Result<String> {
|
||||
let stack_size = match (obj.stack_address, obj.stack_end) {
|
||||
(Some(stack_address), Some(stack_end)) => stack_address - stack_end,
|
||||
_ => 65535, // default
|
||||
};
|
||||
|
||||
let section_defs = obj
|
||||
.sections
|
||||
.iter()
|
||||
.map(|s| format!("{} ALIGN({:#X}):{{}}", s.name, 0x20 /* TODO */))
|
||||
.join("\n ");
|
||||
|
||||
let mut force_files = Vec::with_capacity(obj.link_order.len());
|
||||
for unit in &obj.link_order {
|
||||
let obj_path = obj_path_for_unit(unit);
|
||||
force_files.push(obj_path.file_name().unwrap().to_str().unwrap().to_string());
|
||||
}
|
||||
|
||||
let out = include_str!("../../assets/ldscript.lcf")
|
||||
.replacen("$SECTIONS", §ion_defs, 1)
|
||||
.replacen("$STACKSIZE", &format!("{:#X}", stack_size), 1)
|
||||
.replacen("$FORCEFILES", &force_files.join("\n "), 1);
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
pub fn obj_path_for_unit(unit: &str) -> PathBuf {
|
||||
PathBuf::from(unit).with_extension("").with_extension("o")
|
||||
}
|
||||
|
||||
pub fn asm_path_for_unit(unit: &str) -> PathBuf {
|
||||
PathBuf::from(unit).with_extension("").with_extension("s")
|
||||
}
|
400
src/util/map.rs
400
src/util/map.rs
|
@ -1,16 +1,27 @@
|
|||
#![allow(dead_code)]
|
||||
#![allow(unused_mut)]
|
||||
use std::{
|
||||
collections::{btree_map, BTreeMap, HashMap},
|
||||
collections::{btree_map, BTreeMap, HashMap, HashSet},
|
||||
hash::Hash,
|
||||
io::BufRead,
|
||||
mem::replace,
|
||||
};
|
||||
|
||||
use anyhow::{bail, ensure, Error, Result};
|
||||
use anyhow::{anyhow, bail, ensure, Error, Result};
|
||||
use cwdemangle::{demangle, DemangleOptions};
|
||||
use multimap::MultiMap;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::{Captures, Regex};
|
||||
use topological_sort::TopologicalSort;
|
||||
|
||||
use crate::{
|
||||
obj::{
|
||||
section_kind_for_section, ObjInfo, ObjSplit, ObjSymbol, ObjSymbolFlagSet, ObjSymbolFlags,
|
||||
ObjSymbolKind,
|
||||
},
|
||||
util::nested::NestedVec,
|
||||
};
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum SymbolKind {
|
||||
Function,
|
||||
|
@ -35,7 +46,7 @@ pub struct SymbolEntry {
|
|||
pub unit: Option<String>,
|
||||
pub address: u32,
|
||||
pub size: u32,
|
||||
pub section: String,
|
||||
pub align: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
|
||||
|
@ -55,10 +66,10 @@ fn is_code_section(section: &str) -> bool { matches!(section, ".text" | ".init")
|
|||
|
||||
/// Iterate over the BTreeMap and generate an ordered list of symbols and TUs by address.
|
||||
fn resolve_section_order(
|
||||
address_to_symbol: &BTreeMap<u32, SymbolRef>,
|
||||
_address_to_symbol: &BTreeMap<u32, SymbolRef>,
|
||||
symbol_entries: &mut HashMap<SymbolRef, SymbolEntry>,
|
||||
) -> Result<SectionOrder> {
|
||||
let mut ordering = SectionOrder::default();
|
||||
let ordering = SectionOrder::default();
|
||||
|
||||
// let mut last_unit = String::new();
|
||||
// let mut last_section = String::new();
|
||||
|
@ -138,17 +149,19 @@ fn resolve_section_order(
|
|||
/// There can be ambiguities, but any solution that satisfies the link order
|
||||
/// constraints is considered valid.
|
||||
// TODO account for library ordering
|
||||
#[allow(dead_code)]
|
||||
pub fn resolve_link_order(section_unit_order: &[(String, Vec<String>)]) -> Result<Vec<String>> {
|
||||
let mut global_unit_order = Vec::<String>::new();
|
||||
let mut t_sort = TopologicalSort::<String>::new();
|
||||
for (section, order) in section_unit_order {
|
||||
let mut order: &[String] = order;
|
||||
if matches!(section.as_str(), ".ctors" | ".dtors") && order.len() > 1 {
|
||||
// __init_cpp_exceptions.o has symbols that get ordered to the beginning of
|
||||
// .ctors and .dtors, so our topological sort would fail if we added them.
|
||||
// Always skip the first TU of .ctors and .dtors.
|
||||
order = &order[1..];
|
||||
let mut order = order.clone();
|
||||
if matches!(section.as_str(), ".ctors" | ".dtors" | "extab") {
|
||||
continue;
|
||||
// if order.len() > 1 {
|
||||
// // __init_cpp_exceptions.o has symbols that get ordered to the beginning of
|
||||
// // .ctors and .dtors, so our topological sort would fail if we added them.
|
||||
// // Always skip the first TU of .ctors and .dtors.
|
||||
// order = order[1..].to_vec();
|
||||
// }
|
||||
}
|
||||
for iter in order.windows(2) {
|
||||
t_sort.add_dependency(iter[0].clone(), iter[1].clone());
|
||||
|
@ -158,7 +171,7 @@ pub fn resolve_link_order(section_unit_order: &[(String, Vec<String>)]) -> Resul
|
|||
global_unit_order.push(unit);
|
||||
}
|
||||
// An incomplete topological sort indicates that a cyclic dependency was encountered.
|
||||
ensure!(t_sort.is_empty(), "Cyclic dependency encountered!");
|
||||
ensure!(t_sort.is_empty(), "Cyclic dependency encountered while resolving link order");
|
||||
// Sanity check, did we get all TUs in the final order?
|
||||
for (_, order) in section_unit_order {
|
||||
for unit in order {
|
||||
|
@ -173,6 +186,8 @@ macro_rules! static_regex {
|
|||
static $name: Lazy<Regex> = Lazy::new(|| Regex::new($str).unwrap());
|
||||
};
|
||||
}
|
||||
|
||||
// Link map
|
||||
static_regex!(LINK_MAP_START, "^Link map of (?P<entry>.*)$");
|
||||
static_regex!(
|
||||
LINK_MAP_ENTRY,
|
||||
|
@ -186,35 +201,54 @@ static_regex!(
|
|||
LINK_MAP_ENTRY_DUPLICATE,
|
||||
"^\\s*(?P<depth>\\d+)] >>> UNREFERENCED DUPLICATE (?P<sym>.*)$"
|
||||
);
|
||||
static_regex!(LINK_MAP_EXTERN_SYMBOL, "^\\s*>>> SYMBOL NOT FOUND: (.*)$");
|
||||
|
||||
// Section layout
|
||||
static_regex!(SECTION_LAYOUT_START, "^(?P<section>.*) section layout$");
|
||||
static_regex!(
|
||||
SECTION_LAYOUT_SYMBOL,
|
||||
"^\\s*(?P<rom_addr>[0-9A-Fa-f]+|UNUSED)\\s+(?P<size>[0-9A-Fa-f]+)\\s+(?P<addr>[0-9A-Fa-f]+|\\.{8})\\s+(?P<align>\\d+)?\\s*(?P<sym>.*?)(?:\\s+\\(entry of (?P<entry_of>.*?)\\))?\\s+(?P<tu>.*)$"
|
||||
"^\\s*(?P<rom_addr>[0-9A-Fa-f]+|UNUSED)\\s+(?P<size>[0-9A-Fa-f]+)\\s+(?P<addr>[0-9A-Fa-f]{8}|\\.{8})\\s+(?P<offset>[0-9A-Fa-f]{8}|\\.{8})\\s+(?P<align>\\d+)?\\s*(?P<sym>.*?)(?:\\s+\\(entry of (?P<entry_of>.*?)\\))?\\s+(?P<tu>.*)$"
|
||||
);
|
||||
static_regex!(
|
||||
SECTION_LAYOUT_HEADER,
|
||||
"^(\\s*Starting\\s+Virtual\\s*|\\s*address\\s+Size\\s+address\\s*|\\s*-----------------------\\s*)$"
|
||||
"^(\\s*Starting\\s+Virtual\\s*(File\\s*)?|\\s*address\\s+Size\\s+address\\s*(offset\\s*)?|\\s*-----------------------(----------)?\\s*)$"
|
||||
);
|
||||
static_regex!(MEMORY_MAP_HEADER, "^\\s*Memory map:\\s*$");
|
||||
static_regex!(EXTERN_SYMBOL, "^\\s*>>> SYMBOL NOT FOUND: (.*)$");
|
||||
static_regex!(LINKER_SYMBOLS_HEADER, "^\\s*Linker generated symbols:\\s*$");
|
||||
|
||||
// Memory map
|
||||
static_regex!(MEMORY_MAP_START, "^\\s*Memory map:\\s*$");
|
||||
static_regex!(MEMORY_MAP_HEADER, "^(\\s*Starting Size\\s+File\\s*|\\s*address\\s+Offset\\s*)$");
|
||||
static_regex!(MEMORY_MAP_ENTRY, "^\\s*(?P<section>\\S+)\\s+(?P<addr>[0-9A-Fa-f]+|\\.{0,8})\\s+(?P<size>[0-9A-Fa-f]+|\\.{1,8})\\s+(?P<offset>[0-9A-Fa-f]+|\\.{1,8})\\s*$");
|
||||
|
||||
// Linker generated symbols
|
||||
static_regex!(LINKER_SYMBOLS_START, "^\\s*Linker generated symbols:\\s*$");
|
||||
static_regex!(LINKER_SYMBOL_ENTRY, "^\\s*(?P<name>\\S+)\\s+(?P<addr>[0-9A-Fa-f]+|\\.{0,8})\\s*$");
|
||||
|
||||
pub struct SectionInfo {
|
||||
name: String,
|
||||
address: u32,
|
||||
size: u32,
|
||||
file_offset: u32,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MapEntries {
|
||||
pub struct MapInfo {
|
||||
pub entry_point: String,
|
||||
pub symbols: HashMap<SymbolRef, SymbolEntry>,
|
||||
pub unit_entries: MultiMap<String, SymbolRef>,
|
||||
pub entry_references: MultiMap<SymbolRef, SymbolRef>,
|
||||
pub entry_referenced_from: MultiMap<SymbolRef, SymbolRef>,
|
||||
// pub address_to_symbol: BTreeMap<u32, SymbolRef>,
|
||||
// pub unit_section_ranges: HashMap<String, HashMap<String, Range<u32>>>,
|
||||
pub symbol_order: Vec<SymbolRef>,
|
||||
pub unit_order: Vec<(String, Vec<String>)>,
|
||||
// pub symbol_order: Vec<SymbolRef>,
|
||||
// pub unit_order: Vec<(String, Vec<String>)>,
|
||||
pub sections: BTreeMap<u32, SectionInfo>,
|
||||
pub link_map_symbols: HashMap<SymbolRef, SymbolEntry>,
|
||||
pub section_symbols: HashMap<String, BTreeMap<u32, Vec<SymbolEntry>>>,
|
||||
pub section_units: HashMap<String, Vec<(u32, String)>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct LinkMapState {
|
||||
last_name: String,
|
||||
last_symbol_name: String,
|
||||
symbol_stack: Vec<SymbolRef>,
|
||||
}
|
||||
|
||||
|
@ -224,10 +258,6 @@ struct SectionLayoutState {
|
|||
current_unit: Option<String>,
|
||||
units: Vec<(u32, String)>,
|
||||
symbols: BTreeMap<u32, Vec<SymbolEntry>>,
|
||||
// unit_override: Option<String>,
|
||||
// relative_offset: u32,
|
||||
// last_unit_start: u32,
|
||||
// last_section_end: u32,
|
||||
has_link_map: bool,
|
||||
}
|
||||
|
||||
|
@ -241,7 +271,7 @@ enum ProcessMapState {
|
|||
|
||||
struct StateMachine {
|
||||
state: ProcessMapState,
|
||||
entries: MapEntries,
|
||||
result: MapInfo,
|
||||
has_link_map: bool,
|
||||
}
|
||||
|
||||
|
@ -253,7 +283,7 @@ impl StateMachine {
|
|||
match &mut self.state {
|
||||
ProcessMapState::None => {
|
||||
if let Some(captures) = LINK_MAP_START.captures(&line) {
|
||||
self.entries.entry_point = captures["entry"].to_string();
|
||||
self.result.entry_point = captures["entry"].to_string();
|
||||
self.switch_state(ProcessMapState::LinkMap(Default::default()))?;
|
||||
} else if let Some(captures) = SECTION_LAYOUT_START.captures(&line) {
|
||||
self.switch_state(ProcessMapState::SectionLayout(SectionLayoutState {
|
||||
|
@ -261,9 +291,9 @@ impl StateMachine {
|
|||
has_link_map: self.has_link_map,
|
||||
..Default::default()
|
||||
}))?;
|
||||
} else if MEMORY_MAP_HEADER.is_match(&line) {
|
||||
} else if MEMORY_MAP_START.is_match(&line) {
|
||||
self.switch_state(ProcessMapState::MemoryMap)?;
|
||||
} else if LINKER_SYMBOLS_HEADER.is_match(&line) {
|
||||
} else if LINKER_SYMBOLS_START.is_match(&line) {
|
||||
self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?;
|
||||
} else {
|
||||
bail!("Unexpected line while processing map: '{line}'");
|
||||
|
@ -271,10 +301,11 @@ impl StateMachine {
|
|||
}
|
||||
ProcessMapState::LinkMap(ref mut state) => {
|
||||
if let Some(captures) = LINK_MAP_ENTRY.captures(&line) {
|
||||
StateMachine::process_link_map_entry(captures, state, &mut self.entries)?;
|
||||
StateMachine::process_link_map_entry(captures, state, &mut self.result)?;
|
||||
} else if let Some(captures) = LINK_MAP_ENTRY_GENERATED.captures(&line) {
|
||||
StateMachine::process_link_map_generated(captures, state, &mut self.entries)?;
|
||||
} else if LINK_MAP_ENTRY_DUPLICATE.is_match(&line) || EXTERN_SYMBOL.is_match(&line)
|
||||
StateMachine::process_link_map_generated(captures, state, &mut self.result)?;
|
||||
} else if LINK_MAP_ENTRY_DUPLICATE.is_match(&line)
|
||||
|| LINK_MAP_EXTERN_SYMBOL.is_match(&line)
|
||||
{
|
||||
// Ignore
|
||||
} else if let Some(captures) = SECTION_LAYOUT_START.captures(&line) {
|
||||
|
@ -283,9 +314,9 @@ impl StateMachine {
|
|||
has_link_map: self.has_link_map,
|
||||
..Default::default()
|
||||
}))?;
|
||||
} else if MEMORY_MAP_HEADER.is_match(&line) {
|
||||
} else if MEMORY_MAP_START.is_match(&line) {
|
||||
self.switch_state(ProcessMapState::MemoryMap)?;
|
||||
} else if LINKER_SYMBOLS_HEADER.is_match(&line) {
|
||||
} else if LINKER_SYMBOLS_START.is_match(&line) {
|
||||
self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?;
|
||||
} else {
|
||||
bail!("Unexpected line while processing map: '{line}'");
|
||||
|
@ -293,51 +324,52 @@ impl StateMachine {
|
|||
}
|
||||
ProcessMapState::SectionLayout(ref mut state) => {
|
||||
if let Some(captures) = SECTION_LAYOUT_SYMBOL.captures(&line) {
|
||||
StateMachine::section_layout_entry(captures, state, &mut self.entries)?;
|
||||
StateMachine::section_layout_entry(captures, state, &mut self.result)?;
|
||||
} else if let Some(captures) = SECTION_LAYOUT_START.captures(&line) {
|
||||
// let last_section_end = state.last_section_end;
|
||||
self.switch_state(ProcessMapState::SectionLayout(SectionLayoutState {
|
||||
current_section: captures["section"].to_string(),
|
||||
has_link_map: self.has_link_map,
|
||||
// last_section_end,
|
||||
..Default::default()
|
||||
}))?;
|
||||
} else if SECTION_LAYOUT_HEADER.is_match(&line) {
|
||||
// Ignore
|
||||
} else if MEMORY_MAP_HEADER.is_match(&line) {
|
||||
} else if MEMORY_MAP_START.is_match(&line) {
|
||||
self.switch_state(ProcessMapState::MemoryMap)?;
|
||||
} else if LINKER_SYMBOLS_HEADER.is_match(&line) {
|
||||
} else if LINKER_SYMBOLS_START.is_match(&line) {
|
||||
self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?;
|
||||
} else {
|
||||
bail!("Unexpected line while processing map: '{line}'");
|
||||
}
|
||||
}
|
||||
ProcessMapState::MemoryMap => {
|
||||
// TODO
|
||||
if LINKER_SYMBOLS_HEADER.is_match(&line) {
|
||||
if let Some(captures) = MEMORY_MAP_ENTRY.captures(&line) {
|
||||
StateMachine::memory_map_entry(captures, &mut self.result)?;
|
||||
} else if LINKER_SYMBOLS_START.is_match(&line) {
|
||||
self.switch_state(ProcessMapState::LinkerGeneratedSymbols)?;
|
||||
}
|
||||
}
|
||||
ProcessMapState::LinkerGeneratedSymbols => {
|
||||
// TODO
|
||||
if let Some(captures) = LINKER_SYMBOL_ENTRY.captures(&line) {
|
||||
StateMachine::linker_symbol_entry(captures, &mut self.result)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn switch_state(&mut self, new_state: ProcessMapState) -> Result<()> {
|
||||
self.end_state()?;
|
||||
self.state = new_state;
|
||||
let old_state = replace(&mut self.state, new_state);
|
||||
self.end_state(old_state)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end_state(&mut self) -> Result<()> {
|
||||
match self.state {
|
||||
fn end_state(&mut self, old_state: ProcessMapState) -> Result<()> {
|
||||
match old_state {
|
||||
ProcessMapState::LinkMap { .. } => {
|
||||
self.has_link_map = true;
|
||||
}
|
||||
ProcessMapState::SectionLayout(ref mut state) => {
|
||||
StateMachine::end_section_layout(state, &mut self.entries)?;
|
||||
ProcessMapState::SectionLayout(state) => {
|
||||
StateMachine::end_section_layout(state, &mut self.result)?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -347,17 +379,13 @@ impl StateMachine {
|
|||
fn process_link_map_entry(
|
||||
captures: Captures,
|
||||
state: &mut LinkMapState,
|
||||
entries: &mut MapEntries,
|
||||
result: &mut MapInfo,
|
||||
) -> Result<()> {
|
||||
// if captures["sym"].starts_with('.') {
|
||||
// state.last_name.clear();
|
||||
// return Ok(());
|
||||
// }
|
||||
let is_duplicate = &captures["sym"] == ">>>";
|
||||
let unit = captures["tu"].trim().to_string();
|
||||
let name = if is_duplicate {
|
||||
ensure!(!state.last_name.is_empty(), "Last name empty?");
|
||||
state.last_name.clone()
|
||||
ensure!(!state.last_symbol_name.is_empty(), "Last name empty?");
|
||||
state.last_symbol_name.clone()
|
||||
} else {
|
||||
captures["sym"].to_string()
|
||||
};
|
||||
|
@ -385,11 +413,11 @@ impl StateMachine {
|
|||
};
|
||||
if !is_duplicate && state.symbol_stack.len() > 1 {
|
||||
let from = &state.symbol_stack[state.symbol_stack.len() - 2];
|
||||
entries.entry_referenced_from.insert(symbol_ref.clone(), from.clone());
|
||||
entries.entry_references.insert(from.clone(), symbol_ref.clone());
|
||||
result.entry_referenced_from.insert(symbol_ref.clone(), from.clone());
|
||||
result.entry_references.insert(from.clone(), symbol_ref.clone());
|
||||
}
|
||||
let mut should_insert = true;
|
||||
if let Some(symbol) = entries.symbols.get(&symbol_ref) {
|
||||
if let Some(symbol) = result.link_map_symbols.get(&symbol_ref) {
|
||||
if symbol.kind != kind {
|
||||
log::warn!(
|
||||
"Kind mismatch for {}: was {:?}, now {:?}",
|
||||
|
@ -406,12 +434,12 @@ impl StateMachine {
|
|||
visibility
|
||||
);
|
||||
}
|
||||
entries.unit_entries.insert(unit.clone(), symbol_ref.clone());
|
||||
result.unit_entries.insert(unit.clone(), symbol_ref.clone());
|
||||
should_insert = false;
|
||||
}
|
||||
if should_insert {
|
||||
let demangled = demangle(&name, &DemangleOptions::default());
|
||||
entries.symbols.insert(symbol_ref.clone(), SymbolEntry {
|
||||
result.link_map_symbols.insert(symbol_ref.clone(), SymbolEntry {
|
||||
name: name.clone(),
|
||||
demangled,
|
||||
kind,
|
||||
|
@ -419,10 +447,10 @@ impl StateMachine {
|
|||
unit: Some(unit.clone()),
|
||||
address: 0,
|
||||
size: 0,
|
||||
section: String::new(),
|
||||
align: None,
|
||||
});
|
||||
state.last_name = name;
|
||||
entries.unit_entries.insert(unit, symbol_ref);
|
||||
state.last_symbol_name = name;
|
||||
result.unit_entries.insert(unit, symbol_ref);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -430,12 +458,12 @@ impl StateMachine {
|
|||
fn process_link_map_generated(
|
||||
captures: Captures,
|
||||
_state: &mut LinkMapState,
|
||||
entries: &mut MapEntries,
|
||||
result: &mut MapInfo,
|
||||
) -> Result<()> {
|
||||
let name = captures["sym"].to_string();
|
||||
let demangled = demangle(&name, &DemangleOptions::default());
|
||||
let symbol_ref = SymbolRef { name: name.clone(), unit: None };
|
||||
entries.symbols.insert(symbol_ref, SymbolEntry {
|
||||
result.link_map_symbols.insert(symbol_ref, SymbolEntry {
|
||||
name,
|
||||
demangled,
|
||||
kind: SymbolKind::NoType,
|
||||
|
@ -443,12 +471,47 @@ impl StateMachine {
|
|||
unit: None,
|
||||
address: 0,
|
||||
size: 0,
|
||||
section: String::new(),
|
||||
align: None,
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn end_section_layout(state: &mut SectionLayoutState, entries: &mut MapEntries) -> Result<()> {
|
||||
fn end_section_layout(mut state: SectionLayoutState, entries: &mut MapInfo) -> Result<()> {
|
||||
// Resolve duplicate TUs
|
||||
let mut existing = HashSet::new();
|
||||
for idx in 0..state.units.len() {
|
||||
let (addr, unit) = &state.units[idx];
|
||||
// FIXME
|
||||
if
|
||||
/*state.current_section == ".bss" ||*/
|
||||
existing.contains(unit) {
|
||||
if
|
||||
/*state.current_section == ".bss" ||*/
|
||||
&state.units[idx - 1].1 != unit {
|
||||
let new_name = format!("{unit}_{}_{:010X}", state.current_section, addr);
|
||||
log::info!("Renaming {unit} to {new_name}");
|
||||
for idx2 in 0..idx {
|
||||
let (addr, n_unit) = &state.units[idx2];
|
||||
if unit == n_unit {
|
||||
let new_name =
|
||||
format!("{n_unit}_{}_{:010X}", state.current_section, addr);
|
||||
log::info!("Renaming 2 {n_unit} to {new_name}");
|
||||
state.units[idx2].1 = new_name;
|
||||
break;
|
||||
}
|
||||
}
|
||||
state.units[idx].1 = new_name;
|
||||
}
|
||||
} else {
|
||||
existing.insert(unit.clone());
|
||||
}
|
||||
}
|
||||
if !state.symbols.is_empty() {
|
||||
entries.section_symbols.insert(state.current_section.clone(), state.symbols);
|
||||
}
|
||||
if !state.units.is_empty() {
|
||||
entries.section_units.insert(state.current_section.clone(), state.units);
|
||||
}
|
||||
// Set last section size
|
||||
// if let Some(last_unit) = state.section_units.last() {
|
||||
// let last_unit = state.unit_override.as_ref().unwrap_or(last_unit);
|
||||
|
@ -468,16 +531,26 @@ impl StateMachine {
|
|||
fn section_layout_entry(
|
||||
captures: Captures,
|
||||
state: &mut SectionLayoutState,
|
||||
entries: &mut MapEntries,
|
||||
result: &mut MapInfo,
|
||||
) -> Result<()> {
|
||||
if captures["rom_addr"].trim() == "UNUSED" {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let sym_name = captures["sym"].trim();
|
||||
let mut tu = captures["tu"].trim().to_string();
|
||||
let mut address = u32::from_str_radix(captures["addr"].trim(), 16)?;
|
||||
let mut size = u32::from_str_radix(captures["size"].trim(), 16)?;
|
||||
if sym_name == "*fill*" {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tu = captures["tu"].trim().to_string();
|
||||
if tu == "*fill*" || tu == "Linker Generated Symbol File" {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let address = u32::from_str_radix(captures["addr"].trim(), 16)?;
|
||||
let size = u32::from_str_radix(captures["size"].trim(), 16)?;
|
||||
let align =
|
||||
captures.name("align").and_then(|m| u32::from_str_radix(m.as_str().trim(), 16).ok());
|
||||
|
||||
if state.current_unit.as_ref() != Some(&tu) || sym_name == state.current_section {
|
||||
state.current_unit = Some(tu.clone());
|
||||
|
@ -488,7 +561,7 @@ impl StateMachine {
|
|||
}
|
||||
|
||||
let symbol_ref = SymbolRef { name: sym_name.to_string(), unit: Some(tu.clone()) };
|
||||
let entry = if let Some(existing) = entries.symbols.get(&symbol_ref) {
|
||||
let entry = if let Some(existing) = result.link_map_symbols.get(&symbol_ref) {
|
||||
SymbolEntry {
|
||||
name: existing.name.clone(),
|
||||
demangled: existing.demangled.clone(),
|
||||
|
@ -497,7 +570,7 @@ impl StateMachine {
|
|||
unit: existing.unit.clone(),
|
||||
address,
|
||||
size,
|
||||
section: state.current_section.clone(),
|
||||
align,
|
||||
}
|
||||
} else {
|
||||
let visibility = if state.has_link_map {
|
||||
|
@ -518,7 +591,7 @@ impl StateMachine {
|
|||
unit: Some(tu.clone()),
|
||||
address,
|
||||
size,
|
||||
section: state.current_section.clone(),
|
||||
align,
|
||||
}
|
||||
};
|
||||
match state.symbols.entry(address) {
|
||||
|
@ -529,25 +602,188 @@ impl StateMachine {
|
|||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn memory_map_entry(captures: Captures, entries: &mut MapInfo) -> Result<()> {
|
||||
let section = &captures["section"];
|
||||
let addr_str = &captures["addr"];
|
||||
if addr_str.is_empty() {
|
||||
// Stripped from DOL
|
||||
return Ok(());
|
||||
}
|
||||
let address = u32::from_str_radix(addr_str, 16)?;
|
||||
let size = u32::from_str_radix(&captures["size"], 16)?;
|
||||
let file_offset = u32::from_str_radix(&captures["offset"], 16)?;
|
||||
// log::info!("Memory map entry: {section} {address:#010X} {size:#010X} {file_offset:#010X}");
|
||||
entries.sections.insert(address, SectionInfo {
|
||||
name: section.to_string(),
|
||||
address,
|
||||
size,
|
||||
file_offset,
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn process_map<R: BufRead>(reader: R) -> Result<MapEntries> {
|
||||
let mut state = StateMachine {
|
||||
fn linker_symbol_entry(captures: Captures, result: &mut MapInfo) -> Result<()> {
|
||||
let name = &captures["name"];
|
||||
let address = u32::from_str_radix(&captures["addr"], 16)?;
|
||||
if address == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let symbol_ref = SymbolRef { name: name.to_string(), unit: None };
|
||||
if let Some(existing) = result.link_map_symbols.get_mut(&symbol_ref) {
|
||||
existing.address = address;
|
||||
} else {
|
||||
result.link_map_symbols.insert(symbol_ref, SymbolEntry {
|
||||
name: name.to_string(),
|
||||
demangled: demangle(name, &DemangleOptions::default()),
|
||||
kind: SymbolKind::NoType,
|
||||
visibility: SymbolVisibility::Global,
|
||||
unit: None,
|
||||
address,
|
||||
size: 0,
|
||||
align: None,
|
||||
});
|
||||
};
|
||||
// log::info!("Linker generated symbol: {} @ {:#010X}", name, address);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn process_map<R: BufRead>(reader: R) -> Result<MapInfo> {
|
||||
let mut sm = StateMachine {
|
||||
state: ProcessMapState::None,
|
||||
entries: Default::default(),
|
||||
result: Default::default(),
|
||||
has_link_map: false,
|
||||
};
|
||||
for result in reader.lines() {
|
||||
match result {
|
||||
Ok(line) => state.process_line(line)?,
|
||||
Ok(line) => sm.process_line(line)?,
|
||||
Err(e) => return Err(Error::from(e)),
|
||||
}
|
||||
}
|
||||
state.end_state()?;
|
||||
let state = replace(&mut sm.state, ProcessMapState::None);
|
||||
sm.end_state(state)?;
|
||||
|
||||
let mut entries = state.entries;
|
||||
let entries = sm.result;
|
||||
// let section_order = resolve_section_order(&entries.address_to_symbol, &mut entries.symbols)?;
|
||||
// entries.symbol_order = section_order.symbol_order;
|
||||
// entries.unit_order = section_order.unit_order;
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
pub fn apply_map(result: &MapInfo, obj: &mut ObjInfo) -> Result<()> {
|
||||
for section in &mut obj.sections {
|
||||
if let Some(info) = result.sections.get(&(section.address as u32)) {
|
||||
let kind = section_kind_for_section(&info.name)?;
|
||||
if section.section_known {
|
||||
if section.name != info.name {
|
||||
log::warn!("Section mismatch: was {}, map says {}", section.name, info.name);
|
||||
}
|
||||
if section.kind != kind {
|
||||
log::warn!(
|
||||
"Section type mismatch: {} was {:?}, map says {:?}",
|
||||
info.name,
|
||||
section.kind,
|
||||
kind
|
||||
);
|
||||
}
|
||||
}
|
||||
// if section.size != info.size as u64 {
|
||||
// log::warn!(
|
||||
// "Section size mismatch: {} was {:#X}, map says {:#X}",
|
||||
// info.name,
|
||||
// section.size,
|
||||
// info.size
|
||||
// );
|
||||
// }
|
||||
// if section.file_offset != info.file_offset as u64 {
|
||||
// log::warn!(
|
||||
// "Section file offset mismatch: {} was {:#X}, map says {:#X}",
|
||||
// info.name,
|
||||
// section.file_offset,
|
||||
// info.file_offset
|
||||
// );
|
||||
// }
|
||||
section.name = info.name.clone();
|
||||
section.kind = kind;
|
||||
// section.size = info.size as u64;
|
||||
// section.file_offset = info.file_offset as u64;
|
||||
// section.original_address = info.address as u64;
|
||||
section.section_known = true;
|
||||
} else {
|
||||
log::warn!("Section {} @ {:#010X} not found in map", section.name, section.address);
|
||||
}
|
||||
}
|
||||
// Add section symbols
|
||||
for (section_name, symbol_map) in &result.section_symbols {
|
||||
let section_index = obj
|
||||
.sections
|
||||
.iter()
|
||||
.find(|s| &s.name == section_name)
|
||||
.map(|s| s.index)
|
||||
.ok_or_else(|| anyhow!("Failed to locate section {section_name} from map"))?;
|
||||
for symbol_entry in symbol_map.values().flatten() {
|
||||
add_symbol(obj, symbol_entry, Some(section_index))?;
|
||||
}
|
||||
}
|
||||
// Add absolute symbols
|
||||
for symbol_entry in result.link_map_symbols.values().filter(|s| s.unit.is_none()) {
|
||||
add_symbol(obj, symbol_entry, None)?;
|
||||
}
|
||||
// Add splits
|
||||
let mut section_order: Vec<(String, Vec<String>)> = Vec::new();
|
||||
for (section, unit_order) in &result.section_units {
|
||||
let mut units = Vec::new();
|
||||
let mut existing = HashSet::new();
|
||||
for (addr, unit) in unit_order {
|
||||
let unit = unit.clone();
|
||||
if !existing.contains(&unit) {
|
||||
units.push(unit.clone());
|
||||
existing.insert(unit.clone());
|
||||
}
|
||||
obj.splits.nested_push(*addr, ObjSplit {
|
||||
unit,
|
||||
end: 0, // TODO?
|
||||
align: None,
|
||||
common: false, // TODO?
|
||||
});
|
||||
}
|
||||
section_order.push((section.clone(), units));
|
||||
}
|
||||
log::info!("Section order: {:#?}", section_order);
|
||||
obj.link_order = resolve_link_order(§ion_order)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_symbol(obj: &mut ObjInfo, symbol_entry: &SymbolEntry, section: Option<usize>) -> Result<()> {
|
||||
let demangled_name = demangle(&symbol_entry.name, &DemangleOptions::default());
|
||||
obj.add_symbol(
|
||||
ObjSymbol {
|
||||
name: symbol_entry.name.clone(),
|
||||
demangled_name,
|
||||
address: symbol_entry.address as u64,
|
||||
section,
|
||||
size: symbol_entry.size as u64,
|
||||
size_known: symbol_entry.size != 0,
|
||||
flags: ObjSymbolFlagSet(
|
||||
match symbol_entry.visibility {
|
||||
SymbolVisibility::Global => ObjSymbolFlags::Global,
|
||||
SymbolVisibility::Local => ObjSymbolFlags::Local,
|
||||
SymbolVisibility::Weak => ObjSymbolFlags::Weak,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
kind: match symbol_entry.kind {
|
||||
SymbolKind::Function => ObjSymbolKind::Function,
|
||||
SymbolKind::Object => ObjSymbolKind::Object,
|
||||
SymbolKind::Section => ObjSymbolKind::Section,
|
||||
SymbolKind::NoType => ObjSymbolKind::Unknown,
|
||||
},
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
},
|
||||
true,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
pub mod asm;
|
||||
pub mod comment;
|
||||
pub mod config;
|
||||
pub mod dol;
|
||||
pub mod dwarf;
|
||||
pub mod elf;
|
||||
pub mod file;
|
||||
pub mod lcf;
|
||||
pub mod map;
|
||||
pub mod nested;
|
||||
pub mod rarc;
|
||||
pub mod rel;
|
||||
pub mod rso;
|
||||
pub mod yaz0;
|
||||
|
||||
/// Creates a fixed-size array reference from a slice.
|
||||
#[macro_export]
|
||||
|
@ -20,3 +24,15 @@ macro_rules! array_ref {
|
|||
to_array(&$slice[$offset..$offset + $size])
|
||||
}};
|
||||
}
|
||||
|
||||
/// Creates a fixed-size mutable array reference from a slice.
|
||||
#[macro_export]
|
||||
macro_rules! array_ref_mut {
|
||||
($slice:expr, $offset:expr, $size:expr) => {{
|
||||
#[inline]
|
||||
fn to_array_mut<T>(slice: &mut [T]) -> &mut [T; $size] {
|
||||
unsafe { &mut *(slice.as_mut_ptr() as *mut [_; $size]) }
|
||||
}
|
||||
to_array_mut(&mut $slice[$offset..$offset + $size])
|
||||
}};
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ pub trait NestedMap<T1, T2, T3> {
|
|||
|
||||
pub trait NestedVec<T1, T2> {
|
||||
fn nested_push(&mut self, v1: T1, v2: T2);
|
||||
fn nested_remove(&mut self, v1: &T1, v2: &T2);
|
||||
}
|
||||
|
||||
impl<T1, T2, T3> NestedMap<T1, T2, T3> for BTreeMap<T1, BTreeMap<T2, T3>>
|
||||
|
@ -19,11 +20,7 @@ where
|
|||
T2: Eq + Ord,
|
||||
{
|
||||
fn nested_insert(&mut self, v1: T1, v2: T2, v3: T3) -> Result<()> {
|
||||
let inner = match self.entry(v1) {
|
||||
btree_map::Entry::Occupied(entry) => entry.into_mut(),
|
||||
btree_map::Entry::Vacant(entry) => entry.insert(Default::default()),
|
||||
};
|
||||
match inner.entry(v2) {
|
||||
match self.entry(v1).or_default().entry(v2) {
|
||||
btree_map::Entry::Occupied(_) => bail!("Entry already exists"),
|
||||
btree_map::Entry::Vacant(entry) => entry.insert(v3),
|
||||
};
|
||||
|
@ -37,11 +34,7 @@ where
|
|||
T2: Eq + Hash,
|
||||
{
|
||||
fn nested_insert(&mut self, v1: T1, v2: T2, v3: T3) -> Result<()> {
|
||||
let inner = match self.entry(v1) {
|
||||
hash_map::Entry::Occupied(entry) => entry.into_mut(),
|
||||
hash_map::Entry::Vacant(entry) => entry.insert(Default::default()),
|
||||
};
|
||||
match inner.entry(v2) {
|
||||
match self.entry(v1).or_default().entry(v2) {
|
||||
hash_map::Entry::Occupied(_) => bail!("Entry already exists"),
|
||||
hash_map::Entry::Vacant(entry) => entry.insert(v3),
|
||||
};
|
||||
|
@ -50,16 +43,29 @@ where
|
|||
}
|
||||
|
||||
impl<T1, T2> NestedVec<T1, T2> for BTreeMap<T1, Vec<T2>>
|
||||
where T1: Ord
|
||||
where
|
||||
T1: Ord,
|
||||
T2: PartialEq,
|
||||
{
|
||||
fn nested_push(&mut self, v1: T1, v2: T2) {
|
||||
match self.entry(v1) {
|
||||
btree_map::Entry::Occupied(mut e) => {
|
||||
e.get_mut().push(v2);
|
||||
}
|
||||
btree_map::Entry::Vacant(e) => {
|
||||
e.insert(vec![v2]);
|
||||
fn nested_push(&mut self, v1: T1, v2: T2) { self.entry(v1).or_default().push(v2); }
|
||||
|
||||
fn nested_remove(&mut self, v1: &T1, v2: &T2) {
|
||||
if let Some(vec) = self.get_mut(v1) {
|
||||
vec.retain(|n| n != v2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T1, T2> NestedVec<T1, T2> for HashMap<T1, Vec<T2>>
|
||||
where
|
||||
T1: Ord + Hash,
|
||||
T2: PartialEq,
|
||||
{
|
||||
fn nested_push(&mut self, v1: T1, v2: T2) { self.entry(v1).or_default().push(v2); }
|
||||
|
||||
fn nested_remove(&mut self, v1: &T1, v2: &T2) {
|
||||
if let Some(vec) = self.get_mut(v1) {
|
||||
vec.retain(|n| n != v2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,258 @@
|
|||
// Source: https://github.com/Julgodis/picori/blob/650da9f4fe6050b39b80d5360416591c748058d5/src/rarc.rs
|
||||
// License: MIT
|
||||
// Modified to use `std::io::Cursor<&[u8]>` and `byteorder`
|
||||
use std::{collections::HashMap, fmt::Display};
|
||||
|
||||
use anyhow::{anyhow, ensure, Result};
|
||||
use byteorder::{BigEndian, LittleEndian, ReadBytesExt};
|
||||
|
||||
use crate::util::file::{read_c_string, Reader};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NamedHash {
|
||||
pub name: String,
|
||||
pub hash: u16,
|
||||
}
|
||||
|
||||
impl Display for NamedHash {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.name)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for NamedHash {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.hash.hash(state); }
|
||||
}
|
||||
|
||||
impl PartialEq for NamedHash {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
if self.hash == other.hash {
|
||||
self.name == other.name
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for NamedHash {}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum RarcDirectory {
|
||||
File {
|
||||
/// Name of the file.
|
||||
name: NamedHash,
|
||||
/// Offset of the file in the RARC file. This offset is relative to the start of the RARC file.
|
||||
offset: u64,
|
||||
/// Size of the file.
|
||||
size: u32,
|
||||
},
|
||||
Folder {
|
||||
/// Name of the folder.
|
||||
name: NamedHash,
|
||||
},
|
||||
CurrentFolder,
|
||||
ParentFolder,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct RarcNode {
|
||||
/// Index of first directory.
|
||||
pub index: u32,
|
||||
/// Number of directories.
|
||||
pub count: u32,
|
||||
}
|
||||
|
||||
pub struct RarcReader<'a> {
|
||||
reader: Reader<'a>,
|
||||
directories: Vec<RarcDirectory>,
|
||||
nodes: HashMap<NamedHash, RarcNode>,
|
||||
root_node: NamedHash,
|
||||
}
|
||||
|
||||
impl<'a> RarcReader<'a> {
|
||||
/// Creates a new RARC reader.
|
||||
pub fn new(mut reader: Reader<'a>) -> Result<Self> {
|
||||
let base = reader.position();
|
||||
|
||||
let magic = reader.read_u32::<LittleEndian>()?;
|
||||
let _file_length = reader.read_u32::<BigEndian>()?;
|
||||
let header_length = reader.read_u32::<BigEndian>()?;
|
||||
let file_offset = reader.read_u32::<BigEndian>()?;
|
||||
let _file_length = reader.read_u32::<BigEndian>()?;
|
||||
let _ = reader.read_u32::<BigEndian>()?;
|
||||
let _ = reader.read_u32::<BigEndian>()?;
|
||||
let _ = reader.read_u32::<BigEndian>()?;
|
||||
let node_count = reader.read_u32::<BigEndian>()?;
|
||||
let node_offset = reader.read_u32::<BigEndian>()?;
|
||||
let directory_count = reader.read_u32::<BigEndian>()?;
|
||||
let directory_offset = reader.read_u32::<BigEndian>()?;
|
||||
let string_table_length = reader.read_u32::<BigEndian>()?;
|
||||
let string_table_offset = reader.read_u32::<BigEndian>()?;
|
||||
let _file_count = reader.read_u16::<BigEndian>()?;
|
||||
let _ = reader.read_u16::<BigEndian>()?;
|
||||
let _ = reader.read_u32::<BigEndian>()?;
|
||||
|
||||
ensure!(magic == 0x43524152, "invalid RARC magic");
|
||||
ensure!(node_count < 0x10000, "invalid node count");
|
||||
ensure!(directory_count < 0x10000, "invalid directory count");
|
||||
|
||||
let base = base + header_length as u64;
|
||||
let directory_base = base + directory_offset as u64;
|
||||
let data_base = base + file_offset as u64;
|
||||
let mut directories = Vec::with_capacity(directory_count as usize);
|
||||
for i in 0..directory_count {
|
||||
reader.set_position(directory_base + 20 * i as u64);
|
||||
let index = reader.read_u16::<BigEndian>()?;
|
||||
let name_hash = reader.read_u16::<BigEndian>()?;
|
||||
let _ = reader.read_u16::<BigEndian>()?; // 0x200 for folders, 0x1100 for files
|
||||
let name_offset = reader.read_u16::<BigEndian>()?;
|
||||
let data_offset = reader.read_u32::<BigEndian>()?;
|
||||
let data_length = reader.read_u32::<BigEndian>()?;
|
||||
let _ = reader.read_u32::<BigEndian>()?;
|
||||
|
||||
let name = {
|
||||
let offset = string_table_offset as u64;
|
||||
let offset = offset + name_offset as u64;
|
||||
ensure!((name_offset as u32) < string_table_length, "invalid string table offset");
|
||||
read_c_string(&mut reader, base + offset)
|
||||
}?;
|
||||
|
||||
if index == 0xFFFF {
|
||||
if name == "." {
|
||||
directories.push(RarcDirectory::CurrentFolder);
|
||||
} else if name == ".." {
|
||||
directories.push(RarcDirectory::ParentFolder);
|
||||
} else {
|
||||
directories
|
||||
.push(RarcDirectory::Folder { name: NamedHash { name, hash: name_hash } });
|
||||
}
|
||||
} else {
|
||||
directories.push(RarcDirectory::File {
|
||||
name: NamedHash { name, hash: name_hash },
|
||||
offset: data_base + data_offset as u64,
|
||||
size: data_length,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let node_base = base + node_offset as u64;
|
||||
let mut root_node: Option<NamedHash> = None;
|
||||
let mut nodes = HashMap::with_capacity(node_count as usize);
|
||||
for i in 0..node_count {
|
||||
reader.set_position(node_base + 16 * i as u64);
|
||||
let _identifier = reader.read_u32::<BigEndian>()?;
|
||||
let name_offset = reader.read_u32::<BigEndian>()?;
|
||||
let name_hash = reader.read_u16::<BigEndian>()?;
|
||||
let count = reader.read_u16::<BigEndian>()? as u32;
|
||||
let index = reader.read_u32::<BigEndian>()?;
|
||||
|
||||
ensure!(index < directory_count, "first directory index out of bounds");
|
||||
|
||||
let last_index = index.checked_add(count);
|
||||
ensure!(
|
||||
last_index.is_some() && last_index.unwrap() <= directory_count,
|
||||
"last directory index out of bounds"
|
||||
);
|
||||
|
||||
let name = {
|
||||
let offset = string_table_offset as u64;
|
||||
let offset = offset + name_offset as u64;
|
||||
ensure!(name_offset < string_table_length, "invalid string table offset");
|
||||
read_c_string(&mut reader, base + offset)
|
||||
}?;
|
||||
|
||||
// FIXME: this assumes that the root node is the first node in the list
|
||||
if root_node.is_none() {
|
||||
root_node = Some(NamedHash { name: name.clone(), hash: name_hash });
|
||||
}
|
||||
|
||||
let name = NamedHash { name, hash: name_hash };
|
||||
nodes.insert(name.clone(), RarcNode { index, count });
|
||||
}
|
||||
|
||||
if let Some(root_node) = root_node {
|
||||
Ok(Self { reader, directories, nodes, root_node })
|
||||
} else {
|
||||
Err(anyhow!("no root node"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the data for a file.
|
||||
pub fn file_data(&mut self, offset: u64, size: u32) -> Result<&'a [u8]> {
|
||||
ensure!(offset + size as u64 <= self.reader.get_ref().len() as u64, "out of bounds");
|
||||
Ok(&self.reader.get_ref()[offset as usize..offset as usize + size as usize])
|
||||
}
|
||||
|
||||
/// Get a iterator over the nodes in the RARC file.
|
||||
pub fn nodes(&self) -> Nodes<'_, '_> {
|
||||
let root_node = self.root_node.clone();
|
||||
Nodes { parent: self, stack: vec![NodeState::Begin(root_node)] }
|
||||
}
|
||||
}
|
||||
|
||||
/// A node in an RARC file.
|
||||
pub enum Node {
|
||||
/// A directory that has been entered.
|
||||
DirectoryBegin { name: NamedHash },
|
||||
/// A directory that has been exited.
|
||||
DirectoryEnd { name: NamedHash },
|
||||
/// A file in the current directory.
|
||||
File { name: NamedHash, offset: u64, size: u32 },
|
||||
/// The current directory. This is equivalent to ".".
|
||||
CurrentDirectory,
|
||||
/// The parent directory. This is equivalent to "..".
|
||||
ParentDirectory,
|
||||
}
|
||||
|
||||
enum NodeState {
|
||||
Begin(NamedHash),
|
||||
End(NamedHash),
|
||||
File(NamedHash, u32),
|
||||
}
|
||||
|
||||
/// An iterator over the nodes in an RARC file.
|
||||
pub struct Nodes<'parent, 'a> {
|
||||
parent: &'parent RarcReader<'a>,
|
||||
stack: Vec<NodeState>,
|
||||
}
|
||||
|
||||
impl<'parent, 'a> Iterator for Nodes<'parent, 'a> {
|
||||
type Item = Node;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let Some(state) = self.stack.pop() else {
|
||||
return None;
|
||||
};
|
||||
|
||||
match state {
|
||||
NodeState::Begin(name) => {
|
||||
self.stack.push(NodeState::File(name.clone(), 0));
|
||||
Some(Node::DirectoryBegin { name })
|
||||
}
|
||||
NodeState::End(name) => Some(Node::DirectoryEnd { name }),
|
||||
NodeState::File(name, index) => {
|
||||
if let Some(node) = self.parent.nodes.get(&name) {
|
||||
if index + 1 >= node.count {
|
||||
self.stack.push(NodeState::End(name.clone()));
|
||||
} else {
|
||||
self.stack.push(NodeState::File(name.clone(), index + 1));
|
||||
}
|
||||
let directory = &self.parent.directories[(node.index + index) as usize];
|
||||
match directory {
|
||||
RarcDirectory::CurrentFolder => Some(Node::CurrentDirectory),
|
||||
RarcDirectory::ParentFolder => Some(Node::ParentDirectory),
|
||||
RarcDirectory::Folder { name } => {
|
||||
self.stack.push(NodeState::Begin(name.clone()));
|
||||
self.next()
|
||||
}
|
||||
RarcDirectory::File { name, offset, size } => {
|
||||
Some(Node::File { name: name.clone(), offset: *offset, size: *size })
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
use std::{io::Read, path::Path};
|
||||
use std::io::Read;
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Result};
|
||||
use byteorder::{BigEndian, ReadBytesExt};
|
||||
|
@ -9,7 +9,7 @@ use crate::{
|
|||
ObjArchitecture, ObjInfo, ObjKind, ObjRelocKind, ObjSection, ObjSectionKind, ObjSymbol,
|
||||
ObjSymbolFlagSet, ObjSymbolFlags, ObjSymbolKind,
|
||||
},
|
||||
util::file::{map_file, map_reader, read_string},
|
||||
util::file::Reader,
|
||||
};
|
||||
|
||||
/// Do not relocate anything, but accumulate the offset field for the next relocation offset calculation.
|
||||
|
@ -24,17 +24,14 @@ pub const R_DOLPHIN_END: u32 = 203;
|
|||
#[allow(unused)]
|
||||
pub const R_DOLPHIN_MRKREF: u32 = 204;
|
||||
|
||||
pub fn process_rel<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
||||
let mmap = map_file(path)?;
|
||||
let mut reader = map_reader(&mmap);
|
||||
|
||||
pub fn process_rel(mut reader: Reader) -> Result<ObjInfo> {
|
||||
let module_id = reader.read_u32::<BigEndian>()?;
|
||||
ensure!(reader.read_u32::<BigEndian>()? == 0, "Expected 'next' to be 0");
|
||||
ensure!(reader.read_u32::<BigEndian>()? == 0, "Expected 'prev' to be 0");
|
||||
let num_sections = reader.read_u32::<BigEndian>()?;
|
||||
let section_info_offset = reader.read_u32::<BigEndian>()?;
|
||||
let name_offset = reader.read_u32::<BigEndian>()?;
|
||||
let name_size = reader.read_u32::<BigEndian>()?;
|
||||
let _name_offset = reader.read_u32::<BigEndian>()?;
|
||||
let _name_size = reader.read_u32::<BigEndian>()?;
|
||||
let version = reader.read_u32::<BigEndian>()?;
|
||||
ensure!(matches!(version, 1..=3), "Unsupported REL version {}", version);
|
||||
let bss_size = reader.read_u32::<BigEndian>()?;
|
||||
|
@ -125,7 +122,7 @@ pub fn process_rel<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
.iter()
|
||||
.find(|section| section.elf_index == section_idx as usize)
|
||||
.ok_or_else(|| anyhow!("Failed to locate {name} section {section_idx}"))?;
|
||||
log::info!("Adding {name} section {section_idx} offset {offset:#X}");
|
||||
log::debug!("Adding {name} section {section_idx} offset {offset:#X}");
|
||||
symbols.push(ObjSymbol {
|
||||
name: name.to_string(),
|
||||
demangled_name: None,
|
||||
|
@ -135,6 +132,8 @@ pub fn process_rel<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
size_known: false,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
kind: ObjSymbolKind::Function,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
|
@ -222,31 +221,21 @@ pub fn process_rel<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
reader.set_position(position);
|
||||
}
|
||||
|
||||
let name = match name_offset {
|
||||
0 => String::new(),
|
||||
_ => read_string(&mut reader, name_offset as u64, name_size as usize)?,
|
||||
};
|
||||
Ok(ObjInfo {
|
||||
module_id,
|
||||
kind: ObjKind::Relocatable,
|
||||
architecture: ObjArchitecture::PowerPc,
|
||||
name,
|
||||
// let name = match name_offset {
|
||||
// 0 => String::new(),
|
||||
// _ => read_string(&mut reader, name_offset as u64, name_size as usize).unwrap_or_default(),
|
||||
// };
|
||||
log::debug!("Read REL ID {module_id}");
|
||||
let mut obj = ObjInfo::new(
|
||||
ObjKind::Relocatable,
|
||||
ObjArchitecture::PowerPc,
|
||||
String::new(),
|
||||
symbols,
|
||||
sections,
|
||||
entry: 0,
|
||||
sda2_base: None,
|
||||
sda_base: None,
|
||||
stack_address: None,
|
||||
stack_end: None,
|
||||
db_stack_addr: None,
|
||||
arena_lo: None,
|
||||
arena_hi: None,
|
||||
splits: Default::default(),
|
||||
named_sections: Default::default(),
|
||||
link_order: vec![],
|
||||
known_functions: Default::default(),
|
||||
unresolved_relocations,
|
||||
})
|
||||
);
|
||||
obj.module_id = module_id;
|
||||
obj.unresolved_relocations = unresolved_relocations;
|
||||
Ok(obj)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
|
@ -32,8 +32,8 @@ pub fn process_rso<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
let prolog_offset = reader.read_u32::<BigEndian>()?;
|
||||
let epilog_offset = reader.read_u32::<BigEndian>()?;
|
||||
let unresolved_offset = reader.read_u32::<BigEndian>()?;
|
||||
let internal_rel_offset = reader.read_u32::<BigEndian>()?;
|
||||
let internal_rel_size = reader.read_u32::<BigEndian>()?;
|
||||
let _internal_rel_offset = reader.read_u32::<BigEndian>()?;
|
||||
let _internal_rel_size = reader.read_u32::<BigEndian>()?;
|
||||
let external_rel_offset = reader.read_u32::<BigEndian>()?;
|
||||
let external_rel_size = reader.read_u32::<BigEndian>()?;
|
||||
let export_table_offset = reader.read_u32::<BigEndian>()?;
|
||||
|
@ -118,6 +118,8 @@ pub fn process_rso<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
size_known: false,
|
||||
flags: ObjSymbolFlagSet(ObjSymbolFlags::Global.into()),
|
||||
kind: ObjSymbolKind::Function,
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
|
@ -172,6 +174,8 @@ pub fn process_rso<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
size_known: false,
|
||||
flags: Default::default(),
|
||||
kind: Default::default(),
|
||||
align: None,
|
||||
data_kind: Default::default(),
|
||||
});
|
||||
}
|
||||
reader.set_position(import_table_offset as u64);
|
||||
|
@ -187,32 +191,14 @@ pub fn process_rso<P: AsRef<Path>>(path: P) -> Result<ObjInfo> {
|
|||
0 => String::new(),
|
||||
_ => read_string(&mut reader, name_offset as u64, name_size as usize)?,
|
||||
};
|
||||
Ok(ObjInfo {
|
||||
kind: ObjKind::Relocatable,
|
||||
architecture: ObjArchitecture::PowerPc,
|
||||
name,
|
||||
symbols,
|
||||
sections,
|
||||
entry: 0,
|
||||
sda2_base: None,
|
||||
sda_base: None,
|
||||
stack_address: None,
|
||||
stack_end: None,
|
||||
db_stack_addr: None,
|
||||
arena_lo: None,
|
||||
arena_hi: None,
|
||||
splits: Default::default(),
|
||||
named_sections: Default::default(),
|
||||
link_order: vec![],
|
||||
known_functions: Default::default(),
|
||||
module_id: 0,
|
||||
unresolved_relocations: vec![],
|
||||
})
|
||||
|
||||
let obj = ObjInfo::new(ObjKind::Relocatable, ObjArchitecture::PowerPc, name, symbols, sections);
|
||||
Ok(obj)
|
||||
}
|
||||
|
||||
fn symbol_hash(s: &str) -> u32 {
|
||||
s.bytes().fold(0u32, |hash, c| {
|
||||
let mut m = (hash << 4) + c as u32;
|
||||
let mut m = (hash << 4).wrapping_add(c as u32);
|
||||
let n = m & 0xF0000000;
|
||||
if n != 0 {
|
||||
m ^= n >> 24;
|
||||
|
|
|
@ -0,0 +1,97 @@
|
|||
// Source: https://github.com/Julgodis/picori/blob/650da9f4fe6050b39b80d5360416591c748058d5/src/yaz0.rs
|
||||
// License: MIT
|
||||
// Modified to use `std::io::Read`/`Seek` and `byteorder`
|
||||
use std::io::{Read, Seek};
|
||||
|
||||
use anyhow::{ensure, Result};
|
||||
use byteorder::{BigEndian, ReadBytesExt};
|
||||
|
||||
/// Yaz0 header.
|
||||
pub struct Header {
|
||||
/// Yaz0 magic (0x59617A30).
|
||||
pub magic: u32,
|
||||
/// Size of decompressed data.
|
||||
pub decompressed_size: u32,
|
||||
_reserved0: u32,
|
||||
_reserved1: u32,
|
||||
}
|
||||
|
||||
impl Header {
|
||||
/// Reads a Yaz0 header from a reader.
|
||||
pub fn from_binary<D: Read>(input: &mut D) -> Result<Header> {
|
||||
Ok(Header {
|
||||
magic: input.read_u32::<BigEndian>()?,
|
||||
decompressed_size: input.read_u32::<BigEndian>()?,
|
||||
_reserved0: input.read_u32::<BigEndian>()?,
|
||||
_reserved1: input.read_u32::<BigEndian>()?,
|
||||
})
|
||||
}
|
||||
|
||||
/// Checks if the header is valid.
|
||||
pub fn is_valid(&self) -> bool { self.magic == 0x59617A30 }
|
||||
|
||||
pub fn decompressed_size(input: &mut impl Read) -> Result<usize> {
|
||||
let header = Header::from_binary(input)?;
|
||||
ensure!(header.is_valid(), "Invalid Yaz0 magic");
|
||||
Ok(header.decompressed_size as usize)
|
||||
}
|
||||
}
|
||||
|
||||
/// Decompresses the data into a new allocated [`Vec`]. Assumes a Yaz0 header followed by
|
||||
/// compressed data.
|
||||
pub fn decompress_file<D: Read + Seek>(input: &mut D) -> Result<Vec<u8>> {
|
||||
let decompressed_size = Header::decompressed_size(input)?;
|
||||
decompress(input, decompressed_size)
|
||||
}
|
||||
|
||||
/// Decompresses the data into a new allocated [`Vec`]. `decompressed_size` can be determined
|
||||
/// by looking at the Yaz0 header [`Header`].
|
||||
pub fn decompress<D: Read + Seek>(input: &mut D, decompressed_size: usize) -> Result<Vec<u8>> {
|
||||
let mut output = vec![0; decompressed_size];
|
||||
decompress_into(input, output.as_mut_slice())?;
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Decompresses the data into the given buffer. The buffer must be large
|
||||
/// enough to hold the decompressed data.
|
||||
pub fn decompress_into<D: Read + Seek>(input: &mut D, destination: &mut [u8]) -> Result<()> {
|
||||
let decompressed_size = destination.len();
|
||||
let mut dest = 0;
|
||||
let mut code = 0;
|
||||
let mut code_bits = 0;
|
||||
|
||||
while dest < decompressed_size {
|
||||
if code_bits == 0 {
|
||||
code = input.read_u8()? as u32;
|
||||
code_bits = 8;
|
||||
}
|
||||
|
||||
if code & 0x80 != 0 {
|
||||
let byte = input.read_u8()?;
|
||||
destination[dest] = byte;
|
||||
dest += 1;
|
||||
} else {
|
||||
let byte0 = input.read_u8()?;
|
||||
let byte1 = input.read_u8()?;
|
||||
let a = (byte0 & 0xf) as usize;
|
||||
let b = (byte0 >> 4) as usize;
|
||||
let offset = (a << 8) | (byte1 as usize);
|
||||
let length = match b {
|
||||
0 => (input.read_u8()? as usize) + 0x12,
|
||||
length => length + 2,
|
||||
};
|
||||
|
||||
ensure!(offset < dest, "Unexpected EOF");
|
||||
let base = dest - (offset + 1);
|
||||
for n in 0..length {
|
||||
destination[dest] = destination[base + n];
|
||||
dest += 1;
|
||||
}
|
||||
}
|
||||
|
||||
code <<= 1;
|
||||
code_bits -= 1;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
Loading…
Reference in New Issue