Initial commit
This commit is contained in:
commit
5b8fbc6cd5
|
@ -0,0 +1,13 @@
|
||||||
|
# Auto detect text files and perform LF normalization
|
||||||
|
* text=auto
|
||||||
|
|
||||||
|
# Explicitly declare text files
|
||||||
|
*.py text
|
||||||
|
|
||||||
|
# Enforce platform-specific encodings
|
||||||
|
*.bat text eol=crlf
|
||||||
|
*.sh text eol=lf
|
||||||
|
*.sha1 text eol=lf
|
||||||
|
|
||||||
|
# DTK keeps these files with LF
|
||||||
|
config/**/*.txt text eol=lf
|
|
@ -0,0 +1,11 @@
|
||||||
|
__pycache__
|
||||||
|
.idea
|
||||||
|
.vscode
|
||||||
|
.ninja_*
|
||||||
|
*.exe
|
||||||
|
build
|
||||||
|
build.ninja
|
||||||
|
objdiff.json
|
||||||
|
orig/*/*
|
||||||
|
!orig/*/.gitkeep
|
||||||
|
/*.txt
|
|
@ -0,0 +1,22 @@
|
||||||
|
{
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "Linux",
|
||||||
|
"includePath": [
|
||||||
|
"${workspaceFolder}/include/**"
|
||||||
|
],
|
||||||
|
"cStandard": "c99",
|
||||||
|
"cppStandard": "c++98",
|
||||||
|
"intelliSenseMode": "linux-clang-x86",
|
||||||
|
"compilerPath": "",
|
||||||
|
"configurationProvider": "ms-vscode.makefile-tools",
|
||||||
|
"browse": {
|
||||||
|
"path": [
|
||||||
|
"${workspaceFolder}/include"
|
||||||
|
],
|
||||||
|
"limitSymbolsToIncludedHeaders": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"version": 4
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
"[c]": {
|
||||||
|
"files.encoding": "utf8",
|
||||||
|
"editor.defaultFormatter": "xaver.clang-format"
|
||||||
|
},
|
||||||
|
"[cpp]": {
|
||||||
|
"files.encoding": "utf8",
|
||||||
|
"editor.defaultFormatter": "xaver.clang-format"
|
||||||
|
},
|
||||||
|
"[python]": {
|
||||||
|
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||||
|
},
|
||||||
|
"files.insertFinalNewline": true,
|
||||||
|
"files.trimFinalNewlines": true,
|
||||||
|
"files.associations": {
|
||||||
|
"*.inc": "cpp"
|
||||||
|
},
|
||||||
|
"search.useIgnoreFiles": false,
|
||||||
|
"search.exclude": {
|
||||||
|
"build/*/config.json": true,
|
||||||
|
"build/**/*.MAP": true,
|
||||||
|
"build.ninja": true,
|
||||||
|
".ninja_*": true,
|
||||||
|
"objdiff.json": true
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2023 Luke Street
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
|
@ -0,0 +1,114 @@
|
||||||
|
Some Game
|
||||||
|
[![Build Status]][actions] ![Progress] ![DOL Progress] ![RELs Progress] [![Discord Badge]][discord]
|
||||||
|
=============
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Replace with your repository's URL.
|
||||||
|
-->
|
||||||
|
[Build Status]: https://github.com/zeldaret/tww/actions/workflows/build.yml/badge.svg
|
||||||
|
[actions]: https://github.com/zeldaret/tww/actions/workflows/build.yml
|
||||||
|
<!---
|
||||||
|
Code progress URL:
|
||||||
|
https://progress.decomp.club/data/[project]/[version]/all/?mode=shield&measure=code
|
||||||
|
URL encoded then appended to: https://img.shields.io/endpoint?label=Code&url=
|
||||||
|
-->
|
||||||
|
[Progress]: https://img.shields.io/endpoint?label=Code&url=https%3A%2F%2Fprogress.decomp.club%2Fdata%2Ftww%2FGZLE01%2Fall%2F%3Fmode%3Dshield%26measure%3Dcode
|
||||||
|
<!---
|
||||||
|
DOL progress URL:
|
||||||
|
https://progress.decomp.club/data/[project]/[version]/dol/?mode=shield&measure=code
|
||||||
|
URL encoded then appended to: https://img.shields.io/endpoint?label=DOL&url=
|
||||||
|
-->
|
||||||
|
[DOL Progress]: https://img.shields.io/endpoint?label=DOL&url=https%3A%2F%2Fprogress.decomp.club%2Fdata%2Ftww%2FGZLE01%2Fdol%2F%3Fmode%3Dshield%26measure%3Dcode
|
||||||
|
<!--
|
||||||
|
REL progress URL:
|
||||||
|
https://progress.decomp.club/data/[project]/[version]/modules/?mode=shield&measure=code
|
||||||
|
URL encoded then appended to: https://img.shields.io/endpoint?label=RELs&url=
|
||||||
|
-->
|
||||||
|
[RELs Progress]: https://img.shields.io/endpoint?label=RELs&url=https%3A%2F%2Fprogress.decomp.club%2Fdata%2Ftww%2FGZLE01%2Fmodules%2F%3Fmode%3Dshield%26measure%3Dcode
|
||||||
|
<!--
|
||||||
|
Replace with your Discord server's ID and invite URL.
|
||||||
|
-->
|
||||||
|
[Discord Badge]: https://img.shields.io/discord/727908905392275526?color=%237289DA&logo=discord&logoColor=%23FFFFFF
|
||||||
|
[discord]: https://discord.gg/hKx3FJJgrV
|
||||||
|
|
||||||
|
A work-in-progress decompilation of Some Game.
|
||||||
|
|
||||||
|
This repository does **not** contain any game assets or assembly whatsoever. An existing copy of the game is required.
|
||||||
|
|
||||||
|
Supported versions:
|
||||||
|
|
||||||
|
- `GAMEID`: Rev 0 (USA)
|
||||||
|
|
||||||
|
Dependencies
|
||||||
|
============
|
||||||
|
|
||||||
|
Windows:
|
||||||
|
--------
|
||||||
|
|
||||||
|
On Windows, it's **highly recommended** to use native tooling. WSL or msys2 are **not** required.
|
||||||
|
When running under WSL, [objdiff](#diffing) is unable to get filesystem notifications for automatic rebuilds.
|
||||||
|
|
||||||
|
- Install [Python](https://www.python.org/downloads/) and add it to `%PATH%`.
|
||||||
|
- Also available from the [Windows Store](https://apps.microsoft.com/store/detail/python-311/9NRWMJP3717K).
|
||||||
|
- Download [ninja](https://github.com/ninja-build/ninja/releases) and add it to `%PATH%`.
|
||||||
|
- Quick install via pip: `pip install ninja`
|
||||||
|
|
||||||
|
macOS:
|
||||||
|
------
|
||||||
|
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages):
|
||||||
|
```
|
||||||
|
brew install ninja
|
||||||
|
```
|
||||||
|
- Install [wine-crossover](https://github.com/Gcenx/homebrew-wine):
|
||||||
|
```
|
||||||
|
brew install --cask --no-quarantine gcenx/wine/wine-crossover
|
||||||
|
```
|
||||||
|
|
||||||
|
After OS upgrades, if macOS complains about `Wine Crossover.app` being unverified, you can unquarantine it using:
|
||||||
|
```sh
|
||||||
|
sudo xattr -rd com.apple.quarantine '/Applications/Wine Crossover.app'
|
||||||
|
```
|
||||||
|
|
||||||
|
Linux:
|
||||||
|
------
|
||||||
|
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages).
|
||||||
|
- For non-x86(_64) platforms: Install wine from your package manager.
|
||||||
|
- For x86(_64), [WiBo](https://github.com/decompals/WiBo), a minimal 32-bit Windows binary wrapper, will be automatically downloaded and used.
|
||||||
|
|
||||||
|
Building
|
||||||
|
========
|
||||||
|
|
||||||
|
- Clone the repository:
|
||||||
|
```
|
||||||
|
git clone https://github.com/my/repo.git
|
||||||
|
```
|
||||||
|
- Using [Dolphin Emulator](https://dolphin-emu.org/), extract your game to `orig/GAMEID`.
|
||||||
|
![](assets/dolphin-extract.png)
|
||||||
|
- To save space, the only necessary files are the following. Any others can be deleted.
|
||||||
|
- `sys/main.dol`
|
||||||
|
- `files/rels/*.rel`
|
||||||
|
- Configure:
|
||||||
|
```
|
||||||
|
python configure.py
|
||||||
|
```
|
||||||
|
To use a version other than `GAMEID` (USA), specify it with `--version`.
|
||||||
|
- Build:
|
||||||
|
```
|
||||||
|
ninja
|
||||||
|
```
|
||||||
|
|
||||||
|
Visual Studio Code
|
||||||
|
==================
|
||||||
|
|
||||||
|
If desired, use the recommended Visual Studio Code settings by renaming the `.vscode.example` directory to `.vscode`.
|
||||||
|
|
||||||
|
Diffing
|
||||||
|
=======
|
||||||
|
|
||||||
|
Once the initial build succeeds, an `objdiff.json` should exist in the project root.
|
||||||
|
|
||||||
|
Download the latest release from [encounter/objdiff](https://github.com/encounter/objdiff). Under project settings, set `Project directory`. The configuration should be loaded automatically.
|
||||||
|
|
||||||
|
Select an object from the left sidebar to begin diffing. Changes to the project will rebuild automatically: changes to source files, headers, `configure.py`, `splits.txt` or `symbols.txt`.
|
||||||
|
|
||||||
|
![](assets/objdiff.png)
|
|
@ -0,0 +1,65 @@
|
||||||
|
decomp-toolkit Project Template
|
||||||
|
===============================
|
||||||
|
|
||||||
|
If starting a new GameCube / Wii decompilation project, this repository can be used as a scaffold.
|
||||||
|
|
||||||
|
See [decomp-toolkit](https://github.com/encounter/decomp-toolkit) for background on the concept and more information on the tooling used.
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- [Dependencies](docs/dependencies.md)
|
||||||
|
- [Getting Started](docs/getting_started.md)
|
||||||
|
- [`symbols.txt`](docs/symbols.md)
|
||||||
|
- [`splits.txt`](docs/splits.md)
|
||||||
|
|
||||||
|
General:
|
||||||
|
- [Common BSS](docs/common_bss.md)
|
||||||
|
- [`.comment` section](docs/comment_section.md)
|
||||||
|
|
||||||
|
References
|
||||||
|
--------
|
||||||
|
|
||||||
|
- [Discord: GC/Wii Decompilation](https://discord.gg/hKx3FJJgrV) (Come to `#dtk` for help!)
|
||||||
|
- [objdiff](https://github.com/encounter/objdiff) (Local diffing tool)
|
||||||
|
- [decomp.me](https://decomp.me) (Collaborate on matches)
|
||||||
|
- [frogress](https://github.com/decompals/frogress) (Decompilation progress API)
|
||||||
|
- [wibo](https://github.com/decompals/wibo) (Minimal Win32 wrapper for Linux)
|
||||||
|
- [sjiswrap](https://github.com/encounter/sjiswrap) (UTF-8 to Shift JIS wrapper)
|
||||||
|
|
||||||
|
Projects using this structure:
|
||||||
|
- [zeldaret/tww](https://github.com/zeldaret/tww)
|
||||||
|
- [PrimeDecomp/echoes](https://github.com/PrimeDecomp/echoes)
|
||||||
|
- [DarkRTA/rb3](https://github.com/DarkRTA/rb3)
|
||||||
|
- [InputEvelution/wp](https://github.com/InputEvelution/wp)
|
||||||
|
- [Rainchus/ttyd_dtk](https://github.com/Rainchus/ttyd_dtk)
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
- Few external dependencies: Just `python` for the generator and `ninja` for the build system. See [Dependencies](docs/dependencies.md).
|
||||||
|
- Simple configuration: Everything lives in `config.yml`, `symbols.txt`, and `splits.txt`.
|
||||||
|
- Multi-version support: Separate configurations for each game version, and a `configure.py --version` flag to switch between them.
|
||||||
|
- Feature-rich analyzer: Many time-consuming tasks are automated, allowing you to focus on the decompilation itself. See [Analyzer features](https://github.com/encounter/decomp-toolkit#analyzer-features).
|
||||||
|
- REL support: RELs each have their own `symbols.txt` and `splits.txt`, and will automatically be built and linked against the main binary.
|
||||||
|
- No manual assembly: decomp-toolkit handles splitting the DOL into relocatable objects based on the configuration. No game assets are committed to the repository.
|
||||||
|
- Progress calculation and upload script for [frogress](https://github.com/decompals/frogress).
|
||||||
|
- Integration with [objdiff](https://github.com/encounter/objdiff) for a diffing workflow.
|
||||||
|
- (TODO) CI workflow template for GitHub Actions.
|
||||||
|
|
||||||
|
Project structure
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
- `configure.py` - Project configuration and generator script.
|
||||||
|
- `config/[GAMEID]` - Configuration files for each game version.
|
||||||
|
- `config/[GAMEID]/build.sha1` - SHA-1 hashes for each built artifact, for final verification.
|
||||||
|
- `build/` - Build artifacts generated by the the build process. Ignored by `.gitignore`.
|
||||||
|
- `orig/[GAMEID]` - Original game files, extracted from the disc. Ignored by `.gitignore`.
|
||||||
|
- `orig/[GAMEID]/.gitkeep` - Empty checked-in file to ensure the directory is created on clone.
|
||||||
|
- `src/` - C/C++ source files.
|
||||||
|
- `include/` - C/C++ header files.
|
||||||
|
- `tools/` - Scripts shared between projects.
|
||||||
|
|
||||||
|
Temporary, delete when done:
|
||||||
|
- `config/GAMEID/config.example.yml` - Example configuration file and documentation.
|
||||||
|
- `docs/` - Documentation for decomp-toolkit configuration.
|
||||||
|
- `README.md` - This file, replace with your own. For a template, see [`README.example.md`](README.example.md).
|
Binary file not shown.
After Width: | Height: | Size: 12 KiB |
Binary file not shown.
After Width: | Height: | Size: 65 KiB |
|
@ -0,0 +1,2 @@
|
||||||
|
0123456789abcdef0123456789abcdef01234567 build/GAMEID/main.dol
|
||||||
|
0123456789abcdef0123456789abcdef01234567 build/GAMEID/module/module.rel
|
|
@ -0,0 +1,100 @@
|
||||||
|
# Path to the main.dol file.
|
||||||
|
object: orig/GAMEID/sys/main.dol
|
||||||
|
# (optional) SHA-1 hash of the main.dol file for verification.
|
||||||
|
hash: 0123456789abcdef0123456789abcdef01234567
|
||||||
|
# (optional) Name override. Defaults to "main".
|
||||||
|
name: main
|
||||||
|
|
||||||
|
# (optional) Path to the symbols.txt file.
|
||||||
|
# This file will be created if it does not exist.
|
||||||
|
# See docs/symbols.md for more information.
|
||||||
|
symbols: config/GAMEID/symbols.txt
|
||||||
|
# (optional) Path to the splits.txt file.
|
||||||
|
# This file will be created if it does not exist.
|
||||||
|
# See docs/splits.md for more information.
|
||||||
|
splits: config/GAMEID/splits.txt
|
||||||
|
|
||||||
|
# (optional) Path to the DOL's .map file.
|
||||||
|
# This should only used for initial analysis, and generating the symbols and splits files.
|
||||||
|
# Once those files are generated, remove this to avoid conflicts.
|
||||||
|
map: orig/GAMEID/files/main.MAP
|
||||||
|
# (optional) Start address of common BSS symbols, if any.
|
||||||
|
# Useful along with `map`, but not required otherwise, since common BSS
|
||||||
|
# is marked in the splits file.
|
||||||
|
common_start: 0x80001234
|
||||||
|
|
||||||
|
# (optional) Version used to generate `.comment` sections in the split objects.
|
||||||
|
# If not specified, no `.comment` sections will be generated.
|
||||||
|
# See docs/comment_section.md for more information.
|
||||||
|
mw_comment_version: 8
|
||||||
|
|
||||||
|
# (optional) Path to `selfile.sel` for Wii games with RSO files.
|
||||||
|
selfile: orig/GAMEID/files/selfile.sel
|
||||||
|
# (optional) SHA-1 hash of the `selfile.sel` file for verification.
|
||||||
|
selfile_hash: 0123456789abcdef0123456789abcdef01234567
|
||||||
|
|
||||||
|
# (optional) When enabled, function boundary analysis will be skipped.
|
||||||
|
# Only valid _after_ initial analysis has been performed and
|
||||||
|
# the symbols and splits files have been generated.
|
||||||
|
quick_analysis: false
|
||||||
|
|
||||||
|
# (optional) When enabled, the analyzer will attempt to detect sizes
|
||||||
|
# and data types of objects based on code usage and alignment.
|
||||||
|
detect_objects: true
|
||||||
|
|
||||||
|
# (optional) When enabled, the analyzer will attempt to detect strings,
|
||||||
|
# wide strings, and string tables.
|
||||||
|
detect_strings: true
|
||||||
|
|
||||||
|
# (optional) Whether to write disassembly to the split output directory.
|
||||||
|
# While not used in the build process, the disassembly is useful
|
||||||
|
# for reading and usage with other tools, like decomp.me.
|
||||||
|
write_asm: true
|
||||||
|
|
||||||
|
# (optional) If symbols are _fully_ known (e.g. from a complete map file),
|
||||||
|
# this can be set to true to skip most analysis steps, and ensure new
|
||||||
|
# symbols are not created by the analyzer.
|
||||||
|
# If you're not sure, leave this false.
|
||||||
|
symbols_known: false
|
||||||
|
|
||||||
|
# (optional) Whether to create `gap_` symbols to prevent the linker from
|
||||||
|
# adjusting the alignment / address of symbols.
|
||||||
|
# When alignments are fully known (e.g. from a complete map file),
|
||||||
|
# this can be set to false.
|
||||||
|
fill_gaps: true
|
||||||
|
|
||||||
|
# (optional) Custom template for `ldscript.lcf`. Avoid unless necessary.
|
||||||
|
# See https://github.com/encounter/decomp-toolkit/blob/main/assets/ldscript.lcf
|
||||||
|
ldscript_template: config/GAMEID/module/ldscript.tpl
|
||||||
|
|
||||||
|
# (optional) Configuration for modules.
|
||||||
|
modules:
|
||||||
|
|
||||||
|
- # Path to the module.
|
||||||
|
object: orig/GAMEID/files/module.rel
|
||||||
|
|
||||||
|
# (optional) SHA-1 hash of the module for verification.
|
||||||
|
hash: 0123456789abcdef0123456789abcdef01234567
|
||||||
|
|
||||||
|
# (optional) Name of the module. Defaults to the module's filename.
|
||||||
|
name: module
|
||||||
|
|
||||||
|
# (optional) Path to the module's symbols.txt file.
|
||||||
|
# This file will be created if it does not exist.
|
||||||
|
# See docs/symbols.md for more information.
|
||||||
|
symbols: config/GAMEID/module/symbols.txt
|
||||||
|
# (optional) Path to the module's splits.txt file.
|
||||||
|
# This file will be created if it does not exist.
|
||||||
|
# See docs/splits.md for more information.
|
||||||
|
splits: config/GAMEID/module/splits.txt
|
||||||
|
|
||||||
|
# (optional) Path to the module's .map file.
|
||||||
|
# See `map` above for more information.
|
||||||
|
map: orig/GAMEID/files/module.MAP
|
||||||
|
|
||||||
|
# (optional) Mark symbols as "force active" / "exported".
|
||||||
|
force_active: []
|
||||||
|
|
||||||
|
# (optional) Custom template for `ldscript.lcf`, if needed.
|
||||||
|
# See https://github.com/encounter/decomp-toolkit/blob/main/assets/ldscript_partial.lcf
|
||||||
|
ldscript_template: config/GAMEID/module/ldscript.tpl
|
|
@ -0,0 +1,12 @@
|
||||||
|
# See config.example.yml for documentation.
|
||||||
|
object: orig/GAMEID/sys/main.dol
|
||||||
|
hash: 0123456789abcdef0123456789abcdef01234567
|
||||||
|
symbols: config/GAMEID/symbols.txt
|
||||||
|
splits: config/GAMEID/splits.txt
|
||||||
|
mw_comment_version: 8
|
||||||
|
|
||||||
|
modules:
|
||||||
|
- object: orig/GAMEID/files/module.rel
|
||||||
|
hash: 0123456789abcdef0123456789abcdef01234567
|
||||||
|
symbols: config/GAMEID/module/symbols.txt
|
||||||
|
splits: config/GAMEID/module/splits.txt
|
|
@ -0,0 +1 @@
|
||||||
|
// Intentionally empty. Initial analysis will generate this automatically.
|
|
@ -0,0 +1 @@
|
||||||
|
// Intentionally empty. Initial analysis will generate this automatically.
|
|
@ -0,0 +1,233 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
###
|
||||||
|
# Generates build files for the project.
|
||||||
|
# This file also includes the project configuration,
|
||||||
|
# such as compiler flags and the object matching status.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# python3 configure.py
|
||||||
|
# ninja
|
||||||
|
#
|
||||||
|
# Append --help to see available options.
|
||||||
|
###
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from tools.project import (
|
||||||
|
Object,
|
||||||
|
ProjectConfig,
|
||||||
|
calculate_progress,
|
||||||
|
generate_build,
|
||||||
|
is_windows,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Game versions
|
||||||
|
DEFAULT_VERSION = 0
|
||||||
|
VERSIONS = [
|
||||||
|
"GAMEID", # 0
|
||||||
|
]
|
||||||
|
|
||||||
|
if len(VERSIONS) > 1:
|
||||||
|
versions_str = ", ".join(VERSIONS[:-1]) + f" or {VERSIONS[-1]}"
|
||||||
|
else:
|
||||||
|
versions_str = VERSIONS[0]
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument(
|
||||||
|
"mode",
|
||||||
|
default="configure",
|
||||||
|
help="configure or progress (default: configure)",
|
||||||
|
nargs="?",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--version",
|
||||||
|
dest="version",
|
||||||
|
default=VERSIONS[DEFAULT_VERSION],
|
||||||
|
help=f"version to build ({versions_str})",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--build-dir",
|
||||||
|
dest="build_dir",
|
||||||
|
type=Path,
|
||||||
|
default=Path("build"),
|
||||||
|
help="base build directory (default: build)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--compilers",
|
||||||
|
dest="compilers",
|
||||||
|
type=Path,
|
||||||
|
help="path to compilers (optional)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--map",
|
||||||
|
dest="map",
|
||||||
|
action="store_true",
|
||||||
|
help="generate map file(s)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--debug",
|
||||||
|
dest="debug",
|
||||||
|
action="store_true",
|
||||||
|
help="build with debug info (non-matching)",
|
||||||
|
)
|
||||||
|
if not is_windows():
|
||||||
|
parser.add_argument(
|
||||||
|
"--wrapper",
|
||||||
|
dest="wrapper",
|
||||||
|
type=Path,
|
||||||
|
help="path to wibo or wine (optional)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--build-dtk",
|
||||||
|
dest="build_dtk",
|
||||||
|
type=Path,
|
||||||
|
help="path to decomp-toolkit source (optional)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--sjiswrap",
|
||||||
|
dest="sjiswrap",
|
||||||
|
type=Path,
|
||||||
|
help="path to sjiswrap.exe (optional)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--verbose",
|
||||||
|
dest="verbose",
|
||||||
|
action="store_true",
|
||||||
|
help="print verbose output",
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
config = ProjectConfig()
|
||||||
|
config.version = args.version.upper()
|
||||||
|
if config.version not in VERSIONS:
|
||||||
|
sys.exit(f"Invalid version '{config.version}', expected {versions_str}")
|
||||||
|
version_num = VERSIONS.index(config.version)
|
||||||
|
|
||||||
|
# Apply arguments
|
||||||
|
config.build_dir = args.build_dir
|
||||||
|
config.build_dtk_path = args.build_dtk
|
||||||
|
config.compilers_path = args.compilers
|
||||||
|
config.debug = args.debug
|
||||||
|
config.generate_map = args.map
|
||||||
|
config.sjiswrap_path = args.sjiswrap
|
||||||
|
if not is_windows():
|
||||||
|
config.wrapper = args.wrapper
|
||||||
|
|
||||||
|
# Tool versions
|
||||||
|
config.compilers_tag = "1"
|
||||||
|
config.dtk_tag = "v0.5.5"
|
||||||
|
config.sjiswrap_tag = "v1.1.1"
|
||||||
|
config.wibo_tag = "0.6.3"
|
||||||
|
|
||||||
|
# Project
|
||||||
|
config.config_path = Path("config") / config.version / "config.yml"
|
||||||
|
config.check_sha_path = Path("config") / config.version / "build.sha1"
|
||||||
|
config.ldflags = [
|
||||||
|
"-fp hardware",
|
||||||
|
"-nodefaults",
|
||||||
|
"-listclosure",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Base flags, common to most GC/Wii games.
|
||||||
|
# Generally leave untouched, with overrides added below.
|
||||||
|
cflags_base = [
|
||||||
|
"-nodefaults",
|
||||||
|
"-proc gekko",
|
||||||
|
"-align powerpc",
|
||||||
|
"-enum int",
|
||||||
|
"-fp hardware",
|
||||||
|
"-Cpp_exceptions off",
|
||||||
|
# "-W all",
|
||||||
|
"-O4,p",
|
||||||
|
"-inline auto",
|
||||||
|
'-pragma "cats off"',
|
||||||
|
'-pragma "warn_notinlined off"',
|
||||||
|
"-maxerrors 1",
|
||||||
|
"-nosyspath",
|
||||||
|
"-RTTI off",
|
||||||
|
"-fp_contract on",
|
||||||
|
"-str reuse",
|
||||||
|
"-i include",
|
||||||
|
"-i libc",
|
||||||
|
"-enc SJIS",
|
||||||
|
f"-DVERSION={version_num}",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Debug flags
|
||||||
|
if config.debug:
|
||||||
|
cflags_base.extend(["-sym on", "-DDEBUG=1"])
|
||||||
|
else:
|
||||||
|
cflags_base.append("-DNDEBUG=1")
|
||||||
|
|
||||||
|
# Metrowerks library flags
|
||||||
|
cflags_runtime = [
|
||||||
|
*cflags_base,
|
||||||
|
"-use_lmw_stmw on",
|
||||||
|
"-str reuse,pool,readonly",
|
||||||
|
"-gccinc",
|
||||||
|
"-common off",
|
||||||
|
"-inline auto",
|
||||||
|
]
|
||||||
|
|
||||||
|
# REL flags
|
||||||
|
cflags_rel = [
|
||||||
|
*cflags_base,
|
||||||
|
"-sdata 0",
|
||||||
|
"-sdata2 0",
|
||||||
|
]
|
||||||
|
|
||||||
|
config.linker_version = "Wii/1.3"
|
||||||
|
|
||||||
|
|
||||||
|
# Helper function for Dolphin libraries
|
||||||
|
def DolphinLib(lib_name, objects):
|
||||||
|
return {
|
||||||
|
"lib": lib_name,
|
||||||
|
"mw_version": "Wii/1.1",
|
||||||
|
"cflags": cflags_base,
|
||||||
|
"host": False,
|
||||||
|
"objects": objects,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Helper function for REL script objects
|
||||||
|
def Rel(lib_name, objects):
|
||||||
|
return {
|
||||||
|
"lib": lib_name,
|
||||||
|
"mw_version": "Wii/1.3",
|
||||||
|
"cflags": cflags_rel,
|
||||||
|
"host": True,
|
||||||
|
"objects": objects,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Matching = True
|
||||||
|
NonMatching = False
|
||||||
|
|
||||||
|
config.warn_missing_config = True
|
||||||
|
config.warn_missing_source = False
|
||||||
|
config.libs = [
|
||||||
|
{
|
||||||
|
"lib": "Runtime.PPCEABI.H",
|
||||||
|
"mw_version": config.linker_version,
|
||||||
|
"cflags": cflags_runtime,
|
||||||
|
"host": False,
|
||||||
|
"objects": [
|
||||||
|
Object(NonMatching, "Runtime.PPCEABI.H/global_destructor_chain.c"),
|
||||||
|
Object(NonMatching, "Runtime.PPCEABI.H/__init_cpp_exceptions.cpp"),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
if args.mode == "configure":
|
||||||
|
# Write build.ninja and objdiff.json
|
||||||
|
generate_build(config)
|
||||||
|
elif args.mode == "progress":
|
||||||
|
# Print progress and write progress.json
|
||||||
|
config.progress_each_module = args.verbose
|
||||||
|
calculate_progress(config)
|
||||||
|
else:
|
||||||
|
sys.exit("Unknown mode: " + args.mode)
|
|
@ -0,0 +1,106 @@
|
||||||
|
# CodeWarrior `.comment` section
|
||||||
|
|
||||||
|
Files built with `mwcc` contain a `.comment` section:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ powerpc-eabi-readelf -We object.o
|
||||||
|
|
||||||
|
Section Headers:
|
||||||
|
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
|
||||||
|
[ 0] NULL 00000000 000000 000000 00 0 0 0
|
||||||
|
[ 1] .text PROGBITS 00000000 000034 000708 00 AX 0 0 4
|
||||||
|
...
|
||||||
|
[16] .comment PROGBITS 00000000 00153b 0001b4 01 0 0 1
|
||||||
|
```
|
||||||
|
|
||||||
|
The `.comment` section contains information that `mwld` uses during linking, primarily symbol alignment and a "force active" / export flag.
|
||||||
|
|
||||||
|
If missing, `mwld` will **not** adjust the alignment of symbols or remove any unused symbols.
|
||||||
|
|
||||||
|
This behavior is quite useful in some cases. When we split our program into objects, we're working from the final post-aligned, post-stripped result, and don't want the linker to make any changes. Most decompilation projects rely on this behavior unintentionally, since their generated objects don't contain a `.comment` section. (For example, objects built with `powerpc-eabi-as`.)
|
||||||
|
|
||||||
|
However, we need the `.comment` section for some purposes:
|
||||||
|
- Reproducing the [common BSS inflation bug](common_bss.md#inflation-bug) requires the `.comment` section present, due to the above. The linker inflates the size of the first common BSS symbol in a TU, but won't actually move any data around unless the `.comment` section is present.
|
||||||
|
- In newer versions of the linker, using common BSS at all _without_ a valid `.comment` section will cause an internal linker error.
|
||||||
|
|
||||||
|
When the `.comment` section is generated, decomp-toolkit will mark all global symbols as "exported" to prevent any deadstripping, since the presence of the `.comment` section itself enables deadstripping.
|
||||||
|
|
||||||
|
Generating the `.comment` section and setting the "export" flag is also useful to prevent the linker from removing entire objects. A missing `.comment` section will prevent the removal of unused symbols _inside_ of an object, but the linker will still remove the entire object itself if it thinks it's unused.
|
||||||
|
|
||||||
|
## Contents
|
||||||
|
|
||||||
|
The contents of this section follow a very simple format:
|
||||||
|
|
||||||
|
### Header
|
||||||
|
|
||||||
|
`[0x0 size: 0xB]` Magic: `43 6F 64 65 57 61 72 72 69 6F 72` ("CodeWarrior")
|
||||||
|
|
||||||
|
`[0xB size: 0x1]` Version(?): `XX`
|
||||||
|
|
||||||
|
It's not known whether this field actually affects `mwld` in any way, but it's configurable for completeness sake. (See `mw_comment_version` in [`config.example.yml`](/config/GAMEID/config.example.yml).)
|
||||||
|
|
||||||
|
Known values:
|
||||||
|
- `08` - CodeWarrior for GameCube 1.0+
|
||||||
|
- `0A` - CodeWarrior for GameCube 1.3.2+
|
||||||
|
- `0B`, `0C` - CodeWarrior for GameCube 2.7+ (difference unknown)
|
||||||
|
- `0E`, `0F` - CodeWarrior for GameCube 3.0a3+ (difference unknown)
|
||||||
|
|
||||||
|
`[0xC size: 0x4]` Compiler version: `XX XX XX XX`
|
||||||
|
|
||||||
|
First 3 bytes are major, minor, and patch version numbers.
|
||||||
|
4th byte is unknown, but is always `01`.
|
||||||
|
|
||||||
|
Example: `Version 2.3.3 build 144` -> `02 03 00 01`
|
||||||
|
Often the `.exe`'s properties (which `--help` reads from) and the internal version number (here) will differ.
|
||||||
|
|
||||||
|
`[0x10 size: 1]` Pool data: `XX`
|
||||||
|
|
||||||
|
- `00` - Data pooling disabled
|
||||||
|
- `01` - Data pooling enabled
|
||||||
|
|
||||||
|
`[0x11 size: 1]` Float type: `XX`
|
||||||
|
|
||||||
|
- `00` - Floating point disabled
|
||||||
|
- `01` - Software floating point
|
||||||
|
- `02` - Hardware floating point
|
||||||
|
|
||||||
|
`[0x12 size: 2]` Processor type: `00 16` (Gekko)
|
||||||
|
|
||||||
|
`[0x14 size: 1]` Unknown, always `2C`. Possibly the start of symbol entries.
|
||||||
|
|
||||||
|
`[0x15 size: 1]` "Quirk" flags: `XX`
|
||||||
|
|
||||||
|
Bitfield of miscellaneous flags. Known flags:
|
||||||
|
- `01` - "Incompatible return small structs"
|
||||||
|
- `02` - "Incompatible SFPE double params"
|
||||||
|
- `04` - "Unsafe global reg vars"
|
||||||
|
|
||||||
|
`[0x16 size: 22]` Padding until `0x2C`
|
||||||
|
|
||||||
|
### Symbol entry
|
||||||
|
|
||||||
|
At `0x2C` is the first symbol entry. There is one 8 byte entry per ELF symbol.
|
||||||
|
|
||||||
|
This includes the "null" ELF symbol, so the first entry will be all 0's.
|
||||||
|
|
||||||
|
`[0x0 size: 4]` Alignment: `XX XX XX XX`
|
||||||
|
|
||||||
|
`[0x4 size: 1]` Visibility flags(?): `XX`
|
||||||
|
|
||||||
|
Known values:
|
||||||
|
- `00` - Default
|
||||||
|
- `0D` - Weak
|
||||||
|
- `0E` - Unknown, also weak?
|
||||||
|
|
||||||
|
`[0x5 size: 1]` Active flags(?): `XX`
|
||||||
|
|
||||||
|
Known values:
|
||||||
|
- `00` - Default
|
||||||
|
- `08` - Force active / export. Prevents the symbol from being deadstripped.
|
||||||
|
When applied on a section symbol, the entire section is kept as-is. This is used
|
||||||
|
by `mwcc` when data pooling is triggered (indicated by a symbol like `...data.0`), likely to prevent the hard-coded section-relative offsets from breaking.
|
||||||
|
Can also be set using `#pragma force_active on` or `__declspec(export)`.
|
||||||
|
- `10` - Unknown
|
||||||
|
- `20` - Unknown
|
||||||
|
|
||||||
|
`[0x6 size: 2]` Padding(?): `00 00`
|
|
@ -0,0 +1,69 @@
|
||||||
|
# Common BSS
|
||||||
|
|
||||||
|
When passed the `-common on` flag, `mwcc` will generate global BSS symbols as **common**. The linker deduplicates common symbols with the same name, and allocates an area at the **end** of `.bss` for them.
|
||||||
|
|
||||||
|
This is a legacy feature, allowing uninitialized global variables to be defined in headers without linker errors:
|
||||||
|
|
||||||
|
```c
|
||||||
|
// foo.h
|
||||||
|
int foo;
|
||||||
|
```
|
||||||
|
|
||||||
|
With `-common on`, any TU that includes `foo.h` will define `foo` as a **common** symbol. The linker will deduplicate `foo` across TUs, similar to weak symbols. Common symbols are then generated at the **end** of `.bss`, after all other `.bss` symbols.
|
||||||
|
|
||||||
|
With `-common off`, `foo` would be defined as a **global** symbol, and the linker would error out with a duplicate symbol error if `foo.h` was included in multiple TUs.
|
||||||
|
|
||||||
|
In `splits.txt`, common BSS can be defined with the `common` attribute:
|
||||||
|
|
||||||
|
```
|
||||||
|
foo.cpp:
|
||||||
|
.text start:0x80047E5C end:0x8004875C
|
||||||
|
.ctors start:0x803A54C4 end:0x803A54C8
|
||||||
|
.data start:0x803B1B40 end:0x803B1B60
|
||||||
|
.bss start:0x803DF828 end:0x803DFA8C
|
||||||
|
.bss start:0x8040D4AC end:0x8040D4D8 common
|
||||||
|
```
|
||||||
|
|
||||||
|
As shown above, a file can contain both regular `.bss` and common `.bss`. Marking common `.bss` appropriately is important for determining the final link order.
|
||||||
|
|
||||||
|
## Detection
|
||||||
|
|
||||||
|
Example from Pikmin 2:
|
||||||
|
```
|
||||||
|
00016e60 00000c 805069c0 1 .bss utilityU.a PSMainSide_CreaturePrm.cpp
|
||||||
|
00016e60 00000c 805069c0 4 @3464 utilityU.a PSMainSide_CreaturePrm.cpp
|
||||||
|
00016e6c 000048 805069cc 4 saoVVOutput_direction___Q214JStudio_JStage14TAdaptor_light JSystem.a object-light.cpp
|
||||||
|
00016eb4 0000d0 80506a14 4 saoVVOutput___Q214JStudio_JStage14TAdaptor_actor JSystem.a object-actor.cpp
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, we see a symbol from `utilityU.a PSMainSide_CreaturePrm.cpp`. We know that this file is very close to the _end_ of the link order. Afterwards, there's a symbol from `JSystem.a object-light.cpp`, which is very close to the _beginning_ of the link order.
|
||||||
|
|
||||||
|
A file can't be both at the beginning and end of the link order, so it's a strong indication that `saoVVOutput_direction___Q214JStudio_JStage14TAdaptor_light` marks the beginning of the common BSS section.
|
||||||
|
|
||||||
|
One other indication from this example is the lack of a `.bss` section symbol from `JSystem.a object-actor.cpp` and any following files in the link order. Section symbols aren't generated for common BSS.
|
||||||
|
|
||||||
|
Without a map, it's harder to tell if there's a common BSS section, but guesses can be made. When looking at XREFs in Ghidra, if a symbol is close to the _end_ of `.bss`, but has XREFs from various addresses close to the _beginning_ of `.text`, it could be an indication of common BSS.
|
||||||
|
|
||||||
|
For games built with older versions of the linker, the inflation bug (described below) can also be used to detect common BSS.
|
||||||
|
|
||||||
|
## Inflation bug
|
||||||
|
|
||||||
|
In older versions of the linker (<= GC 2.6?), when calculating the size of common symbols, the linker will accidentally set the size of the first common symbol in a TU to the size of the _entire_ common section in that TU.
|
||||||
|
|
||||||
|
Example from Pikmin 2:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Section Addr | Size | Addr | Alignment | Name | File
|
||||||
|
00017260 000188 80506dc0 4 mPadList__10JUTGamePad JSystem.a JUTGamePad.cpp
|
||||||
|
000173e8 000030 80506f48 4 mPadStatus__10JUTGamePad JSystem.a JUTGamePad.cpp
|
||||||
|
00017418 0000c0 80506f78 4 mPadButton__10JUTGamePad JSystem.a JUTGamePad.cpp
|
||||||
|
000174d8 000040 80507038 4 mPadMStick__10JUTGamePad JSystem.a JUTGamePad.cpp
|
||||||
|
00017518 000040 80507078 4 mPadSStick__10JUTGamePad JSystem.a JUTGamePad.cpp
|
||||||
|
00017558 00000c 805070b8 4 sPatternList__19JUTGamePadLongPress JSystem.a JUTGamePad.cpp
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, `mPadList__10JUTGamePad` is the first common symbol in the TU, and was inflated to include the size of all other common symbols in the TU. In reality, it's only supposed to be `0xC` bytes, given `0x188 - 0x30 - 0xC0 - 0x40 - 0x40 - 0xC`.
|
||||||
|
|
||||||
|
This can be useful to determine if symbols are in the same TU without a map: if a `.bss` symbol is much larger than expected, it could be the first common symbol in a TU. One can subtract the sizes of following symbols to find the true size of the symbol, along with the end of the TU's common symbols.
|
||||||
|
|
||||||
|
To reproduce this behavior, the `.comment` section must be present in the object. See [`.comment` section](comment_section.md) for more details.
|
|
@ -0,0 +1,33 @@
|
||||||
|
# Dependencies
|
||||||
|
|
||||||
|
## Windows:
|
||||||
|
|
||||||
|
On Windows, it's **highly recommended** to use native tooling. WSL or msys2 are **not** required.
|
||||||
|
When running under WSL, [objdiff](#diffing) is unable to get filesystem notifications for automatic rebuilds.
|
||||||
|
|
||||||
|
- Install [Python](https://www.python.org/downloads/) and add it to `%PATH%`.
|
||||||
|
- Also available from the [Windows Store](https://apps.microsoft.com/store/detail/python-311/9NRWMJP3717K).
|
||||||
|
- Download [ninja](https://github.com/ninja-build/ninja/releases) and add it to `%PATH%`.
|
||||||
|
- Quick install via pip: `pip install ninja`
|
||||||
|
|
||||||
|
## macOS:
|
||||||
|
|
||||||
|
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages):
|
||||||
|
```
|
||||||
|
brew install ninja
|
||||||
|
```
|
||||||
|
- Install [wine-crossover](https://github.com/Gcenx/homebrew-wine):
|
||||||
|
```
|
||||||
|
brew install --cask --no-quarantine gcenx/wine/wine-crossover
|
||||||
|
```
|
||||||
|
|
||||||
|
After OS upgrades, if macOS complains about `Wine Crossover.app` being unverified, you can unquarantine it using:
|
||||||
|
```sh
|
||||||
|
sudo xattr -rd com.apple.quarantine '/Applications/Wine Crossover.app'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Linux:
|
||||||
|
|
||||||
|
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages).
|
||||||
|
- For non-x86(_64) platforms: Install wine from your package manager.
|
||||||
|
- For x86(_64), [WiBo](https://github.com/decompals/WiBo), a minimal 32-bit Windows binary wrapper, will be automatically downloaded and used.
|
|
@ -0,0 +1,124 @@
|
||||||
|
# Getting Started
|
||||||
|
|
||||||
|
See [Dependencies](dependencies.md) first.
|
||||||
|
|
||||||
|
Clone this template repository.
|
||||||
|
|
||||||
|
Rename `orig/GAMEID` to the game's ID. (For example, `GLZE01` for _The Legend of Zelda: The Wind Waker_.)
|
||||||
|
|
||||||
|
Extract your game to `orig/[GAMEID]`. In Dolphin, use "Extract Entire Disc" for GameCube games, or use "Data Partition" -> "Extract Entire Partition" for Wii games.
|
||||||
|
|
||||||
|
Rename `config/GAMEID` to the game's ID and modify `config/[GAMEID]/config.yml` appropriately, using [`config.example.yml`](/config/GAMEID/config.example.yml) as a reference. If the game doesn't use RELs, the `modules` list in `config.yml` can be removed.
|
||||||
|
|
||||||
|
Generate a `config/[GAMEID]/build.sha1` file for verification. This file is a list of SHA-1 hashes for each build artifact. One possible way:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ dtk shasum orig/[GAMEID]/sys/main.dol orig/[GAMEID]/files/*.rel > config/[GAMEID]/build.sha1
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, modify the paths in `config/[GAMEID]/build.sha1` to point to the `build` directory instead of `orig`. The DOL will be built at `build/[GAMEID]/main.dol`, and modules will be built at `build/[GAMEID]/[module_name]/[module_name].rel`.
|
||||||
|
|
||||||
|
Update `VERSIONS` in [`configure.py`](/configure.py) with the game ID.
|
||||||
|
|
||||||
|
Run `python configure.py` to generate the initial `build.ninja`.
|
||||||
|
|
||||||
|
Run `ninja` to perform initial analysis.
|
||||||
|
|
||||||
|
If all goes well, the initial `symbols.txt` and `splits.txt` should be automatically generated. Though it's likely it won't build yet. See [Post-analysis](#post-analysis) for next steps.
|
||||||
|
|
||||||
|
## Using a `.map`
|
||||||
|
|
||||||
|
If the game has `.map` files matching the DOL (and RELs, if applicable), they can be used to fill out `symbols.txt` and `splits.txt` automatically during the initial analysis.
|
||||||
|
|
||||||
|
Add the `map` key to `config.yml`, pointing to the `.map` file from the game disc. (For example, `orig/[GAMEID]/files/main.map`.) For RELs, add a `map` key to each module in `config.yml`.
|
||||||
|
|
||||||
|
If the game uses [common BSS](common_bss.md), be sure to set `common_start` as well. (See [`config.example.yml`](/config/GAMEID/config.example.yml).) Otherwise, the final link order may fail to be determined.
|
||||||
|
|
||||||
|
Once the initial analysis is completed, `symbols.txt` and `splits.txt` will be generated from the map information. **Remove** the `map` fields from `config.yml` to avoid conflicts.
|
||||||
|
|
||||||
|
## Post-analysis
|
||||||
|
|
||||||
|
After the initial analysis, `symbols.txt` and `splits.txt` will be generated. These files can be modified to adjust symbols and split points.
|
||||||
|
|
||||||
|
If the game uses C++ exceptions, it's required to set up a split for the `__init_cpp_exceptions.cpp` file. This differs between linker versions.
|
||||||
|
|
||||||
|
Often indicated by the following error:
|
||||||
|
|
||||||
|
```
|
||||||
|
# runtime sources 'global_destructor_chain.c' and
|
||||||
|
# '__init_cpp_exceptions.cpp' both need to be updated to latest version.
|
||||||
|
```
|
||||||
|
|
||||||
|
### GC 1.0 - 2.6 linkers:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# splits.txt
|
||||||
|
Runtime.PPCEABI.H/__init_cpp_exceptions.cpp:
|
||||||
|
.text start:0x803294EC end:0x80329568
|
||||||
|
.ctors start:0x80338680 end:0x80338684
|
||||||
|
.dtors start:0x80338820 end:0x80338828
|
||||||
|
.sdata start:0x803F67F0 end:0x803F67F8
|
||||||
|
```
|
||||||
|
|
||||||
|
`.text`:
|
||||||
|
Find the following symbols in `symbols.txt`:
|
||||||
|
```
|
||||||
|
GetR2__Fv = .text:0x803294EC; // type:function size:0x8 scope:local align:4
|
||||||
|
__fini_cpp_exceptions = .text:0x803294F4; // type:function size:0x34 scope:global align:4
|
||||||
|
__init_cpp_exceptions = .text:0x80329528; // type:function size:0x40 scope:global align:4
|
||||||
|
```
|
||||||
|
The split end is the address of `__init_cpp_exceptions` + size.
|
||||||
|
|
||||||
|
`.ctors`:
|
||||||
|
Find the address of `__init_cpp_exception_reference` or `_ctors` in symbols.txt.
|
||||||
|
Always size 4.
|
||||||
|
|
||||||
|
`.dtors`:
|
||||||
|
Look for the address of `__destroy_global_chain_reference` or `_dtors` in symbols.txt.
|
||||||
|
If `__fini_cpp_exceptions_reference` is present, it's size 8, otherwise size 4
|
||||||
|
|
||||||
|
`.sdata`:
|
||||||
|
Find the following symbol in `symbols.txt`:
|
||||||
|
```
|
||||||
|
fragmentID = .sdata:0x803F67F0; // type:object size:0x4 scope:local align:4 data:4byte
|
||||||
|
```
|
||||||
|
The split end includes any inter-TU padding, so it's usually size 8.
|
||||||
|
|
||||||
|
### GC 2.7+ and Wii linkers:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# splits.txt
|
||||||
|
Runtime.PPCEABI.H/__init_cpp_exceptions.cpp:
|
||||||
|
.text start:0x80345C34 end:0x80345CA4
|
||||||
|
.ctors start:0x803A54A0 end:0x803A54A4 rename:.ctors$10
|
||||||
|
.dtors start:0x803A56A0 end:0x803A56A4 rename:.dtors$10
|
||||||
|
.dtors start:0x803A56A4 end:0x803A56A8 rename:.dtors$15
|
||||||
|
.sdata start:0x80418CA8 end:0x80418CB0
|
||||||
|
```
|
||||||
|
|
||||||
|
`.text`:
|
||||||
|
Find the following symbols in `symbols.txt`:
|
||||||
|
```
|
||||||
|
__fini_cpp_exceptions = .text:0x80345C34; // type:function size:0x34 scope:global
|
||||||
|
__init_cpp_exceptions = .text:0x80345C68; // type:function size:0x3C scope:global
|
||||||
|
```
|
||||||
|
The split end is the address of `__init_cpp_exceptions` + size.
|
||||||
|
|
||||||
|
`.ctors$10`:
|
||||||
|
Find the address of `__init_cpp_exception_reference` or `_ctors` in symbols.txt.
|
||||||
|
Always size 4.
|
||||||
|
|
||||||
|
`.dtors$10`:
|
||||||
|
Look for the address of `__destroy_global_chain_reference` or `_dtors` in symbols.txt.
|
||||||
|
Always size 4.
|
||||||
|
|
||||||
|
`.dtors$15`:
|
||||||
|
Look for the address of `__fini_cpp_exceptions_reference` in symbols.txt.
|
||||||
|
Always size 4.
|
||||||
|
|
||||||
|
`.sdata`:
|
||||||
|
Find the following symbol in `symbols.txt`:
|
||||||
|
```
|
||||||
|
fragmentID = .sdata:0x80418CA8; // type:object size:0x4 scope:local data:4byte
|
||||||
|
```
|
||||||
|
The split end includes any inter-TU padding, so it's usually size 8.
|
|
@ -0,0 +1,42 @@
|
||||||
|
# `splits.txt`
|
||||||
|
|
||||||
|
This file contains file splits for a module.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
path/to/file.cpp:
|
||||||
|
.text start:0x80047E5C end:0x8004875C
|
||||||
|
.ctors start:0x803A54C4 end:0x803A54C8
|
||||||
|
.data start:0x803B1B40 end:0x803B1B60
|
||||||
|
.bss start:0x803DF828 end:0x803DFA8C
|
||||||
|
.bss start:0x8040D4AC end:0x8040D4D8 common
|
||||||
|
```
|
||||||
|
|
||||||
|
## Format
|
||||||
|
|
||||||
|
```
|
||||||
|
path/to/file.cpp: [file attributes]
|
||||||
|
section [section attributes]
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
- `path/to/file.cpp` The name of the source file, usually relative to `src`. The file does **not** need to exist to start.
|
||||||
|
This corresponds to an entry in `configure.py` for specifying compiler flags and other options.
|
||||||
|
|
||||||
|
### File attributes
|
||||||
|
|
||||||
|
- `comment:` Overrides the `mw_comment_version` setting in [`config.yml`](/config/GAMEID/config.example.yml) for this file. See [Comment section](comment_section.md).
|
||||||
|
|
||||||
|
`comment:0` is used to disable `.comment` section generation for a file that wasn't compiled with `mwcc`.
|
||||||
|
Example: `TRK_MINNOW_DOLPHIN/ppc/Export/targsupp.s: comment:0`
|
||||||
|
This file was assembled and only contains label symbols. Generating a `.comment` section for it will crash `mwld`.
|
||||||
|
|
||||||
|
### Section attributes
|
||||||
|
|
||||||
|
- `start:` The start address of the section within the file. For DOLs, this is the absolute address (e.g. `0x80001234`). For RELs, this is the section-relative address (e.g. `0x1234`).
|
||||||
|
- `end:` The end address of the section within the file.
|
||||||
|
- `align:` Specifies the alignment of the section. If not specified, the default alignment for the section is used.
|
||||||
|
- `rename:` Writes this section under a different name when generating the split object. Used for `.ctors$10`, etc.
|
||||||
|
- `common` Only valid for `.bss`. See [Common BSS](common_bss.md).
|
||||||
|
- `skip` Skips this data when writing the object file. Used for ignoring data that's linker-generated.
|
|
@ -0,0 +1,35 @@
|
||||||
|
# `symbols.txt`
|
||||||
|
|
||||||
|
This file contains all symbols for a module, one per line.
|
||||||
|
|
||||||
|
Example line:
|
||||||
|
```
|
||||||
|
__dt__13mDoExt_bckAnmFv = .text:0x800DD2EC; // type:function size:0x5C scope:global align:4
|
||||||
|
```
|
||||||
|
|
||||||
|
## Format
|
||||||
|
|
||||||
|
Numbers can be written as decimal or hexadecimal. Hexadecimal numbers must be prefixed with `0x`.
|
||||||
|
|
||||||
|
Comment lines starting with `//` or `#` are permitted, but are currently **not** preserved when updating the file.
|
||||||
|
|
||||||
|
```
|
||||||
|
symbol_name = section:address; // [attributes]
|
||||||
|
```
|
||||||
|
|
||||||
|
- `symbol_name` - The name of the symbol. (For C++, this is the mangled name, e.g. `__dt__13mDoExt_bckAnmFv`)
|
||||||
|
- `section` - The section the symbol is in.
|
||||||
|
- `address` - The address of the symbol. For DOLs, this is the absolute address (e.g. `0x80001234`). For RELs, this is the section-relative address (e.g. `0x1234`).
|
||||||
|
|
||||||
|
### Attributes
|
||||||
|
|
||||||
|
All attributes are optional, and are separated by spaces.
|
||||||
|
|
||||||
|
- `type:` The symbol type. `function`, `object`, or `label`.
|
||||||
|
- `size:` The size of the symbol.
|
||||||
|
- `scope:` The symbol's visibility. `global` (default), `local` or `weak`.
|
||||||
|
- `align:` The symbol's alignment.
|
||||||
|
- `data:` The data type used when writing disassembly. `byte`, `2byte`, `4byte`, `8byte`, `float`, `double`, `string`, `wstring`, `string_table`, or `wstring_table`.
|
||||||
|
- `hidden` Marked as "hidden" in the generated object. (Only used for extab)
|
||||||
|
- `force_active` Marked as ["exported"](comment_section.md) in the generated object, and added to `FORCEACTIVE` in the generated `ldscript.lcf`. Prevents the symbol from being deadstripped.
|
||||||
|
- `noreloc` Prevents the _contents_ of the symbol from being interpreted as addresses. Used for objects containing data that look like pointers, but aren't.
|
|
@ -0,0 +1,87 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
###
|
||||||
|
# Generates a ctx.c file, usable for "Context" on https://decomp.me.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# python3 tools/decompctx.py src/file.cpp
|
||||||
|
#
|
||||||
|
# If changes are made, please submit a PR to
|
||||||
|
# https://github.com/encounter/dtk-template
|
||||||
|
###
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
root_dir = os.path.abspath(os.path.join(script_dir, ".."))
|
||||||
|
src_dir = os.path.join(root_dir, "src")
|
||||||
|
include_dir = os.path.join(root_dir, "include")
|
||||||
|
|
||||||
|
include_pattern = re.compile(r'^#include\s*[<"](.+?)[>"]$')
|
||||||
|
guard_pattern = re.compile(r'^#ifndef\s+(.*)$')
|
||||||
|
|
||||||
|
defines = set()
|
||||||
|
|
||||||
|
def import_h_file(in_file: str, r_path: str) -> str:
|
||||||
|
rel_path = os.path.join(root_dir, r_path, in_file)
|
||||||
|
inc_path = os.path.join(include_dir, in_file)
|
||||||
|
if os.path.exists(rel_path):
|
||||||
|
return import_c_file(rel_path)
|
||||||
|
elif os.path.exists(inc_path):
|
||||||
|
return import_c_file(inc_path)
|
||||||
|
else:
|
||||||
|
print("Failed to locate", in_file)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
def import_c_file(in_file) -> str:
|
||||||
|
in_file = os.path.relpath(in_file, root_dir)
|
||||||
|
out_text = ''
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(in_file, encoding="utf-8") as file:
|
||||||
|
out_text += process_file(in_file, list(file))
|
||||||
|
except Exception:
|
||||||
|
with open(in_file) as file:
|
||||||
|
out_text += process_file(in_file, list(file))
|
||||||
|
return out_text
|
||||||
|
|
||||||
|
def process_file(in_file: str, lines) -> str:
|
||||||
|
out_text = ''
|
||||||
|
for idx, line in enumerate(lines):
|
||||||
|
guard_match = guard_pattern.match(line.strip())
|
||||||
|
if idx == 0:
|
||||||
|
if guard_match:
|
||||||
|
if guard_match[1] in defines:
|
||||||
|
break
|
||||||
|
defines.add(guard_match[1])
|
||||||
|
print("Processing file", in_file)
|
||||||
|
include_match = include_pattern.match(line.strip())
|
||||||
|
if include_match and not include_match[1].endswith(".s"):
|
||||||
|
out_text += f"/* \"{in_file}\" line {idx} \"{include_match[1]}\" */\n"
|
||||||
|
out_text += import_h_file(include_match[1], os.path.dirname(in_file))
|
||||||
|
out_text += f"/* end \"{include_match[1]}\" */\n"
|
||||||
|
else:
|
||||||
|
out_text += line
|
||||||
|
|
||||||
|
return out_text
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="""Create a context file which can be used for decomp.me"""
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"c_file",
|
||||||
|
help="""File from which to create context""",
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
output = import_c_file(args.c_file)
|
||||||
|
|
||||||
|
with open(os.path.join(root_dir, "ctx.c"), "w", encoding="utf-8") as f:
|
||||||
|
f.write(output)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,93 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
###
|
||||||
|
# Downloads various tools from GitHub releases.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# python3 tools/download_tool.py wibo build/tools/wibo --tag 1.0.0
|
||||||
|
#
|
||||||
|
# If changes are made, please submit a PR to
|
||||||
|
# https://github.com/encounter/dtk-template
|
||||||
|
###
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
import sys
|
||||||
|
import urllib.request
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def dtk_url(tag):
|
||||||
|
uname = platform.uname()
|
||||||
|
suffix = ""
|
||||||
|
system = uname.system.lower()
|
||||||
|
if system == "darwin":
|
||||||
|
system = "macos"
|
||||||
|
elif system == "windows":
|
||||||
|
suffix = ".exe"
|
||||||
|
arch = uname.machine.lower()
|
||||||
|
if arch == "amd64":
|
||||||
|
arch = "x86_64"
|
||||||
|
|
||||||
|
repo = "https://github.com/encounter/decomp-toolkit"
|
||||||
|
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
|
||||||
|
|
||||||
|
|
||||||
|
def sjiswrap_url(tag):
|
||||||
|
repo = "https://github.com/encounter/sjiswrap"
|
||||||
|
return f"{repo}/releases/download/{tag}/sjiswrap-windows-x86.exe"
|
||||||
|
|
||||||
|
|
||||||
|
def wibo_url(tag):
|
||||||
|
repo = "https://github.com/decompals/wibo"
|
||||||
|
return f"{repo}/releases/download/{tag}/wibo"
|
||||||
|
|
||||||
|
|
||||||
|
def compilers_url(tag):
|
||||||
|
if tag == "1":
|
||||||
|
return "https://cdn.discordapp.com/attachments/727918646525165659/1129759991696457728/GC_WII_COMPILERS.zip"
|
||||||
|
else:
|
||||||
|
sys.exit("Unknown compilers tag %s" % tag)
|
||||||
|
|
||||||
|
|
||||||
|
TOOLS = {
|
||||||
|
"dtk": dtk_url,
|
||||||
|
"sjiswrap": sjiswrap_url,
|
||||||
|
"wibo": wibo_url,
|
||||||
|
"compilers": compilers_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("tool", help="Tool name")
|
||||||
|
parser.add_argument("output", type=Path, help="output file path")
|
||||||
|
parser.add_argument("--tag", help="GitHub tag", required=True)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
url = TOOLS[args.tool](args.tag)
|
||||||
|
output = Path(args.output)
|
||||||
|
|
||||||
|
print(f"Downloading {url} to {output}")
|
||||||
|
req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0"})
|
||||||
|
with urllib.request.urlopen(req) as response:
|
||||||
|
if url.endswith(".zip"):
|
||||||
|
data = io.BytesIO(response.read())
|
||||||
|
with zipfile.ZipFile(data) as f:
|
||||||
|
f.extractall(output)
|
||||||
|
output.touch(mode=0o755)
|
||||||
|
else:
|
||||||
|
with open(output, "wb") as f:
|
||||||
|
shutil.copyfileobj(response, f)
|
||||||
|
st = os.stat(output)
|
||||||
|
os.chmod(output, st.st_mode | stat.S_IEXEC)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,223 @@
|
||||||
|
# Copyright 2011 Google Inc. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Python module for generating .ninja files.
|
||||||
|
|
||||||
|
Note that this is emphatically not a required piece of Ninja; it's
|
||||||
|
just a helpful utility for build-file-generation systems that already
|
||||||
|
use Python.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
|
||||||
|
def escape_path(word):
|
||||||
|
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
|
||||||
|
|
||||||
|
|
||||||
|
class Writer(object):
|
||||||
|
def __init__(self, output, width=78):
|
||||||
|
self.output = output
|
||||||
|
self.width = width
|
||||||
|
|
||||||
|
def newline(self):
|
||||||
|
self.output.write("\n")
|
||||||
|
|
||||||
|
def comment(self, text):
|
||||||
|
for line in textwrap.wrap(
|
||||||
|
text, self.width - 2, break_long_words=False, break_on_hyphens=False
|
||||||
|
):
|
||||||
|
self.output.write("# " + line + "\n")
|
||||||
|
|
||||||
|
def variable(self, key, value, indent=0):
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = " ".join(filter(None, value)) # Filter out empty strings.
|
||||||
|
self._line("%s = %s" % (key, value), indent)
|
||||||
|
|
||||||
|
def pool(self, name, depth):
|
||||||
|
self._line("pool %s" % name)
|
||||||
|
self.variable("depth", depth, indent=1)
|
||||||
|
|
||||||
|
def rule(
|
||||||
|
self,
|
||||||
|
name,
|
||||||
|
command,
|
||||||
|
description=None,
|
||||||
|
depfile=None,
|
||||||
|
generator=False,
|
||||||
|
pool=None,
|
||||||
|
restat=False,
|
||||||
|
rspfile=None,
|
||||||
|
rspfile_content=None,
|
||||||
|
deps=None,
|
||||||
|
):
|
||||||
|
self._line("rule %s" % name)
|
||||||
|
self.variable("command", command, indent=1)
|
||||||
|
if description:
|
||||||
|
self.variable("description", description, indent=1)
|
||||||
|
if depfile:
|
||||||
|
self.variable("depfile", depfile, indent=1)
|
||||||
|
if generator:
|
||||||
|
self.variable("generator", "1", indent=1)
|
||||||
|
if pool:
|
||||||
|
self.variable("pool", pool, indent=1)
|
||||||
|
if restat:
|
||||||
|
self.variable("restat", "1", indent=1)
|
||||||
|
if rspfile:
|
||||||
|
self.variable("rspfile", rspfile, indent=1)
|
||||||
|
if rspfile_content:
|
||||||
|
self.variable("rspfile_content", rspfile_content, indent=1)
|
||||||
|
if deps:
|
||||||
|
self.variable("deps", deps, indent=1)
|
||||||
|
|
||||||
|
def build(
|
||||||
|
self,
|
||||||
|
outputs,
|
||||||
|
rule,
|
||||||
|
inputs=None,
|
||||||
|
implicit=None,
|
||||||
|
order_only=None,
|
||||||
|
variables=None,
|
||||||
|
implicit_outputs=None,
|
||||||
|
pool=None,
|
||||||
|
dyndep=None,
|
||||||
|
):
|
||||||
|
outputs = as_list(outputs)
|
||||||
|
out_outputs = [escape_path(x) for x in outputs]
|
||||||
|
all_inputs = [escape_path(x) for x in as_list(inputs)]
|
||||||
|
|
||||||
|
if implicit:
|
||||||
|
implicit = [escape_path(x) for x in as_list(implicit)]
|
||||||
|
all_inputs.append("|")
|
||||||
|
all_inputs.extend(implicit)
|
||||||
|
if order_only:
|
||||||
|
order_only = [escape_path(x) for x in as_list(order_only)]
|
||||||
|
all_inputs.append("||")
|
||||||
|
all_inputs.extend(order_only)
|
||||||
|
if implicit_outputs:
|
||||||
|
implicit_outputs = [escape_path(x) for x in as_list(implicit_outputs)]
|
||||||
|
out_outputs.append("|")
|
||||||
|
out_outputs.extend(implicit_outputs)
|
||||||
|
|
||||||
|
self._line(
|
||||||
|
"build %s: %s" % (" ".join(out_outputs), " ".join([rule] + all_inputs))
|
||||||
|
)
|
||||||
|
if pool is not None:
|
||||||
|
self._line(" pool = %s" % pool)
|
||||||
|
if dyndep is not None:
|
||||||
|
self._line(" dyndep = %s" % dyndep)
|
||||||
|
|
||||||
|
if variables:
|
||||||
|
if isinstance(variables, dict):
|
||||||
|
iterator = iter(variables.items())
|
||||||
|
else:
|
||||||
|
iterator = iter(variables)
|
||||||
|
|
||||||
|
for key, val in iterator:
|
||||||
|
self.variable(key, val, indent=1)
|
||||||
|
|
||||||
|
return outputs
|
||||||
|
|
||||||
|
def include(self, path):
|
||||||
|
self._line("include %s" % path)
|
||||||
|
|
||||||
|
def subninja(self, path):
|
||||||
|
self._line("subninja %s" % path)
|
||||||
|
|
||||||
|
def default(self, paths):
|
||||||
|
self._line("default %s" % " ".join(as_list(paths)))
|
||||||
|
|
||||||
|
def _count_dollars_before_index(self, s, i):
|
||||||
|
"""Returns the number of '$' characters right in front of s[i]."""
|
||||||
|
dollar_count = 0
|
||||||
|
dollar_index = i - 1
|
||||||
|
while dollar_index > 0 and s[dollar_index] == "$":
|
||||||
|
dollar_count += 1
|
||||||
|
dollar_index -= 1
|
||||||
|
return dollar_count
|
||||||
|
|
||||||
|
def _line(self, text, indent=0):
|
||||||
|
"""Write 'text' word-wrapped at self.width characters."""
|
||||||
|
leading_space = " " * indent
|
||||||
|
while len(leading_space) + len(text) > self.width:
|
||||||
|
# The text is too wide; wrap if possible.
|
||||||
|
|
||||||
|
# Find the rightmost space that would obey our width constraint and
|
||||||
|
# that's not an escaped space.
|
||||||
|
available_space = self.width - len(leading_space) - len(" $")
|
||||||
|
space = available_space
|
||||||
|
while True:
|
||||||
|
space = text.rfind(" ", 0, space)
|
||||||
|
if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
if space < 0:
|
||||||
|
# No such space; just use the first unescaped space we can find.
|
||||||
|
space = available_space - 1
|
||||||
|
while True:
|
||||||
|
space = text.find(" ", space + 1)
|
||||||
|
if (
|
||||||
|
space < 0
|
||||||
|
or self._count_dollars_before_index(text, space) % 2 == 0
|
||||||
|
):
|
||||||
|
break
|
||||||
|
if space < 0:
|
||||||
|
# Give up on breaking.
|
||||||
|
break
|
||||||
|
|
||||||
|
self.output.write(leading_space + text[0:space] + " $\n")
|
||||||
|
text = text[space + 1 :]
|
||||||
|
|
||||||
|
# Subsequent lines are continuations, so indent them.
|
||||||
|
leading_space = " " * (indent + 2)
|
||||||
|
|
||||||
|
self.output.write(leading_space + text + "\n")
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.output.close()
|
||||||
|
|
||||||
|
|
||||||
|
def as_list(input):
|
||||||
|
if input is None:
|
||||||
|
return []
|
||||||
|
if isinstance(input, list):
|
||||||
|
return input
|
||||||
|
return [input]
|
||||||
|
|
||||||
|
|
||||||
|
def escape(string):
|
||||||
|
"""Escape a string such that it can be embedded into a Ninja file without
|
||||||
|
further interpretation."""
|
||||||
|
assert "\n" not in string, "Ninja syntax does not allow newlines"
|
||||||
|
# We only have one special metacharacter: '$'.
|
||||||
|
return string.replace("$", "$$")
|
||||||
|
|
||||||
|
|
||||||
|
def expand(string, vars, local_vars={}):
|
||||||
|
"""Expand a string containing $vars as Ninja would.
|
||||||
|
|
||||||
|
Note: doesn't handle the full Ninja variable syntax, but it's enough
|
||||||
|
to make configure.py's use of it work.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def exp(m):
|
||||||
|
var = m.group(1)
|
||||||
|
if var == "$":
|
||||||
|
return "$"
|
||||||
|
return local_vars.get(var, vars.get(var, ""))
|
||||||
|
|
||||||
|
return re.sub(r"\$(\$|\w*)", exp, string)
|
|
@ -0,0 +1,982 @@
|
||||||
|
###
|
||||||
|
# decomp-toolkit project generator
|
||||||
|
# Generates build.ninja and objdiff.json.
|
||||||
|
#
|
||||||
|
# This generator is intentionally project-agnostic
|
||||||
|
# and shared between multiple projects. Any configuration
|
||||||
|
# specific to a project should be added to `configure.py`.
|
||||||
|
#
|
||||||
|
# If changes are made, please submit a PR to
|
||||||
|
# https://github.com/encounter/dtk-template
|
||||||
|
###
|
||||||
|
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from . import ninja_syntax
|
||||||
|
|
||||||
|
if sys.platform == "cygwin":
|
||||||
|
sys.exit(
|
||||||
|
f"Cygwin/MSYS2 is not supported."
|
||||||
|
f"\nPlease use native Windows Python instead."
|
||||||
|
f"\n(Current path: {sys.executable})"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectConfig:
|
||||||
|
def __init__(self):
|
||||||
|
# Paths
|
||||||
|
self.build_dir = Path("build")
|
||||||
|
self.src_dir = Path("src")
|
||||||
|
self.tools_dir = Path("tools")
|
||||||
|
|
||||||
|
# Tooling
|
||||||
|
self.dtk_tag = None # Git tag
|
||||||
|
self.build_dtk_path = None # If None, download
|
||||||
|
self.compilers_tag = None # 1
|
||||||
|
self.compilers_path = None # If None, download
|
||||||
|
self.wibo_tag = None # Git tag
|
||||||
|
self.wrapper = None # If None, download wibo on Linux
|
||||||
|
self.sjiswrap_tag = None # Git tag
|
||||||
|
self.sjiswrap_path = None # If None, download
|
||||||
|
|
||||||
|
# Project config
|
||||||
|
self.build_rels = True # Build REL files
|
||||||
|
self.check_sha_path = None # Path to version.sha1
|
||||||
|
self.config_path = None # Path to config.yml
|
||||||
|
self.debug = False # Build with debug info
|
||||||
|
self.generate_map = False # Generate map file(s)
|
||||||
|
self.ldflags = None # Linker flags
|
||||||
|
self.libs = None # List of libraries
|
||||||
|
self.linker_version = None # mwld version
|
||||||
|
self.version = None # Version name
|
||||||
|
self.warn_missing_config = False # Warn on missing unit configuration
|
||||||
|
self.warn_missing_source = False # Warn on missing source file
|
||||||
|
|
||||||
|
# Progress output and progress.json config
|
||||||
|
self.progress_all = True # Include combined "all" category
|
||||||
|
self.progress_modules = True # Include combined "modules" category
|
||||||
|
self.progress_each_module = (
|
||||||
|
True # Include individual modules, disable for large numbers of modules
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
required_attrs = [
|
||||||
|
"build_dir",
|
||||||
|
"src_dir",
|
||||||
|
"tools_dir",
|
||||||
|
"check_sha_path",
|
||||||
|
"config_path",
|
||||||
|
"ldflags",
|
||||||
|
"linker_version",
|
||||||
|
"libs",
|
||||||
|
"version",
|
||||||
|
]
|
||||||
|
for attr in required_attrs:
|
||||||
|
if getattr(self, attr) is None:
|
||||||
|
sys.exit(f"ProjectConfig.{attr} missing")
|
||||||
|
|
||||||
|
def find_object(self, name):
|
||||||
|
for lib in self.libs:
|
||||||
|
for obj in lib["objects"]:
|
||||||
|
if obj.name == name:
|
||||||
|
return [lib, obj]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def out_path(self):
|
||||||
|
return self.build_dir / self.version
|
||||||
|
|
||||||
|
|
||||||
|
class Object:
|
||||||
|
def __init__(self, completed, name, **options):
|
||||||
|
self.name = name
|
||||||
|
self.completed = completed
|
||||||
|
self.options = {
|
||||||
|
"add_to_all": True,
|
||||||
|
"cflags": None,
|
||||||
|
"mw_version": None,
|
||||||
|
"shiftjis": True,
|
||||||
|
"source": name,
|
||||||
|
}
|
||||||
|
self.options.update(options)
|
||||||
|
|
||||||
|
|
||||||
|
def is_windows():
|
||||||
|
return os.name == "nt"
|
||||||
|
|
||||||
|
|
||||||
|
# On Windows, we need this to use && in commands
|
||||||
|
CHAIN = "cmd /c " if is_windows() else ""
|
||||||
|
# Native executable extension
|
||||||
|
EXE = ".exe" if is_windows() else ""
|
||||||
|
|
||||||
|
|
||||||
|
# Replace forward slashes with backslashes on Windows
|
||||||
|
def os_str(value):
|
||||||
|
return str(value).replace("/", os.sep)
|
||||||
|
|
||||||
|
|
||||||
|
# Replace backslashes with forward slashes on Windows
|
||||||
|
def unix_str(value):
|
||||||
|
return str(value).replace(os.sep, "/")
|
||||||
|
|
||||||
|
|
||||||
|
# Stringify paths for ninja_syntax
|
||||||
|
def path(value):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
elif isinstance(value, list):
|
||||||
|
return list(map(os_str, filter(lambda x: x is not None, value)))
|
||||||
|
else:
|
||||||
|
return [os_str(value)]
|
||||||
|
|
||||||
|
|
||||||
|
# Load decomp-toolkit generated config.json
|
||||||
|
def load_build_config(config, build_config_path):
|
||||||
|
if not build_config_path.is_file():
|
||||||
|
return None
|
||||||
|
|
||||||
|
def versiontuple(v):
|
||||||
|
return tuple(map(int, (v.split("."))))
|
||||||
|
|
||||||
|
f = open(build_config_path, "r", encoding="utf-8")
|
||||||
|
build_config = json.load(f)
|
||||||
|
config_version = build_config.get("version")
|
||||||
|
if not config_version:
|
||||||
|
# Invalid config.json
|
||||||
|
f.close()
|
||||||
|
os.remove(build_config_path)
|
||||||
|
return None
|
||||||
|
|
||||||
|
dtk_version = config.dtk_tag[1:] # Strip v
|
||||||
|
if versiontuple(config_version) < versiontuple(dtk_version):
|
||||||
|
# Outdated config.json
|
||||||
|
f.close()
|
||||||
|
os.remove(build_config_path)
|
||||||
|
return None
|
||||||
|
|
||||||
|
f.close()
|
||||||
|
return build_config
|
||||||
|
|
||||||
|
|
||||||
|
# Generate build.ninja and objdiff.json
|
||||||
|
def generate_build(config):
|
||||||
|
build_config = load_build_config(config, config.out_path() / "config.json")
|
||||||
|
generate_build_ninja(config, build_config)
|
||||||
|
generate_objdiff_config(config, build_config)
|
||||||
|
|
||||||
|
|
||||||
|
# Generate build.ninja
|
||||||
|
def generate_build_ninja(config, build_config):
|
||||||
|
config.validate()
|
||||||
|
|
||||||
|
out = io.StringIO()
|
||||||
|
n = ninja_syntax.Writer(out)
|
||||||
|
n.variable("ninja_required_version", "1.3")
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
configure_script = os.path.relpath(os.path.abspath(sys.argv[0]))
|
||||||
|
python_lib = os.path.relpath(__file__)
|
||||||
|
python_lib_dir = os.path.dirname(python_lib)
|
||||||
|
n.comment("The arguments passed to configure.py, for rerunning it.")
|
||||||
|
n.variable("configure_args", sys.argv[1:])
|
||||||
|
n.variable("python", f'"{sys.executable}"')
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Variables
|
||||||
|
###
|
||||||
|
n.comment("Variables")
|
||||||
|
ldflags = " ".join(config.ldflags)
|
||||||
|
if config.generate_map:
|
||||||
|
ldflags += " -mapunused"
|
||||||
|
if config.debug:
|
||||||
|
ldflags += " -g"
|
||||||
|
n.variable("ldflags", ldflags)
|
||||||
|
n.variable("mw_version", config.linker_version)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Tooling
|
||||||
|
###
|
||||||
|
n.comment("Tooling")
|
||||||
|
|
||||||
|
build_path = config.out_path()
|
||||||
|
build_tools_path = config.build_dir / "tools"
|
||||||
|
download_tool = config.tools_dir / "download_tool.py"
|
||||||
|
n.rule(
|
||||||
|
name="download_tool",
|
||||||
|
command=f"$python {download_tool} $tool $out --tag $tag",
|
||||||
|
description="TOOL $out",
|
||||||
|
)
|
||||||
|
|
||||||
|
if config.build_dtk_path:
|
||||||
|
dtk = build_tools_path / "release" / f"dtk{EXE}"
|
||||||
|
n.rule(
|
||||||
|
name="cargo",
|
||||||
|
command="cargo build --release --manifest-path $in --bin $bin --target-dir $target",
|
||||||
|
description="CARGO $bin",
|
||||||
|
depfile=path(Path("$target") / "release" / "$bin.d"),
|
||||||
|
deps="gcc",
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
outputs=path(dtk),
|
||||||
|
rule="cargo",
|
||||||
|
inputs=path(config.build_dtk_path / "Cargo.toml"),
|
||||||
|
implicit=path(config.build_dtk_path / "Cargo.lock"),
|
||||||
|
variables={
|
||||||
|
"bin": "dtk",
|
||||||
|
"target": build_tools_path,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
elif config.dtk_tag:
|
||||||
|
dtk = build_tools_path / f"dtk{EXE}"
|
||||||
|
n.build(
|
||||||
|
outputs=path(dtk),
|
||||||
|
rule="download_tool",
|
||||||
|
implicit=path(download_tool),
|
||||||
|
variables={
|
||||||
|
"tool": "dtk",
|
||||||
|
"tag": config.dtk_tag,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
sys.exit("ProjectConfig.dtk_tag missing")
|
||||||
|
|
||||||
|
if config.sjiswrap_path:
|
||||||
|
sjiswrap = config.sjiswrap_path
|
||||||
|
elif config.sjiswrap_tag:
|
||||||
|
sjiswrap = build_tools_path / "sjiswrap.exe"
|
||||||
|
n.build(
|
||||||
|
outputs=path(sjiswrap),
|
||||||
|
rule="download_tool",
|
||||||
|
implicit=path(download_tool),
|
||||||
|
variables={
|
||||||
|
"tool": "sjiswrap",
|
||||||
|
"tag": config.sjiswrap_tag,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
sys.exit("ProjectConfig.sjiswrap_tag missing")
|
||||||
|
|
||||||
|
# Only add an implicit dependency on wibo if we download it
|
||||||
|
wrapper = config.wrapper
|
||||||
|
wrapper_implicit = None
|
||||||
|
if (
|
||||||
|
config.wibo_tag is not None
|
||||||
|
and sys.platform == "linux"
|
||||||
|
and platform.machine() in ("i386", "x86_64")
|
||||||
|
and config.wrapper is None
|
||||||
|
):
|
||||||
|
wrapper = build_tools_path / "wibo"
|
||||||
|
wrapper_implicit = wrapper
|
||||||
|
n.build(
|
||||||
|
outputs=path(wrapper),
|
||||||
|
rule="download_tool",
|
||||||
|
implicit=path(download_tool),
|
||||||
|
variables={
|
||||||
|
"tool": "wibo",
|
||||||
|
"tag": config.wibo_tag,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if not is_windows() and wrapper is None:
|
||||||
|
wrapper = "wine"
|
||||||
|
wrapper_cmd = f"{wrapper} " if wrapper else ""
|
||||||
|
|
||||||
|
compilers_implicit = None
|
||||||
|
if config.compilers_path:
|
||||||
|
compilers = config.compilers_path
|
||||||
|
elif config.compilers_tag:
|
||||||
|
compilers = config.build_dir / "compilers"
|
||||||
|
compilers_implicit = compilers
|
||||||
|
n.build(
|
||||||
|
outputs=path(compilers),
|
||||||
|
rule="download_tool",
|
||||||
|
implicit=path(download_tool),
|
||||||
|
variables={
|
||||||
|
"tool": "compilers",
|
||||||
|
"tag": config.compilers_tag,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Build rules
|
||||||
|
###
|
||||||
|
compiler_path = compilers / "$mw_version"
|
||||||
|
|
||||||
|
# MWCC
|
||||||
|
mwcc = compiler_path / "mwcceppc.exe"
|
||||||
|
mwcc_cmd = f"{wrapper_cmd}{mwcc} $cflags -MMD -c $in -o $basedir"
|
||||||
|
mwcc_implicit = [compilers_implicit or mwcc, wrapper_implicit]
|
||||||
|
|
||||||
|
# MWCC with UTF-8 to Shift JIS wrapper
|
||||||
|
mwcc_sjis_cmd = f"{wrapper_cmd}{sjiswrap} {mwcc} $cflags -MMD -c $in -o $basedir"
|
||||||
|
mwcc_sjis_implicit = [*mwcc_implicit, sjiswrap]
|
||||||
|
|
||||||
|
# MWLD
|
||||||
|
mwld = compiler_path / "mwldeppc.exe"
|
||||||
|
mwld_cmd = f"{wrapper_cmd}{mwld} $ldflags -o $out @$out.rsp"
|
||||||
|
mwld_implicit = [compilers_implicit or mwld, wrapper_implicit]
|
||||||
|
|
||||||
|
if os.name != "nt":
|
||||||
|
transform_dep = config.tools_dir / "transform_dep.py"
|
||||||
|
mwcc_cmd += f" && $python {transform_dep} $basefile.d $basefile.d"
|
||||||
|
mwcc_sjis_cmd += f" && $python {transform_dep} $basefile.d $basefile.d"
|
||||||
|
mwcc_implicit.append(transform_dep)
|
||||||
|
mwcc_sjis_implicit.append(transform_dep)
|
||||||
|
|
||||||
|
n.comment("Link ELF file")
|
||||||
|
n.rule(
|
||||||
|
name="link",
|
||||||
|
command=mwld_cmd,
|
||||||
|
description="LINK $out",
|
||||||
|
rspfile="$out.rsp",
|
||||||
|
rspfile_content="$in_newline",
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
n.comment("Generate DOL")
|
||||||
|
n.rule(
|
||||||
|
name="elf2dol",
|
||||||
|
command=f"{dtk} elf2dol $in $out",
|
||||||
|
description="DOL $out",
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
n.comment("Generate REL(s)")
|
||||||
|
makerel_rsp = build_path / "makerel.rsp"
|
||||||
|
n.rule(
|
||||||
|
name="makerel",
|
||||||
|
command=f"{dtk} rel make -w -c $config @{makerel_rsp}",
|
||||||
|
description="REL",
|
||||||
|
rspfile=path(makerel_rsp),
|
||||||
|
rspfile_content="$in_newline",
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
n.comment("MWCC build")
|
||||||
|
n.rule(
|
||||||
|
name="mwcc",
|
||||||
|
command=mwcc_cmd,
|
||||||
|
description="MWCC $out",
|
||||||
|
depfile="$basefile.d",
|
||||||
|
deps="gcc",
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
n.comment("MWCC build (with UTF-8 to Shift JIS wrapper)")
|
||||||
|
n.rule(
|
||||||
|
name="mwcc_sjis",
|
||||||
|
command=mwcc_sjis_cmd,
|
||||||
|
description="MWCC $out",
|
||||||
|
depfile="$basefile.d",
|
||||||
|
deps="gcc",
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
n.comment("Host build")
|
||||||
|
n.variable("host_cflags", "-I include -Wno-trigraphs")
|
||||||
|
n.variable(
|
||||||
|
"host_cppflags",
|
||||||
|
"-std=c++98 -I include -fno-exceptions -fno-rtti -D_CRT_SECURE_NO_WARNINGS -Wno-trigraphs -Wno-c++11-extensions",
|
||||||
|
)
|
||||||
|
n.rule(
|
||||||
|
name="host_cc",
|
||||||
|
command="clang $host_cflags -c -o $out $in",
|
||||||
|
description="CC $out",
|
||||||
|
)
|
||||||
|
n.rule(
|
||||||
|
name="host_cpp",
|
||||||
|
command="clang++ $host_cppflags -c -o $out $in",
|
||||||
|
description="CXX $out",
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Source files
|
||||||
|
###
|
||||||
|
n.comment("Source files")
|
||||||
|
build_src_path = build_path / "src"
|
||||||
|
build_host_path = build_path / "host"
|
||||||
|
build_config_path = build_path / "config.json"
|
||||||
|
|
||||||
|
def map_path(path):
|
||||||
|
return path.parent / (path.name + ".MAP")
|
||||||
|
|
||||||
|
class LinkStep:
|
||||||
|
def __init__(self, config):
|
||||||
|
self.name = config["name"]
|
||||||
|
self.module_id = config["module_id"]
|
||||||
|
self.ldscript = config["ldscript"]
|
||||||
|
self.entry = config["entry"]
|
||||||
|
self.inputs = []
|
||||||
|
|
||||||
|
def add(self, obj):
|
||||||
|
self.inputs.append(obj)
|
||||||
|
|
||||||
|
def output(self):
|
||||||
|
if self.module_id == 0:
|
||||||
|
return build_path / f"{self.name}.dol"
|
||||||
|
else:
|
||||||
|
return build_path / self.name / f"{self.name}.rel"
|
||||||
|
|
||||||
|
def partial_output(self):
|
||||||
|
if self.module_id == 0:
|
||||||
|
return build_path / f"{self.name}.elf"
|
||||||
|
else:
|
||||||
|
return build_path / self.name / f"{self.name}.plf"
|
||||||
|
|
||||||
|
def write(self, n):
|
||||||
|
n.comment(f"Link {self.name}")
|
||||||
|
if self.module_id == 0:
|
||||||
|
elf_path = build_path / f"{self.name}.elf"
|
||||||
|
dol_path = build_path / f"{self.name}.dol"
|
||||||
|
elf_ldflags = f"$ldflags -lcf {self.ldscript}"
|
||||||
|
if config.generate_map:
|
||||||
|
elf_map = map_path(elf_path)
|
||||||
|
elf_ldflags += f" -map {elf_map}"
|
||||||
|
else:
|
||||||
|
elf_map = None
|
||||||
|
n.build(
|
||||||
|
outputs=path(elf_path),
|
||||||
|
rule="link",
|
||||||
|
inputs=path(self.inputs),
|
||||||
|
implicit=path([self.ldscript, *mwld_implicit]),
|
||||||
|
implicit_outputs=path(elf_map),
|
||||||
|
variables={"ldflags": elf_ldflags},
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
outputs=path(dol_path),
|
||||||
|
rule="elf2dol",
|
||||||
|
inputs=path(elf_path),
|
||||||
|
implicit=path(dtk),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
preplf_path = build_path / self.name / f"{self.name}.preplf"
|
||||||
|
plf_path = build_path / self.name / f"{self.name}.plf"
|
||||||
|
preplf_ldflags = f"$ldflags -sdata 0 -sdata2 0 -r"
|
||||||
|
plf_ldflags = f"$ldflags -sdata 0 -sdata2 0 -m {self.entry} -r1 -strip_partial -lcf {self.ldscript}"
|
||||||
|
if config.generate_map:
|
||||||
|
preplf_map = map_path(preplf_path)
|
||||||
|
preplf_ldflags += f" -map {preplf_map}"
|
||||||
|
plf_map = map_path(plf_path)
|
||||||
|
plf_ldflags += f" -map {plf_map}"
|
||||||
|
else:
|
||||||
|
preplf_map = None
|
||||||
|
plf_map = None
|
||||||
|
n.build(
|
||||||
|
outputs=path(preplf_path),
|
||||||
|
rule="link",
|
||||||
|
inputs=path(self.inputs),
|
||||||
|
implicit=path(mwld_implicit),
|
||||||
|
implicit_outputs=path(preplf_map),
|
||||||
|
variables={"ldflags": preplf_ldflags},
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
outputs=path(plf_path),
|
||||||
|
rule="link",
|
||||||
|
inputs=path(self.inputs),
|
||||||
|
implicit=path([self.ldscript, preplf_path, *mwld_implicit]),
|
||||||
|
implicit_outputs=path(plf_map),
|
||||||
|
variables={"ldflags": plf_ldflags},
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
if build_config:
|
||||||
|
link_steps = []
|
||||||
|
used_compiler_versions = set()
|
||||||
|
source_inputs = []
|
||||||
|
host_source_inputs = []
|
||||||
|
source_added = set()
|
||||||
|
|
||||||
|
def add_unit(build_obj, link_step):
|
||||||
|
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
||||||
|
result = config.find_object(obj_name)
|
||||||
|
if not result:
|
||||||
|
if config.warn_missing_config and not build_obj["autogenerated"]:
|
||||||
|
print(f"Missing configuration for {obj_name}")
|
||||||
|
link_step.add(obj_path)
|
||||||
|
return
|
||||||
|
|
||||||
|
lib, obj = result
|
||||||
|
lib_name = lib["lib"]
|
||||||
|
|
||||||
|
options = obj.options
|
||||||
|
completed = obj.completed
|
||||||
|
|
||||||
|
unit_src_path = config.src_dir / options["source"]
|
||||||
|
if not unit_src_path.exists():
|
||||||
|
if config.warn_missing_source:
|
||||||
|
print(f"Missing source file {unit_src_path}")
|
||||||
|
link_step.add(obj_path)
|
||||||
|
return
|
||||||
|
|
||||||
|
mw_version = options["mw_version"] or lib["mw_version"]
|
||||||
|
cflags = options["cflags"] or lib["cflags"]
|
||||||
|
if type(cflags) is list:
|
||||||
|
cflags_str = " ".join(cflags)
|
||||||
|
else:
|
||||||
|
cflags_str = str(cflags)
|
||||||
|
used_compiler_versions.add(mw_version)
|
||||||
|
|
||||||
|
base_object = Path(obj.name).with_suffix("")
|
||||||
|
src_obj_path = build_src_path / f"{base_object}.o"
|
||||||
|
src_base_path = build_src_path / base_object
|
||||||
|
|
||||||
|
if src_obj_path not in source_added:
|
||||||
|
source_added.add(src_obj_path)
|
||||||
|
|
||||||
|
n.comment(f"{obj_name}: {lib_name} (linked {completed})")
|
||||||
|
n.build(
|
||||||
|
outputs=path(src_obj_path),
|
||||||
|
rule="mwcc_sjis" if options["shiftjis"] else "mwcc",
|
||||||
|
inputs=path(unit_src_path),
|
||||||
|
variables={
|
||||||
|
"mw_version": path(Path(mw_version)),
|
||||||
|
"cflags": cflags_str,
|
||||||
|
"basedir": os.path.dirname(src_base_path),
|
||||||
|
"basefile": path(src_base_path),
|
||||||
|
},
|
||||||
|
implicit=path(
|
||||||
|
mwcc_sjis_implicit if options["shiftjis"] else mwcc_implicit
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
if lib["host"]:
|
||||||
|
host_obj_path = build_host_path / f"{base_object}.o"
|
||||||
|
host_base_path = build_host_path / base_object
|
||||||
|
n.build(
|
||||||
|
outputs=path(host_obj_path),
|
||||||
|
rule="host_cc" if unit_src_path.suffix == ".c" else "host_cpp",
|
||||||
|
inputs=path(unit_src_path),
|
||||||
|
variables={
|
||||||
|
"basedir": os.path.dirname(host_base_path),
|
||||||
|
"basefile": path(host_base_path),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if options["add_to_all"]:
|
||||||
|
host_source_inputs.append(host_obj_path)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
if options["add_to_all"]:
|
||||||
|
source_inputs.append(src_obj_path)
|
||||||
|
|
||||||
|
if completed:
|
||||||
|
obj_path = src_obj_path
|
||||||
|
link_step.add(obj_path)
|
||||||
|
|
||||||
|
# Add DOL link step
|
||||||
|
link_step = LinkStep(build_config)
|
||||||
|
for unit in build_config["units"]:
|
||||||
|
add_unit(unit, link_step)
|
||||||
|
link_steps.append(link_step)
|
||||||
|
|
||||||
|
if config.build_rels:
|
||||||
|
# Add REL link steps
|
||||||
|
for module in build_config["modules"]:
|
||||||
|
module_link_step = LinkStep(module)
|
||||||
|
for unit in module["units"]:
|
||||||
|
add_unit(unit, module_link_step)
|
||||||
|
link_steps.append(module_link_step)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
# Check if all compiler versions exist
|
||||||
|
for mw_version in used_compiler_versions:
|
||||||
|
mw_path = compilers / mw_version / "mwcceppc.exe"
|
||||||
|
if config.compilers_path and not os.path.exists(mw_path):
|
||||||
|
sys.exit(f"Compiler {mw_path} does not exist")
|
||||||
|
|
||||||
|
# Check if linker exists
|
||||||
|
mw_path = compilers / config.linker_version / "mwldeppc.exe"
|
||||||
|
if config.compilers_path and not os.path.exists(mw_path):
|
||||||
|
sys.exit(f"Linker {mw_path} does not exist")
|
||||||
|
|
||||||
|
###
|
||||||
|
# Link
|
||||||
|
###
|
||||||
|
for step in link_steps:
|
||||||
|
step.write(n)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Generate RELs
|
||||||
|
###
|
||||||
|
rel_outputs = list(
|
||||||
|
map(
|
||||||
|
lambda step: step.output(),
|
||||||
|
filter(lambda step: step.module_id != 0, link_steps),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if len(rel_outputs) > 0:
|
||||||
|
n.comment("Generate RELs")
|
||||||
|
n.build(
|
||||||
|
outputs=path(rel_outputs),
|
||||||
|
rule="makerel",
|
||||||
|
inputs=path(list(map(lambda step: step.partial_output(), link_steps))),
|
||||||
|
implicit=path([dtk, config.config_path]),
|
||||||
|
variables={"config": path(config.config_path)},
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Helper rule for building all source files
|
||||||
|
###
|
||||||
|
n.comment("Build all source files")
|
||||||
|
n.build(
|
||||||
|
outputs="all_source",
|
||||||
|
rule="phony",
|
||||||
|
inputs=path(source_inputs),
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Helper rule for building all source files, with a host compiler
|
||||||
|
###
|
||||||
|
n.comment("Build all source files with a host compiler")
|
||||||
|
n.build(
|
||||||
|
outputs="all_source_host",
|
||||||
|
rule="phony",
|
||||||
|
inputs=path(host_source_inputs),
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Check hash
|
||||||
|
###
|
||||||
|
n.comment("Check hash")
|
||||||
|
ok_path = build_path / "ok"
|
||||||
|
quiet = "-q " if len(link_steps) > 3 else ""
|
||||||
|
n.rule(
|
||||||
|
name="check",
|
||||||
|
command=f"{dtk} shasum {quiet} -c $in -o $out",
|
||||||
|
description="CHECK $in",
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
outputs=path(ok_path),
|
||||||
|
rule="check",
|
||||||
|
inputs=path(config.check_sha_path),
|
||||||
|
implicit=path([dtk, *map(lambda step: step.output(), link_steps)]),
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Calculate progress
|
||||||
|
###
|
||||||
|
n.comment("Calculate progress")
|
||||||
|
progress_path = build_path / "progress.json"
|
||||||
|
n.rule(
|
||||||
|
name="progress",
|
||||||
|
command=f"$python {configure_script} $configure_args progress",
|
||||||
|
description="PROGRESS",
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
outputs=path(progress_path),
|
||||||
|
rule="progress",
|
||||||
|
implicit=path([ok_path, configure_script, python_lib, config.config_path]),
|
||||||
|
)
|
||||||
|
|
||||||
|
###
|
||||||
|
# Helper tools
|
||||||
|
###
|
||||||
|
# TODO: make these rules work for RELs too
|
||||||
|
dol_link_step = link_steps[0]
|
||||||
|
dol_elf_path = dol_link_step.partial_output()
|
||||||
|
n.comment("Check for mismatching symbols")
|
||||||
|
n.rule(
|
||||||
|
name="dol_diff",
|
||||||
|
command=f"{dtk} -L error dol diff $in",
|
||||||
|
description=f"DIFF {dol_elf_path}",
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
inputs=path([config.config_path, dol_elf_path]),
|
||||||
|
outputs="dol_diff",
|
||||||
|
rule="dol_diff",
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
outputs="diff",
|
||||||
|
rule="phony",
|
||||||
|
inputs="dol_diff",
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
n.comment("Apply symbols from linked ELF")
|
||||||
|
n.rule(
|
||||||
|
name="dol_apply",
|
||||||
|
command=f"{dtk} dol apply $in",
|
||||||
|
description=f"APPLY {dol_elf_path}",
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
inputs=path([config.config_path, dol_elf_path]),
|
||||||
|
outputs="dol_apply",
|
||||||
|
rule="dol_apply",
|
||||||
|
implicit=path([ok_path]),
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
outputs="apply",
|
||||||
|
rule="phony",
|
||||||
|
inputs="dol_apply",
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Split DOL
|
||||||
|
###
|
||||||
|
n.comment("Split DOL into relocatable objects")
|
||||||
|
n.rule(
|
||||||
|
name="split",
|
||||||
|
command=f"{dtk} dol split $in $out_dir",
|
||||||
|
description="SPLIT $in",
|
||||||
|
depfile="$out_dir/dep",
|
||||||
|
deps="gcc",
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
inputs=path(config.config_path),
|
||||||
|
outputs=path(build_config_path),
|
||||||
|
rule="split",
|
||||||
|
implicit=path(dtk),
|
||||||
|
variables={"out_dir": path(build_path)},
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Regenerate on change
|
||||||
|
###
|
||||||
|
n.comment("Reconfigure on change")
|
||||||
|
n.rule(
|
||||||
|
name="configure",
|
||||||
|
command=f"$python {configure_script} $configure_args",
|
||||||
|
generator=True,
|
||||||
|
description=f"RUN {configure_script}",
|
||||||
|
)
|
||||||
|
n.build(
|
||||||
|
outputs="build.ninja",
|
||||||
|
rule="configure",
|
||||||
|
implicit=path(
|
||||||
|
[
|
||||||
|
build_config_path,
|
||||||
|
configure_script,
|
||||||
|
python_lib,
|
||||||
|
Path(python_lib_dir) / "ninja_syntax.py",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Default rule
|
||||||
|
###
|
||||||
|
n.comment("Default rule")
|
||||||
|
if build_config:
|
||||||
|
n.default(path(progress_path))
|
||||||
|
else:
|
||||||
|
n.default(path(build_config_path))
|
||||||
|
|
||||||
|
# Write build.ninja
|
||||||
|
with open("build.ninja", "w", encoding="utf-8") as f:
|
||||||
|
f.write(out.getvalue())
|
||||||
|
out.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Generate objdiff.json
|
||||||
|
def generate_objdiff_config(config, build_config):
|
||||||
|
if not build_config:
|
||||||
|
return
|
||||||
|
|
||||||
|
objdiff_config = {
|
||||||
|
"min_version": "0.4.3",
|
||||||
|
"custom_make": "ninja",
|
||||||
|
"build_target": False,
|
||||||
|
"watch_patterns": [
|
||||||
|
"*.c",
|
||||||
|
"*.cp",
|
||||||
|
"*.cpp",
|
||||||
|
"*.h",
|
||||||
|
"*.hpp",
|
||||||
|
"*.py",
|
||||||
|
"*.yml",
|
||||||
|
"*.txt",
|
||||||
|
"*.json",
|
||||||
|
],
|
||||||
|
"units": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
build_path = config.out_path()
|
||||||
|
|
||||||
|
def add_unit(build_obj, module_name):
|
||||||
|
if build_obj["autogenerated"]:
|
||||||
|
# Skip autogenerated objects
|
||||||
|
return
|
||||||
|
|
||||||
|
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
||||||
|
base_object = Path(obj_name).with_suffix("")
|
||||||
|
unit_config = {
|
||||||
|
"name": unix_str(Path(module_name) / base_object),
|
||||||
|
"target_path": unix_str(obj_path),
|
||||||
|
}
|
||||||
|
|
||||||
|
result = config.find_object(obj_name)
|
||||||
|
if not result:
|
||||||
|
objdiff_config["units"].append(unit_config)
|
||||||
|
return
|
||||||
|
|
||||||
|
lib, obj = result
|
||||||
|
unit_src_path = config.src_dir / obj.options["source"]
|
||||||
|
if not unit_src_path.exists():
|
||||||
|
objdiff_config["units"].append(unit_config)
|
||||||
|
return
|
||||||
|
|
||||||
|
cflags = obj.options["cflags"] or lib["cflags"]
|
||||||
|
src_obj_path = build_path / "src" / f"{base_object}.o"
|
||||||
|
|
||||||
|
reverse_fn_order = False
|
||||||
|
if type(cflags) is list:
|
||||||
|
for flag in cflags:
|
||||||
|
if not flag.startswith("-inline "):
|
||||||
|
continue
|
||||||
|
for value in flag.split(" ")[1].split(","):
|
||||||
|
if value == "deferred":
|
||||||
|
reverse_fn_order = True
|
||||||
|
elif value == "nodeferred":
|
||||||
|
reverse_fn_order = False
|
||||||
|
|
||||||
|
unit_config["base_path"] = unix_str(src_obj_path)
|
||||||
|
unit_config["reverse_fn_order"] = reverse_fn_order
|
||||||
|
unit_config["complete"] = obj.completed
|
||||||
|
objdiff_config["units"].append(unit_config)
|
||||||
|
|
||||||
|
# Add DOL units
|
||||||
|
for unit in build_config["units"]:
|
||||||
|
add_unit(unit, build_config["name"])
|
||||||
|
|
||||||
|
# Add REL units
|
||||||
|
for module in build_config["modules"]:
|
||||||
|
for unit in module["units"]:
|
||||||
|
add_unit(unit, module["name"])
|
||||||
|
|
||||||
|
# Write objdiff.json
|
||||||
|
with open("objdiff.json", "w", encoding="utf-8") as w:
|
||||||
|
json.dump(objdiff_config, w, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
# Calculate, print and write progress to progress.json
|
||||||
|
def calculate_progress(config):
|
||||||
|
out_path = config.out_path()
|
||||||
|
build_config = load_build_config(config, out_path / "config.json")
|
||||||
|
if not build_config:
|
||||||
|
return
|
||||||
|
|
||||||
|
class ProgressUnit:
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.code_total = 0
|
||||||
|
self.code_progress = 0
|
||||||
|
self.data_total = 0
|
||||||
|
self.data_progress = 0
|
||||||
|
self.objects_progress = 0
|
||||||
|
self.objects_total = 0
|
||||||
|
self.objects = set()
|
||||||
|
|
||||||
|
def add(self, build_obj):
|
||||||
|
self.code_total += build_obj["code_size"]
|
||||||
|
self.data_total += build_obj["data_size"]
|
||||||
|
|
||||||
|
# Avoid counting the same object in different modules twice
|
||||||
|
include_object = build_obj["name"] not in self.objects
|
||||||
|
if include_object:
|
||||||
|
self.objects.add(build_obj["name"])
|
||||||
|
self.objects_total += 1
|
||||||
|
|
||||||
|
if build_obj["autogenerated"]:
|
||||||
|
# Skip autogenerated objects
|
||||||
|
return
|
||||||
|
|
||||||
|
result = config.find_object(build_obj["name"])
|
||||||
|
if not result:
|
||||||
|
return
|
||||||
|
|
||||||
|
_, obj = result
|
||||||
|
if not obj.completed:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.code_progress += build_obj["code_size"]
|
||||||
|
self.data_progress += build_obj["data_size"]
|
||||||
|
if include_object:
|
||||||
|
self.objects_progress += 1
|
||||||
|
|
||||||
|
def code_frac(self):
|
||||||
|
return self.code_progress / self.code_total
|
||||||
|
|
||||||
|
def data_frac(self):
|
||||||
|
return self.data_progress / self.data_total
|
||||||
|
|
||||||
|
# Add DOL units
|
||||||
|
all_progress = ProgressUnit("All") if config.progress_all else None
|
||||||
|
dol_progress = ProgressUnit("DOL")
|
||||||
|
for unit in build_config["units"]:
|
||||||
|
if all_progress:
|
||||||
|
all_progress.add(unit)
|
||||||
|
dol_progress.add(unit)
|
||||||
|
|
||||||
|
# Add REL units
|
||||||
|
rels_progress = ProgressUnit("Modules") if config.progress_modules else None
|
||||||
|
modules_progress = []
|
||||||
|
for module in build_config["modules"]:
|
||||||
|
progress = ProgressUnit(module["name"])
|
||||||
|
modules_progress.append(progress)
|
||||||
|
for unit in module["units"]:
|
||||||
|
if all_progress:
|
||||||
|
all_progress.add(unit)
|
||||||
|
if rels_progress:
|
||||||
|
rels_progress.add(unit)
|
||||||
|
progress.add(unit)
|
||||||
|
|
||||||
|
# Print human-readable progress
|
||||||
|
print("Progress:")
|
||||||
|
|
||||||
|
def print_category(unit):
|
||||||
|
code_frac = unit.code_frac()
|
||||||
|
data_frac = unit.data_frac()
|
||||||
|
print(
|
||||||
|
f" {unit.name}: {code_frac:.2%} code, {data_frac:.2%} data ({unit.objects_progress} / {unit.objects_total} files)"
|
||||||
|
)
|
||||||
|
print(f" Code: {unit.code_progress} / {unit.code_total} bytes")
|
||||||
|
print(f" Data: {unit.data_progress} / {unit.data_total} bytes")
|
||||||
|
|
||||||
|
if all_progress:
|
||||||
|
print_category(all_progress)
|
||||||
|
print_category(dol_progress)
|
||||||
|
module_count = len(build_config["modules"])
|
||||||
|
if module_count > 0:
|
||||||
|
print_category(rels_progress)
|
||||||
|
if config.progress_each_module:
|
||||||
|
for progress in modules_progress:
|
||||||
|
print_category(progress)
|
||||||
|
|
||||||
|
# Generate and write progress.json
|
||||||
|
progress_json = {}
|
||||||
|
|
||||||
|
def add_category(category, unit):
|
||||||
|
progress_json[category] = {
|
||||||
|
"code": unit.code_progress,
|
||||||
|
"code/total": unit.code_total,
|
||||||
|
"data": unit.data_progress,
|
||||||
|
"data/total": unit.data_total,
|
||||||
|
}
|
||||||
|
|
||||||
|
if all_progress:
|
||||||
|
add_category("all", all_progress)
|
||||||
|
add_category("dol", dol_progress)
|
||||||
|
if len(build_config["modules"]) > 0:
|
||||||
|
if rels_progress:
|
||||||
|
add_category("modules", rels_progress)
|
||||||
|
if config.progress_each_module:
|
||||||
|
for progress in modules_progress:
|
||||||
|
add_category(progress.name, progress)
|
||||||
|
with open(out_path / "progress.json", "w", encoding="utf-8") as w:
|
||||||
|
json.dump(progress_json, w, indent=4)
|
|
@ -0,0 +1,84 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
###
|
||||||
|
# Transforms .d files, converting Windows paths to Unix paths.
|
||||||
|
# Allows usage of the mwcc -MMD flag on platforms other than Windows.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# python3 tools/transform_dep.py build/src/file.d build/src/file.d
|
||||||
|
#
|
||||||
|
# If changes are made, please submit a PR to
|
||||||
|
# https://github.com/encounter/dtk-template
|
||||||
|
###
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
from platform import uname
|
||||||
|
|
||||||
|
wineprefix = os.path.join(os.environ["HOME"], ".wine")
|
||||||
|
if "WINEPREFIX" in os.environ:
|
||||||
|
wineprefix = os.environ["WINEPREFIX"]
|
||||||
|
winedevices = os.path.join(wineprefix, "dosdevices")
|
||||||
|
|
||||||
|
|
||||||
|
def in_wsl() -> bool:
|
||||||
|
return "microsoft-standard" in uname().release
|
||||||
|
|
||||||
|
|
||||||
|
def import_d_file(in_file) -> str:
|
||||||
|
out_text = ""
|
||||||
|
|
||||||
|
with open(in_file) as file:
|
||||||
|
for idx, line in enumerate(file):
|
||||||
|
if idx == 0:
|
||||||
|
if line.endswith(" \\\n"):
|
||||||
|
out_text += line[:-3].replace("\\", "/") + " \\\n"
|
||||||
|
else:
|
||||||
|
out_text += line.replace("\\", "/")
|
||||||
|
else:
|
||||||
|
suffix = ""
|
||||||
|
if line.endswith(" \\\n"):
|
||||||
|
suffix = " \\"
|
||||||
|
path = line.lstrip()[:-3]
|
||||||
|
else:
|
||||||
|
path = line.strip()
|
||||||
|
# lowercase drive letter
|
||||||
|
path = path[0].lower() + path[1:]
|
||||||
|
if path[0] == "z":
|
||||||
|
# shortcut for z:
|
||||||
|
path = path[2:].replace("\\", "/")
|
||||||
|
elif in_wsl():
|
||||||
|
path = path[0:1] + path[2:]
|
||||||
|
path = os.path.join("/mnt", path.replace("\\", "/"))
|
||||||
|
else:
|
||||||
|
# use $WINEPREFIX/dosdevices to resolve path
|
||||||
|
path = os.path.realpath(
|
||||||
|
os.path.join(winedevices, path.replace("\\", "/"))
|
||||||
|
)
|
||||||
|
out_text += "\t" + path + suffix + "\n"
|
||||||
|
|
||||||
|
return out_text
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="""Transform a .d file from Wine paths to normal paths"""
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"d_file",
|
||||||
|
help="""Dependency file in""",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"d_file_out",
|
||||||
|
help="""Dependency file out""",
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
output = import_d_file(args.d_file)
|
||||||
|
|
||||||
|
with open(args.d_file_out, "w", encoding="UTF-8") as f:
|
||||||
|
f.write(output)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,76 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
###
|
||||||
|
# Uploads progress information to https://github.com/decompals/frogress.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# python3 tools/upload_progress.py -b https://progress.decomp.club/ -p [project] -v [version] build/[version]/progress.json
|
||||||
|
#
|
||||||
|
# If changes are made, please submit a PR to
|
||||||
|
# https://github.com/encounter/dtk-template
|
||||||
|
###
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_commit_timestamp() -> int:
|
||||||
|
return int(
|
||||||
|
subprocess.check_output(["git", "show", "-s", "--format=%ct"])
|
||||||
|
.decode("ascii")
|
||||||
|
.rstrip()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_commit_sha() -> str:
|
||||||
|
return subprocess.check_output(["git", "rev-parse", "HEAD"]).decode("ascii").strip()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_url(args: argparse.Namespace) -> str:
|
||||||
|
url_components = [args.base_url.rstrip("/"), "data"]
|
||||||
|
|
||||||
|
for arg in [args.project, args.version]:
|
||||||
|
if arg != "":
|
||||||
|
url_components.append(arg)
|
||||||
|
|
||||||
|
return str.join("/", url_components) + "/"
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(description="Upload progress information.")
|
||||||
|
parser.add_argument("-b", "--base_url", help="API base URL", required=True)
|
||||||
|
parser.add_argument("-a", "--api_key", help="API key (env var PROGRESS_API_KEY)")
|
||||||
|
parser.add_argument("-p", "--project", help="Project slug", required=True)
|
||||||
|
parser.add_argument("-v", "--version", help="Version slug", required=True)
|
||||||
|
parser.add_argument("input", help="Progress JSON input")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
api_key = args.api_key or os.environ.get("PROGRESS_API_KEY")
|
||||||
|
if not api_key:
|
||||||
|
raise "API key required"
|
||||||
|
url = generate_url(args)
|
||||||
|
|
||||||
|
entries = []
|
||||||
|
with open(args.input, "r") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
entries.append(
|
||||||
|
{
|
||||||
|
"timestamp": get_git_commit_timestamp(),
|
||||||
|
"git_hash": get_git_commit_sha(),
|
||||||
|
"categories": data,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
print("Publishing entry to", url)
|
||||||
|
json.dump(entries[0], sys.stdout, indent=4)
|
||||||
|
print()
|
||||||
|
r = requests.post(url, json={
|
||||||
|
"api_key": api_key,
|
||||||
|
"entries": entries,
|
||||||
|
})
|
||||||
|
r.raise_for_status()
|
||||||
|
print("Done!")
|
Loading…
Reference in New Issue