mirror of https://github.com/PrimeDecomp/prime.git
Update to latest dtk-template
This commit is contained in:
parent
bc0fcbb7b3
commit
3efecd011f
|
@ -9,5 +9,5 @@
|
||||||
*.sh text eol=lf
|
*.sh text eol=lf
|
||||||
*.sha1 text eol=lf
|
*.sha1 text eol=lf
|
||||||
|
|
||||||
# DTK keeps these files with LF
|
# decomp-toolkit writes files with LF
|
||||||
config/**/*.txt text eol=lf
|
config/**/*.txt text eol=lf
|
||||||
|
|
|
@ -35,7 +35,8 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
python configure.py --map --version ${{ matrix.version }} \
|
python configure.py --map --version ${{ matrix.version }} \
|
||||||
--binutils /binutils --compilers /compilers
|
--binutils /binutils --compilers /compilers
|
||||||
ninja all_source build/${{ matrix.version }}/progress.json
|
ninja all_source build/${{ matrix.version }}/progress.json \
|
||||||
|
build/${{ matrix.version }}/report.json
|
||||||
|
|
||||||
# Upload progress if we're on the main branch
|
# Upload progress if we're on the main branch
|
||||||
- name: Upload progress
|
- name: Upload progress
|
||||||
|
@ -55,3 +56,10 @@ jobs:
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.version }}_maps
|
name: ${{ matrix.version }}_maps
|
||||||
path: build/${{ matrix.version }}/**/*.MAP
|
path: build/${{ matrix.version }}/**/*.MAP
|
||||||
|
|
||||||
|
# Upload progress report
|
||||||
|
- name: Upload report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.version }}_report
|
||||||
|
path: build/${{ matrix.version }}/report.json
|
||||||
|
|
33
README.md
33
README.md
|
@ -26,7 +26,7 @@ If you'd like to contribute, see [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||||
Dependencies
|
Dependencies
|
||||||
============
|
============
|
||||||
|
|
||||||
Windows:
|
Windows
|
||||||
--------
|
--------
|
||||||
|
|
||||||
On Windows, it's **highly recommended** to use native tooling. WSL or msys2 are **not** required.
|
On Windows, it's **highly recommended** to use native tooling. WSL or msys2 are **not** required.
|
||||||
|
@ -37,51 +37,64 @@ When running under WSL, [objdiff](#diffing) is unable to get filesystem notifica
|
||||||
- Download [ninja](https://github.com/ninja-build/ninja/releases) and add it to `%PATH%`.
|
- Download [ninja](https://github.com/ninja-build/ninja/releases) and add it to `%PATH%`.
|
||||||
- Quick install via pip: `pip install ninja`
|
- Quick install via pip: `pip install ninja`
|
||||||
|
|
||||||
macOS:
|
macOS
|
||||||
------
|
------
|
||||||
|
|
||||||
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages):
|
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages):
|
||||||
```
|
|
||||||
|
```sh
|
||||||
brew install ninja
|
brew install ninja
|
||||||
```
|
```
|
||||||
|
|
||||||
- Install [wine-crossover](https://github.com/Gcenx/homebrew-wine):
|
- Install [wine-crossover](https://github.com/Gcenx/homebrew-wine):
|
||||||
```
|
|
||||||
|
```sh
|
||||||
brew install --cask --no-quarantine gcenx/wine/wine-crossover
|
brew install --cask --no-quarantine gcenx/wine/wine-crossover
|
||||||
```
|
```
|
||||||
|
|
||||||
After OS upgrades, if macOS complains about `Wine Crossover.app` being unverified, you can unquarantine it using:
|
After OS upgrades, if macOS complains about `Wine Crossover.app` being unverified, you can unquarantine it using:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
sudo xattr -rd com.apple.quarantine '/Applications/Wine Crossover.app'
|
sudo xattr -rd com.apple.quarantine '/Applications/Wine Crossover.app'
|
||||||
```
|
```
|
||||||
|
|
||||||
Linux:
|
Linux
|
||||||
------
|
------
|
||||||
|
|
||||||
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages).
|
- Install [ninja](https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages).
|
||||||
- For non-x86(_64) platforms: Install wine from your package manager.
|
- For non-x86(_64) platforms: Install wine from your package manager.
|
||||||
- For x86(_64), [WiBo](https://github.com/decompals/WiBo), a minimal 32-bit Windows binary wrapper, will be automatically downloaded and used.
|
- For x86(_64), [wibo](https://github.com/decompals/wibo), a minimal 32-bit Windows binary wrapper, will be automatically downloaded and used.
|
||||||
|
|
||||||
Building
|
Building
|
||||||
========
|
========
|
||||||
|
|
||||||
- Clone the repository:
|
- Clone the repository:
|
||||||
```
|
|
||||||
|
```sh
|
||||||
git clone https://github.com/PrimeDecomp/prime.git
|
git clone https://github.com/PrimeDecomp/prime.git
|
||||||
```
|
```
|
||||||
|
|
||||||
- Update and Initialize submodules:
|
- Update and Initialize submodules:
|
||||||
```
|
|
||||||
|
```sh
|
||||||
git submodule update --init --recursive
|
git submodule update --init --recursive
|
||||||
```
|
```
|
||||||
|
|
||||||
- Using [Dolphin Emulator](https://dolphin-emu.org/), extract your game to `orig/GM8E01_00` (or the appropriate version).
|
- Using [Dolphin Emulator](https://dolphin-emu.org/), extract your game to `orig/GM8E01_00` (or the appropriate version).
|
||||||
![](assets/dolphin-extract.png)
|
![](assets/dolphin-extract.png)
|
||||||
- To save space, the only necessary files are the following. Any others can be deleted.
|
- To save space, the only necessary files are the following. Any others can be deleted.
|
||||||
- `sys/main.dol`
|
- `sys/main.dol`
|
||||||
- `files/NESemuP.rel`
|
- `files/NESemuP.rel`
|
||||||
- Configure:
|
- Configure:
|
||||||
```
|
|
||||||
|
```sh
|
||||||
python configure.py
|
python configure.py
|
||||||
```
|
```
|
||||||
|
|
||||||
To use a version other than `GM8E01_00` (USA), specify `--version GM8E01_01` or similar.
|
To use a version other than `GM8E01_00` (USA), specify `--version GM8E01_01` or similar.
|
||||||
- Build:
|
- Build:
|
||||||
```
|
|
||||||
|
```sh
|
||||||
ninja
|
ninja
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
44
configure.py
44
configure.py
|
@ -16,14 +16,7 @@ import argparse
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
from tools.project import *
|
||||||
from tools.project import (
|
|
||||||
Object,
|
|
||||||
ProjectConfig,
|
|
||||||
calculate_progress,
|
|
||||||
generate_build,
|
|
||||||
is_windows,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Game versions
|
# Game versions
|
||||||
DEFAULT_VERSION = 0
|
DEFAULT_VERSION = 0
|
||||||
|
@ -79,11 +72,6 @@ parser.add_argument(
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="generate map file(s)",
|
help="generate map file(s)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"--no-asm",
|
|
||||||
action="store_true",
|
|
||||||
help="don't incorporate .s files from asm directory",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--debug",
|
"--debug",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
|
@ -102,6 +90,12 @@ parser.add_argument(
|
||||||
type=Path,
|
type=Path,
|
||||||
help="path to decomp-toolkit binary or source (optional)",
|
help="path to decomp-toolkit binary or source (optional)",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--objdiff",
|
||||||
|
metavar="BINARY | DIR",
|
||||||
|
type=Path,
|
||||||
|
help="path to objdiff-cli binary or source (optional)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--sjiswrap",
|
"--sjiswrap",
|
||||||
metavar="EXE",
|
metavar="EXE",
|
||||||
|
@ -128,6 +122,7 @@ version_num = VERSIONS.index(config.version)
|
||||||
# Apply arguments
|
# Apply arguments
|
||||||
config.build_dir = args.build_dir
|
config.build_dir = args.build_dir
|
||||||
config.dtk_path = args.dtk
|
config.dtk_path = args.dtk
|
||||||
|
config.objdiff_path = args.objdiff
|
||||||
config.binutils_path = args.binutils
|
config.binutils_path = args.binutils
|
||||||
config.compilers_path = args.compilers
|
config.compilers_path = args.compilers
|
||||||
config.debug = args.debug
|
config.debug = args.debug
|
||||||
|
@ -136,13 +131,15 @@ config.non_matching = args.non_matching
|
||||||
config.sjiswrap_path = args.sjiswrap
|
config.sjiswrap_path = args.sjiswrap
|
||||||
if not is_windows():
|
if not is_windows():
|
||||||
config.wrapper = args.wrapper
|
config.wrapper = args.wrapper
|
||||||
if args.no_asm:
|
# Don't build asm unless we're --non-matching
|
||||||
|
if not config.non_matching:
|
||||||
config.asm_dir = None
|
config.asm_dir = None
|
||||||
|
|
||||||
# Tool versions
|
# Tool versions
|
||||||
config.binutils_tag = "2.42-1"
|
config.binutils_tag = "2.42-1"
|
||||||
config.compilers_tag = "20231018"
|
config.compilers_tag = "20240706"
|
||||||
config.dtk_tag = "v0.9.2"
|
config.dtk_tag = "v0.9.5"
|
||||||
|
config.objdiff_tag = "v2.0.0-beta.5"
|
||||||
config.sjiswrap_tag = "v1.1.1"
|
config.sjiswrap_tag = "v1.1.1"
|
||||||
config.wibo_tag = "0.6.11"
|
config.wibo_tag = "0.6.11"
|
||||||
|
|
||||||
|
@ -159,7 +156,6 @@ config.asflags = [
|
||||||
config.ldflags = [
|
config.ldflags = [
|
||||||
"-fp hardware",
|
"-fp hardware",
|
||||||
"-nodefaults",
|
"-nodefaults",
|
||||||
"-warn off",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
config.progress_all = False
|
config.progress_all = False
|
||||||
|
@ -272,7 +268,7 @@ cflags_rel = [
|
||||||
"-sdata 0",
|
"-sdata 0",
|
||||||
"-sdata2 0",
|
"-sdata2 0",
|
||||||
"-str noreuse",
|
"-str noreuse",
|
||||||
"-Cpp_exceptions off"
|
"-Cpp_exceptions off",
|
||||||
]
|
]
|
||||||
|
|
||||||
config.linker_version = "GC/1.3.2"
|
config.linker_version = "GC/1.3.2"
|
||||||
|
@ -329,7 +325,9 @@ def Rel(lib_name, objects):
|
||||||
|
|
||||||
Matching = True # Object matches and should be linked
|
Matching = True # Object matches and should be linked
|
||||||
NonMatching = False # Object does not match and should not be linked
|
NonMatching = False # Object does not match and should not be linked
|
||||||
Equivalent = config.non_matching # Object should be linked when configured with --non-matching
|
Equivalent = (
|
||||||
|
config.non_matching
|
||||||
|
) # Object should be linked when configured with --non-matching
|
||||||
|
|
||||||
config.warn_missing_config = True
|
config.warn_missing_config = True
|
||||||
config.warn_missing_source = False
|
config.warn_missing_source = False
|
||||||
|
@ -1343,12 +1341,18 @@ config.libs = [
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Optional extra categories for progress tracking
|
||||||
|
config.progress_categories = [
|
||||||
|
# ProgressCategory("game", "Game Code"),
|
||||||
|
# ProgressCategory("sdk", "SDK Code"),
|
||||||
|
]
|
||||||
|
config.progress_each_module = args.verbose
|
||||||
|
|
||||||
if args.mode == "configure":
|
if args.mode == "configure":
|
||||||
# Write build.ninja and objdiff.json
|
# Write build.ninja and objdiff.json
|
||||||
generate_build(config)
|
generate_build(config)
|
||||||
elif args.mode == "progress":
|
elif args.mode == "progress":
|
||||||
# Print progress and write progress.json
|
# Print progress and write progress.json
|
||||||
config.progress_each_module = args.verbose
|
|
||||||
calculate_progress(config)
|
calculate_progress(config)
|
||||||
else:
|
else:
|
||||||
sys.exit("Unknown mode: " + args.mode)
|
sys.exit("Unknown mode: " + args.mode)
|
||||||
|
|
|
@ -24,8 +24,8 @@ include_dirs = [
|
||||||
os.path.join(root_dir, "extern/musyx/include")
|
os.path.join(root_dir, "extern/musyx/include")
|
||||||
]
|
]
|
||||||
|
|
||||||
include_pattern = re.compile(r'^#include\s*[<"](.+?)[>"]$')
|
include_pattern = re.compile(r'^#\s*include\s*[<"](.+?)[>"]$')
|
||||||
guard_pattern = re.compile(r"^#ifndef\s+(.*)$")
|
guard_pattern = re.compile(r"^#\s*ifndef\s+(.*)$")
|
||||||
|
|
||||||
defines = set()
|
defines = set()
|
||||||
|
|
||||||
|
|
|
@ -55,6 +55,21 @@ def dtk_url(tag: str) -> str:
|
||||||
repo = "https://github.com/encounter/decomp-toolkit"
|
repo = "https://github.com/encounter/decomp-toolkit"
|
||||||
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
|
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
|
||||||
|
|
||||||
|
def objdiff_cli_url(tag: str) -> str:
|
||||||
|
uname = platform.uname()
|
||||||
|
suffix = ""
|
||||||
|
system = uname.system.lower()
|
||||||
|
if system == "darwin":
|
||||||
|
system = "macos"
|
||||||
|
elif system == "windows":
|
||||||
|
suffix = ".exe"
|
||||||
|
arch = uname.machine.lower()
|
||||||
|
if arch == "amd64":
|
||||||
|
arch = "x86_64"
|
||||||
|
|
||||||
|
repo = "https://github.com/encounter/objdiff"
|
||||||
|
return f"{repo}/releases/download/{tag}/objdiff-cli-{system}-{arch}{suffix}"
|
||||||
|
|
||||||
|
|
||||||
def sjiswrap_url(tag: str) -> str:
|
def sjiswrap_url(tag: str) -> str:
|
||||||
repo = "https://github.com/encounter/sjiswrap"
|
repo = "https://github.com/encounter/sjiswrap"
|
||||||
|
@ -70,6 +85,7 @@ TOOLS: Dict[str, Callable[[str], str]] = {
|
||||||
"binutils": binutils_url,
|
"binutils": binutils_url,
|
||||||
"compilers": compilers_url,
|
"compilers": compilers_url,
|
||||||
"dtk": dtk_url,
|
"dtk": dtk_url,
|
||||||
|
"objdiff-cli": objdiff_cli_url,
|
||||||
"sjiswrap": sjiswrap_url,
|
"sjiswrap": sjiswrap_url,
|
||||||
"wibo": wibo_url,
|
"wibo": wibo_url,
|
||||||
}
|
}
|
||||||
|
|
623
tools/project.py
623
tools/project.py
|
@ -17,7 +17,7 @@ import os
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
from typing import Any, Dict, List, Optional, Set, Tuple, Union, cast
|
||||||
|
|
||||||
from . import ninja_syntax
|
from . import ninja_syntax
|
||||||
from .ninja_syntax import serialize_path
|
from .ninja_syntax import serialize_path
|
||||||
|
@ -29,24 +29,78 @@ if sys.platform == "cygwin":
|
||||||
f"\n(Current path: {sys.executable})"
|
f"\n(Current path: {sys.executable})"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
Library = Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
class Object:
|
class Object:
|
||||||
def __init__(self, completed: bool, name: str, **options: Any) -> None:
|
def __init__(self, completed: bool, name: str, **options: Any) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
self.base_name = Path(name).with_suffix("")
|
|
||||||
self.completed = completed
|
self.completed = completed
|
||||||
self.options: Dict[str, Any] = {
|
self.options: Dict[str, Any] = {
|
||||||
"add_to_all": True,
|
"add_to_all": None,
|
||||||
"asflags": None,
|
"asflags": None,
|
||||||
"extra_asflags": None,
|
"asm_dir": None,
|
||||||
"cflags": None,
|
"cflags": None,
|
||||||
|
"extra_asflags": None,
|
||||||
"extra_cflags": None,
|
"extra_cflags": None,
|
||||||
|
"host": None,
|
||||||
|
"lib": None,
|
||||||
"mw_version": None,
|
"mw_version": None,
|
||||||
|
"progress_category": None,
|
||||||
"shift_jis": None,
|
"shift_jis": None,
|
||||||
"source": name,
|
"source": name,
|
||||||
|
"src_dir": None,
|
||||||
}
|
}
|
||||||
self.options.update(options)
|
self.options.update(options)
|
||||||
|
|
||||||
|
# Internal
|
||||||
|
self.src_path: Optional[Path] = None
|
||||||
|
self.asm_path: Optional[Path] = None
|
||||||
|
self.src_obj_path: Optional[Path] = None
|
||||||
|
self.asm_obj_path: Optional[Path] = None
|
||||||
|
self.host_obj_path: Optional[Path] = None
|
||||||
|
self.ctx_path: Optional[Path] = None
|
||||||
|
|
||||||
|
def resolve(self, config: "ProjectConfig", lib: Library) -> "Object":
|
||||||
|
# Use object options, then library options
|
||||||
|
obj = Object(self.completed, self.name, **lib)
|
||||||
|
for key, value in self.options.items():
|
||||||
|
if value is not None or key not in obj.options:
|
||||||
|
obj.options[key] = value
|
||||||
|
|
||||||
|
# Use default options from config
|
||||||
|
def set_default(key: str, value: Any) -> None:
|
||||||
|
if obj.options[key] is None:
|
||||||
|
obj.options[key] = value
|
||||||
|
|
||||||
|
set_default("add_to_all", True)
|
||||||
|
set_default("asflags", config.asflags)
|
||||||
|
set_default("asm_dir", config.asm_dir)
|
||||||
|
set_default("host", False)
|
||||||
|
set_default("mw_version", config.linker_version)
|
||||||
|
set_default("shift_jis", config.shift_jis)
|
||||||
|
set_default("src_dir", config.src_dir)
|
||||||
|
|
||||||
|
# Resolve paths
|
||||||
|
build_dir = config.out_path()
|
||||||
|
obj.src_path = Path(obj.options["src_dir"]) / obj.options["source"]
|
||||||
|
if obj.options["asm_dir"] is not None:
|
||||||
|
obj.asm_path = (
|
||||||
|
Path(obj.options["asm_dir"]) / obj.options["source"]
|
||||||
|
).with_suffix(".s")
|
||||||
|
base_name = Path(self.name).with_suffix("")
|
||||||
|
obj.src_obj_path = build_dir / "src" / f"{base_name}.o"
|
||||||
|
obj.asm_obj_path = build_dir / "mod" / f"{base_name}.o"
|
||||||
|
obj.host_obj_path = build_dir / "host" / f"{base_name}.o"
|
||||||
|
obj.ctx_path = build_dir / "src" / f"{base_name}.ctx"
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
class ProgressCategory:
|
||||||
|
def __init__(self, id: str, name: str) -> None:
|
||||||
|
self.id = id
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
|
||||||
class ProjectConfig:
|
class ProjectConfig:
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
|
@ -69,6 +123,8 @@ class ProjectConfig:
|
||||||
self.wrapper: Optional[Path] = None # If None, download wibo on Linux
|
self.wrapper: Optional[Path] = None # If None, download wibo on Linux
|
||||||
self.sjiswrap_tag: Optional[str] = None # Git tag
|
self.sjiswrap_tag: Optional[str] = None # Git tag
|
||||||
self.sjiswrap_path: Optional[Path] = None # If None, download
|
self.sjiswrap_path: Optional[Path] = None # If None, download
|
||||||
|
self.objdiff_tag: Optional[str] = None # Git tag
|
||||||
|
self.objdiff_path: Optional[Path] = None # If None, download
|
||||||
|
|
||||||
# Project config
|
# Project config
|
||||||
self.non_matching: bool = False
|
self.non_matching: bool = False
|
||||||
|
@ -79,28 +135,35 @@ class ProjectConfig:
|
||||||
self.generate_map: bool = False # Generate map file(s)
|
self.generate_map: bool = False # Generate map file(s)
|
||||||
self.asflags: Optional[List[str]] = None # Assembler flags
|
self.asflags: Optional[List[str]] = None # Assembler flags
|
||||||
self.ldflags: Optional[List[str]] = None # Linker flags
|
self.ldflags: Optional[List[str]] = None # Linker flags
|
||||||
self.libs: Optional[List[Dict[str, Any]]] = None # List of libraries
|
self.libs: Optional[List[Library]] = None # List of libraries
|
||||||
self.linker_version: Optional[str] = None # mwld version
|
self.linker_version: Optional[str] = None # mwld version
|
||||||
self.version: Optional[str] = None # Version name
|
self.version: Optional[str] = None # Version name
|
||||||
self.warn_missing_config: bool = False # Warn on missing unit configuration
|
self.warn_missing_config: bool = False # Warn on missing unit configuration
|
||||||
self.warn_missing_source: bool = False # Warn on missing source file
|
self.warn_missing_source: bool = False # Warn on missing source file
|
||||||
self.rel_strip_partial: bool = True # Generate PLFs with -strip_partial
|
self.rel_strip_partial: bool = True # Generate PLFs with -strip_partial
|
||||||
self.rel_empty_file: Optional[
|
self.rel_empty_file: Optional[str] = (
|
||||||
str
|
None # Object name for generating empty RELs
|
||||||
] = None # Object name for generating empty RELs
|
)
|
||||||
self.shift_jis = (
|
self.shift_jis = (
|
||||||
True # Convert source files from UTF-8 to Shift JIS automatically
|
True # Convert source files from UTF-8 to Shift JIS automatically
|
||||||
)
|
)
|
||||||
self.reconfig_deps: Optional[List[Path]] = (
|
self.reconfig_deps: Optional[List[Path]] = (
|
||||||
None # Additional re-configuration dependency files
|
None # Additional re-configuration dependency files
|
||||||
)
|
)
|
||||||
|
self.custom_build_rules: Optional[List[Dict[str, Any]]] = (
|
||||||
|
None # Custom ninja build rules
|
||||||
|
)
|
||||||
|
self.custom_build_steps: Optional[Dict[str, List[Dict[str, Any]]]] = (
|
||||||
|
None # Custom build steps, types are ["pre-compile", "post-compile", "post-link", "post-build"]
|
||||||
|
)
|
||||||
|
|
||||||
# Progress output and progress.json config
|
# Progress output, progress.json and report.json config
|
||||||
self.progress_all: bool = True # Include combined "all" category
|
self.progress_all: bool = True # Include combined "all" category
|
||||||
self.progress_modules: bool = True # Include combined "modules" category
|
self.progress_modules: bool = True # Include combined "modules" category
|
||||||
self.progress_each_module: bool = (
|
self.progress_each_module: bool = (
|
||||||
True # Include individual modules, disable for large numbers of modules
|
False # Include individual modules, disable for large numbers of modules
|
||||||
)
|
)
|
||||||
|
self.progress_categories: List[ProgressCategory] = [] # Additional categories
|
||||||
|
|
||||||
# Progress fancy printing
|
# Progress fancy printing
|
||||||
self.progress_use_fancy: bool = False
|
self.progress_use_fancy: bool = False
|
||||||
|
@ -125,12 +188,17 @@ class ProjectConfig:
|
||||||
if getattr(self, attr) is None:
|
if getattr(self, attr) is None:
|
||||||
sys.exit(f"ProjectConfig.{attr} missing")
|
sys.exit(f"ProjectConfig.{attr} missing")
|
||||||
|
|
||||||
def find_object(self, name: str) -> Optional[Tuple[Dict[str, Any], Object]]:
|
# Creates a map of object names to Object instances
|
||||||
|
# Options are fully resolved from the library and object
|
||||||
|
def objects(self) -> Dict[str, Object]:
|
||||||
|
out = {}
|
||||||
for lib in self.libs or {}:
|
for lib in self.libs or {}:
|
||||||
for obj in lib["objects"]:
|
objects: List[Object] = lib["objects"]
|
||||||
if obj.name == name:
|
for obj in objects:
|
||||||
return lib, obj
|
if obj.name in out:
|
||||||
return None
|
sys.exit(f"Duplicate object name {obj.name}")
|
||||||
|
out[obj.name] = obj.resolve(self, lib)
|
||||||
|
return out
|
||||||
|
|
||||||
def out_path(self) -> Path:
|
def out_path(self) -> Path:
|
||||||
return self.build_dir / str(self.version)
|
return self.build_dir / str(self.version)
|
||||||
|
@ -166,7 +234,7 @@ def load_build_config(
|
||||||
f = open(build_config_path, "r", encoding="utf-8")
|
f = open(build_config_path, "r", encoding="utf-8")
|
||||||
build_config: Dict[str, Any] = json.load(f)
|
build_config: Dict[str, Any] = json.load(f)
|
||||||
config_version = build_config.get("version")
|
config_version = build_config.get("version")
|
||||||
if not config_version:
|
if config_version is None:
|
||||||
# Invalid config.json
|
# Invalid config.json
|
||||||
f.close()
|
f.close()
|
||||||
os.remove(build_config_path)
|
os.remove(build_config_path)
|
||||||
|
@ -185,17 +253,19 @@ def load_build_config(
|
||||||
|
|
||||||
# Generate build.ninja and objdiff.json
|
# Generate build.ninja and objdiff.json
|
||||||
def generate_build(config: ProjectConfig) -> None:
|
def generate_build(config: ProjectConfig) -> None:
|
||||||
|
config.validate()
|
||||||
|
objects = config.objects()
|
||||||
build_config = load_build_config(config, config.out_path() / "config.json")
|
build_config = load_build_config(config, config.out_path() / "config.json")
|
||||||
generate_build_ninja(config, build_config)
|
generate_build_ninja(config, objects, build_config)
|
||||||
generate_objdiff_config(config, build_config)
|
generate_objdiff_config(config, objects, build_config)
|
||||||
|
|
||||||
|
|
||||||
# Generate build.ninja
|
# Generate build.ninja
|
||||||
def generate_build_ninja(
|
def generate_build_ninja(
|
||||||
config: ProjectConfig, build_config: Optional[Dict[str, Any]]
|
config: ProjectConfig,
|
||||||
|
objects: Dict[str, Object],
|
||||||
|
build_config: Optional[Dict[str, Any]],
|
||||||
) -> None:
|
) -> None:
|
||||||
config.validate()
|
|
||||||
|
|
||||||
out = io.StringIO()
|
out = io.StringIO()
|
||||||
n = ninja_syntax.Writer(out)
|
n = ninja_syntax.Writer(out)
|
||||||
n.variable("ninja_required_version", "1.3")
|
n.variable("ninja_required_version", "1.3")
|
||||||
|
@ -219,7 +289,7 @@ def generate_build_ninja(
|
||||||
if config.debug:
|
if config.debug:
|
||||||
ldflags += " -g"
|
ldflags += " -g"
|
||||||
n.variable("ldflags", ldflags)
|
n.variable("ldflags", ldflags)
|
||||||
if not config.linker_version:
|
if config.linker_version is None:
|
||||||
sys.exit("ProjectConfig.linker_version missing")
|
sys.exit("ProjectConfig.linker_version missing")
|
||||||
n.variable("mw_version", Path(config.linker_version))
|
n.variable("mw_version", Path(config.linker_version))
|
||||||
n.newline()
|
n.newline()
|
||||||
|
@ -231,6 +301,7 @@ def generate_build_ninja(
|
||||||
|
|
||||||
build_path = config.out_path()
|
build_path = config.out_path()
|
||||||
progress_path = build_path / "progress.json"
|
progress_path = build_path / "progress.json"
|
||||||
|
report_path = build_path / "report.json"
|
||||||
build_tools_path = config.build_dir / "tools"
|
build_tools_path = config.build_dir / "tools"
|
||||||
download_tool = config.tools_dir / "download_tool.py"
|
download_tool = config.tools_dir / "download_tool.py"
|
||||||
n.rule(
|
n.rule(
|
||||||
|
@ -248,17 +319,27 @@ def generate_build_ninja(
|
||||||
deps="gcc",
|
deps="gcc",
|
||||||
)
|
)
|
||||||
|
|
||||||
if config.dtk_path is not None and config.dtk_path.is_file():
|
cargo_rule_written = False
|
||||||
dtk = config.dtk_path
|
|
||||||
elif config.dtk_path is not None:
|
def write_cargo_rule():
|
||||||
dtk = build_tools_path / "release" / f"dtk{EXE}"
|
nonlocal cargo_rule_written
|
||||||
|
if not cargo_rule_written:
|
||||||
|
n.pool("cargo", 1)
|
||||||
n.rule(
|
n.rule(
|
||||||
name="cargo",
|
name="cargo",
|
||||||
command="cargo build --release --manifest-path $in --bin $bin --target-dir $target",
|
command="cargo build --release --manifest-path $in --bin $bin --target-dir $target",
|
||||||
description="CARGO $bin",
|
description="CARGO $bin",
|
||||||
|
pool="cargo",
|
||||||
depfile=Path("$target") / "release" / "$bin.d",
|
depfile=Path("$target") / "release" / "$bin.d",
|
||||||
deps="gcc",
|
deps="gcc",
|
||||||
)
|
)
|
||||||
|
cargo_rule_written = True
|
||||||
|
|
||||||
|
if config.dtk_path is not None and config.dtk_path.is_file():
|
||||||
|
dtk = config.dtk_path
|
||||||
|
elif config.dtk_path is not None:
|
||||||
|
dtk = build_tools_path / "release" / f"dtk{EXE}"
|
||||||
|
write_cargo_rule()
|
||||||
n.build(
|
n.build(
|
||||||
outputs=dtk,
|
outputs=dtk,
|
||||||
rule="cargo",
|
rule="cargo",
|
||||||
|
@ -283,6 +364,35 @@ def generate_build_ninja(
|
||||||
else:
|
else:
|
||||||
sys.exit("ProjectConfig.dtk_tag missing")
|
sys.exit("ProjectConfig.dtk_tag missing")
|
||||||
|
|
||||||
|
if config.objdiff_path is not None and config.objdiff_path.is_file():
|
||||||
|
objdiff = config.objdiff_path
|
||||||
|
elif config.objdiff_path is not None:
|
||||||
|
objdiff = build_tools_path / "release" / f"objdiff-cli{EXE}"
|
||||||
|
write_cargo_rule()
|
||||||
|
n.build(
|
||||||
|
outputs=objdiff,
|
||||||
|
rule="cargo",
|
||||||
|
inputs=config.objdiff_path / "Cargo.toml",
|
||||||
|
implicit=config.objdiff_path / "Cargo.lock",
|
||||||
|
variables={
|
||||||
|
"bin": "objdiff-cli",
|
||||||
|
"target": build_tools_path,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
elif config.objdiff_tag:
|
||||||
|
objdiff = build_tools_path / f"objdiff-cli{EXE}"
|
||||||
|
n.build(
|
||||||
|
outputs=objdiff,
|
||||||
|
rule="download_tool",
|
||||||
|
implicit=download_tool,
|
||||||
|
variables={
|
||||||
|
"tool": "objdiff-cli",
|
||||||
|
"tag": config.objdiff_tag,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
sys.exit("ProjectConfig.objdiff_tag missing")
|
||||||
|
|
||||||
if config.sjiswrap_path:
|
if config.sjiswrap_path:
|
||||||
sjiswrap = config.sjiswrap_path
|
sjiswrap = config.sjiswrap_path
|
||||||
elif config.sjiswrap_tag:
|
elif config.sjiswrap_tag:
|
||||||
|
@ -361,6 +471,17 @@ def generate_build_ninja(
|
||||||
|
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
###
|
||||||
|
# Helper rule for downloading all tools
|
||||||
|
###
|
||||||
|
n.comment("Download all tools")
|
||||||
|
n.build(
|
||||||
|
outputs="tools",
|
||||||
|
rule="phony",
|
||||||
|
inputs=[dtk, sjiswrap, wrapper, compilers, binutils, objdiff],
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
###
|
###
|
||||||
# Build rules
|
# Build rules
|
||||||
###
|
###
|
||||||
|
@ -443,6 +564,49 @@ def generate_build_ninja(
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
if len(config.custom_build_rules or {}) > 0:
|
||||||
|
n.comment("Custom project build rules (pre/post-processing)")
|
||||||
|
for rule in config.custom_build_rules or {}:
|
||||||
|
n.rule(
|
||||||
|
name=cast(str, rule.get("name")),
|
||||||
|
command=cast(str, rule.get("command")),
|
||||||
|
description=rule.get("description", None),
|
||||||
|
depfile=rule.get("depfile", None),
|
||||||
|
generator=rule.get("generator", False),
|
||||||
|
pool=rule.get("pool", None),
|
||||||
|
restat=rule.get("restat", False),
|
||||||
|
rspfile=rule.get("rspfile", None),
|
||||||
|
rspfile_content=rule.get("rspfile_content", None),
|
||||||
|
deps=rule.get("deps", None),
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
|
||||||
|
def write_custom_step(step: str) -> List[str | Path]:
|
||||||
|
implicit: List[str | Path] = []
|
||||||
|
if config.custom_build_steps and step in config.custom_build_steps:
|
||||||
|
n.comment(f"Custom build steps ({step})")
|
||||||
|
for custom_step in config.custom_build_steps[step]:
|
||||||
|
outputs = cast(List[str | Path], custom_step.get("outputs"))
|
||||||
|
|
||||||
|
if isinstance(outputs, list):
|
||||||
|
implicit.extend(outputs)
|
||||||
|
else:
|
||||||
|
implicit.append(outputs)
|
||||||
|
|
||||||
|
n.build(
|
||||||
|
outputs=outputs,
|
||||||
|
rule=cast(str, custom_step.get("rule")),
|
||||||
|
inputs=custom_step.get("inputs", None),
|
||||||
|
implicit=custom_step.get("implicit", None),
|
||||||
|
order_only=custom_step.get("order_only", None),
|
||||||
|
variables=custom_step.get("variables", None),
|
||||||
|
implicit_outputs=custom_step.get("implicit_outputs", None),
|
||||||
|
pool=custom_step.get("pool", None),
|
||||||
|
dyndep=custom_step.get("dyndep", None),
|
||||||
|
)
|
||||||
|
n.newline()
|
||||||
|
return implicit
|
||||||
|
|
||||||
n.comment("Host build")
|
n.comment("Host build")
|
||||||
n.variable("host_cflags", "-I include -Wno-trigraphs")
|
n.variable("host_cflags", "-I include -Wno-trigraphs")
|
||||||
n.variable(
|
n.variable(
|
||||||
|
@ -461,14 +625,13 @@ def generate_build_ninja(
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
# Add all build steps needed before we compile (e.g. processing assets)
|
||||||
|
precompile_implicit = write_custom_step("pre-compile")
|
||||||
|
|
||||||
###
|
###
|
||||||
# Source files
|
# Source files
|
||||||
###
|
###
|
||||||
n.comment("Source files")
|
n.comment("Source files")
|
||||||
build_asm_path = build_path / "mod"
|
|
||||||
build_src_path = build_path / "src"
|
|
||||||
build_host_path = build_path / "host"
|
|
||||||
build_config_path = build_path / "config.json"
|
|
||||||
|
|
||||||
def map_path(path: Path) -> Path:
|
def map_path(path: Path) -> Path:
|
||||||
return path.parent / (path.name + ".MAP")
|
return path.parent / (path.name + ".MAP")
|
||||||
|
@ -511,16 +674,15 @@ def generate_build_ninja(
|
||||||
outputs=elf_path,
|
outputs=elf_path,
|
||||||
rule="link",
|
rule="link",
|
||||||
inputs=self.inputs,
|
inputs=self.inputs,
|
||||||
implicit=[self.ldscript, *mwld_implicit],
|
implicit=[
|
||||||
|
*precompile_implicit,
|
||||||
|
self.ldscript,
|
||||||
|
*mwld_implicit,
|
||||||
|
*postcompile_implicit,
|
||||||
|
],
|
||||||
implicit_outputs=elf_map,
|
implicit_outputs=elf_map,
|
||||||
variables={"ldflags": elf_ldflags},
|
variables={"ldflags": elf_ldflags},
|
||||||
)
|
)
|
||||||
n.build(
|
|
||||||
outputs=dol_path,
|
|
||||||
rule="elf2dol",
|
|
||||||
inputs=elf_path,
|
|
||||||
implicit=dtk,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
preplf_path = build_path / self.name / f"{self.name}.preplf"
|
preplf_path = build_path / self.name / f"{self.name}.preplf"
|
||||||
plf_path = build_path / self.name / f"{self.name}.plf"
|
plf_path = build_path / self.name / f"{self.name}.plf"
|
||||||
|
@ -565,95 +727,85 @@ def generate_build_ninja(
|
||||||
host_source_inputs: List[Path] = []
|
host_source_inputs: List[Path] = []
|
||||||
source_added: Set[Path] = set()
|
source_added: Set[Path] = set()
|
||||||
|
|
||||||
def c_build(
|
def c_build(obj: Object, src_path: Path) -> Optional[Path]:
|
||||||
obj: Object, options: Dict[str, Any], lib_name: str, src_path: Path
|
cflags_str = make_flags_str(obj.options["cflags"])
|
||||||
) -> Optional[Path]:
|
if obj.options["extra_cflags"] is not None:
|
||||||
cflags_str = make_flags_str(options["cflags"])
|
extra_cflags_str = make_flags_str(obj.options["extra_cflags"])
|
||||||
if options["extra_cflags"] is not None:
|
|
||||||
extra_cflags_str = make_flags_str(options["extra_cflags"])
|
|
||||||
cflags_str += " " + extra_cflags_str
|
cflags_str += " " + extra_cflags_str
|
||||||
used_compiler_versions.add(options["mw_version"])
|
used_compiler_versions.add(obj.options["mw_version"])
|
||||||
|
|
||||||
src_obj_path = build_src_path / f"{obj.base_name}.o"
|
|
||||||
src_base_path = build_src_path / obj.base_name
|
|
||||||
|
|
||||||
# Avoid creating duplicate build rules
|
# Avoid creating duplicate build rules
|
||||||
if src_obj_path in source_added:
|
if obj.src_obj_path is None or obj.src_obj_path in source_added:
|
||||||
return src_obj_path
|
return obj.src_obj_path
|
||||||
source_added.add(src_obj_path)
|
source_added.add(obj.src_obj_path)
|
||||||
|
|
||||||
shift_jis = options["shift_jis"]
|
|
||||||
if shift_jis is None:
|
|
||||||
shift_jis = config.shift_jis
|
|
||||||
|
|
||||||
# Add MWCC build rule
|
# Add MWCC build rule
|
||||||
|
lib_name = obj.options["lib"]
|
||||||
n.comment(f"{obj.name}: {lib_name} (linked {obj.completed})")
|
n.comment(f"{obj.name}: {lib_name} (linked {obj.completed})")
|
||||||
n.build(
|
n.build(
|
||||||
outputs=src_obj_path,
|
outputs=obj.src_obj_path,
|
||||||
rule="mwcc_sjis" if shift_jis else "mwcc",
|
rule="mwcc_sjis" if obj.options["shift_jis"] else "mwcc",
|
||||||
inputs=src_path,
|
inputs=src_path,
|
||||||
variables={
|
variables={
|
||||||
"mw_version": Path(options["mw_version"]),
|
"mw_version": Path(obj.options["mw_version"]),
|
||||||
"cflags": cflags_str,
|
"cflags": cflags_str,
|
||||||
"basedir": os.path.dirname(src_base_path),
|
"basedir": os.path.dirname(obj.src_obj_path),
|
||||||
"basefile": src_base_path,
|
"basefile": obj.src_obj_path.with_suffix(""),
|
||||||
},
|
},
|
||||||
implicit=mwcc_sjis_implicit if shift_jis else mwcc_implicit,
|
implicit=(
|
||||||
|
mwcc_sjis_implicit if obj.options["shift_jis"] else mwcc_implicit
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add ctx build rule
|
# Add ctx build rule
|
||||||
ctx_path = build_src_path / f"{obj.base_name}.ctx"
|
if obj.ctx_path is not None:
|
||||||
n.build(
|
n.build(
|
||||||
outputs=ctx_path,
|
outputs=obj.ctx_path,
|
||||||
rule="decompctx",
|
rule="decompctx",
|
||||||
inputs=src_path,
|
inputs=src_path,
|
||||||
implicit=decompctx,
|
implicit=decompctx,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add host build rule
|
# Add host build rule
|
||||||
if options.get("host", False):
|
if obj.options["host"] and obj.host_obj_path is not None:
|
||||||
host_obj_path = build_host_path / f"{obj.base_name}.o"
|
|
||||||
host_base_path = build_host_path / obj.base_name
|
|
||||||
n.build(
|
n.build(
|
||||||
outputs=host_obj_path,
|
outputs=obj.host_obj_path,
|
||||||
rule="host_cc" if src_path.suffix == ".c" else "host_cpp",
|
rule="host_cc" if src_path.suffix == ".c" else "host_cpp",
|
||||||
inputs=src_path,
|
inputs=src_path,
|
||||||
variables={
|
variables={
|
||||||
"basedir": os.path.dirname(host_base_path),
|
"basedir": os.path.dirname(obj.host_obj_path),
|
||||||
"basefile": host_base_path,
|
"basefile": obj.host_obj_path.with_suffix(""),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
if options["add_to_all"]:
|
if obj.options["add_to_all"]:
|
||||||
host_source_inputs.append(host_obj_path)
|
host_source_inputs.append(obj.host_obj_path)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
if options["add_to_all"]:
|
if obj.options["add_to_all"]:
|
||||||
source_inputs.append(src_obj_path)
|
source_inputs.append(obj.src_obj_path)
|
||||||
|
|
||||||
return src_obj_path
|
return obj.src_obj_path
|
||||||
|
|
||||||
def asm_build(
|
def asm_build(
|
||||||
obj: Object, options: Dict[str, Any], lib_name: str, src_path: Path
|
obj: Object, src_path: Path, obj_path: Optional[Path]
|
||||||
) -> Optional[Path]:
|
) -> Optional[Path]:
|
||||||
asflags = options["asflags"] or config.asflags
|
if obj.options["asflags"] is None:
|
||||||
if asflags is None:
|
|
||||||
sys.exit("ProjectConfig.asflags missing")
|
sys.exit("ProjectConfig.asflags missing")
|
||||||
asflags_str = make_flags_str(asflags)
|
asflags_str = make_flags_str(obj.options["asflags"])
|
||||||
if options["extra_asflags"] is not None:
|
if obj.options["extra_asflags"] is not None:
|
||||||
extra_asflags_str = make_flags_str(options["extra_asflags"])
|
extra_asflags_str = make_flags_str(obj.options["extra_asflags"])
|
||||||
asflags_str += " " + extra_asflags_str
|
asflags_str += " " + extra_asflags_str
|
||||||
|
|
||||||
asm_obj_path = build_asm_path / f"{obj.base_name}.o"
|
|
||||||
|
|
||||||
# Avoid creating duplicate build rules
|
# Avoid creating duplicate build rules
|
||||||
if asm_obj_path in source_added:
|
if obj_path is None or obj_path in source_added:
|
||||||
return asm_obj_path
|
return obj_path
|
||||||
source_added.add(asm_obj_path)
|
source_added.add(obj_path)
|
||||||
|
|
||||||
# Add assembler build rule
|
# Add assembler build rule
|
||||||
|
lib_name = obj.options["lib"]
|
||||||
n.comment(f"{obj.name}: {lib_name} (linked {obj.completed})")
|
n.comment(f"{obj.name}: {lib_name} (linked {obj.completed})")
|
||||||
n.build(
|
n.build(
|
||||||
outputs=asm_obj_path,
|
outputs=obj_path,
|
||||||
rule="as",
|
rule="as",
|
||||||
inputs=src_path,
|
inputs=src_path,
|
||||||
variables={"asflags": asflags_str},
|
variables={"asflags": asflags_str},
|
||||||
|
@ -661,57 +813,40 @@ def generate_build_ninja(
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
if options["add_to_all"]:
|
if obj.options["add_to_all"]:
|
||||||
source_inputs.append(asm_obj_path)
|
source_inputs.append(obj_path)
|
||||||
|
|
||||||
return asm_obj_path
|
return obj_path
|
||||||
|
|
||||||
def add_unit(build_obj, link_step: LinkStep):
|
def add_unit(build_obj, link_step: LinkStep):
|
||||||
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
||||||
result = config.find_object(obj_name)
|
obj = objects.get(obj_name)
|
||||||
if not result:
|
if obj is None:
|
||||||
if config.warn_missing_config and not build_obj["autogenerated"]:
|
if config.warn_missing_config and not build_obj["autogenerated"]:
|
||||||
print(f"Missing configuration for {obj_name}")
|
print(f"Missing configuration for {obj_name}")
|
||||||
link_step.add(obj_path)
|
link_step.add(obj_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
lib, obj = result
|
|
||||||
lib_name = lib["lib"]
|
|
||||||
|
|
||||||
# Use object options, then library options
|
|
||||||
options = lib.copy()
|
|
||||||
for key, value in obj.options.items():
|
|
||||||
if value is not None or key not in options:
|
|
||||||
options[key] = value
|
|
||||||
|
|
||||||
unit_src_path = Path(lib.get("src_dir", config.src_dir)) / options["source"]
|
|
||||||
|
|
||||||
unit_asm_path: Optional[Path] = None
|
|
||||||
if config.asm_dir is not None:
|
|
||||||
unit_asm_path = (
|
|
||||||
Path(lib.get("asm_dir", config.asm_dir)) / options["source"]
|
|
||||||
).with_suffix(".s")
|
|
||||||
|
|
||||||
link_built_obj = obj.completed
|
link_built_obj = obj.completed
|
||||||
built_obj_path: Optional[Path] = None
|
built_obj_path: Optional[Path] = None
|
||||||
if unit_src_path.exists():
|
if obj.src_path is not None and obj.src_path.exists():
|
||||||
if unit_src_path.suffix in (".c", ".cp", ".cpp"):
|
if obj.src_path.suffix in (".c", ".cp", ".cpp"):
|
||||||
# Add MWCC & host build rules
|
# Add MWCC & host build rules
|
||||||
built_obj_path = c_build(obj, options, lib_name, unit_src_path)
|
built_obj_path = c_build(obj, obj.src_path)
|
||||||
elif unit_src_path.suffix == ".s":
|
elif obj.src_path.suffix == ".s":
|
||||||
# Add assembler build rule
|
# Add assembler build rule
|
||||||
built_obj_path = asm_build(obj, options, lib_name, unit_src_path)
|
built_obj_path = asm_build(obj, obj.src_path, obj.src_obj_path)
|
||||||
else:
|
else:
|
||||||
sys.exit(f"Unknown source file type {unit_src_path}")
|
sys.exit(f"Unknown source file type {obj.src_path}")
|
||||||
else:
|
else:
|
||||||
if config.warn_missing_source or obj.completed:
|
if config.warn_missing_source or obj.completed:
|
||||||
print(f"Missing source file {unit_src_path}")
|
print(f"Missing source file {obj.src_path}")
|
||||||
link_built_obj = False
|
link_built_obj = False
|
||||||
|
|
||||||
# Assembly overrides
|
# Assembly overrides
|
||||||
if unit_asm_path is not None and unit_asm_path.exists():
|
if obj.asm_path is not None and obj.asm_path.exists():
|
||||||
link_built_obj = True
|
link_built_obj = True
|
||||||
built_obj_path = asm_build(obj, options, lib_name, unit_asm_path)
|
built_obj_path = asm_build(obj, obj.asm_path, obj.asm_obj_path)
|
||||||
|
|
||||||
if link_built_obj and built_obj_path is not None:
|
if link_built_obj and built_obj_path is not None:
|
||||||
# Use the source-built object
|
# Use the source-built object
|
||||||
|
@ -720,7 +855,10 @@ def generate_build_ninja(
|
||||||
# Use the original (extracted) object
|
# Use the original (extracted) object
|
||||||
link_step.add(obj_path)
|
link_step.add(obj_path)
|
||||||
else:
|
else:
|
||||||
sys.exit(f"Missing object for {obj_name}: {unit_src_path} {lib} {obj}")
|
lib_name = obj.options["lib"]
|
||||||
|
sys.exit(
|
||||||
|
f"Missing object for {obj_name}: {obj.src_path} {lib_name} {obj}"
|
||||||
|
)
|
||||||
|
|
||||||
# Add DOL link step
|
# Add DOL link step
|
||||||
link_step = LinkStep(build_config)
|
link_step = LinkStep(build_config)
|
||||||
|
@ -736,7 +874,7 @@ def generate_build_ninja(
|
||||||
add_unit(unit, module_link_step)
|
add_unit(unit, module_link_step)
|
||||||
# Add empty object to empty RELs
|
# Add empty object to empty RELs
|
||||||
if len(module_link_step.inputs) == 0:
|
if len(module_link_step.inputs) == 0:
|
||||||
if not config.rel_empty_file:
|
if config.rel_empty_file is None:
|
||||||
sys.exit("ProjectConfig.rel_empty_file missing")
|
sys.exit("ProjectConfig.rel_empty_file missing")
|
||||||
add_unit(
|
add_unit(
|
||||||
{
|
{
|
||||||
|
@ -760,6 +898,9 @@ def generate_build_ninja(
|
||||||
if config.compilers_path and not os.path.exists(mw_path):
|
if config.compilers_path and not os.path.exists(mw_path):
|
||||||
sys.exit(f"Linker {mw_path} does not exist")
|
sys.exit(f"Linker {mw_path} does not exist")
|
||||||
|
|
||||||
|
# Add all build steps needed before we link and after compiling objects
|
||||||
|
postcompile_implicit = write_custom_step("post-compile")
|
||||||
|
|
||||||
###
|
###
|
||||||
# Link
|
# Link
|
||||||
###
|
###
|
||||||
|
@ -768,6 +909,19 @@ def generate_build_ninja(
|
||||||
link_outputs.append(step.output())
|
link_outputs.append(step.output())
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
# Add all build steps needed after linking and before GC/Wii native format generation
|
||||||
|
postlink_implicit = write_custom_step("post-link")
|
||||||
|
|
||||||
|
###
|
||||||
|
# Generate DOL
|
||||||
|
###
|
||||||
|
n.build(
|
||||||
|
outputs=link_steps[0].output(),
|
||||||
|
rule="elf2dol",
|
||||||
|
inputs=link_steps[0].partial_output(),
|
||||||
|
implicit=[*postlink_implicit, dtk],
|
||||||
|
)
|
||||||
|
|
||||||
###
|
###
|
||||||
# Generate RELs
|
# Generate RELs
|
||||||
###
|
###
|
||||||
|
@ -782,7 +936,7 @@ def generate_build_ninja(
|
||||||
rspfile="$rspfile",
|
rspfile="$rspfile",
|
||||||
rspfile_content="$in_newline",
|
rspfile_content="$in_newline",
|
||||||
)
|
)
|
||||||
generated_rels = []
|
generated_rels: List[str] = []
|
||||||
for idx, link in enumerate(build_config["links"]):
|
for idx, link in enumerate(build_config["links"]):
|
||||||
# Map module names to link steps
|
# Map module names to link steps
|
||||||
link_steps_local = list(
|
link_steps_local = list(
|
||||||
|
@ -830,6 +984,9 @@ def generate_build_ninja(
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
# Add all build steps needed post-build (re-building archives and such)
|
||||||
|
postbuild_implicit = write_custom_step("post-build")
|
||||||
|
|
||||||
###
|
###
|
||||||
# Helper rule for building all source files
|
# Helper rule for building all source files
|
||||||
###
|
###
|
||||||
|
@ -867,7 +1024,7 @@ def generate_build_ninja(
|
||||||
outputs=ok_path,
|
outputs=ok_path,
|
||||||
rule="check",
|
rule="check",
|
||||||
inputs=config.check_sha_path,
|
inputs=config.check_sha_path,
|
||||||
implicit=[dtk, *link_outputs],
|
implicit=[dtk, *link_outputs, *postbuild_implicit],
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
@ -886,6 +1043,22 @@ def generate_build_ninja(
|
||||||
implicit=[ok_path, configure_script, python_lib, config.config_path],
|
implicit=[ok_path, configure_script, python_lib, config.config_path],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
###
|
||||||
|
# Generate progress report
|
||||||
|
###
|
||||||
|
n.comment("Generate progress report")
|
||||||
|
n.rule(
|
||||||
|
name="report",
|
||||||
|
command=f"{objdiff} report generate -o $out",
|
||||||
|
description="REPORT",
|
||||||
|
)
|
||||||
|
report_implicit: List[str | Path] = [objdiff, "all_source"]
|
||||||
|
n.build(
|
||||||
|
outputs=report_path,
|
||||||
|
rule="report",
|
||||||
|
implicit=report_implicit,
|
||||||
|
)
|
||||||
|
|
||||||
###
|
###
|
||||||
# Helper tools
|
# Helper tools
|
||||||
###
|
###
|
||||||
|
@ -932,6 +1105,7 @@ def generate_build_ninja(
|
||||||
###
|
###
|
||||||
# Split DOL
|
# Split DOL
|
||||||
###
|
###
|
||||||
|
build_config_path = build_path / "config.json"
|
||||||
n.comment("Split DOL into relocatable objects")
|
n.comment("Split DOL into relocatable objects")
|
||||||
n.rule(
|
n.rule(
|
||||||
name="split",
|
name="split",
|
||||||
|
@ -967,7 +1141,7 @@ def generate_build_ninja(
|
||||||
configure_script,
|
configure_script,
|
||||||
python_lib,
|
python_lib,
|
||||||
python_lib_dir / "ninja_syntax.py",
|
python_lib_dir / "ninja_syntax.py",
|
||||||
*(config.reconfig_deps or [])
|
*(config.reconfig_deps or []),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
@ -992,13 +1166,15 @@ def generate_build_ninja(
|
||||||
|
|
||||||
# Generate objdiff.json
|
# Generate objdiff.json
|
||||||
def generate_objdiff_config(
|
def generate_objdiff_config(
|
||||||
config: ProjectConfig, build_config: Optional[Dict[str, Any]]
|
config: ProjectConfig,
|
||||||
|
objects: Dict[str, Object],
|
||||||
|
build_config: Optional[Dict[str, Any]],
|
||||||
) -> None:
|
) -> None:
|
||||||
if not build_config:
|
if build_config is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
objdiff_config: Dict[str, Any] = {
|
objdiff_config: Dict[str, Any] = {
|
||||||
"min_version": "1.0.0",
|
"min_version": "2.0.0-beta.5",
|
||||||
"custom_make": "ninja",
|
"custom_make": "ninja",
|
||||||
"build_target": False,
|
"build_target": False,
|
||||||
"watch_patterns": [
|
"watch_patterns": [
|
||||||
|
@ -1014,6 +1190,7 @@ def generate_objdiff_config(
|
||||||
"*.json",
|
"*.json",
|
||||||
],
|
],
|
||||||
"units": [],
|
"units": [],
|
||||||
|
"progress_categories": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
# decomp.me compiler name mapping
|
# decomp.me compiler name mapping
|
||||||
|
@ -1024,6 +1201,7 @@ def generate_objdiff_config(
|
||||||
"GC/1.2.5": "mwcc_233_163",
|
"GC/1.2.5": "mwcc_233_163",
|
||||||
"GC/1.2.5e": "mwcc_233_163e",
|
"GC/1.2.5e": "mwcc_233_163e",
|
||||||
"GC/1.2.5n": "mwcc_233_163n",
|
"GC/1.2.5n": "mwcc_233_163n",
|
||||||
|
"GC/1.3": "mwcc_242_53",
|
||||||
"GC/1.3.2": "mwcc_242_81",
|
"GC/1.3.2": "mwcc_242_81",
|
||||||
"GC/1.3.2r": "mwcc_242_81r",
|
"GC/1.3.2r": "mwcc_242_81r",
|
||||||
"GC/2.0": "mwcc_247_92",
|
"GC/2.0": "mwcc_247_92",
|
||||||
|
@ -1048,44 +1226,30 @@ def generate_objdiff_config(
|
||||||
"Wii/1.7": "mwcc_43_213",
|
"Wii/1.7": "mwcc_43_213",
|
||||||
}
|
}
|
||||||
|
|
||||||
build_path = config.out_path()
|
def add_unit(
|
||||||
|
build_obj: Dict[str, Any], module_name: str, progress_categories: List[str]
|
||||||
def add_unit(build_obj: Dict[str, Any], module_name: str) -> None:
|
) -> None:
|
||||||
if build_obj["autogenerated"]:
|
|
||||||
# Skip autogenerated objects
|
|
||||||
return
|
|
||||||
|
|
||||||
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
||||||
base_object = Path(obj_name).with_suffix("")
|
base_object = Path(obj_name).with_suffix("")
|
||||||
unit_config: Dict[str, Any] = {
|
unit_config: Dict[str, Any] = {
|
||||||
"name": Path(module_name) / base_object,
|
"name": Path(module_name) / base_object,
|
||||||
"target_path": obj_path,
|
"target_path": obj_path,
|
||||||
|
"metadata": {
|
||||||
|
"auto_generated": build_obj["autogenerated"],
|
||||||
|
"progress_categories": progress_categories,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
result = config.find_object(obj_name)
|
obj = objects.get(obj_name)
|
||||||
if not result:
|
if obj is None:
|
||||||
objdiff_config["units"].append(unit_config)
|
objdiff_config["units"].append(unit_config)
|
||||||
return
|
return
|
||||||
|
|
||||||
lib, obj = result
|
src_exists = obj.src_path is not None and obj.src_path.exists()
|
||||||
src_dir = Path(lib.get("src_dir", config.src_dir))
|
if src_exists:
|
||||||
|
unit_config["base_path"] = obj.src_obj_path
|
||||||
# Use object options, then library options
|
|
||||||
options = lib.copy()
|
|
||||||
for key, value in obj.options.items():
|
|
||||||
if value is not None or key not in options:
|
|
||||||
options[key] = value
|
|
||||||
|
|
||||||
unit_src_path = src_dir / str(options["source"])
|
|
||||||
|
|
||||||
if not unit_src_path.exists():
|
|
||||||
objdiff_config["units"].append(unit_config)
|
|
||||||
return
|
|
||||||
|
|
||||||
cflags = options["cflags"]
|
|
||||||
src_obj_path = build_path / "src" / f"{obj.base_name}.o"
|
|
||||||
src_ctx_path = build_path / "src" / f"{obj.base_name}.ctx"
|
|
||||||
|
|
||||||
|
cflags = obj.options["cflags"]
|
||||||
reverse_fn_order = False
|
reverse_fn_order = False
|
||||||
if type(cflags) is list:
|
if type(cflags) is list:
|
||||||
for flag in cflags:
|
for flag in cflags:
|
||||||
|
@ -1104,35 +1268,86 @@ def generate_objdiff_config(
|
||||||
cflags = list(filter(keep_flag, cflags))
|
cflags = list(filter(keep_flag, cflags))
|
||||||
|
|
||||||
# Add appropriate lang flag
|
# Add appropriate lang flag
|
||||||
if unit_src_path.suffix in (".cp", ".cpp"):
|
if obj.src_path is not None and not any(
|
||||||
|
flag.startswith("-lang") for flag in cflags
|
||||||
|
):
|
||||||
|
if obj.src_path.suffix in (".cp", ".cpp"):
|
||||||
cflags.insert(0, "-lang=c++")
|
cflags.insert(0, "-lang=c++")
|
||||||
else:
|
else:
|
||||||
cflags.insert(0, "-lang=c")
|
cflags.insert(0, "-lang=c")
|
||||||
|
|
||||||
unit_config["base_path"] = src_obj_path
|
compiler_version = COMPILER_MAP.get(obj.options["mw_version"])
|
||||||
unit_config["reverse_fn_order"] = reverse_fn_order
|
|
||||||
unit_config["complete"] = obj.completed
|
|
||||||
compiler_version = COMPILER_MAP.get(options["mw_version"])
|
|
||||||
if compiler_version is None:
|
if compiler_version is None:
|
||||||
print(f"Missing scratch compiler mapping for {options['mw_version']}")
|
print(f"Missing scratch compiler mapping for {obj.options['mw_version']}")
|
||||||
else:
|
else:
|
||||||
|
cflags_str = make_flags_str(cflags)
|
||||||
|
if obj.options["extra_cflags"] is not None:
|
||||||
|
extra_cflags_str = make_flags_str(obj.options["extra_cflags"])
|
||||||
|
cflags_str += " " + extra_cflags_str
|
||||||
unit_config["scratch"] = {
|
unit_config["scratch"] = {
|
||||||
"platform": "gc_wii",
|
"platform": "gc_wii",
|
||||||
"compiler": compiler_version,
|
"compiler": compiler_version,
|
||||||
"c_flags": make_flags_str(cflags),
|
"c_flags": cflags_str,
|
||||||
"ctx_path": src_ctx_path,
|
}
|
||||||
|
if src_exists:
|
||||||
|
unit_config["scratch"].update(
|
||||||
|
{
|
||||||
|
"ctx_path": obj.ctx_path,
|
||||||
"build_ctx": True,
|
"build_ctx": True,
|
||||||
}
|
}
|
||||||
|
)
|
||||||
|
category_opt: List[str] | str = obj.options["progress_category"]
|
||||||
|
if isinstance(category_opt, list):
|
||||||
|
progress_categories.extend(category_opt)
|
||||||
|
elif category_opt is not None:
|
||||||
|
progress_categories.append(category_opt)
|
||||||
|
unit_config["metadata"].update(
|
||||||
|
{
|
||||||
|
"complete": obj.completed,
|
||||||
|
"reverse_fn_order": reverse_fn_order,
|
||||||
|
"source_path": obj.src_path,
|
||||||
|
"progress_categories": progress_categories,
|
||||||
|
}
|
||||||
|
)
|
||||||
objdiff_config["units"].append(unit_config)
|
objdiff_config["units"].append(unit_config)
|
||||||
|
|
||||||
# Add DOL units
|
# Add DOL units
|
||||||
for unit in build_config["units"]:
|
for unit in build_config["units"]:
|
||||||
add_unit(unit, build_config["name"])
|
progress_categories = []
|
||||||
|
# Only include a "dol" category if there are any modules
|
||||||
|
# Otherwise it's redundant with the global report measures
|
||||||
|
if len(build_config["modules"]) > 0:
|
||||||
|
progress_categories.append("dol")
|
||||||
|
add_unit(unit, build_config["name"], progress_categories)
|
||||||
|
|
||||||
# Add REL units
|
# Add REL units
|
||||||
for module in build_config["modules"]:
|
for module in build_config["modules"]:
|
||||||
for unit in module["units"]:
|
for unit in module["units"]:
|
||||||
add_unit(unit, module["name"])
|
progress_categories = []
|
||||||
|
if config.progress_modules:
|
||||||
|
progress_categories.append("modules")
|
||||||
|
if config.progress_each_module:
|
||||||
|
progress_categories.append(module["name"])
|
||||||
|
add_unit(unit, module["name"], progress_categories)
|
||||||
|
|
||||||
|
# Add progress categories
|
||||||
|
def add_category(id: str, name: str):
|
||||||
|
objdiff_config["progress_categories"].append(
|
||||||
|
{
|
||||||
|
"id": id,
|
||||||
|
"name": name,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(build_config["modules"]) > 0:
|
||||||
|
add_category("dol", "DOL")
|
||||||
|
if config.progress_modules:
|
||||||
|
add_category("modules", "Modules")
|
||||||
|
if config.progress_each_module:
|
||||||
|
for module in build_config["modules"]:
|
||||||
|
add_category(module["name"], module["name"])
|
||||||
|
for category in config.progress_categories:
|
||||||
|
add_category(category.id, category.name)
|
||||||
|
|
||||||
# Write objdiff.json
|
# Write objdiff.json
|
||||||
with open("objdiff.json", "w", encoding="utf-8") as w:
|
with open("objdiff.json", "w", encoding="utf-8") as w:
|
||||||
|
@ -1145,9 +1360,11 @@ def generate_objdiff_config(
|
||||||
|
|
||||||
# Calculate, print and write progress to progress.json
|
# Calculate, print and write progress to progress.json
|
||||||
def calculate_progress(config: ProjectConfig) -> None:
|
def calculate_progress(config: ProjectConfig) -> None:
|
||||||
|
config.validate()
|
||||||
|
objects = config.objects()
|
||||||
out_path = config.out_path()
|
out_path = config.out_path()
|
||||||
build_config = load_build_config(config, out_path / "config.json")
|
build_config = load_build_config(config, out_path / "config.json")
|
||||||
if not build_config:
|
if build_config is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
class ProgressUnit:
|
class ProgressUnit:
|
||||||
|
@ -1179,12 +1396,8 @@ def calculate_progress(config: ProjectConfig) -> None:
|
||||||
# Skip autogenerated objects
|
# Skip autogenerated objects
|
||||||
return
|
return
|
||||||
|
|
||||||
result = config.find_object(build_obj["name"])
|
obj = objects.get(build_obj["name"])
|
||||||
if not result:
|
if obj is None or not obj.completed:
|
||||||
return
|
|
||||||
|
|
||||||
_, obj = result
|
|
||||||
if not obj.completed:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
self.code_progress += build_obj["code_size"]
|
self.code_progress += build_obj["code_size"]
|
||||||
|
@ -1198,26 +1411,52 @@ def calculate_progress(config: ProjectConfig) -> None:
|
||||||
def data_frac(self) -> float:
|
def data_frac(self) -> float:
|
||||||
return self.data_progress / self.data_total
|
return self.data_progress / self.data_total
|
||||||
|
|
||||||
|
progress_units: Dict[str, ProgressUnit] = {}
|
||||||
|
if config.progress_all:
|
||||||
|
progress_units["all"] = ProgressUnit("All")
|
||||||
|
progress_units["dol"] = ProgressUnit("DOL")
|
||||||
|
if len(build_config["modules"]) > 0:
|
||||||
|
if config.progress_modules:
|
||||||
|
progress_units["modules"] = ProgressUnit("Modules")
|
||||||
|
if len(config.progress_categories) > 0:
|
||||||
|
for category in config.progress_categories:
|
||||||
|
progress_units[category.id] = ProgressUnit(category.name)
|
||||||
|
if config.progress_each_module:
|
||||||
|
for module in build_config["modules"]:
|
||||||
|
progress_units[module["name"]] = ProgressUnit(module["name"])
|
||||||
|
|
||||||
|
def add_unit(id: str, unit: Dict[str, Any]) -> None:
|
||||||
|
progress = progress_units.get(id)
|
||||||
|
if progress is not None:
|
||||||
|
progress.add(unit)
|
||||||
|
|
||||||
# Add DOL units
|
# Add DOL units
|
||||||
all_progress = ProgressUnit("All") if config.progress_all else None
|
|
||||||
dol_progress = ProgressUnit("DOL")
|
|
||||||
for unit in build_config["units"]:
|
for unit in build_config["units"]:
|
||||||
if all_progress:
|
add_unit("all", unit)
|
||||||
all_progress.add(unit)
|
add_unit("dol", unit)
|
||||||
dol_progress.add(unit)
|
obj = objects.get(unit["name"])
|
||||||
|
if obj is not None:
|
||||||
|
category_opt = obj.options["progress_category"]
|
||||||
|
if isinstance(category_opt, list):
|
||||||
|
for id in category_opt:
|
||||||
|
add_unit(id, unit)
|
||||||
|
elif category_opt is not None:
|
||||||
|
add_unit(category_opt, unit)
|
||||||
|
|
||||||
# Add REL units
|
# Add REL units
|
||||||
rels_progress = ProgressUnit("Modules") if config.progress_modules else None
|
|
||||||
modules_progress: List[ProgressUnit] = []
|
|
||||||
for module in build_config["modules"]:
|
for module in build_config["modules"]:
|
||||||
progress = ProgressUnit(module["name"])
|
|
||||||
modules_progress.append(progress)
|
|
||||||
for unit in module["units"]:
|
for unit in module["units"]:
|
||||||
if all_progress:
|
add_unit("all", unit)
|
||||||
all_progress.add(unit)
|
add_unit("modules", unit)
|
||||||
if rels_progress:
|
add_unit(module["name"], unit)
|
||||||
rels_progress.add(unit)
|
obj = objects.get(unit["name"])
|
||||||
progress.add(unit)
|
if obj is not None:
|
||||||
|
category_opt = obj.options["progress_category"]
|
||||||
|
if isinstance(category_opt, list):
|
||||||
|
for id in category_opt:
|
||||||
|
add_unit(id, unit)
|
||||||
|
elif category_opt is not None:
|
||||||
|
add_unit(category_opt, unit)
|
||||||
|
|
||||||
# Print human-readable progress
|
# Print human-readable progress
|
||||||
print("Progress:")
|
print("Progress:")
|
||||||
|
@ -1245,14 +1484,7 @@ def calculate_progress(config: ProjectConfig) -> None:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if all_progress:
|
for progress in progress_units.values():
|
||||||
print_category(all_progress)
|
|
||||||
print_category(dol_progress)
|
|
||||||
module_count = len(build_config["modules"])
|
|
||||||
if module_count > 0:
|
|
||||||
print_category(rels_progress)
|
|
||||||
if config.progress_each_module:
|
|
||||||
for progress in modules_progress:
|
|
||||||
print_category(progress)
|
print_category(progress)
|
||||||
|
|
||||||
# Generate and write progress.json
|
# Generate and write progress.json
|
||||||
|
@ -1266,14 +1498,7 @@ def calculate_progress(config: ProjectConfig) -> None:
|
||||||
"data/total": unit.data_total,
|
"data/total": unit.data_total,
|
||||||
}
|
}
|
||||||
|
|
||||||
if all_progress:
|
for id, progress in progress_units.items():
|
||||||
add_category("all", all_progress)
|
add_category(id, progress)
|
||||||
add_category("dol", dol_progress)
|
|
||||||
if len(build_config["modules"]) > 0:
|
|
||||||
if rels_progress:
|
|
||||||
add_category("modules", rels_progress)
|
|
||||||
if config.progress_each_module:
|
|
||||||
for progress in modules_progress:
|
|
||||||
add_category(progress.name, progress)
|
|
||||||
with open(out_path / "progress.json", "w", encoding="utf-8") as w:
|
with open(out_path / "progress.json", "w", encoding="utf-8") as w:
|
||||||
json.dump(progress_json, w, indent=4)
|
json.dump(progress_json, w, indent=4)
|
||||||
|
|
Loading…
Reference in New Issue