Implement mypy fixes across all python files (#4)
• Type hinting added to every single file • Path conversion now handled in `ninja_syntax.py` natively, can safely pass almost everything directly
This commit is contained in:
parent
04c8b45f93
commit
575e3b4a46
|
@ -2,6 +2,7 @@ __pycache__
|
||||||
.idea
|
.idea
|
||||||
.vscode
|
.vscode
|
||||||
.ninja_*
|
.ninja_*
|
||||||
|
.mypy_cache
|
||||||
*.exe
|
*.exe
|
||||||
build
|
build
|
||||||
build.ninja
|
build.ninja
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
"build/**/*.MAP": true,
|
"build/**/*.MAP": true,
|
||||||
"build.ninja": true,
|
"build.ninja": true,
|
||||||
".ninja_*": true,
|
".ninja_*": true,
|
||||||
"objdiff.json": true
|
"objdiff.json": true,
|
||||||
|
".mypy_cache": true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
11
configure.py
11
configure.py
|
@ -16,6 +16,7 @@ import sys
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Dict, List, Any
|
||||||
from tools.project import (
|
from tools.project import (
|
||||||
Object,
|
Object,
|
||||||
ProjectConfig,
|
ProjectConfig,
|
||||||
|
@ -27,7 +28,7 @@ from tools.project import (
|
||||||
# Game versions
|
# Game versions
|
||||||
DEFAULT_VERSION = 0
|
DEFAULT_VERSION = 0
|
||||||
VERSIONS = [
|
VERSIONS = [
|
||||||
"GAMEID", # 0
|
"GAMEID", # 0
|
||||||
]
|
]
|
||||||
|
|
||||||
if len(VERSIONS) > 1:
|
if len(VERSIONS) > 1:
|
||||||
|
@ -150,7 +151,7 @@ cflags_base = [
|
||||||
"-RTTI off",
|
"-RTTI off",
|
||||||
"-fp_contract on",
|
"-fp_contract on",
|
||||||
"-str reuse",
|
"-str reuse",
|
||||||
"-multibyte", # For Wii compilers, replace with `-enc SJIS`
|
"-multibyte", # For Wii compilers, replace with `-enc SJIS`
|
||||||
"-i include",
|
"-i include",
|
||||||
f"-i build/{config.version}/include",
|
f"-i build/{config.version}/include",
|
||||||
f"-DVERSION={version_num}",
|
f"-DVERSION={version_num}",
|
||||||
|
@ -169,7 +170,7 @@ cflags_runtime = [
|
||||||
"-str reuse,pool,readonly",
|
"-str reuse,pool,readonly",
|
||||||
"-gccinc",
|
"-gccinc",
|
||||||
"-common off",
|
"-common off",
|
||||||
"-inline auto",
|
"-inline auto",
|
||||||
]
|
]
|
||||||
|
|
||||||
# REL flags
|
# REL flags
|
||||||
|
@ -183,7 +184,7 @@ config.linker_version = "GC/1.3.2"
|
||||||
|
|
||||||
|
|
||||||
# Helper function for Dolphin libraries
|
# Helper function for Dolphin libraries
|
||||||
def DolphinLib(lib_name, objects):
|
def DolphinLib(lib_name: str, objects: List[Object]) -> Dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"lib": lib_name,
|
"lib": lib_name,
|
||||||
"mw_version": "GC/1.2.5n",
|
"mw_version": "GC/1.2.5n",
|
||||||
|
@ -194,7 +195,7 @@ def DolphinLib(lib_name, objects):
|
||||||
|
|
||||||
|
|
||||||
# Helper function for REL script objects
|
# Helper function for REL script objects
|
||||||
def Rel(lib_name, objects):
|
def Rel(lib_name: str, objects: List[Object]) -> Dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"lib": lib_name,
|
"lib": lib_name,
|
||||||
"mw_version": "GC/1.3.2",
|
"mw_version": "GC/1.3.2",
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from typing import List, Set
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
root_dir = os.path.abspath(os.path.join(script_dir, ".."))
|
root_dir = os.path.abspath(os.path.join(script_dir, ".."))
|
||||||
|
@ -20,54 +21,58 @@ src_dir = os.path.join(root_dir, "src")
|
||||||
include_dir = os.path.join(root_dir, "include")
|
include_dir = os.path.join(root_dir, "include")
|
||||||
|
|
||||||
include_pattern = re.compile(r'^#include\s*[<"](.+?)[>"]$')
|
include_pattern = re.compile(r'^#include\s*[<"](.+?)[>"]$')
|
||||||
guard_pattern = re.compile(r'^#ifndef\s+(.*)$')
|
guard_pattern = re.compile(r"^#ifndef\s+(.*)$")
|
||||||
|
|
||||||
|
defines: Set[str] = set()
|
||||||
|
|
||||||
defines = set()
|
|
||||||
|
|
||||||
def import_h_file(in_file: str, r_path: str) -> str:
|
def import_h_file(in_file: str, r_path: str) -> str:
|
||||||
rel_path = os.path.join(root_dir, r_path, in_file)
|
rel_path = os.path.join(root_dir, r_path, in_file)
|
||||||
inc_path = os.path.join(include_dir, in_file)
|
inc_path = os.path.join(include_dir, in_file)
|
||||||
if os.path.exists(rel_path):
|
if os.path.exists(rel_path):
|
||||||
return import_c_file(rel_path)
|
return import_c_file(rel_path)
|
||||||
elif os.path.exists(inc_path):
|
elif os.path.exists(inc_path):
|
||||||
return import_c_file(inc_path)
|
return import_c_file(inc_path)
|
||||||
else:
|
else:
|
||||||
print("Failed to locate", in_file)
|
print("Failed to locate", in_file)
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
def import_c_file(in_file) -> str:
|
|
||||||
|
def import_c_file(in_file: str) -> str:
|
||||||
in_file = os.path.relpath(in_file, root_dir)
|
in_file = os.path.relpath(in_file, root_dir)
|
||||||
out_text = ''
|
out_text = ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(in_file, encoding="utf-8") as file:
|
with open(in_file, encoding="utf-8") as file:
|
||||||
out_text += process_file(in_file, list(file))
|
out_text += process_file(in_file, list(file))
|
||||||
except Exception:
|
except Exception:
|
||||||
with open(in_file) as file:
|
with open(in_file) as file:
|
||||||
out_text += process_file(in_file, list(file))
|
out_text += process_file(in_file, list(file))
|
||||||
return out_text
|
return out_text
|
||||||
|
|
||||||
def process_file(in_file: str, lines) -> str:
|
|
||||||
out_text = ''
|
def process_file(in_file: str, lines: List[str]) -> str:
|
||||||
|
out_text = ""
|
||||||
for idx, line in enumerate(lines):
|
for idx, line in enumerate(lines):
|
||||||
guard_match = guard_pattern.match(line.strip())
|
guard_match = guard_pattern.match(line.strip())
|
||||||
if idx == 0:
|
if idx == 0:
|
||||||
if guard_match:
|
if guard_match:
|
||||||
if guard_match[1] in defines:
|
if guard_match[1] in defines:
|
||||||
break
|
break
|
||||||
defines.add(guard_match[1])
|
defines.add(guard_match[1])
|
||||||
print("Processing file", in_file)
|
print("Processing file", in_file)
|
||||||
include_match = include_pattern.match(line.strip())
|
include_match = include_pattern.match(line.strip())
|
||||||
if include_match and not include_match[1].endswith(".s"):
|
if include_match and not include_match[1].endswith(".s"):
|
||||||
out_text += f"/* \"{in_file}\" line {idx} \"{include_match[1]}\" */\n"
|
out_text += f'/* "{in_file}" line {idx} "{include_match[1]}" */\n'
|
||||||
out_text += import_h_file(include_match[1], os.path.dirname(in_file))
|
out_text += import_h_file(include_match[1], os.path.dirname(in_file))
|
||||||
out_text += f"/* end \"{include_match[1]}\" */\n"
|
out_text += f'/* end "{include_match[1]}" */\n'
|
||||||
else:
|
else:
|
||||||
out_text += line
|
out_text += line
|
||||||
|
|
||||||
return out_text
|
return out_text
|
||||||
|
|
||||||
def main():
|
|
||||||
|
def main() -> None:
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="""Create a context file which can be used for decomp.me"""
|
description="""Create a context file which can be used for decomp.me"""
|
||||||
)
|
)
|
||||||
|
|
|
@ -18,11 +18,11 @@ import shutil
|
||||||
import stat
|
import stat
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from typing import Callable, Dict
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
def dtk_url(tag):
|
def dtk_url(tag: str) -> str:
|
||||||
uname = platform.uname()
|
uname = platform.uname()
|
||||||
suffix = ""
|
suffix = ""
|
||||||
system = uname.system.lower()
|
system = uname.system.lower()
|
||||||
|
@ -38,21 +38,21 @@ def dtk_url(tag):
|
||||||
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
|
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
|
||||||
|
|
||||||
|
|
||||||
def sjiswrap_url(tag):
|
def sjiswrap_url(tag: str) -> str:
|
||||||
repo = "https://github.com/encounter/sjiswrap"
|
repo = "https://github.com/encounter/sjiswrap"
|
||||||
return f"{repo}/releases/download/{tag}/sjiswrap-windows-x86.exe"
|
return f"{repo}/releases/download/{tag}/sjiswrap-windows-x86.exe"
|
||||||
|
|
||||||
|
|
||||||
def wibo_url(tag):
|
def wibo_url(tag: str) -> str:
|
||||||
repo = "https://github.com/decompals/wibo"
|
repo = "https://github.com/decompals/wibo"
|
||||||
return f"{repo}/releases/download/{tag}/wibo"
|
return f"{repo}/releases/download/{tag}/wibo"
|
||||||
|
|
||||||
|
|
||||||
def compilers_url(tag):
|
def compilers_url(tag: str) -> str:
|
||||||
return f"https://files.decomp.dev/compilers_{tag}.zip"
|
return f"https://files.decomp.dev/compilers_{tag}.zip"
|
||||||
|
|
||||||
|
|
||||||
TOOLS = {
|
TOOLS: Dict[str, Callable[[str], str]] = {
|
||||||
"dtk": dtk_url,
|
"dtk": dtk_url,
|
||||||
"sjiswrap": sjiswrap_url,
|
"sjiswrap": sjiswrap_url,
|
||||||
"wibo": wibo_url,
|
"wibo": wibo_url,
|
||||||
|
@ -60,7 +60,7 @@ TOOLS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main() -> None:
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("tool", help="Tool name")
|
parser.add_argument("tool", help="Tool name")
|
||||||
parser.add_argument("output", type=Path, help="output file path")
|
parser.add_argument("output", type=Path, help="output file path")
|
||||||
|
|
|
@ -21,50 +21,67 @@ use Python.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import textwrap
|
import textwrap
|
||||||
|
from typing import Optional, Union, Tuple, Match, Dict, List
|
||||||
|
from io import StringIO
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
def escape_path(word):
|
NinjaPath = Union[str, Path]
|
||||||
|
NinjaPaths = Union[
|
||||||
|
List[str],
|
||||||
|
List[Path],
|
||||||
|
List[NinjaPath],
|
||||||
|
List[Optional[str]],
|
||||||
|
List[Optional[Path]],
|
||||||
|
List[Optional[NinjaPath]],
|
||||||
|
]
|
||||||
|
NinjaPathOrPaths = Union[NinjaPath, NinjaPaths]
|
||||||
|
|
||||||
|
|
||||||
|
def escape_path(word: str) -> str:
|
||||||
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
|
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
|
||||||
|
|
||||||
|
|
||||||
class Writer(object):
|
class Writer(object):
|
||||||
def __init__(self, output, width=78):
|
def __init__(self, output: StringIO, width: int = 78) -> None:
|
||||||
self.output = output
|
self.output = output
|
||||||
self.width = width
|
self.width = width
|
||||||
|
|
||||||
def newline(self):
|
def newline(self) -> None:
|
||||||
self.output.write("\n")
|
self.output.write("\n")
|
||||||
|
|
||||||
def comment(self, text):
|
def comment(self, text: str) -> None:
|
||||||
for line in textwrap.wrap(
|
for line in textwrap.wrap(
|
||||||
text, self.width - 2, break_long_words=False, break_on_hyphens=False
|
text, self.width - 2, break_long_words=False, break_on_hyphens=False
|
||||||
):
|
):
|
||||||
self.output.write("# " + line + "\n")
|
self.output.write("# " + line + "\n")
|
||||||
|
|
||||||
def variable(self, key, value, indent=0):
|
def variable(
|
||||||
if value is None:
|
self,
|
||||||
return
|
key: str,
|
||||||
if isinstance(value, list):
|
value: Optional[NinjaPathOrPaths],
|
||||||
value = " ".join(filter(None, value)) # Filter out empty strings.
|
indent: int = 0,
|
||||||
|
) -> None:
|
||||||
|
value = " ".join(serialize_paths(value))
|
||||||
self._line("%s = %s" % (key, value), indent)
|
self._line("%s = %s" % (key, value), indent)
|
||||||
|
|
||||||
def pool(self, name, depth):
|
def pool(self, name: str, depth: int) -> None:
|
||||||
self._line("pool %s" % name)
|
self._line("pool %s" % name)
|
||||||
self.variable("depth", depth, indent=1)
|
self.variable("depth", str(depth), indent=1)
|
||||||
|
|
||||||
def rule(
|
def rule(
|
||||||
self,
|
self,
|
||||||
name,
|
name: str,
|
||||||
command,
|
command: str,
|
||||||
description=None,
|
description: Optional[str] = None,
|
||||||
depfile=None,
|
depfile: Optional[NinjaPath] = None,
|
||||||
generator=False,
|
generator: bool = False,
|
||||||
pool=None,
|
pool: Optional[str] = None,
|
||||||
restat=False,
|
restat: bool = False,
|
||||||
rspfile=None,
|
rspfile: Optional[NinjaPath] = None,
|
||||||
rspfile_content=None,
|
rspfile_content: Optional[NinjaPath] = None,
|
||||||
deps=None,
|
deps: Optional[NinjaPathOrPaths] = None,
|
||||||
):
|
) -> None:
|
||||||
self._line("rule %s" % name)
|
self._line("rule %s" % name)
|
||||||
self.variable("command", command, indent=1)
|
self.variable("command", command, indent=1)
|
||||||
if description:
|
if description:
|
||||||
|
@ -86,30 +103,37 @@ class Writer(object):
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self,
|
self,
|
||||||
outputs,
|
outputs: NinjaPathOrPaths,
|
||||||
rule,
|
rule: str,
|
||||||
inputs=None,
|
inputs: Optional[NinjaPathOrPaths] = None,
|
||||||
implicit=None,
|
implicit: Optional[NinjaPathOrPaths] = None,
|
||||||
order_only=None,
|
order_only: Optional[NinjaPathOrPaths] = None,
|
||||||
variables=None,
|
variables: Optional[
|
||||||
implicit_outputs=None,
|
Union[
|
||||||
pool=None,
|
List[Tuple[str, Optional[NinjaPathOrPaths]]],
|
||||||
dyndep=None,
|
Dict[str, Optional[NinjaPathOrPaths]],
|
||||||
):
|
]
|
||||||
outputs = as_list(outputs)
|
] = None,
|
||||||
|
implicit_outputs: Optional[NinjaPathOrPaths] = None,
|
||||||
|
pool: Optional[str] = None,
|
||||||
|
dyndep: Optional[NinjaPath] = None,
|
||||||
|
) -> List[str]:
|
||||||
|
outputs = serialize_paths(outputs)
|
||||||
out_outputs = [escape_path(x) for x in outputs]
|
out_outputs = [escape_path(x) for x in outputs]
|
||||||
all_inputs = [escape_path(x) for x in as_list(inputs)]
|
all_inputs = [escape_path(x) for x in serialize_paths(inputs)]
|
||||||
|
|
||||||
if implicit:
|
if implicit:
|
||||||
implicit = [escape_path(x) for x in as_list(implicit)]
|
implicit = [escape_path(x) for x in serialize_paths(implicit)]
|
||||||
all_inputs.append("|")
|
all_inputs.append("|")
|
||||||
all_inputs.extend(implicit)
|
all_inputs.extend(implicit)
|
||||||
if order_only:
|
if order_only:
|
||||||
order_only = [escape_path(x) for x in as_list(order_only)]
|
order_only = [escape_path(x) for x in serialize_paths(order_only)]
|
||||||
all_inputs.append("||")
|
all_inputs.append("||")
|
||||||
all_inputs.extend(order_only)
|
all_inputs.extend(order_only)
|
||||||
if implicit_outputs:
|
if implicit_outputs:
|
||||||
implicit_outputs = [escape_path(x) for x in as_list(implicit_outputs)]
|
implicit_outputs = [
|
||||||
|
escape_path(x) for x in serialize_paths(implicit_outputs)
|
||||||
|
]
|
||||||
out_outputs.append("|")
|
out_outputs.append("|")
|
||||||
out_outputs.extend(implicit_outputs)
|
out_outputs.extend(implicit_outputs)
|
||||||
|
|
||||||
|
@ -119,7 +143,7 @@ class Writer(object):
|
||||||
if pool is not None:
|
if pool is not None:
|
||||||
self._line(" pool = %s" % pool)
|
self._line(" pool = %s" % pool)
|
||||||
if dyndep is not None:
|
if dyndep is not None:
|
||||||
self._line(" dyndep = %s" % dyndep)
|
self._line(" dyndep = %s" % serialize_path(dyndep))
|
||||||
|
|
||||||
if variables:
|
if variables:
|
||||||
if isinstance(variables, dict):
|
if isinstance(variables, dict):
|
||||||
|
@ -132,16 +156,16 @@ class Writer(object):
|
||||||
|
|
||||||
return outputs
|
return outputs
|
||||||
|
|
||||||
def include(self, path):
|
def include(self, path: str) -> None:
|
||||||
self._line("include %s" % path)
|
self._line("include %s" % path)
|
||||||
|
|
||||||
def subninja(self, path):
|
def subninja(self, path: str) -> None:
|
||||||
self._line("subninja %s" % path)
|
self._line("subninja %s" % path)
|
||||||
|
|
||||||
def default(self, paths):
|
def default(self, paths: NinjaPathOrPaths) -> None:
|
||||||
self._line("default %s" % " ".join(as_list(paths)))
|
self._line("default %s" % " ".join(serialize_paths(paths)))
|
||||||
|
|
||||||
def _count_dollars_before_index(self, s, i):
|
def _count_dollars_before_index(self, s: str, i: int) -> int:
|
||||||
"""Returns the number of '$' characters right in front of s[i]."""
|
"""Returns the number of '$' characters right in front of s[i]."""
|
||||||
dollar_count = 0
|
dollar_count = 0
|
||||||
dollar_index = i - 1
|
dollar_index = i - 1
|
||||||
|
@ -150,7 +174,7 @@ class Writer(object):
|
||||||
dollar_index -= 1
|
dollar_index -= 1
|
||||||
return dollar_count
|
return dollar_count
|
||||||
|
|
||||||
def _line(self, text, indent=0):
|
def _line(self, text: str, indent: int = 0) -> None:
|
||||||
"""Write 'text' word-wrapped at self.width characters."""
|
"""Write 'text' word-wrapped at self.width characters."""
|
||||||
leading_space = " " * indent
|
leading_space = " " * indent
|
||||||
while len(leading_space) + len(text) > self.width:
|
while len(leading_space) + len(text) > self.width:
|
||||||
|
@ -187,19 +211,21 @@ class Writer(object):
|
||||||
|
|
||||||
self.output.write(leading_space + text + "\n")
|
self.output.write(leading_space + text + "\n")
|
||||||
|
|
||||||
def close(self):
|
def close(self) -> None:
|
||||||
self.output.close()
|
self.output.close()
|
||||||
|
|
||||||
|
|
||||||
def as_list(input):
|
def serialize_path(input: Optional[NinjaPath]) -> str:
|
||||||
if input is None:
|
return str(input).replace("\\", "/") if input else ""
|
||||||
return []
|
|
||||||
|
|
||||||
|
def serialize_paths(input: Optional[NinjaPathOrPaths]) -> List[str]:
|
||||||
if isinstance(input, list):
|
if isinstance(input, list):
|
||||||
return input
|
return [serialize_path(path) for path in input if path]
|
||||||
return [input]
|
return [serialize_path(input)] if input else []
|
||||||
|
|
||||||
|
|
||||||
def escape(string):
|
def escape(string: str) -> str:
|
||||||
"""Escape a string such that it can be embedded into a Ninja file without
|
"""Escape a string such that it can be embedded into a Ninja file without
|
||||||
further interpretation."""
|
further interpretation."""
|
||||||
assert "\n" not in string, "Ninja syntax does not allow newlines"
|
assert "\n" not in string, "Ninja syntax does not allow newlines"
|
||||||
|
@ -207,14 +233,14 @@ def escape(string):
|
||||||
return string.replace("$", "$$")
|
return string.replace("$", "$$")
|
||||||
|
|
||||||
|
|
||||||
def expand(string, vars, local_vars={}):
|
def expand(string: str, vars: Dict[str, str], local_vars: Dict[str, str] = {}) -> str:
|
||||||
"""Expand a string containing $vars as Ninja would.
|
"""Expand a string containing $vars as Ninja would.
|
||||||
|
|
||||||
Note: doesn't handle the full Ninja variable syntax, but it's enough
|
Note: doesn't handle the full Ninja variable syntax, but it's enough
|
||||||
to make configure.py's use of it work.
|
to make configure.py's use of it work.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def exp(m):
|
def exp(m: Match[str]) -> str:
|
||||||
var = m.group(1)
|
var = m.group(1)
|
||||||
if var == "$":
|
if var == "$":
|
||||||
return "$"
|
return "$"
|
||||||
|
|
409
tools/project.py
409
tools/project.py
|
@ -17,6 +17,7 @@ import platform
|
||||||
import sys
|
import sys
|
||||||
import math
|
import math
|
||||||
|
|
||||||
|
from typing import Optional, Union, Tuple, Dict, List, Set, Any
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from . import ninja_syntax
|
from . import ninja_syntax
|
||||||
|
|
||||||
|
@ -28,51 +29,70 @@ if sys.platform == "cygwin":
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Object:
|
||||||
|
def __init__(self, completed: bool, name: str, **options: Any) -> None:
|
||||||
|
self.name = name
|
||||||
|
self.completed = completed
|
||||||
|
self.options: Dict[str, Any] = {
|
||||||
|
"add_to_all": True,
|
||||||
|
"cflags": None,
|
||||||
|
"extra_cflags": None,
|
||||||
|
"mw_version": None,
|
||||||
|
"shiftjis": True,
|
||||||
|
"source": name,
|
||||||
|
}
|
||||||
|
self.options.update(options)
|
||||||
|
|
||||||
|
|
||||||
class ProjectConfig:
|
class ProjectConfig:
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# Paths
|
# Paths
|
||||||
self.build_dir = Path("build")
|
self.build_dir: Path = Path("build")
|
||||||
self.src_dir = Path("src")
|
self.src_dir: Path = Path("src")
|
||||||
self.tools_dir = Path("tools")
|
self.tools_dir: Path = Path("tools")
|
||||||
|
|
||||||
# Tooling
|
# Tooling
|
||||||
self.dtk_tag = None # Git tag
|
self.dtk_tag: Optional[str] = None # Git tag
|
||||||
self.build_dtk_path = None # If None, download
|
self.build_dtk_path: Optional[Path] = None # If None, download
|
||||||
self.compilers_tag = None # 1
|
self.compilers_tag: Optional[str] = None # 1
|
||||||
self.compilers_path = None # If None, download
|
self.compilers_path: Optional[Path] = None # If None, download
|
||||||
self.wibo_tag = None # Git tag
|
self.wibo_tag: Optional[str] = None # Git tag
|
||||||
self.wrapper = None # If None, download wibo on Linux
|
self.wrapper: Optional[Path] = None # If None, download wibo on Linux
|
||||||
self.sjiswrap_tag = None # Git tag
|
self.sjiswrap_tag: Optional[str] = None # Git tag
|
||||||
self.sjiswrap_path = None # If None, download
|
self.sjiswrap_path: Optional[Path] = None # If None, download
|
||||||
|
|
||||||
# Project config
|
# Project config
|
||||||
self.build_rels = True # Build REL files
|
self.build_rels: bool = True # Build REL files
|
||||||
self.check_sha_path = None # Path to version.sha1
|
self.check_sha_path: Optional[Path] = None # Path to version.sha1
|
||||||
self.config_path = None # Path to config.yml
|
self.config_path: Optional[Path] = None # Path to config.yml
|
||||||
self.debug = False # Build with debug info
|
self.debug: bool = False # Build with debug info
|
||||||
self.generate_map = False # Generate map file(s)
|
self.generate_map: bool = False # Generate map file(s)
|
||||||
self.ldflags = None # Linker flags
|
self.ldflags: Optional[List[str]] = None # Linker flags
|
||||||
self.libs = None # List of libraries
|
self.libs: Optional[List[Dict[str, Any]]] = None # List of libraries
|
||||||
self.linker_version = None # mwld version
|
self.linker_version: Optional[str] = None # mwld version
|
||||||
self.version = None # Version name
|
self.version: Optional[str] = None # Version name
|
||||||
self.warn_missing_config = False # Warn on missing unit configuration
|
self.warn_missing_config: bool = False # Warn on missing unit configuration
|
||||||
self.warn_missing_source = False # Warn on missing source file
|
self.warn_missing_source: bool = False # Warn on missing source file
|
||||||
self.rel_strip_partial = True # Generate PLFs with -strip_partial
|
self.rel_strip_partial: bool = True # Generate PLFs with -strip_partial
|
||||||
self.rel_empty_file = None # Path to empty.c for generating empty RELs
|
self.rel_empty_file: Optional[
|
||||||
|
Path
|
||||||
|
] = None # Path to empty.c for generating empty RELs
|
||||||
|
|
||||||
# Progress output and progress.json config
|
# Progress output and progress.json config
|
||||||
self.progress_all = True # Include combined "all" category
|
self.progress_all: bool = True # Include combined "all" category
|
||||||
self.progress_modules = True # Include combined "modules" category
|
self.progress_modules: bool = True # Include combined "modules" category
|
||||||
self.progress_each_module = True # Include individual modules, disable for large numbers of modules
|
self.progress_each_module: bool = (
|
||||||
|
True # Include individual modules, disable for large numbers of modules
|
||||||
|
)
|
||||||
|
|
||||||
# Progress fancy printing
|
# Progress fancy printing
|
||||||
self.progress_use_fancy = False
|
self.progress_use_fancy: bool = False
|
||||||
self.progress_code_fancy_frac = 0
|
self.progress_code_fancy_frac: int = 0
|
||||||
self.progress_code_fancy_item = ""
|
self.progress_code_fancy_item: str = ""
|
||||||
self.progress_data_fancy_frac = 0
|
self.progress_data_fancy_frac: int = 0
|
||||||
self.progress_data_fancy_item = ""
|
self.progress_data_fancy_item: str = ""
|
||||||
|
|
||||||
def validate(self):
|
def validate(self) -> None:
|
||||||
required_attrs = [
|
required_attrs = [
|
||||||
"build_dir",
|
"build_dir",
|
||||||
"src_dir",
|
"src_dir",
|
||||||
|
@ -88,33 +108,18 @@ class ProjectConfig:
|
||||||
if getattr(self, attr) is None:
|
if getattr(self, attr) is None:
|
||||||
sys.exit(f"ProjectConfig.{attr} missing")
|
sys.exit(f"ProjectConfig.{attr} missing")
|
||||||
|
|
||||||
def find_object(self, name):
|
def find_object(self, name: str) -> Optional[Tuple[Dict[str, Any], Object]]:
|
||||||
for lib in self.libs:
|
for lib in self.libs or {}:
|
||||||
for obj in lib["objects"]:
|
for obj in lib["objects"]:
|
||||||
if obj.name == name:
|
if obj.name == name:
|
||||||
return [lib, obj]
|
return lib, obj
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def out_path(self):
|
def out_path(self) -> Path:
|
||||||
return self.build_dir / self.version
|
return self.build_dir / str(self.version)
|
||||||
|
|
||||||
|
|
||||||
class Object:
|
def is_windows() -> bool:
|
||||||
def __init__(self, completed, name, **options):
|
|
||||||
self.name = name
|
|
||||||
self.completed = completed
|
|
||||||
self.options = {
|
|
||||||
"add_to_all": True,
|
|
||||||
"cflags": None,
|
|
||||||
"extra_cflags": None,
|
|
||||||
"mw_version": None,
|
|
||||||
"shiftjis": True,
|
|
||||||
"source": name,
|
|
||||||
}
|
|
||||||
self.options.update(options)
|
|
||||||
|
|
||||||
|
|
||||||
def is_windows():
|
|
||||||
return os.name == "nt"
|
return os.name == "nt"
|
||||||
|
|
||||||
|
|
||||||
|
@ -124,36 +129,18 @@ CHAIN = "cmd /c " if is_windows() else ""
|
||||||
EXE = ".exe" if is_windows() else ""
|
EXE = ".exe" if is_windows() else ""
|
||||||
|
|
||||||
|
|
||||||
# Replace forward slashes with backslashes on Windows
|
|
||||||
def os_str(value):
|
|
||||||
return str(value).replace("/", os.sep)
|
|
||||||
|
|
||||||
|
|
||||||
# Replace backslashes with forward slashes on Windows
|
|
||||||
def unix_str(value):
|
|
||||||
return str(value).replace(os.sep, "/")
|
|
||||||
|
|
||||||
|
|
||||||
# Stringify paths for ninja_syntax
|
|
||||||
def path(value):
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
elif isinstance(value, list):
|
|
||||||
return list(map(os_str, filter(lambda x: x is not None, value)))
|
|
||||||
else:
|
|
||||||
return [os_str(value)]
|
|
||||||
|
|
||||||
|
|
||||||
# Load decomp-toolkit generated config.json
|
# Load decomp-toolkit generated config.json
|
||||||
def load_build_config(config, build_config_path):
|
def load_build_config(
|
||||||
|
config: ProjectConfig, build_config_path: Path
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
if not build_config_path.is_file():
|
if not build_config_path.is_file():
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def versiontuple(v):
|
def versiontuple(v: str) -> Tuple[int, ...]:
|
||||||
return tuple(map(int, (v.split("."))))
|
return tuple(map(int, (v.split("."))))
|
||||||
|
|
||||||
f = open(build_config_path, "r", encoding="utf-8")
|
f = open(build_config_path, "r", encoding="utf-8")
|
||||||
build_config = json.load(f)
|
build_config: Dict[str, Any] = json.load(f)
|
||||||
config_version = build_config.get("version")
|
config_version = build_config.get("version")
|
||||||
if not config_version:
|
if not config_version:
|
||||||
# Invalid config.json
|
# Invalid config.json
|
||||||
|
@ -161,7 +148,7 @@ def load_build_config(config, build_config_path):
|
||||||
os.remove(build_config_path)
|
os.remove(build_config_path)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
dtk_version = config.dtk_tag[1:] # Strip v
|
dtk_version = str(config.dtk_tag)[1:] # Strip v
|
||||||
if versiontuple(config_version) < versiontuple(dtk_version):
|
if versiontuple(config_version) < versiontuple(dtk_version):
|
||||||
# Outdated config.json
|
# Outdated config.json
|
||||||
f.close()
|
f.close()
|
||||||
|
@ -173,14 +160,16 @@ def load_build_config(config, build_config_path):
|
||||||
|
|
||||||
|
|
||||||
# Generate build.ninja and objdiff.json
|
# Generate build.ninja and objdiff.json
|
||||||
def generate_build(config):
|
def generate_build(config: ProjectConfig) -> None:
|
||||||
build_config = load_build_config(config, config.out_path() / "config.json")
|
build_config = load_build_config(config, config.out_path() / "config.json")
|
||||||
generate_build_ninja(config, build_config)
|
generate_build_ninja(config, build_config)
|
||||||
generate_objdiff_config(config, build_config)
|
generate_objdiff_config(config, build_config)
|
||||||
|
|
||||||
|
|
||||||
# Generate build.ninja
|
# Generate build.ninja
|
||||||
def generate_build_ninja(config, build_config):
|
def generate_build_ninja(
|
||||||
|
config: ProjectConfig, build_config: Optional[Dict[str, Any]]
|
||||||
|
) -> None:
|
||||||
config.validate()
|
config.validate()
|
||||||
|
|
||||||
out = io.StringIO()
|
out = io.StringIO()
|
||||||
|
@ -188,9 +177,9 @@ def generate_build_ninja(config, build_config):
|
||||||
n.variable("ninja_required_version", "1.3")
|
n.variable("ninja_required_version", "1.3")
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
configure_script = os.path.relpath(os.path.abspath(sys.argv[0]))
|
configure_script = Path(os.path.relpath(os.path.abspath(sys.argv[0])))
|
||||||
python_lib = os.path.relpath(__file__)
|
python_lib = Path(os.path.relpath(__file__))
|
||||||
python_lib_dir = os.path.dirname(python_lib)
|
python_lib_dir = python_lib.parent
|
||||||
n.comment("The arguments passed to configure.py, for rerunning it.")
|
n.comment("The arguments passed to configure.py, for rerunning it.")
|
||||||
n.variable("configure_args", sys.argv[1:])
|
n.variable("configure_args", sys.argv[1:])
|
||||||
n.variable("python", f'"{sys.executable}"')
|
n.variable("python", f'"{sys.executable}"')
|
||||||
|
@ -200,7 +189,7 @@ def generate_build_ninja(config, build_config):
|
||||||
# Variables
|
# Variables
|
||||||
###
|
###
|
||||||
n.comment("Variables")
|
n.comment("Variables")
|
||||||
ldflags = " ".join(config.ldflags)
|
ldflags = " ".join(config.ldflags or [])
|
||||||
if config.generate_map:
|
if config.generate_map:
|
||||||
ldflags += " -mapunused"
|
ldflags += " -mapunused"
|
||||||
if config.debug:
|
if config.debug:
|
||||||
|
@ -229,14 +218,14 @@ def generate_build_ninja(config, build_config):
|
||||||
name="cargo",
|
name="cargo",
|
||||||
command="cargo build --release --manifest-path $in --bin $bin --target-dir $target",
|
command="cargo build --release --manifest-path $in --bin $bin --target-dir $target",
|
||||||
description="CARGO $bin",
|
description="CARGO $bin",
|
||||||
depfile=path(Path("$target") / "release" / "$bin.d"),
|
depfile=Path("$target") / "release" / "$bin.d",
|
||||||
deps="gcc",
|
deps="gcc",
|
||||||
)
|
)
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(dtk),
|
outputs=dtk,
|
||||||
rule="cargo",
|
rule="cargo",
|
||||||
inputs=path(config.build_dtk_path / "Cargo.toml"),
|
inputs=config.build_dtk_path / "Cargo.toml",
|
||||||
implicit=path(config.build_dtk_path / "Cargo.lock"),
|
implicit=config.build_dtk_path / "Cargo.lock",
|
||||||
variables={
|
variables={
|
||||||
"bin": "dtk",
|
"bin": "dtk",
|
||||||
"target": build_tools_path,
|
"target": build_tools_path,
|
||||||
|
@ -245,9 +234,9 @@ def generate_build_ninja(config, build_config):
|
||||||
elif config.dtk_tag:
|
elif config.dtk_tag:
|
||||||
dtk = build_tools_path / f"dtk{EXE}"
|
dtk = build_tools_path / f"dtk{EXE}"
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(dtk),
|
outputs=dtk,
|
||||||
rule="download_tool",
|
rule="download_tool",
|
||||||
implicit=path(download_tool),
|
implicit=download_tool,
|
||||||
variables={
|
variables={
|
||||||
"tool": "dtk",
|
"tool": "dtk",
|
||||||
"tag": config.dtk_tag,
|
"tag": config.dtk_tag,
|
||||||
|
@ -261,9 +250,9 @@ def generate_build_ninja(config, build_config):
|
||||||
elif config.sjiswrap_tag:
|
elif config.sjiswrap_tag:
|
||||||
sjiswrap = build_tools_path / "sjiswrap.exe"
|
sjiswrap = build_tools_path / "sjiswrap.exe"
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(sjiswrap),
|
outputs=sjiswrap,
|
||||||
rule="download_tool",
|
rule="download_tool",
|
||||||
implicit=path(download_tool),
|
implicit=download_tool,
|
||||||
variables={
|
variables={
|
||||||
"tool": "sjiswrap",
|
"tool": "sjiswrap",
|
||||||
"tag": config.sjiswrap_tag,
|
"tag": config.sjiswrap_tag,
|
||||||
|
@ -274,7 +263,7 @@ def generate_build_ninja(config, build_config):
|
||||||
|
|
||||||
# Only add an implicit dependency on wibo if we download it
|
# Only add an implicit dependency on wibo if we download it
|
||||||
wrapper = config.wrapper
|
wrapper = config.wrapper
|
||||||
wrapper_implicit = None
|
wrapper_implicit: Optional[Path] = None
|
||||||
if (
|
if (
|
||||||
config.wibo_tag is not None
|
config.wibo_tag is not None
|
||||||
and sys.platform == "linux"
|
and sys.platform == "linux"
|
||||||
|
@ -284,33 +273,35 @@ def generate_build_ninja(config, build_config):
|
||||||
wrapper = build_tools_path / "wibo"
|
wrapper = build_tools_path / "wibo"
|
||||||
wrapper_implicit = wrapper
|
wrapper_implicit = wrapper
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(wrapper),
|
outputs=wrapper,
|
||||||
rule="download_tool",
|
rule="download_tool",
|
||||||
implicit=path(download_tool),
|
implicit=download_tool,
|
||||||
variables={
|
variables={
|
||||||
"tool": "wibo",
|
"tool": "wibo",
|
||||||
"tag": config.wibo_tag,
|
"tag": config.wibo_tag,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
if not is_windows() and wrapper is None:
|
if not is_windows() and wrapper is None:
|
||||||
wrapper = "wine"
|
wrapper = Path("wine")
|
||||||
wrapper_cmd = f"{wrapper} " if wrapper else ""
|
wrapper_cmd = f"{wrapper} " if wrapper else ""
|
||||||
|
|
||||||
compilers_implicit = None
|
compilers_implicit: Optional[Path] = None
|
||||||
if config.compilers_path:
|
if config.compilers_path:
|
||||||
compilers = config.compilers_path
|
compilers = config.compilers_path
|
||||||
elif config.compilers_tag:
|
elif config.compilers_tag:
|
||||||
compilers = config.build_dir / "compilers"
|
compilers = config.build_dir / "compilers"
|
||||||
compilers_implicit = compilers
|
compilers_implicit = compilers
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(compilers),
|
outputs=compilers,
|
||||||
rule="download_tool",
|
rule="download_tool",
|
||||||
implicit=path(download_tool),
|
implicit=download_tool,
|
||||||
variables={
|
variables={
|
||||||
"tool": "compilers",
|
"tool": "compilers",
|
||||||
"tag": config.compilers_tag,
|
"tag": config.compilers_tag,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
sys.exit("ProjectConfig.compilers_tag missing")
|
||||||
|
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
@ -322,16 +313,16 @@ def generate_build_ninja(config, build_config):
|
||||||
# MWCC
|
# MWCC
|
||||||
mwcc = compiler_path / "mwcceppc.exe"
|
mwcc = compiler_path / "mwcceppc.exe"
|
||||||
mwcc_cmd = f"{wrapper_cmd}{mwcc} $cflags -MMD -c $in -o $basedir"
|
mwcc_cmd = f"{wrapper_cmd}{mwcc} $cflags -MMD -c $in -o $basedir"
|
||||||
mwcc_implicit = [compilers_implicit or mwcc, wrapper_implicit]
|
mwcc_implicit: List[Optional[Path]] = [compilers_implicit or mwcc, wrapper_implicit]
|
||||||
|
|
||||||
# MWCC with UTF-8 to Shift JIS wrapper
|
# MWCC with UTF-8 to Shift JIS wrapper
|
||||||
mwcc_sjis_cmd = f"{wrapper_cmd}{sjiswrap} {mwcc} $cflags -MMD -c $in -o $basedir"
|
mwcc_sjis_cmd = f"{wrapper_cmd}{sjiswrap} {mwcc} $cflags -MMD -c $in -o $basedir"
|
||||||
mwcc_sjis_implicit = [*mwcc_implicit, sjiswrap]
|
mwcc_sjis_implicit: List[Optional[Path]] = [*mwcc_implicit, sjiswrap]
|
||||||
|
|
||||||
# MWLD
|
# MWLD
|
||||||
mwld = compiler_path / "mwldeppc.exe"
|
mwld = compiler_path / "mwldeppc.exe"
|
||||||
mwld_cmd = f"{wrapper_cmd}{mwld} $ldflags -o $out @$out.rsp"
|
mwld_cmd = f"{wrapper_cmd}{mwld} $ldflags -o $out @$out.rsp"
|
||||||
mwld_implicit = [compilers_implicit or mwld, wrapper_implicit]
|
mwld_implicit: List[Optional[Path]] = [compilers_implicit or mwld, wrapper_implicit]
|
||||||
|
|
||||||
if os.name != "nt":
|
if os.name != "nt":
|
||||||
transform_dep = config.tools_dir / "transform_dep.py"
|
transform_dep = config.tools_dir / "transform_dep.py"
|
||||||
|
@ -364,7 +355,7 @@ def generate_build_ninja(config, build_config):
|
||||||
name="makerel",
|
name="makerel",
|
||||||
command=f"{dtk} rel make -w -c $config @{makerel_rsp}",
|
command=f"{dtk} rel make -w -c $config @{makerel_rsp}",
|
||||||
description="REL",
|
description="REL",
|
||||||
rspfile=path(makerel_rsp),
|
rspfile=makerel_rsp,
|
||||||
rspfile_content="$in_newline",
|
rspfile_content="$in_newline",
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
@ -415,33 +406,33 @@ def generate_build_ninja(config, build_config):
|
||||||
build_host_path = build_path / "host"
|
build_host_path = build_path / "host"
|
||||||
build_config_path = build_path / "config.json"
|
build_config_path = build_path / "config.json"
|
||||||
|
|
||||||
def map_path(path):
|
def map_path(path: Path) -> Path:
|
||||||
return path.parent / (path.name + ".MAP")
|
return path.parent / (path.name + ".MAP")
|
||||||
|
|
||||||
class LinkStep:
|
class LinkStep:
|
||||||
def __init__(self, config):
|
def __init__(self, config: Dict[str, Any]) -> None:
|
||||||
self.name = config["name"]
|
self.name: str = config["name"]
|
||||||
self.module_id = config["module_id"]
|
self.module_id: int = config["module_id"]
|
||||||
self.ldscript = config["ldscript"]
|
self.ldscript: Optional[Path] = config["ldscript"]
|
||||||
self.entry = config["entry"]
|
self.entry = config["entry"]
|
||||||
self.inputs = []
|
self.inputs: List[str] = []
|
||||||
|
|
||||||
def add(self, obj):
|
def add(self, obj: str) -> None:
|
||||||
self.inputs.append(obj)
|
self.inputs.append(obj)
|
||||||
|
|
||||||
def output(self):
|
def output(self) -> Path:
|
||||||
if self.module_id == 0:
|
if self.module_id == 0:
|
||||||
return build_path / f"{self.name}.dol"
|
return build_path / f"{self.name}.dol"
|
||||||
else:
|
else:
|
||||||
return build_path / self.name / f"{self.name}.rel"
|
return build_path / self.name / f"{self.name}.rel"
|
||||||
|
|
||||||
def partial_output(self):
|
def partial_output(self) -> Path:
|
||||||
if self.module_id == 0:
|
if self.module_id == 0:
|
||||||
return build_path / f"{self.name}.elf"
|
return build_path / f"{self.name}.elf"
|
||||||
else:
|
else:
|
||||||
return build_path / self.name / f"{self.name}.plf"
|
return build_path / self.name / f"{self.name}.plf"
|
||||||
|
|
||||||
def write(self, n):
|
def write(self, n: ninja_syntax.Writer) -> None:
|
||||||
n.comment(f"Link {self.name}")
|
n.comment(f"Link {self.name}")
|
||||||
if self.module_id == 0:
|
if self.module_id == 0:
|
||||||
elf_path = build_path / f"{self.name}.elf"
|
elf_path = build_path / f"{self.name}.elf"
|
||||||
|
@ -453,18 +444,18 @@ def generate_build_ninja(config, build_config):
|
||||||
else:
|
else:
|
||||||
elf_map = None
|
elf_map = None
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(elf_path),
|
outputs=elf_path,
|
||||||
rule="link",
|
rule="link",
|
||||||
inputs=path(self.inputs),
|
inputs=self.inputs,
|
||||||
implicit=path([self.ldscript, *mwld_implicit]),
|
implicit=[self.ldscript, *mwld_implicit],
|
||||||
implicit_outputs=path(elf_map),
|
implicit_outputs=elf_map,
|
||||||
variables={"ldflags": elf_ldflags},
|
variables={"ldflags": elf_ldflags},
|
||||||
)
|
)
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(dol_path),
|
outputs=dol_path,
|
||||||
rule="elf2dol",
|
rule="elf2dol",
|
||||||
inputs=path(elf_path),
|
inputs=elf_path,
|
||||||
implicit=path(dtk),
|
implicit=dtk,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
preplf_path = build_path / self.name / f"{self.name}.preplf"
|
preplf_path = build_path / self.name / f"{self.name}.preplf"
|
||||||
|
@ -485,37 +476,37 @@ def generate_build_ninja(config, build_config):
|
||||||
preplf_map = None
|
preplf_map = None
|
||||||
plf_map = None
|
plf_map = None
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(preplf_path),
|
outputs=preplf_path,
|
||||||
rule="link",
|
rule="link",
|
||||||
inputs=path(self.inputs),
|
inputs=self.inputs,
|
||||||
implicit=path(mwld_implicit),
|
implicit=mwld_implicit,
|
||||||
implicit_outputs=path(preplf_map),
|
implicit_outputs=preplf_map,
|
||||||
variables={"ldflags": preplf_ldflags},
|
variables={"ldflags": preplf_ldflags},
|
||||||
)
|
)
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(plf_path),
|
outputs=plf_path,
|
||||||
rule="link",
|
rule="link",
|
||||||
inputs=path(self.inputs),
|
inputs=self.inputs,
|
||||||
implicit=path([self.ldscript, preplf_path, *mwld_implicit]),
|
implicit=[self.ldscript, preplf_path, *mwld_implicit],
|
||||||
implicit_outputs=path(plf_map),
|
implicit_outputs=plf_map,
|
||||||
variables={"ldflags": plf_ldflags},
|
variables={"ldflags": plf_ldflags},
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
if build_config:
|
if build_config:
|
||||||
link_steps = []
|
link_steps: List[LinkStep] = []
|
||||||
used_compiler_versions = set()
|
used_compiler_versions: Set[str] = set()
|
||||||
source_inputs = []
|
source_inputs: List[Path] = []
|
||||||
host_source_inputs = []
|
host_source_inputs: List[Path] = []
|
||||||
source_added = set()
|
source_added: Set[Path] = set()
|
||||||
|
|
||||||
def make_cflags_str(cflags):
|
def make_cflags_str(cflags: Union[str, List[str]]) -> str:
|
||||||
if isinstance(cflags, list):
|
if isinstance(cflags, list):
|
||||||
return " ".join(cflags)
|
return " ".join(cflags)
|
||||||
else:
|
else:
|
||||||
return cflags
|
return cflags
|
||||||
|
|
||||||
def add_unit(build_obj, link_step):
|
def add_unit(build_obj: Dict[str, Any], link_step: LinkStep) -> None:
|
||||||
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
||||||
result = config.find_object(obj_name)
|
result = config.find_object(obj_name)
|
||||||
if not result:
|
if not result:
|
||||||
|
@ -531,7 +522,7 @@ def generate_build_ninja(config, build_config):
|
||||||
options = obj.options
|
options = obj.options
|
||||||
completed = obj.completed
|
completed = obj.completed
|
||||||
|
|
||||||
unit_src_path = src_dir / options["source"]
|
unit_src_path = src_dir / str(options["source"])
|
||||||
|
|
||||||
if not unit_src_path.exists():
|
if not unit_src_path.exists():
|
||||||
if config.warn_missing_source or completed:
|
if config.warn_missing_source or completed:
|
||||||
|
@ -555,30 +546,30 @@ def generate_build_ninja(config, build_config):
|
||||||
|
|
||||||
n.comment(f"{obj_name}: {lib_name} (linked {completed})")
|
n.comment(f"{obj_name}: {lib_name} (linked {completed})")
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(src_obj_path),
|
outputs=src_obj_path,
|
||||||
rule="mwcc_sjis" if options["shiftjis"] else "mwcc",
|
rule="mwcc_sjis" if options["shiftjis"] else "mwcc",
|
||||||
inputs=path(unit_src_path),
|
inputs=unit_src_path,
|
||||||
variables={
|
variables={
|
||||||
"mw_version": path(Path(mw_version)),
|
"mw_version": Path(mw_version),
|
||||||
"cflags": cflags_str,
|
"cflags": cflags_str,
|
||||||
"basedir": os.path.dirname(src_base_path),
|
"basedir": os.path.dirname(src_base_path),
|
||||||
"basefile": path(src_base_path),
|
"basefile": src_base_path,
|
||||||
},
|
},
|
||||||
implicit=path(
|
implicit=mwcc_sjis_implicit
|
||||||
mwcc_sjis_implicit if options["shiftjis"] else mwcc_implicit
|
if options["shiftjis"]
|
||||||
),
|
else mwcc_implicit,
|
||||||
)
|
)
|
||||||
|
|
||||||
if lib["host"]:
|
if lib["host"]:
|
||||||
host_obj_path = build_host_path / f"{base_object}.o"
|
host_obj_path = build_host_path / f"{base_object}.o"
|
||||||
host_base_path = build_host_path / base_object
|
host_base_path = build_host_path / base_object
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(host_obj_path),
|
outputs=host_obj_path,
|
||||||
rule="host_cc" if unit_src_path.suffix == ".c" else "host_cpp",
|
rule="host_cc" if unit_src_path.suffix == ".c" else "host_cpp",
|
||||||
inputs=path(unit_src_path),
|
inputs=unit_src_path,
|
||||||
variables={
|
variables={
|
||||||
"basedir": os.path.dirname(host_base_path),
|
"basedir": os.path.dirname(host_base_path),
|
||||||
"basefile": path(host_base_path),
|
"basefile": host_base_path,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
if options["add_to_all"]:
|
if options["add_to_all"]:
|
||||||
|
@ -626,7 +617,7 @@ def generate_build_ninja(config, build_config):
|
||||||
sys.exit(f"Compiler {mw_path} does not exist")
|
sys.exit(f"Compiler {mw_path} does not exist")
|
||||||
|
|
||||||
# Check if linker exists
|
# Check if linker exists
|
||||||
mw_path = compilers / config.linker_version / "mwldeppc.exe"
|
mw_path = compilers / str(config.linker_version) / "mwldeppc.exe"
|
||||||
if config.compilers_path and not os.path.exists(mw_path):
|
if config.compilers_path and not os.path.exists(mw_path):
|
||||||
sys.exit(f"Linker {mw_path} does not exist")
|
sys.exit(f"Linker {mw_path} does not exist")
|
||||||
|
|
||||||
|
@ -640,7 +631,7 @@ def generate_build_ninja(config, build_config):
|
||||||
###
|
###
|
||||||
# Generate RELs
|
# Generate RELs
|
||||||
###
|
###
|
||||||
generated_rels = []
|
generated_rels: List[str] = []
|
||||||
for link in build_config["links"]:
|
for link in build_config["links"]:
|
||||||
# Map module names to link steps
|
# Map module names to link steps
|
||||||
link_steps_local = list(
|
link_steps_local = list(
|
||||||
|
@ -670,13 +661,11 @@ def generate_build_ninja(config, build_config):
|
||||||
)
|
)
|
||||||
n.comment("Generate RELs")
|
n.comment("Generate RELs")
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(rel_outputs),
|
outputs=rel_outputs,
|
||||||
rule="makerel",
|
rule="makerel",
|
||||||
inputs=path(
|
inputs=list(map(lambda step: step.partial_output(), link_steps_local)),
|
||||||
list(map(lambda step: step.partial_output(), link_steps_local))
|
implicit=[dtk, config.config_path],
|
||||||
),
|
variables={"config": config.config_path},
|
||||||
implicit=path([dtk, config.config_path]),
|
|
||||||
variables={"config": path(config.config_path)},
|
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
@ -687,7 +676,7 @@ def generate_build_ninja(config, build_config):
|
||||||
n.build(
|
n.build(
|
||||||
outputs="all_source",
|
outputs="all_source",
|
||||||
rule="phony",
|
rule="phony",
|
||||||
inputs=path(source_inputs),
|
inputs=source_inputs,
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
@ -698,7 +687,7 @@ def generate_build_ninja(config, build_config):
|
||||||
n.build(
|
n.build(
|
||||||
outputs="all_source_host",
|
outputs="all_source_host",
|
||||||
rule="phony",
|
rule="phony",
|
||||||
inputs=path(host_source_inputs),
|
inputs=host_source_inputs,
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
@ -714,10 +703,10 @@ def generate_build_ninja(config, build_config):
|
||||||
description="CHECK $in",
|
description="CHECK $in",
|
||||||
)
|
)
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(ok_path),
|
outputs=ok_path,
|
||||||
rule="check",
|
rule="check",
|
||||||
inputs=path(config.check_sha_path),
|
inputs=config.check_sha_path,
|
||||||
implicit=path([dtk, *map(lambda step: step.output(), link_steps)]),
|
implicit=[dtk, *map(lambda step: step.output(), link_steps)],
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
@ -732,9 +721,9 @@ def generate_build_ninja(config, build_config):
|
||||||
description="PROGRESS",
|
description="PROGRESS",
|
||||||
)
|
)
|
||||||
n.build(
|
n.build(
|
||||||
outputs=path(progress_path),
|
outputs=progress_path,
|
||||||
rule="progress",
|
rule="progress",
|
||||||
implicit=path([ok_path, configure_script, python_lib, config.config_path]),
|
implicit=[ok_path, configure_script, python_lib, config.config_path],
|
||||||
)
|
)
|
||||||
|
|
||||||
###
|
###
|
||||||
|
@ -750,7 +739,7 @@ def generate_build_ninja(config, build_config):
|
||||||
description=f"DIFF {dol_elf_path}",
|
description=f"DIFF {dol_elf_path}",
|
||||||
)
|
)
|
||||||
n.build(
|
n.build(
|
||||||
inputs=path([config.config_path, dol_elf_path]),
|
inputs=[config.config_path, dol_elf_path],
|
||||||
outputs="dol_diff",
|
outputs="dol_diff",
|
||||||
rule="dol_diff",
|
rule="dol_diff",
|
||||||
)
|
)
|
||||||
|
@ -768,10 +757,10 @@ def generate_build_ninja(config, build_config):
|
||||||
description=f"APPLY {dol_elf_path}",
|
description=f"APPLY {dol_elf_path}",
|
||||||
)
|
)
|
||||||
n.build(
|
n.build(
|
||||||
inputs=path([config.config_path, dol_elf_path]),
|
inputs=[config.config_path, dol_elf_path],
|
||||||
outputs="dol_apply",
|
outputs="dol_apply",
|
||||||
rule="dol_apply",
|
rule="dol_apply",
|
||||||
implicit=path([ok_path]),
|
implicit=[ok_path],
|
||||||
)
|
)
|
||||||
n.build(
|
n.build(
|
||||||
outputs="apply",
|
outputs="apply",
|
||||||
|
@ -792,11 +781,11 @@ def generate_build_ninja(config, build_config):
|
||||||
deps="gcc",
|
deps="gcc",
|
||||||
)
|
)
|
||||||
n.build(
|
n.build(
|
||||||
inputs=path(config.config_path),
|
inputs=config.config_path,
|
||||||
outputs=path(build_config_path),
|
outputs=build_config_path,
|
||||||
rule="split",
|
rule="split",
|
||||||
implicit=path(dtk),
|
implicit=dtk,
|
||||||
variables={"out_dir": path(build_path)},
|
variables={"out_dir": build_path},
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
@ -813,14 +802,12 @@ def generate_build_ninja(config, build_config):
|
||||||
n.build(
|
n.build(
|
||||||
outputs="build.ninja",
|
outputs="build.ninja",
|
||||||
rule="configure",
|
rule="configure",
|
||||||
implicit=path(
|
implicit=[
|
||||||
[
|
build_config_path,
|
||||||
build_config_path,
|
configure_script,
|
||||||
configure_script,
|
python_lib,
|
||||||
python_lib,
|
python_lib_dir / "ninja_syntax.py",
|
||||||
Path(python_lib_dir) / "ninja_syntax.py",
|
],
|
||||||
]
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
n.newline()
|
n.newline()
|
||||||
|
|
||||||
|
@ -829,9 +816,9 @@ def generate_build_ninja(config, build_config):
|
||||||
###
|
###
|
||||||
n.comment("Default rule")
|
n.comment("Default rule")
|
||||||
if build_config:
|
if build_config:
|
||||||
n.default(path(progress_path))
|
n.default(progress_path)
|
||||||
else:
|
else:
|
||||||
n.default(path(build_config_path))
|
n.default(build_config_path)
|
||||||
|
|
||||||
# Write build.ninja
|
# Write build.ninja
|
||||||
with open("build.ninja", "w", encoding="utf-8") as f:
|
with open("build.ninja", "w", encoding="utf-8") as f:
|
||||||
|
@ -840,11 +827,13 @@ def generate_build_ninja(config, build_config):
|
||||||
|
|
||||||
|
|
||||||
# Generate objdiff.json
|
# Generate objdiff.json
|
||||||
def generate_objdiff_config(config, build_config):
|
def generate_objdiff_config(
|
||||||
|
config: ProjectConfig, build_config: Optional[Dict[str, Any]]
|
||||||
|
) -> None:
|
||||||
if not build_config:
|
if not build_config:
|
||||||
return
|
return
|
||||||
|
|
||||||
objdiff_config = {
|
objdiff_config: Dict[str, Any] = {
|
||||||
"min_version": "0.4.3",
|
"min_version": "0.4.3",
|
||||||
"custom_make": "ninja",
|
"custom_make": "ninja",
|
||||||
"build_target": False,
|
"build_target": False,
|
||||||
|
@ -865,16 +854,16 @@ def generate_objdiff_config(config, build_config):
|
||||||
|
|
||||||
build_path = config.out_path()
|
build_path = config.out_path()
|
||||||
|
|
||||||
def add_unit(build_obj, module_name):
|
def add_unit(build_obj: Dict[str, Any], module_name: str) -> None:
|
||||||
if build_obj["autogenerated"]:
|
if build_obj["autogenerated"]:
|
||||||
# Skip autogenerated objects
|
# Skip autogenerated objects
|
||||||
return
|
return
|
||||||
|
|
||||||
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
||||||
base_object = Path(obj_name).with_suffix("")
|
base_object = Path(obj_name).with_suffix("")
|
||||||
unit_config = {
|
unit_config: Dict[str, Any] = {
|
||||||
"name": unix_str(Path(module_name) / base_object),
|
"name": Path(module_name) / base_object,
|
||||||
"target_path": unix_str(obj_path),
|
"target_path": obj_path,
|
||||||
}
|
}
|
||||||
|
|
||||||
result = config.find_object(obj_name)
|
result = config.find_object(obj_name)
|
||||||
|
@ -885,7 +874,7 @@ def generate_objdiff_config(config, build_config):
|
||||||
lib, obj = result
|
lib, obj = result
|
||||||
src_dir = Path(lib.get("src_dir", config.src_dir))
|
src_dir = Path(lib.get("src_dir", config.src_dir))
|
||||||
|
|
||||||
unit_src_path = src_dir / obj.options["source"]
|
unit_src_path = src_dir / str(obj.options["source"])
|
||||||
|
|
||||||
if not unit_src_path.exists():
|
if not unit_src_path.exists():
|
||||||
objdiff_config["units"].append(unit_config)
|
objdiff_config["units"].append(unit_config)
|
||||||
|
@ -905,7 +894,7 @@ def generate_objdiff_config(config, build_config):
|
||||||
elif value == "nodeferred":
|
elif value == "nodeferred":
|
||||||
reverse_fn_order = False
|
reverse_fn_order = False
|
||||||
|
|
||||||
unit_config["base_path"] = unix_str(src_obj_path)
|
unit_config["base_path"] = src_obj_path
|
||||||
unit_config["reverse_fn_order"] = reverse_fn_order
|
unit_config["reverse_fn_order"] = reverse_fn_order
|
||||||
unit_config["complete"] = obj.completed
|
unit_config["complete"] = obj.completed
|
||||||
objdiff_config["units"].append(unit_config)
|
objdiff_config["units"].append(unit_config)
|
||||||
|
@ -921,32 +910,34 @@ def generate_objdiff_config(config, build_config):
|
||||||
|
|
||||||
# Write objdiff.json
|
# Write objdiff.json
|
||||||
with open("objdiff.json", "w", encoding="utf-8") as w:
|
with open("objdiff.json", "w", encoding="utf-8") as w:
|
||||||
json.dump(objdiff_config, w, indent=4)
|
from .ninja_syntax import serialize_path
|
||||||
|
|
||||||
|
json.dump(objdiff_config, w, indent=4, default=serialize_path)
|
||||||
|
|
||||||
|
|
||||||
# Calculate, print and write progress to progress.json
|
# Calculate, print and write progress to progress.json
|
||||||
def calculate_progress(config):
|
def calculate_progress(config: ProjectConfig) -> None:
|
||||||
out_path = config.out_path()
|
out_path = config.out_path()
|
||||||
build_config = load_build_config(config, out_path / "config.json")
|
build_config = load_build_config(config, out_path / "config.json")
|
||||||
if not build_config:
|
if not build_config:
|
||||||
return
|
return
|
||||||
|
|
||||||
class ProgressUnit:
|
class ProgressUnit:
|
||||||
def __init__(self, name):
|
def __init__(self, name: str) -> None:
|
||||||
self.name = name
|
self.name: str = name
|
||||||
self.code_total = 0
|
self.code_total: int = 0
|
||||||
self.code_fancy_frac = config.progress_code_fancy_frac
|
self.code_fancy_frac: int = config.progress_code_fancy_frac
|
||||||
self.code_fancy_item = config.progress_code_fancy_item
|
self.code_fancy_item: str = config.progress_code_fancy_item
|
||||||
self.code_progress = 0
|
self.code_progress: int = 0
|
||||||
self.data_total = 0
|
self.data_total: int = 0
|
||||||
self.data_fancy_frac = config.progress_data_fancy_frac
|
self.data_fancy_frac: int = config.progress_data_fancy_frac
|
||||||
self.data_fancy_item = config.progress_data_fancy_item
|
self.data_fancy_item: str = config.progress_data_fancy_item
|
||||||
self.data_progress = 0
|
self.data_progress: int = 0
|
||||||
self.objects_progress = 0
|
self.objects_progress: int = 0
|
||||||
self.objects_total = 0
|
self.objects_total: int = 0
|
||||||
self.objects = set()
|
self.objects: Set[Object] = set()
|
||||||
|
|
||||||
def add(self, build_obj):
|
def add(self, build_obj: Dict[str, Any]) -> None:
|
||||||
self.code_total += build_obj["code_size"]
|
self.code_total += build_obj["code_size"]
|
||||||
self.data_total += build_obj["data_size"]
|
self.data_total += build_obj["data_size"]
|
||||||
|
|
||||||
|
@ -973,10 +964,10 @@ def calculate_progress(config):
|
||||||
if include_object:
|
if include_object:
|
||||||
self.objects_progress += 1
|
self.objects_progress += 1
|
||||||
|
|
||||||
def code_frac(self):
|
def code_frac(self) -> float:
|
||||||
return self.code_progress / self.code_total
|
return self.code_progress / self.code_total
|
||||||
|
|
||||||
def data_frac(self):
|
def data_frac(self) -> float:
|
||||||
return self.data_progress / self.data_total
|
return self.data_progress / self.data_total
|
||||||
|
|
||||||
# Add DOL units
|
# Add DOL units
|
||||||
|
@ -989,7 +980,7 @@ def calculate_progress(config):
|
||||||
|
|
||||||
# Add REL units
|
# Add REL units
|
||||||
rels_progress = ProgressUnit("Modules") if config.progress_modules else None
|
rels_progress = ProgressUnit("Modules") if config.progress_modules else None
|
||||||
modules_progress = []
|
modules_progress: List[ProgressUnit] = []
|
||||||
for module in build_config["modules"]:
|
for module in build_config["modules"]:
|
||||||
progress = ProgressUnit(module["name"])
|
progress = ProgressUnit(module["name"])
|
||||||
modules_progress.append(progress)
|
modules_progress.append(progress)
|
||||||
|
@ -1003,7 +994,7 @@ def calculate_progress(config):
|
||||||
# Print human-readable progress
|
# Print human-readable progress
|
||||||
print("Progress:")
|
print("Progress:")
|
||||||
|
|
||||||
def print_category(unit):
|
def print_category(unit: Optional[ProgressUnit]) -> None:
|
||||||
if unit is None:
|
if unit is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -1037,9 +1028,9 @@ def calculate_progress(config):
|
||||||
print_category(progress)
|
print_category(progress)
|
||||||
|
|
||||||
# Generate and write progress.json
|
# Generate and write progress.json
|
||||||
progress_json = {}
|
progress_json: Dict[str, Any] = {}
|
||||||
|
|
||||||
def add_category(category, unit):
|
def add_category(category: str, unit: ProgressUnit) -> None:
|
||||||
progress_json[category] = {
|
progress_json[category] = {
|
||||||
"code": unit.code_progress,
|
"code": unit.code_progress,
|
||||||
"code/total": unit.code_total,
|
"code/total": unit.code_total,
|
||||||
|
|
|
@ -25,7 +25,7 @@ def in_wsl() -> bool:
|
||||||
return "microsoft-standard" in uname().release
|
return "microsoft-standard" in uname().release
|
||||||
|
|
||||||
|
|
||||||
def import_d_file(in_file) -> str:
|
def import_d_file(in_file: str) -> str:
|
||||||
out_text = ""
|
out_text = ""
|
||||||
|
|
||||||
with open(in_file) as file:
|
with open(in_file) as file:
|
||||||
|
@ -60,7 +60,7 @@ def import_d_file(in_file) -> str:
|
||||||
return out_text
|
return out_text
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main() -> None:
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="""Transform a .d file from Wine paths to normal paths"""
|
description="""Transform a .d file from Wine paths to normal paths"""
|
||||||
)
|
)
|
||||||
|
|
|
@ -51,7 +51,7 @@ if __name__ == "__main__":
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
api_key = args.api_key or os.environ.get("PROGRESS_API_KEY")
|
api_key = args.api_key or os.environ.get("PROGRESS_API_KEY")
|
||||||
if not api_key:
|
if not api_key:
|
||||||
raise "API key required"
|
raise KeyError("API key required")
|
||||||
url = generate_url(args)
|
url = generate_url(args)
|
||||||
|
|
||||||
entries = []
|
entries = []
|
||||||
|
@ -68,9 +68,12 @@ if __name__ == "__main__":
|
||||||
print("Publishing entry to", url)
|
print("Publishing entry to", url)
|
||||||
json.dump(entries[0], sys.stdout, indent=4)
|
json.dump(entries[0], sys.stdout, indent=4)
|
||||||
print()
|
print()
|
||||||
r = requests.post(url, json={
|
r = requests.post(
|
||||||
"api_key": api_key,
|
url,
|
||||||
"entries": entries,
|
json={
|
||||||
})
|
"api_key": api_key,
|
||||||
|
"entries": entries,
|
||||||
|
},
|
||||||
|
)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
print("Done!")
|
print("Done!")
|
||||||
|
|
Loading…
Reference in New Issue