Implement mypy fixes across all python files (#4)
• Type hinting added to every single file • Path conversion now handled in `ninja_syntax.py` natively, can safely pass almost everything directly
This commit is contained in:
parent
04c8b45f93
commit
575e3b4a46
|
@ -2,6 +2,7 @@ __pycache__
|
|||
.idea
|
||||
.vscode
|
||||
.ninja_*
|
||||
.mypy_cache
|
||||
*.exe
|
||||
build
|
||||
build.ninja
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
"build/**/*.MAP": true,
|
||||
"build.ninja": true,
|
||||
".ninja_*": true,
|
||||
"objdiff.json": true
|
||||
"objdiff.json": true,
|
||||
".mypy_cache": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
11
configure.py
11
configure.py
|
@ -16,6 +16,7 @@ import sys
|
|||
import argparse
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any
|
||||
from tools.project import (
|
||||
Object,
|
||||
ProjectConfig,
|
||||
|
@ -27,7 +28,7 @@ from tools.project import (
|
|||
# Game versions
|
||||
DEFAULT_VERSION = 0
|
||||
VERSIONS = [
|
||||
"GAMEID", # 0
|
||||
"GAMEID", # 0
|
||||
]
|
||||
|
||||
if len(VERSIONS) > 1:
|
||||
|
@ -150,7 +151,7 @@ cflags_base = [
|
|||
"-RTTI off",
|
||||
"-fp_contract on",
|
||||
"-str reuse",
|
||||
"-multibyte", # For Wii compilers, replace with `-enc SJIS`
|
||||
"-multibyte", # For Wii compilers, replace with `-enc SJIS`
|
||||
"-i include",
|
||||
f"-i build/{config.version}/include",
|
||||
f"-DVERSION={version_num}",
|
||||
|
@ -169,7 +170,7 @@ cflags_runtime = [
|
|||
"-str reuse,pool,readonly",
|
||||
"-gccinc",
|
||||
"-common off",
|
||||
"-inline auto",
|
||||
"-inline auto",
|
||||
]
|
||||
|
||||
# REL flags
|
||||
|
@ -183,7 +184,7 @@ config.linker_version = "GC/1.3.2"
|
|||
|
||||
|
||||
# Helper function for Dolphin libraries
|
||||
def DolphinLib(lib_name, objects):
|
||||
def DolphinLib(lib_name: str, objects: List[Object]) -> Dict[str, Any]:
|
||||
return {
|
||||
"lib": lib_name,
|
||||
"mw_version": "GC/1.2.5n",
|
||||
|
@ -194,7 +195,7 @@ def DolphinLib(lib_name, objects):
|
|||
|
||||
|
||||
# Helper function for REL script objects
|
||||
def Rel(lib_name, objects):
|
||||
def Rel(lib_name: str, objects: List[Object]) -> Dict[str, Any]:
|
||||
return {
|
||||
"lib": lib_name,
|
||||
"mw_version": "GC/1.3.2",
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
import argparse
|
||||
import os
|
||||
import re
|
||||
from typing import List, Set
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.abspath(os.path.join(script_dir, ".."))
|
||||
|
@ -20,54 +21,58 @@ src_dir = os.path.join(root_dir, "src")
|
|||
include_dir = os.path.join(root_dir, "include")
|
||||
|
||||
include_pattern = re.compile(r'^#include\s*[<"](.+?)[>"]$')
|
||||
guard_pattern = re.compile(r'^#ifndef\s+(.*)$')
|
||||
guard_pattern = re.compile(r"^#ifndef\s+(.*)$")
|
||||
|
||||
defines: Set[str] = set()
|
||||
|
||||
defines = set()
|
||||
|
||||
def import_h_file(in_file: str, r_path: str) -> str:
|
||||
rel_path = os.path.join(root_dir, r_path, in_file)
|
||||
inc_path = os.path.join(include_dir, in_file)
|
||||
if os.path.exists(rel_path):
|
||||
return import_c_file(rel_path)
|
||||
return import_c_file(rel_path)
|
||||
elif os.path.exists(inc_path):
|
||||
return import_c_file(inc_path)
|
||||
return import_c_file(inc_path)
|
||||
else:
|
||||
print("Failed to locate", in_file)
|
||||
exit(1)
|
||||
print("Failed to locate", in_file)
|
||||
exit(1)
|
||||
|
||||
def import_c_file(in_file) -> str:
|
||||
|
||||
def import_c_file(in_file: str) -> str:
|
||||
in_file = os.path.relpath(in_file, root_dir)
|
||||
out_text = ''
|
||||
out_text = ""
|
||||
|
||||
try:
|
||||
with open(in_file, encoding="utf-8") as file:
|
||||
out_text += process_file(in_file, list(file))
|
||||
with open(in_file, encoding="utf-8") as file:
|
||||
out_text += process_file(in_file, list(file))
|
||||
except Exception:
|
||||
with open(in_file) as file:
|
||||
out_text += process_file(in_file, list(file))
|
||||
with open(in_file) as file:
|
||||
out_text += process_file(in_file, list(file))
|
||||
return out_text
|
||||
|
||||
def process_file(in_file: str, lines) -> str:
|
||||
out_text = ''
|
||||
|
||||
def process_file(in_file: str, lines: List[str]) -> str:
|
||||
out_text = ""
|
||||
for idx, line in enumerate(lines):
|
||||
guard_match = guard_pattern.match(line.strip())
|
||||
if idx == 0:
|
||||
if guard_match:
|
||||
if guard_match[1] in defines:
|
||||
break
|
||||
defines.add(guard_match[1])
|
||||
print("Processing file", in_file)
|
||||
include_match = include_pattern.match(line.strip())
|
||||
if include_match and not include_match[1].endswith(".s"):
|
||||
out_text += f"/* \"{in_file}\" line {idx} \"{include_match[1]}\" */\n"
|
||||
out_text += import_h_file(include_match[1], os.path.dirname(in_file))
|
||||
out_text += f"/* end \"{include_match[1]}\" */\n"
|
||||
else:
|
||||
out_text += line
|
||||
guard_match = guard_pattern.match(line.strip())
|
||||
if idx == 0:
|
||||
if guard_match:
|
||||
if guard_match[1] in defines:
|
||||
break
|
||||
defines.add(guard_match[1])
|
||||
print("Processing file", in_file)
|
||||
include_match = include_pattern.match(line.strip())
|
||||
if include_match and not include_match[1].endswith(".s"):
|
||||
out_text += f'/* "{in_file}" line {idx} "{include_match[1]}" */\n'
|
||||
out_text += import_h_file(include_match[1], os.path.dirname(in_file))
|
||||
out_text += f'/* end "{include_match[1]}" */\n'
|
||||
else:
|
||||
out_text += line
|
||||
|
||||
return out_text
|
||||
|
||||
def main():
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Create a context file which can be used for decomp.me"""
|
||||
)
|
||||
|
|
|
@ -18,11 +18,11 @@ import shutil
|
|||
import stat
|
||||
import urllib.request
|
||||
import zipfile
|
||||
|
||||
from typing import Callable, Dict
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def dtk_url(tag):
|
||||
def dtk_url(tag: str) -> str:
|
||||
uname = platform.uname()
|
||||
suffix = ""
|
||||
system = uname.system.lower()
|
||||
|
@ -38,21 +38,21 @@ def dtk_url(tag):
|
|||
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
|
||||
|
||||
|
||||
def sjiswrap_url(tag):
|
||||
def sjiswrap_url(tag: str) -> str:
|
||||
repo = "https://github.com/encounter/sjiswrap"
|
||||
return f"{repo}/releases/download/{tag}/sjiswrap-windows-x86.exe"
|
||||
|
||||
|
||||
def wibo_url(tag):
|
||||
def wibo_url(tag: str) -> str:
|
||||
repo = "https://github.com/decompals/wibo"
|
||||
return f"{repo}/releases/download/{tag}/wibo"
|
||||
|
||||
|
||||
def compilers_url(tag):
|
||||
def compilers_url(tag: str) -> str:
|
||||
return f"https://files.decomp.dev/compilers_{tag}.zip"
|
||||
|
||||
|
||||
TOOLS = {
|
||||
TOOLS: Dict[str, Callable[[str], str]] = {
|
||||
"dtk": dtk_url,
|
||||
"sjiswrap": sjiswrap_url,
|
||||
"wibo": wibo_url,
|
||||
|
@ -60,7 +60,7 @@ TOOLS = {
|
|||
}
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("tool", help="Tool name")
|
||||
parser.add_argument("output", type=Path, help="output file path")
|
||||
|
|
|
@ -21,50 +21,67 @@ use Python.
|
|||
|
||||
import re
|
||||
import textwrap
|
||||
from typing import Optional, Union, Tuple, Match, Dict, List
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def escape_path(word):
|
||||
NinjaPath = Union[str, Path]
|
||||
NinjaPaths = Union[
|
||||
List[str],
|
||||
List[Path],
|
||||
List[NinjaPath],
|
||||
List[Optional[str]],
|
||||
List[Optional[Path]],
|
||||
List[Optional[NinjaPath]],
|
||||
]
|
||||
NinjaPathOrPaths = Union[NinjaPath, NinjaPaths]
|
||||
|
||||
|
||||
def escape_path(word: str) -> str:
|
||||
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
|
||||
|
||||
|
||||
class Writer(object):
|
||||
def __init__(self, output, width=78):
|
||||
def __init__(self, output: StringIO, width: int = 78) -> None:
|
||||
self.output = output
|
||||
self.width = width
|
||||
|
||||
def newline(self):
|
||||
def newline(self) -> None:
|
||||
self.output.write("\n")
|
||||
|
||||
def comment(self, text):
|
||||
def comment(self, text: str) -> None:
|
||||
for line in textwrap.wrap(
|
||||
text, self.width - 2, break_long_words=False, break_on_hyphens=False
|
||||
):
|
||||
self.output.write("# " + line + "\n")
|
||||
|
||||
def variable(self, key, value, indent=0):
|
||||
if value is None:
|
||||
return
|
||||
if isinstance(value, list):
|
||||
value = " ".join(filter(None, value)) # Filter out empty strings.
|
||||
def variable(
|
||||
self,
|
||||
key: str,
|
||||
value: Optional[NinjaPathOrPaths],
|
||||
indent: int = 0,
|
||||
) -> None:
|
||||
value = " ".join(serialize_paths(value))
|
||||
self._line("%s = %s" % (key, value), indent)
|
||||
|
||||
def pool(self, name, depth):
|
||||
def pool(self, name: str, depth: int) -> None:
|
||||
self._line("pool %s" % name)
|
||||
self.variable("depth", depth, indent=1)
|
||||
self.variable("depth", str(depth), indent=1)
|
||||
|
||||
def rule(
|
||||
self,
|
||||
name,
|
||||
command,
|
||||
description=None,
|
||||
depfile=None,
|
||||
generator=False,
|
||||
pool=None,
|
||||
restat=False,
|
||||
rspfile=None,
|
||||
rspfile_content=None,
|
||||
deps=None,
|
||||
):
|
||||
name: str,
|
||||
command: str,
|
||||
description: Optional[str] = None,
|
||||
depfile: Optional[NinjaPath] = None,
|
||||
generator: bool = False,
|
||||
pool: Optional[str] = None,
|
||||
restat: bool = False,
|
||||
rspfile: Optional[NinjaPath] = None,
|
||||
rspfile_content: Optional[NinjaPath] = None,
|
||||
deps: Optional[NinjaPathOrPaths] = None,
|
||||
) -> None:
|
||||
self._line("rule %s" % name)
|
||||
self.variable("command", command, indent=1)
|
||||
if description:
|
||||
|
@ -86,30 +103,37 @@ class Writer(object):
|
|||
|
||||
def build(
|
||||
self,
|
||||
outputs,
|
||||
rule,
|
||||
inputs=None,
|
||||
implicit=None,
|
||||
order_only=None,
|
||||
variables=None,
|
||||
implicit_outputs=None,
|
||||
pool=None,
|
||||
dyndep=None,
|
||||
):
|
||||
outputs = as_list(outputs)
|
||||
outputs: NinjaPathOrPaths,
|
||||
rule: str,
|
||||
inputs: Optional[NinjaPathOrPaths] = None,
|
||||
implicit: Optional[NinjaPathOrPaths] = None,
|
||||
order_only: Optional[NinjaPathOrPaths] = None,
|
||||
variables: Optional[
|
||||
Union[
|
||||
List[Tuple[str, Optional[NinjaPathOrPaths]]],
|
||||
Dict[str, Optional[NinjaPathOrPaths]],
|
||||
]
|
||||
] = None,
|
||||
implicit_outputs: Optional[NinjaPathOrPaths] = None,
|
||||
pool: Optional[str] = None,
|
||||
dyndep: Optional[NinjaPath] = None,
|
||||
) -> List[str]:
|
||||
outputs = serialize_paths(outputs)
|
||||
out_outputs = [escape_path(x) for x in outputs]
|
||||
all_inputs = [escape_path(x) for x in as_list(inputs)]
|
||||
all_inputs = [escape_path(x) for x in serialize_paths(inputs)]
|
||||
|
||||
if implicit:
|
||||
implicit = [escape_path(x) for x in as_list(implicit)]
|
||||
implicit = [escape_path(x) for x in serialize_paths(implicit)]
|
||||
all_inputs.append("|")
|
||||
all_inputs.extend(implicit)
|
||||
if order_only:
|
||||
order_only = [escape_path(x) for x in as_list(order_only)]
|
||||
order_only = [escape_path(x) for x in serialize_paths(order_only)]
|
||||
all_inputs.append("||")
|
||||
all_inputs.extend(order_only)
|
||||
if implicit_outputs:
|
||||
implicit_outputs = [escape_path(x) for x in as_list(implicit_outputs)]
|
||||
implicit_outputs = [
|
||||
escape_path(x) for x in serialize_paths(implicit_outputs)
|
||||
]
|
||||
out_outputs.append("|")
|
||||
out_outputs.extend(implicit_outputs)
|
||||
|
||||
|
@ -119,7 +143,7 @@ class Writer(object):
|
|||
if pool is not None:
|
||||
self._line(" pool = %s" % pool)
|
||||
if dyndep is not None:
|
||||
self._line(" dyndep = %s" % dyndep)
|
||||
self._line(" dyndep = %s" % serialize_path(dyndep))
|
||||
|
||||
if variables:
|
||||
if isinstance(variables, dict):
|
||||
|
@ -132,16 +156,16 @@ class Writer(object):
|
|||
|
||||
return outputs
|
||||
|
||||
def include(self, path):
|
||||
def include(self, path: str) -> None:
|
||||
self._line("include %s" % path)
|
||||
|
||||
def subninja(self, path):
|
||||
def subninja(self, path: str) -> None:
|
||||
self._line("subninja %s" % path)
|
||||
|
||||
def default(self, paths):
|
||||
self._line("default %s" % " ".join(as_list(paths)))
|
||||
def default(self, paths: NinjaPathOrPaths) -> None:
|
||||
self._line("default %s" % " ".join(serialize_paths(paths)))
|
||||
|
||||
def _count_dollars_before_index(self, s, i):
|
||||
def _count_dollars_before_index(self, s: str, i: int) -> int:
|
||||
"""Returns the number of '$' characters right in front of s[i]."""
|
||||
dollar_count = 0
|
||||
dollar_index = i - 1
|
||||
|
@ -150,7 +174,7 @@ class Writer(object):
|
|||
dollar_index -= 1
|
||||
return dollar_count
|
||||
|
||||
def _line(self, text, indent=0):
|
||||
def _line(self, text: str, indent: int = 0) -> None:
|
||||
"""Write 'text' word-wrapped at self.width characters."""
|
||||
leading_space = " " * indent
|
||||
while len(leading_space) + len(text) > self.width:
|
||||
|
@ -187,19 +211,21 @@ class Writer(object):
|
|||
|
||||
self.output.write(leading_space + text + "\n")
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
self.output.close()
|
||||
|
||||
|
||||
def as_list(input):
|
||||
if input is None:
|
||||
return []
|
||||
def serialize_path(input: Optional[NinjaPath]) -> str:
|
||||
return str(input).replace("\\", "/") if input else ""
|
||||
|
||||
|
||||
def serialize_paths(input: Optional[NinjaPathOrPaths]) -> List[str]:
|
||||
if isinstance(input, list):
|
||||
return input
|
||||
return [input]
|
||||
return [serialize_path(path) for path in input if path]
|
||||
return [serialize_path(input)] if input else []
|
||||
|
||||
|
||||
def escape(string):
|
||||
def escape(string: str) -> str:
|
||||
"""Escape a string such that it can be embedded into a Ninja file without
|
||||
further interpretation."""
|
||||
assert "\n" not in string, "Ninja syntax does not allow newlines"
|
||||
|
@ -207,14 +233,14 @@ def escape(string):
|
|||
return string.replace("$", "$$")
|
||||
|
||||
|
||||
def expand(string, vars, local_vars={}):
|
||||
def expand(string: str, vars: Dict[str, str], local_vars: Dict[str, str] = {}) -> str:
|
||||
"""Expand a string containing $vars as Ninja would.
|
||||
|
||||
Note: doesn't handle the full Ninja variable syntax, but it's enough
|
||||
to make configure.py's use of it work.
|
||||
"""
|
||||
|
||||
def exp(m):
|
||||
def exp(m: Match[str]) -> str:
|
||||
var = m.group(1)
|
||||
if var == "$":
|
||||
return "$"
|
||||
|
|
409
tools/project.py
409
tools/project.py
|
@ -17,6 +17,7 @@ import platform
|
|||
import sys
|
||||
import math
|
||||
|
||||
from typing import Optional, Union, Tuple, Dict, List, Set, Any
|
||||
from pathlib import Path
|
||||
from . import ninja_syntax
|
||||
|
||||
|
@ -28,51 +29,70 @@ if sys.platform == "cygwin":
|
|||
)
|
||||
|
||||
|
||||
class Object:
|
||||
def __init__(self, completed: bool, name: str, **options: Any) -> None:
|
||||
self.name = name
|
||||
self.completed = completed
|
||||
self.options: Dict[str, Any] = {
|
||||
"add_to_all": True,
|
||||
"cflags": None,
|
||||
"extra_cflags": None,
|
||||
"mw_version": None,
|
||||
"shiftjis": True,
|
||||
"source": name,
|
||||
}
|
||||
self.options.update(options)
|
||||
|
||||
|
||||
class ProjectConfig:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
# Paths
|
||||
self.build_dir = Path("build")
|
||||
self.src_dir = Path("src")
|
||||
self.tools_dir = Path("tools")
|
||||
self.build_dir: Path = Path("build")
|
||||
self.src_dir: Path = Path("src")
|
||||
self.tools_dir: Path = Path("tools")
|
||||
|
||||
# Tooling
|
||||
self.dtk_tag = None # Git tag
|
||||
self.build_dtk_path = None # If None, download
|
||||
self.compilers_tag = None # 1
|
||||
self.compilers_path = None # If None, download
|
||||
self.wibo_tag = None # Git tag
|
||||
self.wrapper = None # If None, download wibo on Linux
|
||||
self.sjiswrap_tag = None # Git tag
|
||||
self.sjiswrap_path = None # If None, download
|
||||
self.dtk_tag: Optional[str] = None # Git tag
|
||||
self.build_dtk_path: Optional[Path] = None # If None, download
|
||||
self.compilers_tag: Optional[str] = None # 1
|
||||
self.compilers_path: Optional[Path] = None # If None, download
|
||||
self.wibo_tag: Optional[str] = None # Git tag
|
||||
self.wrapper: Optional[Path] = None # If None, download wibo on Linux
|
||||
self.sjiswrap_tag: Optional[str] = None # Git tag
|
||||
self.sjiswrap_path: Optional[Path] = None # If None, download
|
||||
|
||||
# Project config
|
||||
self.build_rels = True # Build REL files
|
||||
self.check_sha_path = None # Path to version.sha1
|
||||
self.config_path = None # Path to config.yml
|
||||
self.debug = False # Build with debug info
|
||||
self.generate_map = False # Generate map file(s)
|
||||
self.ldflags = None # Linker flags
|
||||
self.libs = None # List of libraries
|
||||
self.linker_version = None # mwld version
|
||||
self.version = None # Version name
|
||||
self.warn_missing_config = False # Warn on missing unit configuration
|
||||
self.warn_missing_source = False # Warn on missing source file
|
||||
self.rel_strip_partial = True # Generate PLFs with -strip_partial
|
||||
self.rel_empty_file = None # Path to empty.c for generating empty RELs
|
||||
self.build_rels: bool = True # Build REL files
|
||||
self.check_sha_path: Optional[Path] = None # Path to version.sha1
|
||||
self.config_path: Optional[Path] = None # Path to config.yml
|
||||
self.debug: bool = False # Build with debug info
|
||||
self.generate_map: bool = False # Generate map file(s)
|
||||
self.ldflags: Optional[List[str]] = None # Linker flags
|
||||
self.libs: Optional[List[Dict[str, Any]]] = None # List of libraries
|
||||
self.linker_version: Optional[str] = None # mwld version
|
||||
self.version: Optional[str] = None # Version name
|
||||
self.warn_missing_config: bool = False # Warn on missing unit configuration
|
||||
self.warn_missing_source: bool = False # Warn on missing source file
|
||||
self.rel_strip_partial: bool = True # Generate PLFs with -strip_partial
|
||||
self.rel_empty_file: Optional[
|
||||
Path
|
||||
] = None # Path to empty.c for generating empty RELs
|
||||
|
||||
# Progress output and progress.json config
|
||||
self.progress_all = True # Include combined "all" category
|
||||
self.progress_modules = True # Include combined "modules" category
|
||||
self.progress_each_module = True # Include individual modules, disable for large numbers of modules
|
||||
self.progress_all: bool = True # Include combined "all" category
|
||||
self.progress_modules: bool = True # Include combined "modules" category
|
||||
self.progress_each_module: bool = (
|
||||
True # Include individual modules, disable for large numbers of modules
|
||||
)
|
||||
|
||||
# Progress fancy printing
|
||||
self.progress_use_fancy = False
|
||||
self.progress_code_fancy_frac = 0
|
||||
self.progress_code_fancy_item = ""
|
||||
self.progress_data_fancy_frac = 0
|
||||
self.progress_data_fancy_item = ""
|
||||
self.progress_use_fancy: bool = False
|
||||
self.progress_code_fancy_frac: int = 0
|
||||
self.progress_code_fancy_item: str = ""
|
||||
self.progress_data_fancy_frac: int = 0
|
||||
self.progress_data_fancy_item: str = ""
|
||||
|
||||
def validate(self):
|
||||
def validate(self) -> None:
|
||||
required_attrs = [
|
||||
"build_dir",
|
||||
"src_dir",
|
||||
|
@ -88,33 +108,18 @@ class ProjectConfig:
|
|||
if getattr(self, attr) is None:
|
||||
sys.exit(f"ProjectConfig.{attr} missing")
|
||||
|
||||
def find_object(self, name):
|
||||
for lib in self.libs:
|
||||
def find_object(self, name: str) -> Optional[Tuple[Dict[str, Any], Object]]:
|
||||
for lib in self.libs or {}:
|
||||
for obj in lib["objects"]:
|
||||
if obj.name == name:
|
||||
return [lib, obj]
|
||||
return lib, obj
|
||||
return None
|
||||
|
||||
def out_path(self):
|
||||
return self.build_dir / self.version
|
||||
def out_path(self) -> Path:
|
||||
return self.build_dir / str(self.version)
|
||||
|
||||
|
||||
class Object:
|
||||
def __init__(self, completed, name, **options):
|
||||
self.name = name
|
||||
self.completed = completed
|
||||
self.options = {
|
||||
"add_to_all": True,
|
||||
"cflags": None,
|
||||
"extra_cflags": None,
|
||||
"mw_version": None,
|
||||
"shiftjis": True,
|
||||
"source": name,
|
||||
}
|
||||
self.options.update(options)
|
||||
|
||||
|
||||
def is_windows():
|
||||
def is_windows() -> bool:
|
||||
return os.name == "nt"
|
||||
|
||||
|
||||
|
@ -124,36 +129,18 @@ CHAIN = "cmd /c " if is_windows() else ""
|
|||
EXE = ".exe" if is_windows() else ""
|
||||
|
||||
|
||||
# Replace forward slashes with backslashes on Windows
|
||||
def os_str(value):
|
||||
return str(value).replace("/", os.sep)
|
||||
|
||||
|
||||
# Replace backslashes with forward slashes on Windows
|
||||
def unix_str(value):
|
||||
return str(value).replace(os.sep, "/")
|
||||
|
||||
|
||||
# Stringify paths for ninja_syntax
|
||||
def path(value):
|
||||
if value is None:
|
||||
return None
|
||||
elif isinstance(value, list):
|
||||
return list(map(os_str, filter(lambda x: x is not None, value)))
|
||||
else:
|
||||
return [os_str(value)]
|
||||
|
||||
|
||||
# Load decomp-toolkit generated config.json
|
||||
def load_build_config(config, build_config_path):
|
||||
def load_build_config(
|
||||
config: ProjectConfig, build_config_path: Path
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
if not build_config_path.is_file():
|
||||
return None
|
||||
|
||||
def versiontuple(v):
|
||||
def versiontuple(v: str) -> Tuple[int, ...]:
|
||||
return tuple(map(int, (v.split("."))))
|
||||
|
||||
f = open(build_config_path, "r", encoding="utf-8")
|
||||
build_config = json.load(f)
|
||||
build_config: Dict[str, Any] = json.load(f)
|
||||
config_version = build_config.get("version")
|
||||
if not config_version:
|
||||
# Invalid config.json
|
||||
|
@ -161,7 +148,7 @@ def load_build_config(config, build_config_path):
|
|||
os.remove(build_config_path)
|
||||
return None
|
||||
|
||||
dtk_version = config.dtk_tag[1:] # Strip v
|
||||
dtk_version = str(config.dtk_tag)[1:] # Strip v
|
||||
if versiontuple(config_version) < versiontuple(dtk_version):
|
||||
# Outdated config.json
|
||||
f.close()
|
||||
|
@ -173,14 +160,16 @@ def load_build_config(config, build_config_path):
|
|||
|
||||
|
||||
# Generate build.ninja and objdiff.json
|
||||
def generate_build(config):
|
||||
def generate_build(config: ProjectConfig) -> None:
|
||||
build_config = load_build_config(config, config.out_path() / "config.json")
|
||||
generate_build_ninja(config, build_config)
|
||||
generate_objdiff_config(config, build_config)
|
||||
|
||||
|
||||
# Generate build.ninja
|
||||
def generate_build_ninja(config, build_config):
|
||||
def generate_build_ninja(
|
||||
config: ProjectConfig, build_config: Optional[Dict[str, Any]]
|
||||
) -> None:
|
||||
config.validate()
|
||||
|
||||
out = io.StringIO()
|
||||
|
@ -188,9 +177,9 @@ def generate_build_ninja(config, build_config):
|
|||
n.variable("ninja_required_version", "1.3")
|
||||
n.newline()
|
||||
|
||||
configure_script = os.path.relpath(os.path.abspath(sys.argv[0]))
|
||||
python_lib = os.path.relpath(__file__)
|
||||
python_lib_dir = os.path.dirname(python_lib)
|
||||
configure_script = Path(os.path.relpath(os.path.abspath(sys.argv[0])))
|
||||
python_lib = Path(os.path.relpath(__file__))
|
||||
python_lib_dir = python_lib.parent
|
||||
n.comment("The arguments passed to configure.py, for rerunning it.")
|
||||
n.variable("configure_args", sys.argv[1:])
|
||||
n.variable("python", f'"{sys.executable}"')
|
||||
|
@ -200,7 +189,7 @@ def generate_build_ninja(config, build_config):
|
|||
# Variables
|
||||
###
|
||||
n.comment("Variables")
|
||||
ldflags = " ".join(config.ldflags)
|
||||
ldflags = " ".join(config.ldflags or [])
|
||||
if config.generate_map:
|
||||
ldflags += " -mapunused"
|
||||
if config.debug:
|
||||
|
@ -229,14 +218,14 @@ def generate_build_ninja(config, build_config):
|
|||
name="cargo",
|
||||
command="cargo build --release --manifest-path $in --bin $bin --target-dir $target",
|
||||
description="CARGO $bin",
|
||||
depfile=path(Path("$target") / "release" / "$bin.d"),
|
||||
depfile=Path("$target") / "release" / "$bin.d",
|
||||
deps="gcc",
|
||||
)
|
||||
n.build(
|
||||
outputs=path(dtk),
|
||||
outputs=dtk,
|
||||
rule="cargo",
|
||||
inputs=path(config.build_dtk_path / "Cargo.toml"),
|
||||
implicit=path(config.build_dtk_path / "Cargo.lock"),
|
||||
inputs=config.build_dtk_path / "Cargo.toml",
|
||||
implicit=config.build_dtk_path / "Cargo.lock",
|
||||
variables={
|
||||
"bin": "dtk",
|
||||
"target": build_tools_path,
|
||||
|
@ -245,9 +234,9 @@ def generate_build_ninja(config, build_config):
|
|||
elif config.dtk_tag:
|
||||
dtk = build_tools_path / f"dtk{EXE}"
|
||||
n.build(
|
||||
outputs=path(dtk),
|
||||
outputs=dtk,
|
||||
rule="download_tool",
|
||||
implicit=path(download_tool),
|
||||
implicit=download_tool,
|
||||
variables={
|
||||
"tool": "dtk",
|
||||
"tag": config.dtk_tag,
|
||||
|
@ -261,9 +250,9 @@ def generate_build_ninja(config, build_config):
|
|||
elif config.sjiswrap_tag:
|
||||
sjiswrap = build_tools_path / "sjiswrap.exe"
|
||||
n.build(
|
||||
outputs=path(sjiswrap),
|
||||
outputs=sjiswrap,
|
||||
rule="download_tool",
|
||||
implicit=path(download_tool),
|
||||
implicit=download_tool,
|
||||
variables={
|
||||
"tool": "sjiswrap",
|
||||
"tag": config.sjiswrap_tag,
|
||||
|
@ -274,7 +263,7 @@ def generate_build_ninja(config, build_config):
|
|||
|
||||
# Only add an implicit dependency on wibo if we download it
|
||||
wrapper = config.wrapper
|
||||
wrapper_implicit = None
|
||||
wrapper_implicit: Optional[Path] = None
|
||||
if (
|
||||
config.wibo_tag is not None
|
||||
and sys.platform == "linux"
|
||||
|
@ -284,33 +273,35 @@ def generate_build_ninja(config, build_config):
|
|||
wrapper = build_tools_path / "wibo"
|
||||
wrapper_implicit = wrapper
|
||||
n.build(
|
||||
outputs=path(wrapper),
|
||||
outputs=wrapper,
|
||||
rule="download_tool",
|
||||
implicit=path(download_tool),
|
||||
implicit=download_tool,
|
||||
variables={
|
||||
"tool": "wibo",
|
||||
"tag": config.wibo_tag,
|
||||
},
|
||||
)
|
||||
if not is_windows() and wrapper is None:
|
||||
wrapper = "wine"
|
||||
wrapper = Path("wine")
|
||||
wrapper_cmd = f"{wrapper} " if wrapper else ""
|
||||
|
||||
compilers_implicit = None
|
||||
compilers_implicit: Optional[Path] = None
|
||||
if config.compilers_path:
|
||||
compilers = config.compilers_path
|
||||
elif config.compilers_tag:
|
||||
compilers = config.build_dir / "compilers"
|
||||
compilers_implicit = compilers
|
||||
n.build(
|
||||
outputs=path(compilers),
|
||||
outputs=compilers,
|
||||
rule="download_tool",
|
||||
implicit=path(download_tool),
|
||||
implicit=download_tool,
|
||||
variables={
|
||||
"tool": "compilers",
|
||||
"tag": config.compilers_tag,
|
||||
},
|
||||
)
|
||||
else:
|
||||
sys.exit("ProjectConfig.compilers_tag missing")
|
||||
|
||||
n.newline()
|
||||
|
||||
|
@ -322,16 +313,16 @@ def generate_build_ninja(config, build_config):
|
|||
# MWCC
|
||||
mwcc = compiler_path / "mwcceppc.exe"
|
||||
mwcc_cmd = f"{wrapper_cmd}{mwcc} $cflags -MMD -c $in -o $basedir"
|
||||
mwcc_implicit = [compilers_implicit or mwcc, wrapper_implicit]
|
||||
mwcc_implicit: List[Optional[Path]] = [compilers_implicit or mwcc, wrapper_implicit]
|
||||
|
||||
# MWCC with UTF-8 to Shift JIS wrapper
|
||||
mwcc_sjis_cmd = f"{wrapper_cmd}{sjiswrap} {mwcc} $cflags -MMD -c $in -o $basedir"
|
||||
mwcc_sjis_implicit = [*mwcc_implicit, sjiswrap]
|
||||
mwcc_sjis_implicit: List[Optional[Path]] = [*mwcc_implicit, sjiswrap]
|
||||
|
||||
# MWLD
|
||||
mwld = compiler_path / "mwldeppc.exe"
|
||||
mwld_cmd = f"{wrapper_cmd}{mwld} $ldflags -o $out @$out.rsp"
|
||||
mwld_implicit = [compilers_implicit or mwld, wrapper_implicit]
|
||||
mwld_implicit: List[Optional[Path]] = [compilers_implicit or mwld, wrapper_implicit]
|
||||
|
||||
if os.name != "nt":
|
||||
transform_dep = config.tools_dir / "transform_dep.py"
|
||||
|
@ -364,7 +355,7 @@ def generate_build_ninja(config, build_config):
|
|||
name="makerel",
|
||||
command=f"{dtk} rel make -w -c $config @{makerel_rsp}",
|
||||
description="REL",
|
||||
rspfile=path(makerel_rsp),
|
||||
rspfile=makerel_rsp,
|
||||
rspfile_content="$in_newline",
|
||||
)
|
||||
n.newline()
|
||||
|
@ -415,33 +406,33 @@ def generate_build_ninja(config, build_config):
|
|||
build_host_path = build_path / "host"
|
||||
build_config_path = build_path / "config.json"
|
||||
|
||||
def map_path(path):
|
||||
def map_path(path: Path) -> Path:
|
||||
return path.parent / (path.name + ".MAP")
|
||||
|
||||
class LinkStep:
|
||||
def __init__(self, config):
|
||||
self.name = config["name"]
|
||||
self.module_id = config["module_id"]
|
||||
self.ldscript = config["ldscript"]
|
||||
def __init__(self, config: Dict[str, Any]) -> None:
|
||||
self.name: str = config["name"]
|
||||
self.module_id: int = config["module_id"]
|
||||
self.ldscript: Optional[Path] = config["ldscript"]
|
||||
self.entry = config["entry"]
|
||||
self.inputs = []
|
||||
self.inputs: List[str] = []
|
||||
|
||||
def add(self, obj):
|
||||
def add(self, obj: str) -> None:
|
||||
self.inputs.append(obj)
|
||||
|
||||
def output(self):
|
||||
def output(self) -> Path:
|
||||
if self.module_id == 0:
|
||||
return build_path / f"{self.name}.dol"
|
||||
else:
|
||||
return build_path / self.name / f"{self.name}.rel"
|
||||
|
||||
def partial_output(self):
|
||||
def partial_output(self) -> Path:
|
||||
if self.module_id == 0:
|
||||
return build_path / f"{self.name}.elf"
|
||||
else:
|
||||
return build_path / self.name / f"{self.name}.plf"
|
||||
|
||||
def write(self, n):
|
||||
def write(self, n: ninja_syntax.Writer) -> None:
|
||||
n.comment(f"Link {self.name}")
|
||||
if self.module_id == 0:
|
||||
elf_path = build_path / f"{self.name}.elf"
|
||||
|
@ -453,18 +444,18 @@ def generate_build_ninja(config, build_config):
|
|||
else:
|
||||
elf_map = None
|
||||
n.build(
|
||||
outputs=path(elf_path),
|
||||
outputs=elf_path,
|
||||
rule="link",
|
||||
inputs=path(self.inputs),
|
||||
implicit=path([self.ldscript, *mwld_implicit]),
|
||||
implicit_outputs=path(elf_map),
|
||||
inputs=self.inputs,
|
||||
implicit=[self.ldscript, *mwld_implicit],
|
||||
implicit_outputs=elf_map,
|
||||
variables={"ldflags": elf_ldflags},
|
||||
)
|
||||
n.build(
|
||||
outputs=path(dol_path),
|
||||
outputs=dol_path,
|
||||
rule="elf2dol",
|
||||
inputs=path(elf_path),
|
||||
implicit=path(dtk),
|
||||
inputs=elf_path,
|
||||
implicit=dtk,
|
||||
)
|
||||
else:
|
||||
preplf_path = build_path / self.name / f"{self.name}.preplf"
|
||||
|
@ -485,37 +476,37 @@ def generate_build_ninja(config, build_config):
|
|||
preplf_map = None
|
||||
plf_map = None
|
||||
n.build(
|
||||
outputs=path(preplf_path),
|
||||
outputs=preplf_path,
|
||||
rule="link",
|
||||
inputs=path(self.inputs),
|
||||
implicit=path(mwld_implicit),
|
||||
implicit_outputs=path(preplf_map),
|
||||
inputs=self.inputs,
|
||||
implicit=mwld_implicit,
|
||||
implicit_outputs=preplf_map,
|
||||
variables={"ldflags": preplf_ldflags},
|
||||
)
|
||||
n.build(
|
||||
outputs=path(plf_path),
|
||||
outputs=plf_path,
|
||||
rule="link",
|
||||
inputs=path(self.inputs),
|
||||
implicit=path([self.ldscript, preplf_path, *mwld_implicit]),
|
||||
implicit_outputs=path(plf_map),
|
||||
inputs=self.inputs,
|
||||
implicit=[self.ldscript, preplf_path, *mwld_implicit],
|
||||
implicit_outputs=plf_map,
|
||||
variables={"ldflags": plf_ldflags},
|
||||
)
|
||||
n.newline()
|
||||
|
||||
if build_config:
|
||||
link_steps = []
|
||||
used_compiler_versions = set()
|
||||
source_inputs = []
|
||||
host_source_inputs = []
|
||||
source_added = set()
|
||||
link_steps: List[LinkStep] = []
|
||||
used_compiler_versions: Set[str] = set()
|
||||
source_inputs: List[Path] = []
|
||||
host_source_inputs: List[Path] = []
|
||||
source_added: Set[Path] = set()
|
||||
|
||||
def make_cflags_str(cflags):
|
||||
def make_cflags_str(cflags: Union[str, List[str]]) -> str:
|
||||
if isinstance(cflags, list):
|
||||
return " ".join(cflags)
|
||||
else:
|
||||
return cflags
|
||||
|
||||
def add_unit(build_obj, link_step):
|
||||
def add_unit(build_obj: Dict[str, Any], link_step: LinkStep) -> None:
|
||||
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
||||
result = config.find_object(obj_name)
|
||||
if not result:
|
||||
|
@ -531,7 +522,7 @@ def generate_build_ninja(config, build_config):
|
|||
options = obj.options
|
||||
completed = obj.completed
|
||||
|
||||
unit_src_path = src_dir / options["source"]
|
||||
unit_src_path = src_dir / str(options["source"])
|
||||
|
||||
if not unit_src_path.exists():
|
||||
if config.warn_missing_source or completed:
|
||||
|
@ -555,30 +546,30 @@ def generate_build_ninja(config, build_config):
|
|||
|
||||
n.comment(f"{obj_name}: {lib_name} (linked {completed})")
|
||||
n.build(
|
||||
outputs=path(src_obj_path),
|
||||
outputs=src_obj_path,
|
||||
rule="mwcc_sjis" if options["shiftjis"] else "mwcc",
|
||||
inputs=path(unit_src_path),
|
||||
inputs=unit_src_path,
|
||||
variables={
|
||||
"mw_version": path(Path(mw_version)),
|
||||
"mw_version": Path(mw_version),
|
||||
"cflags": cflags_str,
|
||||
"basedir": os.path.dirname(src_base_path),
|
||||
"basefile": path(src_base_path),
|
||||
"basefile": src_base_path,
|
||||
},
|
||||
implicit=path(
|
||||
mwcc_sjis_implicit if options["shiftjis"] else mwcc_implicit
|
||||
),
|
||||
implicit=mwcc_sjis_implicit
|
||||
if options["shiftjis"]
|
||||
else mwcc_implicit,
|
||||
)
|
||||
|
||||
if lib["host"]:
|
||||
host_obj_path = build_host_path / f"{base_object}.o"
|
||||
host_base_path = build_host_path / base_object
|
||||
n.build(
|
||||
outputs=path(host_obj_path),
|
||||
outputs=host_obj_path,
|
||||
rule="host_cc" if unit_src_path.suffix == ".c" else "host_cpp",
|
||||
inputs=path(unit_src_path),
|
||||
inputs=unit_src_path,
|
||||
variables={
|
||||
"basedir": os.path.dirname(host_base_path),
|
||||
"basefile": path(host_base_path),
|
||||
"basefile": host_base_path,
|
||||
},
|
||||
)
|
||||
if options["add_to_all"]:
|
||||
|
@ -626,7 +617,7 @@ def generate_build_ninja(config, build_config):
|
|||
sys.exit(f"Compiler {mw_path} does not exist")
|
||||
|
||||
# Check if linker exists
|
||||
mw_path = compilers / config.linker_version / "mwldeppc.exe"
|
||||
mw_path = compilers / str(config.linker_version) / "mwldeppc.exe"
|
||||
if config.compilers_path and not os.path.exists(mw_path):
|
||||
sys.exit(f"Linker {mw_path} does not exist")
|
||||
|
||||
|
@ -640,7 +631,7 @@ def generate_build_ninja(config, build_config):
|
|||
###
|
||||
# Generate RELs
|
||||
###
|
||||
generated_rels = []
|
||||
generated_rels: List[str] = []
|
||||
for link in build_config["links"]:
|
||||
# Map module names to link steps
|
||||
link_steps_local = list(
|
||||
|
@ -670,13 +661,11 @@ def generate_build_ninja(config, build_config):
|
|||
)
|
||||
n.comment("Generate RELs")
|
||||
n.build(
|
||||
outputs=path(rel_outputs),
|
||||
outputs=rel_outputs,
|
||||
rule="makerel",
|
||||
inputs=path(
|
||||
list(map(lambda step: step.partial_output(), link_steps_local))
|
||||
),
|
||||
implicit=path([dtk, config.config_path]),
|
||||
variables={"config": path(config.config_path)},
|
||||
inputs=list(map(lambda step: step.partial_output(), link_steps_local)),
|
||||
implicit=[dtk, config.config_path],
|
||||
variables={"config": config.config_path},
|
||||
)
|
||||
n.newline()
|
||||
|
||||
|
@ -687,7 +676,7 @@ def generate_build_ninja(config, build_config):
|
|||
n.build(
|
||||
outputs="all_source",
|
||||
rule="phony",
|
||||
inputs=path(source_inputs),
|
||||
inputs=source_inputs,
|
||||
)
|
||||
n.newline()
|
||||
|
||||
|
@ -698,7 +687,7 @@ def generate_build_ninja(config, build_config):
|
|||
n.build(
|
||||
outputs="all_source_host",
|
||||
rule="phony",
|
||||
inputs=path(host_source_inputs),
|
||||
inputs=host_source_inputs,
|
||||
)
|
||||
n.newline()
|
||||
|
||||
|
@ -714,10 +703,10 @@ def generate_build_ninja(config, build_config):
|
|||
description="CHECK $in",
|
||||
)
|
||||
n.build(
|
||||
outputs=path(ok_path),
|
||||
outputs=ok_path,
|
||||
rule="check",
|
||||
inputs=path(config.check_sha_path),
|
||||
implicit=path([dtk, *map(lambda step: step.output(), link_steps)]),
|
||||
inputs=config.check_sha_path,
|
||||
implicit=[dtk, *map(lambda step: step.output(), link_steps)],
|
||||
)
|
||||
n.newline()
|
||||
|
||||
|
@ -732,9 +721,9 @@ def generate_build_ninja(config, build_config):
|
|||
description="PROGRESS",
|
||||
)
|
||||
n.build(
|
||||
outputs=path(progress_path),
|
||||
outputs=progress_path,
|
||||
rule="progress",
|
||||
implicit=path([ok_path, configure_script, python_lib, config.config_path]),
|
||||
implicit=[ok_path, configure_script, python_lib, config.config_path],
|
||||
)
|
||||
|
||||
###
|
||||
|
@ -750,7 +739,7 @@ def generate_build_ninja(config, build_config):
|
|||
description=f"DIFF {dol_elf_path}",
|
||||
)
|
||||
n.build(
|
||||
inputs=path([config.config_path, dol_elf_path]),
|
||||
inputs=[config.config_path, dol_elf_path],
|
||||
outputs="dol_diff",
|
||||
rule="dol_diff",
|
||||
)
|
||||
|
@ -768,10 +757,10 @@ def generate_build_ninja(config, build_config):
|
|||
description=f"APPLY {dol_elf_path}",
|
||||
)
|
||||
n.build(
|
||||
inputs=path([config.config_path, dol_elf_path]),
|
||||
inputs=[config.config_path, dol_elf_path],
|
||||
outputs="dol_apply",
|
||||
rule="dol_apply",
|
||||
implicit=path([ok_path]),
|
||||
implicit=[ok_path],
|
||||
)
|
||||
n.build(
|
||||
outputs="apply",
|
||||
|
@ -792,11 +781,11 @@ def generate_build_ninja(config, build_config):
|
|||
deps="gcc",
|
||||
)
|
||||
n.build(
|
||||
inputs=path(config.config_path),
|
||||
outputs=path(build_config_path),
|
||||
inputs=config.config_path,
|
||||
outputs=build_config_path,
|
||||
rule="split",
|
||||
implicit=path(dtk),
|
||||
variables={"out_dir": path(build_path)},
|
||||
implicit=dtk,
|
||||
variables={"out_dir": build_path},
|
||||
)
|
||||
n.newline()
|
||||
|
||||
|
@ -813,14 +802,12 @@ def generate_build_ninja(config, build_config):
|
|||
n.build(
|
||||
outputs="build.ninja",
|
||||
rule="configure",
|
||||
implicit=path(
|
||||
[
|
||||
build_config_path,
|
||||
configure_script,
|
||||
python_lib,
|
||||
Path(python_lib_dir) / "ninja_syntax.py",
|
||||
]
|
||||
),
|
||||
implicit=[
|
||||
build_config_path,
|
||||
configure_script,
|
||||
python_lib,
|
||||
python_lib_dir / "ninja_syntax.py",
|
||||
],
|
||||
)
|
||||
n.newline()
|
||||
|
||||
|
@ -829,9 +816,9 @@ def generate_build_ninja(config, build_config):
|
|||
###
|
||||
n.comment("Default rule")
|
||||
if build_config:
|
||||
n.default(path(progress_path))
|
||||
n.default(progress_path)
|
||||
else:
|
||||
n.default(path(build_config_path))
|
||||
n.default(build_config_path)
|
||||
|
||||
# Write build.ninja
|
||||
with open("build.ninja", "w", encoding="utf-8") as f:
|
||||
|
@ -840,11 +827,13 @@ def generate_build_ninja(config, build_config):
|
|||
|
||||
|
||||
# Generate objdiff.json
|
||||
def generate_objdiff_config(config, build_config):
|
||||
def generate_objdiff_config(
|
||||
config: ProjectConfig, build_config: Optional[Dict[str, Any]]
|
||||
) -> None:
|
||||
if not build_config:
|
||||
return
|
||||
|
||||
objdiff_config = {
|
||||
objdiff_config: Dict[str, Any] = {
|
||||
"min_version": "0.4.3",
|
||||
"custom_make": "ninja",
|
||||
"build_target": False,
|
||||
|
@ -865,16 +854,16 @@ def generate_objdiff_config(config, build_config):
|
|||
|
||||
build_path = config.out_path()
|
||||
|
||||
def add_unit(build_obj, module_name):
|
||||
def add_unit(build_obj: Dict[str, Any], module_name: str) -> None:
|
||||
if build_obj["autogenerated"]:
|
||||
# Skip autogenerated objects
|
||||
return
|
||||
|
||||
obj_path, obj_name = build_obj["object"], build_obj["name"]
|
||||
base_object = Path(obj_name).with_suffix("")
|
||||
unit_config = {
|
||||
"name": unix_str(Path(module_name) / base_object),
|
||||
"target_path": unix_str(obj_path),
|
||||
unit_config: Dict[str, Any] = {
|
||||
"name": Path(module_name) / base_object,
|
||||
"target_path": obj_path,
|
||||
}
|
||||
|
||||
result = config.find_object(obj_name)
|
||||
|
@ -885,7 +874,7 @@ def generate_objdiff_config(config, build_config):
|
|||
lib, obj = result
|
||||
src_dir = Path(lib.get("src_dir", config.src_dir))
|
||||
|
||||
unit_src_path = src_dir / obj.options["source"]
|
||||
unit_src_path = src_dir / str(obj.options["source"])
|
||||
|
||||
if not unit_src_path.exists():
|
||||
objdiff_config["units"].append(unit_config)
|
||||
|
@ -905,7 +894,7 @@ def generate_objdiff_config(config, build_config):
|
|||
elif value == "nodeferred":
|
||||
reverse_fn_order = False
|
||||
|
||||
unit_config["base_path"] = unix_str(src_obj_path)
|
||||
unit_config["base_path"] = src_obj_path
|
||||
unit_config["reverse_fn_order"] = reverse_fn_order
|
||||
unit_config["complete"] = obj.completed
|
||||
objdiff_config["units"].append(unit_config)
|
||||
|
@ -921,32 +910,34 @@ def generate_objdiff_config(config, build_config):
|
|||
|
||||
# Write objdiff.json
|
||||
with open("objdiff.json", "w", encoding="utf-8") as w:
|
||||
json.dump(objdiff_config, w, indent=4)
|
||||
from .ninja_syntax import serialize_path
|
||||
|
||||
json.dump(objdiff_config, w, indent=4, default=serialize_path)
|
||||
|
||||
|
||||
# Calculate, print and write progress to progress.json
|
||||
def calculate_progress(config):
|
||||
def calculate_progress(config: ProjectConfig) -> None:
|
||||
out_path = config.out_path()
|
||||
build_config = load_build_config(config, out_path / "config.json")
|
||||
if not build_config:
|
||||
return
|
||||
|
||||
class ProgressUnit:
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.code_total = 0
|
||||
self.code_fancy_frac = config.progress_code_fancy_frac
|
||||
self.code_fancy_item = config.progress_code_fancy_item
|
||||
self.code_progress = 0
|
||||
self.data_total = 0
|
||||
self.data_fancy_frac = config.progress_data_fancy_frac
|
||||
self.data_fancy_item = config.progress_data_fancy_item
|
||||
self.data_progress = 0
|
||||
self.objects_progress = 0
|
||||
self.objects_total = 0
|
||||
self.objects = set()
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name: str = name
|
||||
self.code_total: int = 0
|
||||
self.code_fancy_frac: int = config.progress_code_fancy_frac
|
||||
self.code_fancy_item: str = config.progress_code_fancy_item
|
||||
self.code_progress: int = 0
|
||||
self.data_total: int = 0
|
||||
self.data_fancy_frac: int = config.progress_data_fancy_frac
|
||||
self.data_fancy_item: str = config.progress_data_fancy_item
|
||||
self.data_progress: int = 0
|
||||
self.objects_progress: int = 0
|
||||
self.objects_total: int = 0
|
||||
self.objects: Set[Object] = set()
|
||||
|
||||
def add(self, build_obj):
|
||||
def add(self, build_obj: Dict[str, Any]) -> None:
|
||||
self.code_total += build_obj["code_size"]
|
||||
self.data_total += build_obj["data_size"]
|
||||
|
||||
|
@ -973,10 +964,10 @@ def calculate_progress(config):
|
|||
if include_object:
|
||||
self.objects_progress += 1
|
||||
|
||||
def code_frac(self):
|
||||
def code_frac(self) -> float:
|
||||
return self.code_progress / self.code_total
|
||||
|
||||
def data_frac(self):
|
||||
def data_frac(self) -> float:
|
||||
return self.data_progress / self.data_total
|
||||
|
||||
# Add DOL units
|
||||
|
@ -989,7 +980,7 @@ def calculate_progress(config):
|
|||
|
||||
# Add REL units
|
||||
rels_progress = ProgressUnit("Modules") if config.progress_modules else None
|
||||
modules_progress = []
|
||||
modules_progress: List[ProgressUnit] = []
|
||||
for module in build_config["modules"]:
|
||||
progress = ProgressUnit(module["name"])
|
||||
modules_progress.append(progress)
|
||||
|
@ -1003,7 +994,7 @@ def calculate_progress(config):
|
|||
# Print human-readable progress
|
||||
print("Progress:")
|
||||
|
||||
def print_category(unit):
|
||||
def print_category(unit: Optional[ProgressUnit]) -> None:
|
||||
if unit is None:
|
||||
return
|
||||
|
||||
|
@ -1037,9 +1028,9 @@ def calculate_progress(config):
|
|||
print_category(progress)
|
||||
|
||||
# Generate and write progress.json
|
||||
progress_json = {}
|
||||
progress_json: Dict[str, Any] = {}
|
||||
|
||||
def add_category(category, unit):
|
||||
def add_category(category: str, unit: ProgressUnit) -> None:
|
||||
progress_json[category] = {
|
||||
"code": unit.code_progress,
|
||||
"code/total": unit.code_total,
|
||||
|
|
|
@ -25,7 +25,7 @@ def in_wsl() -> bool:
|
|||
return "microsoft-standard" in uname().release
|
||||
|
||||
|
||||
def import_d_file(in_file) -> str:
|
||||
def import_d_file(in_file: str) -> str:
|
||||
out_text = ""
|
||||
|
||||
with open(in_file) as file:
|
||||
|
@ -60,7 +60,7 @@ def import_d_file(in_file) -> str:
|
|||
return out_text
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Transform a .d file from Wine paths to normal paths"""
|
||||
)
|
||||
|
|
|
@ -51,7 +51,7 @@ if __name__ == "__main__":
|
|||
args = parser.parse_args()
|
||||
api_key = args.api_key or os.environ.get("PROGRESS_API_KEY")
|
||||
if not api_key:
|
||||
raise "API key required"
|
||||
raise KeyError("API key required")
|
||||
url = generate_url(args)
|
||||
|
||||
entries = []
|
||||
|
@ -68,9 +68,12 @@ if __name__ == "__main__":
|
|||
print("Publishing entry to", url)
|
||||
json.dump(entries[0], sys.stdout, indent=4)
|
||||
print()
|
||||
r = requests.post(url, json={
|
||||
"api_key": api_key,
|
||||
"entries": entries,
|
||||
})
|
||||
r = requests.post(
|
||||
url,
|
||||
json={
|
||||
"api_key": api_key,
|
||||
"entries": entries,
|
||||
},
|
||||
)
|
||||
r.raise_for_status()
|
||||
print("Done!")
|
||||
|
|
Loading…
Reference in New Issue