Support custom progress categories (#33)

* WIP custom progress categories

* Working progress categories & type fixes
This commit is contained in:
Luke Street 2024-09-04 08:55:19 -06:00 committed by GitHub
parent 4b1995b381
commit af452140cc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 280 additions and 219 deletions

View File

@ -16,14 +16,7 @@ import argparse
import sys
from pathlib import Path
from typing import Any, Dict, List
from tools.project import (
Object,
ProjectConfig,
calculate_progress,
generate_build,
is_windows,
)
from tools.project import *
# Game versions
DEFAULT_VERSION = 0
@ -138,7 +131,7 @@ if not config.non_matching:
config.binutils_tag = "2.42-1"
config.compilers_tag = "20240706"
config.dtk_tag = "v0.9.4"
config.objdiff_tag = "v2.0.0-beta.3"
config.objdiff_tag = "v2.0.0-beta.5"
config.sjiswrap_tag = "v1.1.1"
config.wibo_tag = "0.6.11"
@ -218,7 +211,7 @@ def DolphinLib(lib_name: str, objects: List[Object]) -> Dict[str, Any]:
"lib": lib_name,
"mw_version": "GC/1.2.5n",
"cflags": cflags_base,
"host": False,
"progress_category": "sdk",
"objects": objects,
}
@ -229,7 +222,7 @@ def Rel(lib_name: str, objects: List[Object]) -> Dict[str, Any]:
"lib": lib_name,
"mw_version": "GC/1.3.2",
"cflags": cflags_rel,
"host": True,
"progress_category": "game",
"objects": objects,
}
@ -245,7 +238,7 @@ config.libs = [
"lib": "Runtime.PPCEABI.H",
"mw_version": config.linker_version,
"cflags": cflags_runtime,
"host": False,
"progress_category": "sdk", # str | List[str]
"objects": [
Object(NonMatching, "Runtime.PPCEABI.H/global_destructor_chain.c"),
Object(NonMatching, "Runtime.PPCEABI.H/__init_cpp_exceptions.cpp"),
@ -253,12 +246,19 @@ config.libs = [
},
]
# Optional extra categories for progress tracking
# Adjust as desired for your project
config.progress_categories = [
ProgressCategory("game", "Game Code"),
ProgressCategory("sdk", "SDK Code"),
]
config.progress_each_module = args.verbose
if args.mode == "configure":
# Write build.ninja and objdiff.json
generate_build(config)
elif args.mode == "progress":
# Print progress and write progress.json
config.progress_each_module = args.verbose
calculate_progress(config)
else:
sys.exit("Unknown mode: " + args.mode)

View File

@ -17,7 +17,7 @@ import os
import platform
import sys
from pathlib import Path
from typing import Any, Dict, List, Optional, Set, Tuple, Union
from typing import Any, Dict, List, Optional, Set, Tuple, Union, cast
from . import ninja_syntax
from .ninja_syntax import serialize_path
@ -29,24 +29,78 @@ if sys.platform == "cygwin":
f"\n(Current path: {sys.executable})"
)
Library = Dict[str, Any]
class Object:
def __init__(self, completed: bool, name: str, **options: Any) -> None:
self.name = name
self.base_name = Path(name).with_suffix("")
self.completed = completed
self.options: Dict[str, Any] = {
"add_to_all": True,
"add_to_all": None,
"asflags": None,
"extra_asflags": None,
"asm_dir": None,
"cflags": None,
"extra_asflags": None,
"extra_cflags": None,
"host": None,
"lib": None,
"mw_version": None,
"progress_category": None,
"shift_jis": None,
"source": name,
"src_dir": None,
}
self.options.update(options)
# Internal
self.src_path: Optional[Path] = None
self.asm_path: Optional[Path] = None
self.src_obj_path: Optional[Path] = None
self.asm_obj_path: Optional[Path] = None
self.host_obj_path: Optional[Path] = None
self.ctx_path: Optional[Path] = None
def resolve(self, config: "ProjectConfig", lib: Library) -> "Object":
# Use object options, then library options
obj = Object(self.completed, self.name, **lib)
for key, value in self.options.items():
if value is not None or key not in obj.options:
obj.options[key] = value
# Use default options from config
def set_default(key: str, value: Any) -> None:
if obj.options[key] is None:
obj.options[key] = value
set_default("add_to_all", True)
set_default("asflags", config.asflags)
set_default("asm_dir", config.asm_dir)
set_default("host", False)
set_default("mw_version", config.linker_version)
set_default("shift_jis", config.shift_jis)
set_default("src_dir", config.src_dir)
# Resolve paths
build_dir = config.out_path()
obj.src_path = Path(obj.options["src_dir"]) / obj.options["source"]
if obj.options["asm_dir"] is not None:
obj.asm_path = (
Path(obj.options["asm_dir"]) / obj.options["source"]
).with_suffix(".s")
base_name = Path(self.name).with_suffix("")
obj.src_obj_path = build_dir / "src" / f"{base_name}.o"
obj.asm_obj_path = build_dir / "mod" / f"{base_name}.o"
obj.host_obj_path = build_dir / "host" / f"{base_name}.o"
obj.ctx_path = build_dir / "src" / f"{base_name}.ctx"
return obj
class ProgressCategory:
def __init__(self, id: str, name: str) -> None:
self.id = id
self.name = name
class ProjectConfig:
def __init__(self) -> None:
@ -81,7 +135,7 @@ class ProjectConfig:
self.generate_map: bool = False # Generate map file(s)
self.asflags: Optional[List[str]] = None # Assembler flags
self.ldflags: Optional[List[str]] = None # Linker flags
self.libs: Optional[List[Dict[str, Any]]] = None # List of libraries
self.libs: Optional[List[Library]] = None # List of libraries
self.linker_version: Optional[str] = None # mwld version
self.version: Optional[str] = None # Version name
self.warn_missing_config: bool = False # Warn on missing unit configuration
@ -103,12 +157,13 @@ class ProjectConfig:
None # Custom build steps, types are ["pre-compile", "post-compile", "post-link", "post-build"]
)
# Progress output and progress.json config
# Progress output, progress.json and report.json config
self.progress_all: bool = True # Include combined "all" category
self.progress_modules: bool = True # Include combined "modules" category
self.progress_each_module: bool = (
True # Include individual modules, disable for large numbers of modules
False # Include individual modules, disable for large numbers of modules
)
self.progress_categories: List[ProgressCategory] = [] # Additional categories
# Progress fancy printing
self.progress_use_fancy: bool = False
@ -133,12 +188,17 @@ class ProjectConfig:
if getattr(self, attr) is None:
sys.exit(f"ProjectConfig.{attr} missing")
def find_object(self, name: str) -> Optional[Tuple[Dict[str, Any], Object]]:
# Creates a map of object names to Object instances
# Options are fully resolved from the library and object
def objects(self) -> Dict[str, Object]:
out = {}
for lib in self.libs or {}:
for obj in lib["objects"]:
if obj.name == name:
return lib, obj
return None
objects: List[Object] = lib["objects"]
for obj in objects:
if obj.name in out:
sys.exit(f"Duplicate object name {obj.name}")
out[obj.name] = obj.resolve(self, lib)
return out
def out_path(self) -> Path:
return self.build_dir / str(self.version)
@ -174,7 +234,7 @@ def load_build_config(
f = open(build_config_path, "r", encoding="utf-8")
build_config: Dict[str, Any] = json.load(f)
config_version = build_config.get("version")
if not config_version:
if config_version is None:
# Invalid config.json
f.close()
os.remove(build_config_path)
@ -193,17 +253,19 @@ def load_build_config(
# Generate build.ninja and objdiff.json
def generate_build(config: ProjectConfig) -> None:
config.validate()
objects = config.objects()
build_config = load_build_config(config, config.out_path() / "config.json")
generate_build_ninja(config, build_config)
generate_objdiff_config(config, build_config)
generate_build_ninja(config, objects, build_config)
generate_objdiff_config(config, objects, build_config)
# Generate build.ninja
def generate_build_ninja(
config: ProjectConfig, build_config: Optional[Dict[str, Any]]
config: ProjectConfig,
objects: Dict[str, Object],
build_config: Optional[Dict[str, Any]],
) -> None:
config.validate()
out = io.StringIO()
n = ninja_syntax.Writer(out)
n.variable("ninja_required_version", "1.3")
@ -227,7 +289,7 @@ def generate_build_ninja(
if config.debug:
ldflags += " -g"
n.variable("ldflags", ldflags)
if not config.linker_version:
if config.linker_version is None:
sys.exit("ProjectConfig.linker_version missing")
n.variable("mw_version", Path(config.linker_version))
n.newline()
@ -506,8 +568,8 @@ def generate_build_ninja(
n.comment("Custom project build rules (pre/post-processing)")
for rule in config.custom_build_rules or {}:
n.rule(
name=rule.get("name"),
command=rule.get("command"),
name=cast(str, rule.get("name")),
command=cast(str, rule.get("command")),
description=rule.get("description", None),
depfile=rule.get("depfile", None),
generator=rule.get("generator", False),
@ -519,12 +581,12 @@ def generate_build_ninja(
)
n.newline()
def write_custom_step(step: str) -> List[str]:
implicit = []
def write_custom_step(step: str) -> List[str | Path]:
implicit: List[str | Path] = []
if config.custom_build_steps and step in config.custom_build_steps:
n.comment(f"Custom build steps ({step})")
for custom_step in config.custom_build_steps[step]:
outputs = custom_step.get("outputs")
outputs = cast(List[str | Path], custom_step.get("outputs"))
if isinstance(outputs, list):
implicit.extend(outputs)
@ -533,7 +595,7 @@ def generate_build_ninja(
n.build(
outputs=outputs,
rule=custom_step.get("rule"),
rule=cast(str, custom_step.get("rule")),
inputs=custom_step.get("inputs", None),
implicit=custom_step.get("implicit", None),
order_only=custom_step.get("order_only", None),
@ -570,10 +632,6 @@ def generate_build_ninja(
# Source files
###
n.comment("Source files")
build_asm_path = build_path / "mod"
build_src_path = build_path / "src"
build_host_path = build_path / "host"
build_config_path = build_path / "config.json"
def map_path(path: Path) -> Path:
return path.parent / (path.name + ".MAP")
@ -669,99 +727,85 @@ def generate_build_ninja(
host_source_inputs: List[Path] = []
source_added: Set[Path] = set()
def c_build(
obj: Object, options: Dict[str, Any], lib_name: str, src_path: Path
) -> Optional[Path]:
cflags_str = make_flags_str(options["cflags"])
if options["extra_cflags"] is not None:
extra_cflags_str = make_flags_str(options["extra_cflags"])
def c_build(obj: Object, src_path: Path) -> Optional[Path]:
cflags_str = make_flags_str(obj.options["cflags"])
if obj.options["extra_cflags"] is not None:
extra_cflags_str = make_flags_str(obj.options["extra_cflags"])
cflags_str += " " + extra_cflags_str
used_compiler_versions.add(options["mw_version"])
src_obj_path = build_src_path / f"{obj.base_name}.o"
src_base_path = build_src_path / obj.base_name
used_compiler_versions.add(obj.options["mw_version"])
# Avoid creating duplicate build rules
if src_obj_path in source_added:
return src_obj_path
source_added.add(src_obj_path)
shift_jis = options["shift_jis"]
if shift_jis is None:
shift_jis = config.shift_jis
if obj.src_obj_path is None or obj.src_obj_path in source_added:
return obj.src_obj_path
source_added.add(obj.src_obj_path)
# Add MWCC build rule
lib_name = obj.options["lib"]
n.comment(f"{obj.name}: {lib_name} (linked {obj.completed})")
n.build(
outputs=src_obj_path,
rule="mwcc_sjis" if shift_jis else "mwcc",
outputs=obj.src_obj_path,
rule="mwcc_sjis" if obj.options["shift_jis"] else "mwcc",
inputs=src_path,
variables={
"mw_version": Path(options["mw_version"]),
"mw_version": Path(obj.options["mw_version"]),
"cflags": cflags_str,
"basedir": os.path.dirname(src_base_path),
"basefile": src_base_path,
"basedir": os.path.dirname(obj.src_obj_path),
"basefile": obj.src_obj_path.with_suffix(""),
},
implicit=mwcc_sjis_implicit if shift_jis else mwcc_implicit,
implicit=(
mwcc_sjis_implicit if obj.options["shift_jis"] else mwcc_implicit
),
)
# Add ctx build rule
ctx_path = build_src_path / f"{obj.base_name}.ctx"
if obj.ctx_path is not None:
n.build(
outputs=ctx_path,
outputs=obj.ctx_path,
rule="decompctx",
inputs=src_path,
implicit=decompctx,
)
# Add host build rule
if options.get("host", False):
host_obj_path = build_host_path / f"{obj.base_name}.o"
host_base_path = build_host_path / obj.base_name
if obj.options["host"] and obj.host_obj_path is not None:
n.build(
outputs=host_obj_path,
outputs=obj.host_obj_path,
rule="host_cc" if src_path.suffix == ".c" else "host_cpp",
inputs=src_path,
variables={
"basedir": os.path.dirname(host_base_path),
"basefile": host_base_path,
"basedir": os.path.dirname(obj.host_obj_path),
"basefile": obj.host_obj_path.with_suffix(""),
},
)
if options["add_to_all"]:
host_source_inputs.append(host_obj_path)
if obj.options["add_to_all"]:
host_source_inputs.append(obj.host_obj_path)
n.newline()
if options["add_to_all"]:
source_inputs.append(src_obj_path)
if obj.options["add_to_all"]:
source_inputs.append(obj.src_obj_path)
return src_obj_path
return obj.src_obj_path
def asm_build(
obj: Object,
options: Dict[str, Any],
lib_name: str,
src_path: Path,
build_path: Path,
obj: Object, src_path: Path, obj_path: Optional[Path]
) -> Optional[Path]:
asflags = options["asflags"] or config.asflags
if asflags is None:
if obj.options["asflags"] is None:
sys.exit("ProjectConfig.asflags missing")
asflags_str = make_flags_str(asflags)
if options["extra_asflags"] is not None:
extra_asflags_str = make_flags_str(options["extra_asflags"])
asflags_str = make_flags_str(obj.options["asflags"])
if obj.options["extra_asflags"] is not None:
extra_asflags_str = make_flags_str(obj.options["extra_asflags"])
asflags_str += " " + extra_asflags_str
asm_obj_path = build_path / f"{obj.base_name}.o"
# Avoid creating duplicate build rules
if asm_obj_path in source_added:
return asm_obj_path
source_added.add(asm_obj_path)
if obj_path is None or obj_path in source_added:
return obj_path
source_added.add(obj_path)
# Add assembler build rule
lib_name = obj.options["lib"]
n.comment(f"{obj.name}: {lib_name} (linked {obj.completed})")
n.build(
outputs=asm_obj_path,
outputs=obj_path,
rule="as",
inputs=src_path,
variables={"asflags": asflags_str},
@ -769,61 +813,40 @@ def generate_build_ninja(
)
n.newline()
if options["add_to_all"]:
source_inputs.append(asm_obj_path)
if obj.options["add_to_all"]:
source_inputs.append(obj_path)
return asm_obj_path
return obj_path
def add_unit(build_obj, link_step: LinkStep):
obj_path, obj_name = build_obj["object"], build_obj["name"]
result = config.find_object(obj_name)
if not result:
obj = objects.get(obj_name)
if obj is None:
if config.warn_missing_config and not build_obj["autogenerated"]:
print(f"Missing configuration for {obj_name}")
link_step.add(obj_path)
return
lib, obj = result
lib_name = lib["lib"]
# Use object options, then library options
options = lib.copy()
for key, value in obj.options.items():
if value is not None or key not in options:
options[key] = value
unit_src_path = Path(lib.get("src_dir", config.src_dir)) / options["source"]
unit_asm_path: Optional[Path] = None
if config.asm_dir is not None:
unit_asm_path = (
Path(lib.get("asm_dir", config.asm_dir)) / options["source"]
).with_suffix(".s")
link_built_obj = obj.completed
built_obj_path: Optional[Path] = None
if unit_src_path.exists():
if unit_src_path.suffix in (".c", ".cp", ".cpp"):
if obj.src_path is not None and obj.src_path.exists():
if obj.src_path.suffix in (".c", ".cp", ".cpp"):
# Add MWCC & host build rules
built_obj_path = c_build(obj, options, lib_name, unit_src_path)
elif unit_src_path.suffix == ".s":
built_obj_path = c_build(obj, obj.src_path)
elif obj.src_path.suffix == ".s":
# Add assembler build rule
built_obj_path = asm_build(
obj, options, lib_name, unit_src_path, build_src_path
)
built_obj_path = asm_build(obj, obj.src_path, obj.src_obj_path)
else:
sys.exit(f"Unknown source file type {unit_src_path}")
sys.exit(f"Unknown source file type {obj.src_path}")
else:
if config.warn_missing_source or obj.completed:
print(f"Missing source file {unit_src_path}")
print(f"Missing source file {obj.src_path}")
link_built_obj = False
# Assembly overrides
if unit_asm_path is not None and unit_asm_path.exists():
if obj.asm_path is not None and obj.asm_path.exists():
link_built_obj = True
built_obj_path = asm_build(
obj, options, lib_name, unit_asm_path, build_asm_path
)
built_obj_path = asm_build(obj, obj.asm_path, obj.asm_obj_path)
if link_built_obj and built_obj_path is not None:
# Use the source-built object
@ -832,7 +855,10 @@ def generate_build_ninja(
# Use the original (extracted) object
link_step.add(obj_path)
else:
sys.exit(f"Missing object for {obj_name}: {unit_src_path} {lib} {obj}")
lib_name = obj.options["lib"]
sys.exit(
f"Missing object for {obj_name}: {obj.src_path} {lib_name} {obj}"
)
# Add DOL link step
link_step = LinkStep(build_config)
@ -848,7 +874,7 @@ def generate_build_ninja(
add_unit(unit, module_link_step)
# Add empty object to empty RELs
if len(module_link_step.inputs) == 0:
if not config.rel_empty_file:
if config.rel_empty_file is None:
sys.exit("ProjectConfig.rel_empty_file missing")
add_unit(
{
@ -910,7 +936,7 @@ def generate_build_ninja(
rspfile="$rspfile",
rspfile_content="$in_newline",
)
generated_rels = []
generated_rels: List[str] = []
for idx, link in enumerate(build_config["links"]):
# Map module names to link steps
link_steps_local = list(
@ -1026,10 +1052,11 @@ def generate_build_ninja(
command=f"{objdiff} report generate -o $out",
description="REPORT",
)
report_implicit: List[str | Path] = [objdiff, "all_source"]
n.build(
outputs=report_path,
rule="report",
implicit=[objdiff, "all_source"],
implicit=report_implicit,
)
###
@ -1078,6 +1105,7 @@ def generate_build_ninja(
###
# Split DOL
###
build_config_path = build_path / "config.json"
n.comment("Split DOL into relocatable objects")
n.rule(
name="split",
@ -1138,13 +1166,15 @@ def generate_build_ninja(
# Generate objdiff.json
def generate_objdiff_config(
config: ProjectConfig, build_config: Optional[Dict[str, Any]]
config: ProjectConfig,
objects: Dict[str, Object],
build_config: Optional[Dict[str, Any]],
) -> None:
if not build_config:
if build_config is None:
return
objdiff_config: Dict[str, Any] = {
"min_version": "1.0.0",
"min_version": "2.0.0-beta.5",
"custom_make": "ninja",
"build_target": False,
"watch_patterns": [
@ -1160,6 +1190,7 @@ def generate_objdiff_config(
"*.json",
],
"units": [],
"progress_categories": [],
}
# decomp.me compiler name mapping
@ -1170,6 +1201,7 @@ def generate_objdiff_config(
"GC/1.2.5": "mwcc_233_163",
"GC/1.2.5e": "mwcc_233_163e",
"GC/1.2.5n": "mwcc_233_163n",
"GC/1.3": "mwcc_242_53",
"GC/1.3.2": "mwcc_242_81",
"GC/1.3.2r": "mwcc_242_81r",
"GC/2.0": "mwcc_247_92",
@ -1194,44 +1226,25 @@ def generate_objdiff_config(
"Wii/1.7": "mwcc_43_213",
}
build_path = config.out_path()
def add_unit(build_obj: Dict[str, Any], module_name: str) -> None:
if build_obj["autogenerated"]:
# Skip autogenerated objects
return
def add_unit(
build_obj: Dict[str, Any], module_name: str, progress_categories: List[str]
) -> None:
obj_path, obj_name = build_obj["object"], build_obj["name"]
base_object = Path(obj_name).with_suffix("")
unit_config: Dict[str, Any] = {
"name": Path(module_name) / base_object,
"target_path": obj_path,
"metadata": {
"auto_generated": build_obj["autogenerated"],
},
}
result = config.find_object(obj_name)
if not result:
obj = objects.get(obj_name)
if obj is None or not obj.src_path or not obj.src_path.exists():
objdiff_config["units"].append(unit_config)
return
lib, obj = result
src_dir = Path(lib.get("src_dir", config.src_dir))
# Use object options, then library options
options = lib.copy()
for key, value in obj.options.items():
if value is not None or key not in options:
options[key] = value
unit_src_path = src_dir / str(options["source"])
if not unit_src_path.exists():
objdiff_config["units"].append(unit_config)
return
cflags = options["cflags"]
src_obj_path = build_path / "src" / f"{obj.base_name}.o"
src_ctx_path = build_path / "src" / f"{obj.base_name}.ctx"
cflags = obj.options["cflags"]
reverse_fn_order = False
if type(cflags) is list:
for flag in cflags:
@ -1250,39 +1263,77 @@ def generate_objdiff_config(
cflags = list(filter(keep_flag, cflags))
# Add appropriate lang flag
if unit_src_path.suffix in (".cp", ".cpp"):
if obj.src_path.suffix in (".cp", ".cpp"):
cflags.insert(0, "-lang=c++")
else:
cflags.insert(0, "-lang=c")
unit_config["base_path"] = src_obj_path
unit_config["reverse_fn_order"] = reverse_fn_order
unit_config["complete"] = obj.completed
compiler_version = COMPILER_MAP.get(options["mw_version"])
unit_config["base_path"] = obj.src_obj_path
compiler_version = COMPILER_MAP.get(obj.options["mw_version"])
if compiler_version is None:
print(f"Missing scratch compiler mapping for {options['mw_version']}")
print(f"Missing scratch compiler mapping for {obj.options['mw_version']}")
else:
cflags_str = make_flags_str(cflags)
if options["extra_cflags"] is not None:
extra_cflags_str = make_flags_str(options["extra_cflags"])
if obj.options["extra_cflags"] is not None:
extra_cflags_str = make_flags_str(obj.options["extra_cflags"])
cflags_str += " " + extra_cflags_str
unit_config["scratch"] = {
"platform": "gc_wii",
"compiler": compiler_version,
"c_flags": cflags_str,
"ctx_path": src_ctx_path,
"ctx_path": obj.ctx_path,
"build_ctx": True,
}
category_opt: List[str] | str = obj.options["progress_category"]
if isinstance(category_opt, list):
progress_categories.extend(category_opt)
elif category_opt is not None:
progress_categories.append(category_opt)
unit_config["metadata"].update({
"complete": obj.completed,
"reverse_fn_order": reverse_fn_order,
"source_path": obj.src_path,
"progress_categories": progress_categories,
})
objdiff_config["units"].append(unit_config)
# Add DOL units
for unit in build_config["units"]:
add_unit(unit, build_config["name"])
progress_categories = []
# Only include a "dol" category if there are any modules
# Otherwise it's redundant with the global report measures
if len(build_config["modules"]) > 0:
progress_categories.append("dol")
add_unit(unit, build_config["name"], progress_categories)
# Add REL units
for module in build_config["modules"]:
for unit in module["units"]:
add_unit(unit, module["name"])
progress_categories = []
if config.progress_modules:
progress_categories.append("modules")
if config.progress_each_module:
progress_categories.append(module["name"])
add_unit(unit, module["name"], progress_categories)
# Add progress categories
def add_category(id: str, name: str):
objdiff_config["progress_categories"].append(
{
"id": id,
"name": name,
}
)
if len(build_config["modules"]) > 0:
add_category("dol", "DOL")
if config.progress_modules:
add_category("modules", "Modules")
if config.progress_each_module:
for module in build_config["modules"]:
add_category(module["name"], module["name"])
for category in config.progress_categories:
add_category(category.id, category.name)
# Write objdiff.json
with open("objdiff.json", "w", encoding="utf-8") as w:
@ -1295,9 +1346,11 @@ def generate_objdiff_config(
# Calculate, print and write progress to progress.json
def calculate_progress(config: ProjectConfig) -> None:
config.validate()
objects = config.objects()
out_path = config.out_path()
build_config = load_build_config(config, out_path / "config.json")
if not build_config:
if build_config is None:
return
class ProgressUnit:
@ -1329,12 +1382,8 @@ def calculate_progress(config: ProjectConfig) -> None:
# Skip autogenerated objects
return
result = config.find_object(build_obj["name"])
if not result:
return
_, obj = result
if not obj.completed:
obj = objects.get(build_obj["name"])
if obj is None or not obj.completed:
return
self.code_progress += build_obj["code_size"]
@ -1348,26 +1397,52 @@ def calculate_progress(config: ProjectConfig) -> None:
def data_frac(self) -> float:
return self.data_progress / self.data_total
progress_units: Dict[str, ProgressUnit] = {}
if config.progress_all:
progress_units["all"] = ProgressUnit("All")
progress_units["dol"] = ProgressUnit("DOL")
if len(build_config["modules"]) > 0:
if config.progress_modules:
progress_units["modules"] = ProgressUnit("Modules")
if len(config.progress_categories) > 0:
for category in config.progress_categories:
progress_units[category.id] = ProgressUnit(category.name)
if config.progress_each_module:
for module in build_config["modules"]:
progress_units[module["name"]] = ProgressUnit(module["name"])
def add_unit(id: str, unit: Dict[str, Any]) -> None:
progress = progress_units.get(id)
if progress is not None:
progress.add(unit)
# Add DOL units
all_progress = ProgressUnit("All") if config.progress_all else None
dol_progress = ProgressUnit("DOL")
for unit in build_config["units"]:
if all_progress:
all_progress.add(unit)
dol_progress.add(unit)
add_unit("all", unit)
add_unit("dol", unit)
obj = objects.get(unit["name"])
if obj is not None:
category_opt = obj.options["progress_category"]
if isinstance(category_opt, list):
for id in category_opt:
add_unit(id, unit)
elif category_opt is not None:
add_unit(category_opt, unit)
# Add REL units
rels_progress = ProgressUnit("Modules") if config.progress_modules else None
modules_progress: List[ProgressUnit] = []
for module in build_config["modules"]:
progress = ProgressUnit(module["name"])
modules_progress.append(progress)
for unit in module["units"]:
if all_progress:
all_progress.add(unit)
if rels_progress:
rels_progress.add(unit)
progress.add(unit)
add_unit("all", unit)
add_unit("modules", unit)
add_unit(module["name"], unit)
obj = objects.get(unit["name"])
if obj is not None:
category_opt = obj.options["progress_category"]
if isinstance(category_opt, list):
for id in category_opt:
add_unit(id, unit)
elif category_opt is not None:
add_unit(category_opt, unit)
# Print human-readable progress
print("Progress:")
@ -1395,14 +1470,7 @@ def calculate_progress(config: ProjectConfig) -> None:
)
)
if all_progress:
print_category(all_progress)
print_category(dol_progress)
module_count = len(build_config["modules"])
if module_count > 0:
print_category(rels_progress)
if config.progress_each_module:
for progress in modules_progress:
for progress in progress_units.values():
print_category(progress)
# Generate and write progress.json
@ -1416,14 +1484,7 @@ def calculate_progress(config: ProjectConfig) -> None:
"data/total": unit.data_total,
}
if all_progress:
add_category("all", all_progress)
add_category("dol", dol_progress)
if len(build_config["modules"]) > 0:
if rels_progress:
add_category("modules", rels_progress)
if config.progress_each_module:
for progress in modules_progress:
add_category(progress.name, progress)
for id, progress in progress_units.items():
add_category(id, progress)
with open(out_path / "progress.json", "w", encoding="utf-8") as w:
json.dump(progress_json, w, indent=4)