mirror of https://github.com/PrimeDecomp/prime.git
Sync with latest dtk-template
This commit is contained in:
parent
530b4540f0
commit
28f16a7cfc
|
@ -6,36 +6,52 @@ on:
|
|||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/primedecomp/build:main
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version: [GM8E01_00] # GM8E01_01, GM8E01_48
|
||||
|
||||
steps:
|
||||
# Checkout the repository (shallow clone)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
# Set Git config
|
||||
- name: Git config
|
||||
run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
|
||||
# Copy the original files to the workspace
|
||||
- name: Prepare
|
||||
run: cp -R /orig .
|
||||
|
||||
# Build the project
|
||||
- name: Build
|
||||
run: |
|
||||
python configure.py --map --version ${{matrix.version}} --compilers /compilers
|
||||
python configure.py --map --version ${{ matrix.version }} \
|
||||
--binutils /binutils --compilers /compilers
|
||||
ninja all_source build/${{ matrix.version }}/progress.json
|
||||
|
||||
# Upload progress if we're on the main branch
|
||||
- name: Upload progress
|
||||
if: github.ref == 'refs/heads/main' && matrix.version == 'GM8E01_00'
|
||||
if: github.ref == 'refs/heads/main'
|
||||
continue-on-error: true
|
||||
env:
|
||||
PROGRESS_SLUG: prime
|
||||
PROGRESS_API_KEY: ${{ secrets.PROGRESS_API_KEY }}
|
||||
run: |
|
||||
python tools/upload_progress.py -b https://progress.decomp.club/ -p prime -v ${{matrix.version}} \
|
||||
python tools/upload_progress.py -b https://progress.decomp.club/ \
|
||||
-p $PROGRESS_SLUG -v ${{ matrix.version }} \
|
||||
build/${{ matrix.version }}/progress.json
|
||||
|
||||
# Upload map files
|
||||
- name: Upload map
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.version }}_maps
|
||||
path: build/${{ matrix.version }}/**/*.MAP
|
||||
|
|
|
@ -48,5 +48,6 @@
|
|||
],
|
||||
"C/C++ Include Guard.Auto Update Path Blocklist": [
|
||||
"include/zlib"
|
||||
]
|
||||
],
|
||||
"cmake.configureOnOpen": false
|
||||
}
|
||||
|
|
104
configure.py
104
configure.py
|
@ -12,10 +12,11 @@
|
|||
# Append --help to see available options.
|
||||
###
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from tools.project import (
|
||||
Object,
|
||||
ProjectConfig,
|
||||
|
@ -38,104 +39,127 @@ VERSIONS = [
|
|||
# "R3MP01_00", # mp-v3.629 Trilogy PAL
|
||||
]
|
||||
|
||||
if len(VERSIONS) > 1:
|
||||
versions_str = ", ".join(VERSIONS[:-1]) + f" or {VERSIONS[-1]}"
|
||||
else:
|
||||
versions_str = VERSIONS[0]
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"mode",
|
||||
choices=["configure", "progress"],
|
||||
default="configure",
|
||||
help="configure or progress (default: configure)",
|
||||
help="script mode (default: configure)",
|
||||
nargs="?",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--version",
|
||||
dest="version",
|
||||
choices=VERSIONS,
|
||||
type=str.upper,
|
||||
default=VERSIONS[DEFAULT_VERSION],
|
||||
help=f"version to build ({versions_str})",
|
||||
help="version to build",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--build-dir",
|
||||
dest="build_dir",
|
||||
metavar="DIR",
|
||||
type=Path,
|
||||
default=Path("build"),
|
||||
help="base build directory (default: build)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--binutils",
|
||||
metavar="BINARY",
|
||||
type=Path,
|
||||
help="path to binutils (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--compilers",
|
||||
dest="compilers",
|
||||
metavar="DIR",
|
||||
type=Path,
|
||||
help="path to compilers (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--map",
|
||||
dest="map",
|
||||
action="store_true",
|
||||
help="generate map file(s)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-asm",
|
||||
action="store_true",
|
||||
help="don't incorporate .s files from asm directory",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--debug",
|
||||
dest="debug",
|
||||
action="store_true",
|
||||
help="build with debug info (non-matching)",
|
||||
)
|
||||
if not is_windows():
|
||||
parser.add_argument(
|
||||
"--wrapper",
|
||||
dest="wrapper",
|
||||
metavar="BINARY",
|
||||
type=Path,
|
||||
help="path to wibo or wine (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--build-dtk",
|
||||
dest="build_dtk",
|
||||
"--dtk",
|
||||
metavar="BINARY | DIR",
|
||||
type=Path,
|
||||
help="path to decomp-toolkit source (optional)",
|
||||
help="path to decomp-toolkit binary or source (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sjiswrap",
|
||||
dest="sjiswrap",
|
||||
metavar="EXE",
|
||||
type=Path,
|
||||
help="path to sjiswrap.exe (optional)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
dest="verbose",
|
||||
action="store_true",
|
||||
help="print verbose output",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--non-matching",
|
||||
dest="non_matching",
|
||||
action="store_true",
|
||||
help="builds equivalent (but non-matching) or modded objects",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
config = ProjectConfig()
|
||||
config.version = args.version
|
||||
if config.version not in VERSIONS:
|
||||
sys.exit(f"Invalid version '{config.version}', expected {versions_str}")
|
||||
config.version = str(args.version)
|
||||
version_num = VERSIONS.index(config.version)
|
||||
|
||||
# Apply arguments
|
||||
config.build_dir = args.build_dir
|
||||
config.build_dtk_path = args.build_dtk
|
||||
config.dtk_path = args.dtk
|
||||
config.binutils_path = args.binutils
|
||||
config.compilers_path = args.compilers
|
||||
config.debug = args.debug
|
||||
config.generate_map = args.map
|
||||
config.non_matching = args.non_matching
|
||||
config.sjiswrap_path = args.sjiswrap
|
||||
if not is_windows():
|
||||
config.wrapper = args.wrapper
|
||||
if args.no_asm:
|
||||
config.asm_dir = None
|
||||
|
||||
# Tool versions
|
||||
config.binutils_tag = "2.42-1"
|
||||
config.compilers_tag = "20231018"
|
||||
config.dtk_tag = "v0.7.6"
|
||||
config.dtk_tag = "v0.9.2"
|
||||
config.sjiswrap_tag = "v1.1.1"
|
||||
config.wibo_tag = "0.6.9"
|
||||
config.wibo_tag = "0.6.11"
|
||||
|
||||
# Project
|
||||
config.config_path = Path("config") / config.version / "config.yml"
|
||||
config.check_sha_path = Path("config") / config.version / "build.sha1"
|
||||
config.asflags = [
|
||||
"-mgekko",
|
||||
"--strip-local-absolute",
|
||||
"-I include",
|
||||
f"-I build/{config.version}/include",
|
||||
f"--defsym version={version_num}",
|
||||
]
|
||||
config.ldflags = [
|
||||
"-fp hardware",
|
||||
"-nodefaults",
|
||||
"-warn off",
|
||||
]
|
||||
|
||||
config.progress_all = False
|
||||
|
@ -151,24 +175,29 @@ config.build_rels = False
|
|||
# Base flags, common to most GC/Wii games.
|
||||
# Generally leave untouched, with overrides added below.
|
||||
cflags_base = [
|
||||
"-proc gekko",
|
||||
"-nodefaults",
|
||||
"-Cpp_exceptions off",
|
||||
"-RTTI off",
|
||||
"-fp hard",
|
||||
"-fp_contract on",
|
||||
"-O4,p",
|
||||
"-maxerrors 1",
|
||||
"-proc gekko",
|
||||
"-align powerpc",
|
||||
"-enum int",
|
||||
"-fp hardware",
|
||||
"-Cpp_exceptions off",
|
||||
# "-W all",
|
||||
"-O4,p",
|
||||
"-inline auto",
|
||||
"-str reuse",
|
||||
'-pragma "cats off"',
|
||||
'-pragma "warn_notinlined off"',
|
||||
"-maxerrors 1",
|
||||
"-nosyspath",
|
||||
"-RTTI off",
|
||||
"-fp_contract on",
|
||||
"-str reuse",
|
||||
"-multibyte",
|
||||
"-i include",
|
||||
"-i extern/musyx/include",
|
||||
"-i libc",
|
||||
f"-i build/{config.version}/include",
|
||||
"-DPRIME1",
|
||||
f"-DVERSION={version_num}",
|
||||
"-DPRIME1",
|
||||
"-DNONMATCHING=0",
|
||||
]
|
||||
|
||||
|
@ -298,8 +327,9 @@ def Rel(lib_name, objects):
|
|||
}
|
||||
|
||||
|
||||
Matching = True
|
||||
NonMatching = False
|
||||
Matching = True # Object matches and should be linked
|
||||
NonMatching = False # Object does not match and should not be linked
|
||||
Equivalent = config.non_matching # Object should be linked when configured with --non-matching
|
||||
|
||||
config.warn_missing_config = True
|
||||
config.warn_missing_source = False
|
||||
|
|
44
ldscript.lcf
44
ldscript.lcf
|
@ -1,44 +0,0 @@
|
|||
MEMORY
|
||||
{
|
||||
text : origin = 0x80003100
|
||||
}
|
||||
|
||||
SECTIONS
|
||||
{
|
||||
GROUP:
|
||||
{
|
||||
.init ALIGN(0x20):{}
|
||||
extab ALIGN(0x20):{}
|
||||
extabindex ALIGN(0x20):{}
|
||||
.text ALIGN(0x20):{TRK_MINNOW_DOLPHIN.a}
|
||||
.ctors ALIGN(0x20):{}
|
||||
.dtors ALIGN(0x20):{}
|
||||
.rodata ALIGN(0x20):{}
|
||||
.data ALIGN(0x20):{}
|
||||
.bss ALIGN(0x20):{}
|
||||
.sdata ALIGN(0x20):{}
|
||||
.sbss ALIGN(0x20):{}
|
||||
.sdata2 ALIGN(0x20):{}
|
||||
.sbss2 ALIGN(0x20):{}
|
||||
.stack ALIGN(0x100):{}
|
||||
} > text
|
||||
|
||||
/* Stack size upped from the default of 65535 */
|
||||
_stack_addr = (_f_sbss2 + SIZEOF(.sbss2) + 65535 + 3073 /* Retro Addition */ + 0x7) & ~0x7;
|
||||
_stack_end = _f_sbss2 + SIZEOF(.sbss2);
|
||||
_db_stack_addr = (_stack_addr + 0x2000);
|
||||
_db_stack_end = _stack_addr;
|
||||
__ArenaLo = (_db_stack_addr + 0x1f) & ~0x1f;
|
||||
__ArenaHi = 0x81700000;
|
||||
}
|
||||
|
||||
FORCEFILES
|
||||
{
|
||||
}
|
||||
|
||||
FORCEACTIVE
|
||||
{
|
||||
OSInitMessageQueue
|
||||
OSSendMessage
|
||||
OSReceiveMessage
|
||||
}
|
|
@ -13,6 +13,7 @@
|
|||
import argparse
|
||||
import os
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.abspath(os.path.join(script_dir, ".."))
|
||||
|
@ -24,36 +25,40 @@ include_dirs = [
|
|||
]
|
||||
|
||||
include_pattern = re.compile(r'^#include\s*[<"](.+?)[>"]$')
|
||||
guard_pattern = re.compile(r'^#ifndef\s+(.*)$')
|
||||
guard_pattern = re.compile(r"^#ifndef\s+(.*)$")
|
||||
|
||||
defines = set()
|
||||
|
||||
def import_h_file(in_file: str, r_path: str) -> str:
|
||||
|
||||
def import_h_file(in_file: str, r_path: str, deps: List[str]) -> str:
|
||||
rel_path = os.path.join(root_dir, r_path, in_file)
|
||||
if os.path.exists(rel_path):
|
||||
return import_c_file(rel_path)
|
||||
return import_c_file(rel_path, deps)
|
||||
for include_dir in include_dirs:
|
||||
inc_path = os.path.join(include_dir, in_file)
|
||||
if os.path.exists(inc_path):
|
||||
return import_c_file(inc_path)
|
||||
return import_c_file(inc_path, deps)
|
||||
else:
|
||||
print("Failed to locate", in_file)
|
||||
return ""
|
||||
|
||||
def import_c_file(in_file) -> str:
|
||||
|
||||
def import_c_file(in_file: str, deps: List[str]) -> str:
|
||||
in_file = os.path.relpath(in_file, root_dir)
|
||||
out_text = ''
|
||||
deps.append(in_file)
|
||||
out_text = ""
|
||||
|
||||
try:
|
||||
with open(in_file, encoding="utf-8") as file:
|
||||
out_text += process_file(in_file, list(file))
|
||||
out_text += process_file(in_file, list(file), deps)
|
||||
except Exception:
|
||||
with open(in_file) as file:
|
||||
out_text += process_file(in_file, list(file))
|
||||
out_text += process_file(in_file, list(file), deps)
|
||||
return out_text
|
||||
|
||||
def process_file(in_file: str, lines) -> str:
|
||||
out_text = ''
|
||||
|
||||
def process_file(in_file: str, lines: List[str], deps: List[str]) -> str:
|
||||
out_text = ""
|
||||
for idx, line in enumerate(lines):
|
||||
guard_match = guard_pattern.match(line.strip())
|
||||
if idx == 0:
|
||||
|
@ -64,14 +69,19 @@ def process_file(in_file: str, lines) -> str:
|
|||
print("Processing file", in_file)
|
||||
include_match = include_pattern.match(line.strip())
|
||||
if include_match and not include_match[1].endswith(".s"):
|
||||
out_text += f"/* \"{in_file}\" line {idx} \"{include_match[1]}\" */\n"
|
||||
out_text += import_h_file(include_match[1], os.path.dirname(in_file))
|
||||
out_text += f"/* end \"{include_match[1]}\" */\n"
|
||||
out_text += f'/* "{in_file}" line {idx} "{include_match[1]}" */\n'
|
||||
out_text += import_h_file(include_match[1], os.path.dirname(in_file), deps)
|
||||
out_text += f'/* end "{include_match[1]}" */\n'
|
||||
else:
|
||||
out_text += line
|
||||
|
||||
return out_text
|
||||
|
||||
|
||||
def sanitize_path(path: str) -> str:
|
||||
return path.replace("\\", "/").replace(" ", "\\ ")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Create a context file which can be used for decomp.me"""
|
||||
|
@ -80,13 +90,32 @@ def main():
|
|||
"c_file",
|
||||
help="""File from which to create context""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
help="""Output file""",
|
||||
default="ctx.c",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--depfile",
|
||||
help="""Dependency file""",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
output = import_c_file(args.c_file)
|
||||
deps = []
|
||||
output = import_c_file(args.c_file, deps)
|
||||
|
||||
with open(os.path.join(root_dir, "ctx.c"), "w", encoding="utf-8") as f:
|
||||
with open(os.path.join(root_dir, args.output), "w", encoding="utf-8") as f:
|
||||
f.write(output)
|
||||
|
||||
if args.depfile:
|
||||
with open(os.path.join(root_dir, args.depfile), "w", encoding="utf-8") as f:
|
||||
f.write(sanitize_path(args.output) + ":")
|
||||
for dep in deps:
|
||||
path = sanitize_path(dep)
|
||||
f.write(f" \\\n\t{path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -18,11 +18,29 @@ import shutil
|
|||
import stat
|
||||
import urllib.request
|
||||
import zipfile
|
||||
|
||||
from typing import Callable, Dict
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def dtk_url(tag):
|
||||
def binutils_url(tag):
|
||||
uname = platform.uname()
|
||||
system = uname.system.lower()
|
||||
arch = uname.machine.lower()
|
||||
if system == "darwin":
|
||||
system = "macos"
|
||||
arch = "universal"
|
||||
elif arch == "amd64":
|
||||
arch = "x86_64"
|
||||
|
||||
repo = "https://github.com/encounter/gc-wii-binutils"
|
||||
return f"{repo}/releases/download/{tag}/{system}-{arch}.zip"
|
||||
|
||||
|
||||
def compilers_url(tag: str) -> str:
|
||||
return f"https://files.decomp.dev/compilers_{tag}.zip"
|
||||
|
||||
|
||||
def dtk_url(tag: str) -> str:
|
||||
uname = platform.uname()
|
||||
suffix = ""
|
||||
system = uname.system.lower()
|
||||
|
@ -38,29 +56,26 @@ def dtk_url(tag):
|
|||
return f"{repo}/releases/download/{tag}/dtk-{system}-{arch}{suffix}"
|
||||
|
||||
|
||||
def sjiswrap_url(tag):
|
||||
def sjiswrap_url(tag: str) -> str:
|
||||
repo = "https://github.com/encounter/sjiswrap"
|
||||
return f"{repo}/releases/download/{tag}/sjiswrap-windows-x86.exe"
|
||||
|
||||
|
||||
def wibo_url(tag):
|
||||
def wibo_url(tag: str) -> str:
|
||||
repo = "https://github.com/decompals/wibo"
|
||||
return f"{repo}/releases/download/{tag}/wibo"
|
||||
|
||||
|
||||
def compilers_url(tag):
|
||||
return f"https://files.decomp.dev/compilers_{tag}.zip"
|
||||
|
||||
|
||||
TOOLS = {
|
||||
TOOLS: Dict[str, Callable[[str], str]] = {
|
||||
"binutils": binutils_url,
|
||||
"compilers": compilers_url,
|
||||
"dtk": dtk_url,
|
||||
"sjiswrap": sjiswrap_url,
|
||||
"wibo": wibo_url,
|
||||
"compilers": compilers_url,
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("tool", help="Tool name")
|
||||
parser.add_argument("output", type=Path, help="output file path")
|
||||
|
@ -77,7 +92,11 @@ def main():
|
|||
data = io.BytesIO(response.read())
|
||||
with zipfile.ZipFile(data) as f:
|
||||
f.extractall(output)
|
||||
output.touch(mode=0o755)
|
||||
# Make all files executable
|
||||
for root, _, files in os.walk(output):
|
||||
for name in files:
|
||||
os.chmod(os.path.join(root, name), 0o755)
|
||||
output.touch(mode=0o755) # Update dir modtime
|
||||
else:
|
||||
with open(output, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
|
|
@ -21,50 +21,67 @@ use Python.
|
|||
|
||||
import re
|
||||
import textwrap
|
||||
import os
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Match, Optional, Tuple, Union
|
||||
|
||||
NinjaPath = Union[str, Path]
|
||||
NinjaPaths = Union[
|
||||
List[str],
|
||||
List[Path],
|
||||
List[NinjaPath],
|
||||
List[Optional[str]],
|
||||
List[Optional[Path]],
|
||||
List[Optional[NinjaPath]],
|
||||
]
|
||||
NinjaPathOrPaths = Union[NinjaPath, NinjaPaths]
|
||||
|
||||
|
||||
def escape_path(word):
|
||||
def escape_path(word: str) -> str:
|
||||
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
|
||||
|
||||
|
||||
class Writer(object):
|
||||
def __init__(self, output, width=78):
|
||||
def __init__(self, output: StringIO, width: int = 78) -> None:
|
||||
self.output = output
|
||||
self.width = width
|
||||
|
||||
def newline(self):
|
||||
def newline(self) -> None:
|
||||
self.output.write("\n")
|
||||
|
||||
def comment(self, text):
|
||||
def comment(self, text: str) -> None:
|
||||
for line in textwrap.wrap(
|
||||
text, self.width - 2, break_long_words=False, break_on_hyphens=False
|
||||
):
|
||||
self.output.write("# " + line + "\n")
|
||||
|
||||
def variable(self, key, value, indent=0):
|
||||
if value is None:
|
||||
return
|
||||
if isinstance(value, list):
|
||||
value = " ".join(filter(None, value)) # Filter out empty strings.
|
||||
def variable(
|
||||
self,
|
||||
key: str,
|
||||
value: Optional[NinjaPathOrPaths],
|
||||
indent: int = 0,
|
||||
) -> None:
|
||||
value = " ".join(serialize_paths(value))
|
||||
self._line("%s = %s" % (key, value), indent)
|
||||
|
||||
def pool(self, name, depth):
|
||||
def pool(self, name: str, depth: int) -> None:
|
||||
self._line("pool %s" % name)
|
||||
self.variable("depth", depth, indent=1)
|
||||
self.variable("depth", str(depth), indent=1)
|
||||
|
||||
def rule(
|
||||
self,
|
||||
name,
|
||||
command,
|
||||
description=None,
|
||||
depfile=None,
|
||||
generator=False,
|
||||
pool=None,
|
||||
restat=False,
|
||||
rspfile=None,
|
||||
rspfile_content=None,
|
||||
deps=None,
|
||||
):
|
||||
name: str,
|
||||
command: str,
|
||||
description: Optional[str] = None,
|
||||
depfile: Optional[NinjaPath] = None,
|
||||
generator: bool = False,
|
||||
pool: Optional[str] = None,
|
||||
restat: bool = False,
|
||||
rspfile: Optional[NinjaPath] = None,
|
||||
rspfile_content: Optional[NinjaPath] = None,
|
||||
deps: Optional[NinjaPathOrPaths] = None,
|
||||
) -> None:
|
||||
self._line("rule %s" % name)
|
||||
self.variable("command", command, indent=1)
|
||||
if description:
|
||||
|
@ -86,32 +103,39 @@ class Writer(object):
|
|||
|
||||
def build(
|
||||
self,
|
||||
outputs,
|
||||
rule,
|
||||
inputs=None,
|
||||
implicit=None,
|
||||
order_only=None,
|
||||
variables=None,
|
||||
implicit_outputs=None,
|
||||
pool=None,
|
||||
dyndep=None,
|
||||
):
|
||||
outputs = as_list(outputs)
|
||||
outputs: NinjaPathOrPaths,
|
||||
rule: str,
|
||||
inputs: Optional[NinjaPathOrPaths] = None,
|
||||
implicit: Optional[NinjaPathOrPaths] = None,
|
||||
order_only: Optional[NinjaPathOrPaths] = None,
|
||||
variables: Optional[
|
||||
Union[
|
||||
List[Tuple[str, Optional[NinjaPathOrPaths]]],
|
||||
Dict[str, Optional[NinjaPathOrPaths]],
|
||||
]
|
||||
] = None,
|
||||
implicit_outputs: Optional[NinjaPathOrPaths] = None,
|
||||
pool: Optional[str] = None,
|
||||
dyndep: Optional[NinjaPath] = None,
|
||||
) -> List[str]:
|
||||
outputs = serialize_paths(outputs)
|
||||
out_outputs = [escape_path(x) for x in outputs]
|
||||
all_inputs = [escape_path(x) for x in as_list(inputs)]
|
||||
all_inputs = [escape_path(x) for x in serialize_paths(inputs)]
|
||||
|
||||
if implicit:
|
||||
implicit = [escape_path(x) for x in as_list(implicit)]
|
||||
implicit = [escape_path(x) for x in serialize_paths(implicit)]
|
||||
all_inputs.append("|")
|
||||
all_inputs.extend(implicit)
|
||||
all_inputs.extend(map(str, implicit))
|
||||
if order_only:
|
||||
order_only = [escape_path(x) for x in as_list(order_only)]
|
||||
order_only = [escape_path(x) for x in serialize_paths(order_only)]
|
||||
all_inputs.append("||")
|
||||
all_inputs.extend(order_only)
|
||||
all_inputs.extend(map(str, order_only))
|
||||
if implicit_outputs:
|
||||
implicit_outputs = [escape_path(x) for x in as_list(implicit_outputs)]
|
||||
implicit_outputs = [
|
||||
escape_path(x) for x in serialize_paths(implicit_outputs)
|
||||
]
|
||||
out_outputs.append("|")
|
||||
out_outputs.extend(implicit_outputs)
|
||||
out_outputs.extend(map(str, implicit_outputs))
|
||||
|
||||
self._line(
|
||||
"build %s: %s" % (" ".join(out_outputs), " ".join([rule] + all_inputs))
|
||||
|
@ -119,7 +143,7 @@ class Writer(object):
|
|||
if pool is not None:
|
||||
self._line(" pool = %s" % pool)
|
||||
if dyndep is not None:
|
||||
self._line(" dyndep = %s" % dyndep)
|
||||
self._line(" dyndep = %s" % serialize_path(dyndep))
|
||||
|
||||
if variables:
|
||||
if isinstance(variables, dict):
|
||||
|
@ -132,16 +156,16 @@ class Writer(object):
|
|||
|
||||
return outputs
|
||||
|
||||
def include(self, path):
|
||||
def include(self, path: str) -> None:
|
||||
self._line("include %s" % path)
|
||||
|
||||
def subninja(self, path):
|
||||
def subninja(self, path: str) -> None:
|
||||
self._line("subninja %s" % path)
|
||||
|
||||
def default(self, paths):
|
||||
self._line("default %s" % " ".join(as_list(paths)))
|
||||
def default(self, paths: NinjaPathOrPaths) -> None:
|
||||
self._line("default %s" % " ".join(serialize_paths(paths)))
|
||||
|
||||
def _count_dollars_before_index(self, s, i):
|
||||
def _count_dollars_before_index(self, s: str, i: int) -> int:
|
||||
"""Returns the number of '$' characters right in front of s[i]."""
|
||||
dollar_count = 0
|
||||
dollar_index = i - 1
|
||||
|
@ -150,7 +174,7 @@ class Writer(object):
|
|||
dollar_index -= 1
|
||||
return dollar_count
|
||||
|
||||
def _line(self, text, indent=0):
|
||||
def _line(self, text: str, indent: int = 0) -> None:
|
||||
"""Write 'text' word-wrapped at self.width characters."""
|
||||
leading_space = " " * indent
|
||||
while len(leading_space) + len(text) > self.width:
|
||||
|
@ -187,19 +211,26 @@ class Writer(object):
|
|||
|
||||
self.output.write(leading_space + text + "\n")
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
self.output.close()
|
||||
|
||||
|
||||
def as_list(input):
|
||||
if input is None:
|
||||
return []
|
||||
def serialize_path(input: Optional[NinjaPath]) -> str:
|
||||
if not input:
|
||||
return ""
|
||||
if isinstance(input, Path):
|
||||
return str(input).replace("/", os.sep)
|
||||
else:
|
||||
return str(input)
|
||||
|
||||
|
||||
def serialize_paths(input: Optional[NinjaPathOrPaths]) -> List[str]:
|
||||
if isinstance(input, list):
|
||||
return input
|
||||
return [input]
|
||||
return [serialize_path(path) for path in input if path]
|
||||
return [serialize_path(input)] if input else []
|
||||
|
||||
|
||||
def escape(string):
|
||||
def escape(string: str) -> str:
|
||||
"""Escape a string such that it can be embedded into a Ninja file without
|
||||
further interpretation."""
|
||||
assert "\n" not in string, "Ninja syntax does not allow newlines"
|
||||
|
@ -207,14 +238,14 @@ def escape(string):
|
|||
return string.replace("$", "$$")
|
||||
|
||||
|
||||
def expand(string, vars, local_vars={}):
|
||||
def expand(string: str, vars: Dict[str, str], local_vars: Dict[str, str] = {}) -> str:
|
||||
"""Expand a string containing $vars as Ninja would.
|
||||
|
||||
Note: doesn't handle the full Ninja variable syntax, but it's enough
|
||||
to make configure.py's use of it work.
|
||||
"""
|
||||
|
||||
def exp(m):
|
||||
def exp(m: Match[str]) -> str:
|
||||
var = m.group(1)
|
||||
if var == "$":
|
||||
return "$"
|
||||
|
|
787
tools/project.py
787
tools/project.py
File diff suppressed because it is too large
Load Diff
|
@ -25,7 +25,7 @@ def in_wsl() -> bool:
|
|||
return "microsoft-standard" in uname().release
|
||||
|
||||
|
||||
def import_d_file(in_file) -> str:
|
||||
def import_d_file(in_file: str) -> str:
|
||||
out_text = ""
|
||||
|
||||
with open(in_file) as file:
|
||||
|
@ -60,7 +60,7 @@ def import_d_file(in_file) -> str:
|
|||
return out_text
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Transform a .d file from Wine paths to normal paths"""
|
||||
)
|
||||
|
|
|
@ -51,7 +51,7 @@ if __name__ == "__main__":
|
|||
args = parser.parse_args()
|
||||
api_key = args.api_key or os.environ.get("PROGRESS_API_KEY")
|
||||
if not api_key:
|
||||
raise "API key required"
|
||||
raise KeyError("API key required")
|
||||
url = generate_url(args)
|
||||
|
||||
entries = []
|
||||
|
@ -68,9 +68,12 @@ if __name__ == "__main__":
|
|||
print("Publishing entry to", url)
|
||||
json.dump(entries[0], sys.stdout, indent=4)
|
||||
print()
|
||||
r = requests.post(url, json={
|
||||
r = requests.post(
|
||||
url,
|
||||
json={
|
||||
"api_key": api_key,
|
||||
"entries": entries,
|
||||
})
|
||||
},
|
||||
)
|
||||
r.raise_for_status()
|
||||
print("Done!")
|
Loading…
Reference in New Issue