2020-04-17 20:43:07 +00:00
|
|
|
#!/usr/bin/env python3
|
2019-06-07 08:59:17 +00:00
|
|
|
# Copyright 2019 The Dawn Authors
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2019-06-24 14:31:06 +00:00
|
|
|
"""Module to create generators that render multiple Jinja2 templates for GN.
|
|
|
|
|
|
|
|
A helper module that can be used to create generator scripts (clients)
|
|
|
|
that expand one or more Jinja2 templates, without outputs usable from
|
|
|
|
GN and Ninja build-based systems. See generator_lib.gni as well.
|
|
|
|
|
|
|
|
Clients should create a Generator sub-class, then call run_generator()
|
|
|
|
with a proper derived class instance.
|
|
|
|
|
|
|
|
Clients specify a list of FileRender operations, each one of them will
|
|
|
|
output a file into a temporary output directory through Jinja2 expansion.
|
|
|
|
All temporary output files are then grouped and written to into a single JSON
|
|
|
|
file, that acts as a convenient single GN output target. Use extract_json.py
|
|
|
|
to extract the output files from the JSON tarball in another GN action.
|
|
|
|
|
|
|
|
--depfile can be used to specify an output Ninja dependency file for the
|
|
|
|
JSON tarball, to ensure it is regenerated any time one of its dependencies
|
|
|
|
changes.
|
|
|
|
|
|
|
|
Finally, --expected-output-files can be used to check the list of generated
|
|
|
|
output files.
|
|
|
|
"""
|
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
import argparse, json, os, re, sys
|
|
|
|
from collections import namedtuple
|
|
|
|
|
2019-06-24 14:31:06 +00:00
|
|
|
# A FileRender represents a single Jinja2 template render operation:
|
|
|
|
#
|
|
|
|
# template: Jinja2 template name, relative to --template-dir path.
|
|
|
|
#
|
|
|
|
# output: Output file path, relative to temporary output directory.
|
|
|
|
#
|
|
|
|
# params_dicts: iterable of (name:string -> value:string) dictionaries.
|
|
|
|
# All of them will be merged before being sent as Jinja2 template
|
|
|
|
# expansion parameters.
|
|
|
|
#
|
|
|
|
# Example:
|
|
|
|
# FileRender('api.c', 'src/project_api.c', [{'PROJECT_VERSION': '1.0.0'}])
|
|
|
|
#
|
|
|
|
FileRender = namedtuple('FileRender', ['template', 'output', 'params_dicts'])
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
# The interface that must be implemented by generators.
|
|
|
|
class Generator:
|
|
|
|
def get_description(self):
|
2019-06-24 14:31:06 +00:00
|
|
|
"""Return generator description for --help."""
|
2019-06-07 08:59:17 +00:00
|
|
|
return ""
|
|
|
|
|
|
|
|
def add_commandline_arguments(self, parser):
|
2019-06-24 14:31:06 +00:00
|
|
|
"""Add generator-specific argparse arguments."""
|
2019-06-07 08:59:17 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
def get_file_renders(self, args):
|
2019-06-24 14:31:06 +00:00
|
|
|
"""Return the list of FileRender objects to process."""
|
2019-06-07 08:59:17 +00:00
|
|
|
return []
|
|
|
|
|
|
|
|
def get_dependencies(self, args):
|
2019-06-24 14:31:06 +00:00
|
|
|
"""Return a list of extra input dependencies."""
|
2019-06-07 08:59:17 +00:00
|
|
|
return []
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
|
2019-06-24 14:31:06 +00:00
|
|
|
# Allow custom Jinja2 installation path through an additional python
|
|
|
|
# path from the arguments if present. This isn't done through the regular
|
|
|
|
# argparse because PreprocessingLoader uses jinja2 in the global scope before
|
|
|
|
# "main" gets to run.
|
|
|
|
#
|
|
|
|
# NOTE: If this argument appears several times, this only uses the first
|
|
|
|
# value, while argparse would typically keep the last one!
|
|
|
|
kJinja2Path = '--jinja2-path'
|
2019-09-18 00:59:40 +00:00
|
|
|
try:
|
|
|
|
jinja2_path_argv_index = sys.argv.index(kJinja2Path)
|
2019-06-24 14:31:06 +00:00
|
|
|
# Add parent path for the import to succeed.
|
|
|
|
path = os.path.join(sys.argv[jinja2_path_argv_index + 1], os.pardir)
|
2019-06-07 08:59:17 +00:00
|
|
|
sys.path.insert(1, path)
|
2019-09-18 00:59:40 +00:00
|
|
|
except ValueError:
|
|
|
|
# --jinja2-path isn't passed, ignore the exception and just import Jinja2
|
|
|
|
# assuming it already is in the Python PATH.
|
|
|
|
pass
|
2019-06-07 08:59:17 +00:00
|
|
|
|
|
|
|
import jinja2
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
# A custom Jinja2 template loader that removes the extra indentation
|
|
|
|
# of the template blocks so that the output is correctly indented
|
2019-06-13 10:22:32 +00:00
|
|
|
class _PreprocessingLoader(jinja2.BaseLoader):
|
2019-06-07 08:59:17 +00:00
|
|
|
def __init__(self, path):
|
|
|
|
self.path = path
|
|
|
|
|
|
|
|
def get_source(self, environment, template):
|
|
|
|
path = os.path.join(self.path, template)
|
|
|
|
if not os.path.exists(path):
|
|
|
|
raise jinja2.TemplateNotFound(template)
|
|
|
|
mtime = os.path.getmtime(path)
|
|
|
|
with open(path) as f:
|
|
|
|
source = self.preprocess(f.read())
|
|
|
|
return source, path, lambda: mtime == os.path.getmtime(path)
|
|
|
|
|
2019-07-08 19:20:22 +00:00
|
|
|
blockstart = re.compile('{%-?\s*(if|elif|else|for|block|macro)[^}]*%}')
|
|
|
|
blockend = re.compile('{%-?\s*(end(if|for|block|macro)|elif|else)[^}]*%}')
|
2019-06-07 08:59:17 +00:00
|
|
|
|
|
|
|
def preprocess(self, source):
|
|
|
|
lines = source.split('\n')
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
# Compute the current indentation level of the template blocks and
|
|
|
|
# remove their indentation
|
2019-06-07 08:59:17 +00:00
|
|
|
result = []
|
|
|
|
indentation_level = 0
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
# Filter lines that are pure comments. line_comment_prefix is not
|
|
|
|
# enough because it removes the comment but doesn't completely remove
|
|
|
|
# the line, resulting in more verbose output.
|
2020-01-15 15:39:12 +00:00
|
|
|
lines = filter(lambda line: not line.strip().startswith('//*'), lines)
|
|
|
|
|
|
|
|
# Remove indentation templates have for the Jinja control flow.
|
2019-06-07 08:59:17 +00:00
|
|
|
for line in lines:
|
2020-07-15 19:51:17 +00:00
|
|
|
# The capture in the regex adds one element per block start or end,
|
|
|
|
# so we divide by two. There is also an extra line chunk
|
|
|
|
# corresponding to the line end, so we subtract it.
|
2019-06-07 08:59:17 +00:00
|
|
|
numends = (len(self.blockend.split(line)) - 1) // 2
|
|
|
|
indentation_level -= numends
|
|
|
|
|
|
|
|
result.append(self.remove_indentation(line, indentation_level))
|
|
|
|
|
|
|
|
numstarts = (len(self.blockstart.split(line)) - 1) // 2
|
|
|
|
indentation_level += numstarts
|
|
|
|
|
|
|
|
return '\n'.join(result) + '\n'
|
|
|
|
|
|
|
|
def remove_indentation(self, line, n):
|
|
|
|
for _ in range(n):
|
|
|
|
if line.startswith(' '):
|
|
|
|
line = line[4:]
|
|
|
|
elif line.startswith('\t'):
|
|
|
|
line = line[1:]
|
|
|
|
else:
|
2020-07-15 19:51:17 +00:00
|
|
|
assert line.strip() == ''
|
2019-06-07 08:59:17 +00:00
|
|
|
return line
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
_FileOutput = namedtuple('FileOutput', ['name', 'content'])
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
def _do_renders(renders, template_dir):
|
2019-06-13 10:22:32 +00:00
|
|
|
loader = _PreprocessingLoader(template_dir)
|
2020-07-15 19:51:17 +00:00
|
|
|
env = jinja2.Environment(loader=loader,
|
|
|
|
lstrip_blocks=True,
|
|
|
|
trim_blocks=True,
|
|
|
|
line_comment_prefix='//*')
|
2019-06-07 08:59:17 +00:00
|
|
|
|
2019-06-11 18:03:05 +00:00
|
|
|
def do_assert(expr):
|
|
|
|
assert expr
|
|
|
|
return ''
|
|
|
|
|
|
|
|
def debug(text):
|
|
|
|
print(text)
|
|
|
|
|
|
|
|
base_params = {
|
|
|
|
'enumerate': enumerate,
|
|
|
|
'format': format,
|
|
|
|
'len': len,
|
|
|
|
'debug': debug,
|
|
|
|
'assert': do_assert,
|
|
|
|
}
|
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
outputs = []
|
|
|
|
for render in renders:
|
|
|
|
params = {}
|
2019-06-11 18:03:05 +00:00
|
|
|
params.update(base_params)
|
2019-06-07 08:59:17 +00:00
|
|
|
for param_dict in render.params_dicts:
|
|
|
|
params.update(param_dict)
|
|
|
|
content = env.get_template(render.template).render(**params)
|
|
|
|
outputs.append(_FileOutput(render.output, content))
|
|
|
|
|
|
|
|
return outputs
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
# Compute the list of imported, non-system Python modules.
|
2019-06-24 14:31:06 +00:00
|
|
|
# It assumes that any path outside of the root directory is system.
|
2020-07-15 19:51:17 +00:00
|
|
|
def _compute_python_dependencies(root_dir=None):
|
2019-06-24 14:31:06 +00:00
|
|
|
if not root_dir:
|
|
|
|
# Assume this script is under generator/ by default.
|
|
|
|
root_dir = os.path.join(os.path.dirname(__file__), os.pardir)
|
|
|
|
root_dir = os.path.abspath(root_dir)
|
2019-06-07 08:59:17 +00:00
|
|
|
|
|
|
|
module_paths = (module.__file__ for module in sys.modules.values()
|
2020-07-15 19:51:17 +00:00
|
|
|
if module and hasattr(module, '__file__'))
|
2019-06-07 08:59:17 +00:00
|
|
|
|
|
|
|
paths = set()
|
|
|
|
for path in module_paths:
|
|
|
|
path = os.path.abspath(path)
|
|
|
|
|
2019-06-24 14:31:06 +00:00
|
|
|
if not path.startswith(root_dir):
|
2019-06-07 08:59:17 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if (path.endswith('.pyc')
|
|
|
|
or (path.endswith('c') and not os.path.splitext(path)[1])):
|
|
|
|
path = path[:-1]
|
|
|
|
|
|
|
|
paths.add(path)
|
|
|
|
|
|
|
|
return paths
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
def run_generator(generator):
|
|
|
|
parser = argparse.ArgumentParser(
|
2020-07-15 19:51:17 +00:00
|
|
|
description=generator.get_description(),
|
|
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
2019-06-07 08:59:17 +00:00
|
|
|
)
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
generator.add_commandline_arguments(parser)
|
|
|
|
parser.add_argument('--template-dir',
|
|
|
|
default='templates',
|
|
|
|
type=str,
|
|
|
|
help='Directory with template files.')
|
|
|
|
parser.add_argument(
|
|
|
|
kJinja2Path,
|
|
|
|
default=None,
|
|
|
|
type=str,
|
|
|
|
help='Additional python path to set before loading Jinja2')
|
|
|
|
parser.add_argument(
|
|
|
|
'--output-json-tarball',
|
|
|
|
default=None,
|
|
|
|
type=str,
|
|
|
|
help=('Name of the "JSON tarball" to create (tar is too annoying '
|
|
|
|
'to use in python).'))
|
|
|
|
parser.add_argument(
|
|
|
|
'--depfile',
|
|
|
|
default=None,
|
|
|
|
type=str,
|
|
|
|
help='Name of the Ninja depfile to create for the JSON tarball')
|
|
|
|
parser.add_argument(
|
|
|
|
'--expected-outputs-file',
|
|
|
|
default=None,
|
|
|
|
type=str,
|
|
|
|
help="File to compare outputs with and fail if it doesn't match")
|
|
|
|
parser.add_argument(
|
|
|
|
'--root-dir',
|
|
|
|
default=None,
|
|
|
|
type=str,
|
|
|
|
help=('Optional source root directory for Python dependency '
|
|
|
|
'computations'))
|
|
|
|
parser.add_argument(
|
|
|
|
'--allowed-output-dirs-file',
|
|
|
|
default=None,
|
|
|
|
type=str,
|
|
|
|
help=("File containing a list of allowed directories where files "
|
|
|
|
"can be output."))
|
|
|
|
parser.add_argument(
|
|
|
|
'--print-cmake-dependencies',
|
|
|
|
default=False,
|
|
|
|
action="store_true",
|
|
|
|
help=("Prints a semi-colon separated list of dependencies to "
|
|
|
|
"stdout and exits."))
|
|
|
|
parser.add_argument(
|
|
|
|
'--print-cmake-outputs',
|
|
|
|
default=False,
|
|
|
|
action="store_true",
|
|
|
|
help=("Prints a semi-colon separated list of outputs to "
|
|
|
|
"stdout and exits."))
|
|
|
|
parser.add_argument('--output-dir',
|
|
|
|
default=None,
|
|
|
|
type=str,
|
|
|
|
help='Directory where to output generate files.')
|
2019-06-07 08:59:17 +00:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
renders = generator.get_file_renders(args)
|
2019-06-07 08:59:17 +00:00
|
|
|
|
2020-02-05 17:16:05 +00:00
|
|
|
# Output a list of all dependencies for CMake or the tarball for GN/Ninja.
|
|
|
|
if args.depfile != None or args.print_cmake_dependencies:
|
|
|
|
dependencies = generator.get_dependencies(args)
|
2020-07-15 19:51:17 +00:00
|
|
|
dependencies += [
|
|
|
|
args.template_dir + os.path.sep + render.template
|
|
|
|
for render in renders
|
|
|
|
]
|
2020-02-05 17:16:05 +00:00
|
|
|
dependencies += _compute_python_dependencies(args.root_dir)
|
|
|
|
|
|
|
|
if args.depfile != None:
|
|
|
|
with open(args.depfile, 'w') as f:
|
2020-07-15 19:51:17 +00:00
|
|
|
f.write(args.output_json_tarball + ": " +
|
|
|
|
" ".join(dependencies))
|
2020-02-05 17:16:05 +00:00
|
|
|
|
|
|
|
if args.print_cmake_dependencies:
|
|
|
|
sys.stdout.write(";".join(dependencies))
|
|
|
|
return 0
|
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
# The caller wants to assert that the outputs are what it expects.
|
|
|
|
# Load the file and compare with our renders.
|
|
|
|
if args.expected_outputs_file != None:
|
|
|
|
with open(args.expected_outputs_file) as f:
|
|
|
|
expected = set([line.strip() for line in f.readlines()])
|
|
|
|
|
2019-06-24 14:31:06 +00:00
|
|
|
actual = {render.output for render in renders}
|
2019-06-07 08:59:17 +00:00
|
|
|
|
|
|
|
if actual != expected:
|
2020-07-15 19:51:17 +00:00
|
|
|
print("Wrong expected outputs, caller expected:\n " +
|
|
|
|
repr(sorted(expected)))
|
2019-06-24 14:31:06 +00:00
|
|
|
print("Actual output:\n " + repr(sorted(actual)))
|
2019-06-07 08:59:17 +00:00
|
|
|
return 1
|
|
|
|
|
2020-02-05 17:16:05 +00:00
|
|
|
# Print the list of all the outputs for cmake.
|
|
|
|
if args.print_cmake_outputs:
|
2020-07-15 19:51:17 +00:00
|
|
|
sys.stdout.write(";".join([
|
|
|
|
os.path.join(args.output_dir, render.output) for render in renders
|
|
|
|
]))
|
2020-02-05 17:16:05 +00:00
|
|
|
return 0
|
|
|
|
|
2019-06-07 08:59:17 +00:00
|
|
|
outputs = _do_renders(renders, args.template_dir)
|
|
|
|
|
2020-07-15 19:51:17 +00:00
|
|
|
# The caller wants to assert that the outputs are only in specific
|
|
|
|
# directories.
|
2019-09-18 23:19:31 +00:00
|
|
|
if args.allowed_output_dirs_file != None:
|
|
|
|
with open(args.allowed_output_dirs_file) as f:
|
|
|
|
allowed_dirs = set([line.strip() for line in f.readlines()])
|
|
|
|
|
|
|
|
for directory in allowed_dirs:
|
|
|
|
if not directory.endswith('/'):
|
2020-07-15 19:51:17 +00:00
|
|
|
print('Allowed directory entry "{}" doesn\'t '
|
|
|
|
'end with /'.format(directory))
|
2019-09-18 23:19:31 +00:00
|
|
|
return 1
|
|
|
|
|
|
|
|
def check_in_subdirectory(path, directory):
|
2020-07-15 19:51:17 +00:00
|
|
|
return path.startswith(
|
|
|
|
directory) and not '/' in path[len(directory):]
|
2019-09-18 23:19:31 +00:00
|
|
|
|
|
|
|
for render in renders:
|
2020-07-15 19:51:17 +00:00
|
|
|
if not any(
|
|
|
|
check_in_subdirectory(render.output, directory)
|
|
|
|
for directory in allowed_dirs):
|
|
|
|
print('Output file "{}" is not in the allowed directory '
|
|
|
|
'list below:'.format(render.output))
|
2019-09-18 23:19:31 +00:00
|
|
|
for directory in sorted(allowed_dirs):
|
|
|
|
print(' "{}"'.format(directory))
|
|
|
|
return 1
|
|
|
|
|
2020-02-05 17:16:05 +00:00
|
|
|
# Output the JSON tarball
|
2019-06-07 08:59:17 +00:00
|
|
|
if args.output_json_tarball != None:
|
|
|
|
json_root = {}
|
|
|
|
for output in outputs:
|
|
|
|
json_root[output.name] = output.content
|
|
|
|
|
|
|
|
with open(args.output_json_tarball, 'w') as f:
|
|
|
|
f.write(json.dumps(json_root))
|
|
|
|
|
2020-02-05 17:16:05 +00:00
|
|
|
# Output the files directly.
|
|
|
|
if args.output_dir != None:
|
|
|
|
for output in outputs:
|
|
|
|
output_path = os.path.join(args.output_dir, output.name)
|
|
|
|
|
|
|
|
directory = os.path.dirname(output_path)
|
|
|
|
if not os.path.exists(directory):
|
|
|
|
os.makedirs(directory)
|
2019-06-07 08:59:17 +00:00
|
|
|
|
2020-02-05 17:16:05 +00:00
|
|
|
with open(output_path, 'w') as outfile:
|
|
|
|
outfile.write(output.content)
|