Add fetch_dawn_dependencies

This adds a DAWN_FETCH_DEPENDENCIES CMake option (default: OFF) that if
turned ON fetches dependencies using a basic Python script that reads
DEPS files instead of requiring the installation of depot_tools by all
projects that depend on Dawn.

The script is not equivalent to gclient as it is not aware of advanced
configuration options (it always gets the list of dependencies that is
hardcoded at the beginning) but at least for a regular use of Dawn as a
dependency that provides a WebGPU backend (the intended use of this
DAWN_FETCH_DEPENDENCIES option) it is enough.

This is an attempt at merging upstream [this
repository](https://github.com/eliemichel/WebGPU-distribution/tree/dawn)
that I currently recommend in [Learn WebGPU for native
C++](https://eliemichel.github.io/LearnWebGPU/getting-started/hello-webgpu.html)
as a shallow intermediary to avoid requiring depot_tools (and turn some
options on/off).

By default, the script performs shallow clones, getting only the very
commit that is needed. This is a great improvement over naive clone:

                | Regular | Shallow |
----------------|---------|---------|
Downloaded size |  194 MB |  15 MB  |
Stored size     |  294 MB | 103 MB  |

Change-Id: Iaedb4af78036a1696b68787c36f1d3d70e18ba2c
Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/131750
Commit-Queue: Austin Eng <enga@chromium.org>
Reviewed-by: Ben Clayton <bclayton@google.com>
Kokoro: Kokoro <noreply+kokoro@google.com>
This commit is contained in:
Elie Michel 2023-05-12 20:19:41 +00:00 committed by Dawn LUCI CQ
parent 2d6690ed9c
commit 9ae8ed2f62
3 changed files with 252 additions and 0 deletions

View File

@ -203,6 +203,8 @@ set_if_not_defined(NODE_API_HEADERS_DIR "${DAWN_THIRD_PARTY_DIR}/node-api-header
set_if_not_defined(WEBGPU_IDL_PATH "${DAWN_THIRD_PARTY_DIR}/gpuweb/webgpu.idl" "Path to the webgpu.idl definition file")
set_if_not_defined(GO_EXECUTABLE "go" "Golang executable for running the IDL generator")
option_if_not_defined(DAWN_FETCH_DEPENDENCIES "Use fetch_dawn_dependencies.py as an alternative to using depot_tools" OFF)
# Much of the backend code is shared among desktop OpenGL and OpenGL ES
if (${DAWN_ENABLE_DESKTOP_GL} OR ${DAWN_ENABLE_OPENGLES})
set(DAWN_ENABLE_OPENGL ON)

View File

@ -15,6 +15,27 @@
# Don't build testing in third_party dependencies
set(BUILD_TESTING OFF)
# fetch_dawn_dependencies.py is an alternative to using depot_tools
# It is particularly interesting when building dawn as a subdirectory in
# a parent project that does not want to use depot_tools.
if (${DAWN_FETCH_DEPENDENCIES})
find_package(PythonInterp 3 REQUIRED)
set(EXTRA_FETCH_ARGS)
if (NOT TARGET gmock AND ${TINT_BUILD_TESTS})
list(APPEND EXTRA_FETCH_ARGS --use-test-deps)
endif()
message(STATUS "Running fetch_dawn_dependencies:")
execute_process(
COMMAND
${PYTHON_EXECUTABLE}
"${PROJECT_SOURCE_DIR}/tools/fetch_dawn_dependencies.py"
--directory ${PROJECT_SOURCE_DIR}
${EXTRA_FETCH_ARGS}
)
endif ()
if (NOT TARGET SPIRV-Headers)
set(SPIRV_HEADERS_SKIP_EXAMPLES ON CACHE BOOL "" FORCE)
set(SPIRV_HEADERS_SKIP_INSTALL ON CACHE BOOL "" FORCE)

View File

@ -0,0 +1,229 @@
# Copyright 2023 The Dawn & Tint Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Helper script to download Dawn's source dependencies without the need to
install depot_tools by manually. This script implements a subset of
`gclient sync`.
This helps embedders, for example through CMake, get all the sources with
a single add_subdirectory call (or FetchContent) instead of more complex setups
Note that this script executes blindly the content of DEPS file, run it only on
a project that you trust not to contain malicious DEPS files.
"""
import os
import sys
import subprocess
import argparse
from pathlib import Path
parser = argparse.ArgumentParser(
prog='fetch_dawn_dependencies',
description=__doc__,
)
parser.add_argument('-d',
'--directory',
type=str,
default="",
help="""
Working directory, in which we read and apply DEPS files recusively. If not
specified, the current working directory is used.
""")
parser.add_argument('-g',
'--git',
type=str,
default="git",
help="""
Path to the git command used to. By default, git is retrieved from the PATH.
You may also use this option to specify extra argument for all calls to git.
""")
parser.add_argument('-s',
'--shallow',
action='store_true',
default=True,
help="""
Clone repositories without commit history (only getting data for the
requested commit).
NB: The git server hosting the dependencies must have turned on the
`uploadpack.allowReachableSHA1InWant` option.
NB2: git submodules may not work as expected (but they are not used by Dawn
dependencies).
""")
parser.add_argument('-ns',
'--no-shallow',
action='store_false',
dest='shallow',
help="Deactivate shallow cloning.")
parser.add_argument('-t',
'--use-test-deps',
action='store_true',
default=False,
help="""
Fetch dependencies needed for testing
""")
def main(args):
# The dependencies that we need to pull from the DEPS files.
# Dependencies of dependencies are prefixed by their ancestors.
required_submodules = [
'third_party/vulkan-deps',
'third_party/vulkan-deps/spirv-headers/src',
'third_party/vulkan-deps/spirv-tools/src',
'third_party/vulkan-deps/vulkan-headers/src',
'third_party/vulkan-deps/vulkan-loader/src',
'third_party/vulkan-deps/vulkan-tools/src',
'third_party/glfw',
'third_party/abseil-cpp',
'third_party/jinja2',
'third_party/markupsafe',
]
if args.use_test_deps:
required_submodules += [
'third_party/googletest',
]
root_dir = Path(args.directory).resolve()
process_dir(args, root_dir, required_submodules)
def process_dir(args, dir_path, required_submodules):
"""
Install dependencies for the provided directory by processing the DEPS file
that it contains (if it exists).
Recursively install dependencies in sub-directories that are created by
cloning dependencies.
"""
deps_path = dir_path / 'DEPS'
if not deps_path.is_file():
return
log(f"Listing dependencies from {dir_path}")
DEPS = open(deps_path).read()
ldict = {}
exec(DEPS, globals(), ldict)
deps = ldict.get('deps')
variables = ldict.get('vars', {})
if deps is None:
log(f"ERROR: DEPS file '{deps_path}' does not define a 'deps' variable"
)
exit(1)
for submodule in required_submodules:
if submodule not in deps:
continue
submodule_path = dir_path / Path(submodule)
raw_url = deps[submodule]['url']
git_url, git_tag = raw_url.format(**variables).rsplit('@', 1)
# Run git from within the submodule's path (don't use for clone)
git = lambda *x: subprocess.run([args.git, '-C', submodule_path, *x],
capture_output=True)
log(f"Fetching dependency '{submodule}'")
if not submodule_path.is_dir():
if args.shallow:
log(f"Shallow cloning '{git_url}' at '{git_tag}' into '{submodule_path}'"
)
shallow_clone(git, git_url, git_tag, submodule_path)
else:
log(f"Cloning '{git_url}' into '{submodule_path}'")
subprocess.run([
args.git,
'clone',
'--recurse-submodules',
git_url,
submodule_path,
],
capture_output=True)
log(f"Checking out tag '{git_tag}'")
git('checkout', git_tag)
elif (submodule_path / ".git").is_dir():
# The module was already cloned, but we may need to update it
proc = git('rev-parse', 'HEAD')
need_update = proc.stdout.decode().strip() != git_tag
if need_update:
# The module was already cloned, but we may need to update it
proc = git('cat-file', '-t', git_tag)
git_tag_exists = proc.returncode == 0
if not git_tag_exists:
log(f"Updating '{submodule_path}' from '{git_url}'")
if args.shallow:
git('fetch', 'origin', git_tag, '--depth', '1')
else:
git('fetch', 'origin')
log(f"Checking out tag '{git_tag}'")
git('checkout', git_tag)
else:
# The caller may have "flattened" the source tree to get rid of
# some heavy submodules.
log(f"(Overridden by a local copy of the submodule)")
# Recursive call
required_subsubmodules = [
m[len(submodule) + 1:] for m in required_submodules
if m.startswith(submodule + "/")
]
process_dir(args, submodule_path, required_subsubmodules)
def shallow_clone(git, git_url, git_tag, submodule_path):
"""
Fetching only 1 commit is not exposed in the git clone API, so we decompose
it manually in git init, git fetch, git reset.
"""
submodule_path.mkdir()
git('init')
git('remote', 'add', 'origin', git_url)
git('fetch', 'origin', git_tag, '--depth', '1')
def log(msg):
"""Just makes it look good in the CMake log flow."""
print(f"-- -- {msg}")
class Var:
"""
Mock Var class, that the content of DEPS files assume to exist when they
are exec-ed.
"""
def __init__(self, name):
self.name = name
def __add__(self, text):
return self.name + text
def __radd__(self, text):
return text + self.name
if __name__ == "__main__":
main(parser.parse_args())