Reland "Add deps to enable performing the cmake build of dawn_node on CQ"
This is a reland of ecbdd8fbe7
It prefixes the DEPS variables with "dawn_". Because they are globals,
they may collide with other variables in other projects.
Original change's description:
> Add deps to enable performing the cmake build of dawn_node on CQ
>
> (and the cmake build of Dawn in general)
>
> Bug: dawn:688
> Change-Id: If7c037a03d237372739aed1f5dc78bffb7975a24
> Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/65603
> Reviewed-by: Ben Clayton <bclayton@google.com>
> Reviewed-by: Corentin Wallez <cwallez@chromium.org>
> Commit-Queue: Austin Eng <enga@chromium.org>
Bug: dawn:688
Change-Id: I81ec3d5298efea54b1417ff58569cf1c615ea372
Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/66400
Reviewed-by: Ben Clayton <bclayton@google.com>
Commit-Queue: Austin Eng <enga@chromium.org>
This commit is contained in:
parent
854899edb9
commit
4948c81344
|
@ -137,6 +137,7 @@ set_if_not_defined(DAWN_TINT_DIR "${DAWN_THIRD_PARTY_DIR}/tint" "Directory in wh
|
|||
set_if_not_defined(NODE_ADDON_API_DIR "${DAWN_THIRD_PARTY_DIR}/node-addon-api" "Directory in which to find node-addon-api")
|
||||
set_if_not_defined(NODE_API_HEADERS_DIR "${DAWN_THIRD_PARTY_DIR}/node-api-headers" "Directory in which to find node-api-headers")
|
||||
set_if_not_defined(WEBGPU_IDL_PATH "${DAWN_THIRD_PARTY_DIR}/gpuweb/webgpu.idl" "Path to the webgpu.idl definition file")
|
||||
set_if_not_defined(GO_EXECUTABLE "go" "Golang executable for running the IDL generator")
|
||||
|
||||
# Much of the backend code is shared among desktop OpenGL and OpenGL ES
|
||||
if (${DAWN_ENABLE_DESKTOP_GL} OR ${DAWN_ENABLE_OPENGLES})
|
||||
|
|
45
DEPS
45
DEPS
|
@ -10,6 +10,9 @@ vars = {
|
|||
|
||||
'dawn_standalone': True,
|
||||
'dawn_node': False, # Also fetches dependencies required for building NodeJS bindings.
|
||||
'dawn_cmake_version': 'version:3.13.5',
|
||||
'dawn_cmake_win32_sha1': 'b106d66bcdc8a71ea2cdf5446091327bfdb1bcd7',
|
||||
'dawn_go_version': 'version:1.16',
|
||||
}
|
||||
|
||||
deps = {
|
||||
|
@ -155,6 +158,24 @@ deps = {
|
|||
'url': '{github_git}/gpuweb/gpuweb.git@67edc187f5305a72456663c34d51153601b79f3b',
|
||||
'condition': 'dawn_node',
|
||||
},
|
||||
|
||||
'tools/golang': {
|
||||
'condition': 'dawn_node',
|
||||
'packages': [{
|
||||
'package': 'infra/3pp/tools/go/${{platform}}',
|
||||
'version': Var('dawn_go_version'),
|
||||
}],
|
||||
'dep_type': 'cipd',
|
||||
},
|
||||
|
||||
'tools/cmake': {
|
||||
'condition': 'dawn_node and (host_os == "mac" or host_os == "linux")',
|
||||
'packages': [{
|
||||
'package': 'infra/3pp/tools/cmake/${{platform}}',
|
||||
'version': Var('dawn_cmake_version'),
|
||||
}],
|
||||
'dep_type': 'cipd',
|
||||
},
|
||||
}
|
||||
|
||||
hooks = [
|
||||
|
@ -249,6 +270,30 @@ hooks = [
|
|||
'action': ['python', 'build/util/lastchange.py',
|
||||
'-o', 'build/util/LASTCHANGE'],
|
||||
},
|
||||
# TODO(https://crbug.com/1180257): Use CIPD for CMake on Windows.
|
||||
{
|
||||
'name': 'cmake_win32',
|
||||
'pattern': '.',
|
||||
'condition': 'dawn_node and host_os == "win"',
|
||||
'action': [ 'download_from_google_storage',
|
||||
'--no_resume',
|
||||
'--platform=win32',
|
||||
'--no_auth',
|
||||
'--bucket', 'chromium-tools',
|
||||
Var('dawn_cmake_win32_sha1'),
|
||||
'-o', 'tools/cmake-win32.zip'
|
||||
],
|
||||
},
|
||||
{
|
||||
'name': 'cmake_win32_extract',
|
||||
'pattern': '.',
|
||||
'condition': 'dawn_node and host_os == "win"',
|
||||
'action': [ 'python',
|
||||
'scripts/extract.py',
|
||||
'tools/cmake-win32.zip',
|
||||
'tools/cmake-win32/',
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
recursedeps = [
|
||||
|
|
|
@ -0,0 +1,182 @@
|
|||
# Copyright (c) 2015, Google Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
|
||||
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
|
||||
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
"""Extracts archives."""
|
||||
|
||||
import hashlib
|
||||
import optparse
|
||||
import os
|
||||
import os.path
|
||||
import tarfile
|
||||
import shutil
|
||||
import sys
|
||||
import zipfile
|
||||
|
||||
|
||||
def CheckedJoin(output, path):
|
||||
"""
|
||||
CheckedJoin returns os.path.join(output, path). It does sanity checks to
|
||||
ensure the resulting path is under output, but shouldn't be used on untrusted
|
||||
input.
|
||||
"""
|
||||
path = os.path.normpath(path)
|
||||
if os.path.isabs(path) or path.startswith('.'):
|
||||
raise ValueError(path)
|
||||
return os.path.join(output, path)
|
||||
|
||||
|
||||
class FileEntry(object):
|
||||
def __init__(self, path, mode, fileobj):
|
||||
self.path = path
|
||||
self.mode = mode
|
||||
self.fileobj = fileobj
|
||||
|
||||
|
||||
class SymlinkEntry(object):
|
||||
def __init__(self, path, mode, target):
|
||||
self.path = path
|
||||
self.mode = mode
|
||||
self.target = target
|
||||
|
||||
|
||||
def IterateZip(path):
|
||||
"""
|
||||
IterateZip opens the zip file at path and returns a generator of entry objects
|
||||
for each file in it.
|
||||
"""
|
||||
with zipfile.ZipFile(path, 'r') as zip_file:
|
||||
for info in zip_file.infolist():
|
||||
if info.filename.endswith('/'):
|
||||
continue
|
||||
yield FileEntry(info.filename, None, zip_file.open(info))
|
||||
|
||||
|
||||
def IterateTar(path, compression):
|
||||
"""
|
||||
IterateTar opens the tar.gz or tar.bz2 file at path and returns a generator of
|
||||
entry objects for each file in it.
|
||||
"""
|
||||
with tarfile.open(path, 'r:' + compression) as tar_file:
|
||||
for info in tar_file:
|
||||
if info.isdir():
|
||||
pass
|
||||
elif info.issym():
|
||||
yield SymlinkEntry(info.name, None, info.linkname)
|
||||
elif info.isfile():
|
||||
yield FileEntry(info.name, info.mode,
|
||||
tar_file.extractfile(info))
|
||||
else:
|
||||
raise ValueError('Unknown entry type "%s"' % (info.name, ))
|
||||
|
||||
|
||||
def main(args):
|
||||
parser = optparse.OptionParser(usage='Usage: %prog ARCHIVE OUTPUT')
|
||||
parser.add_option('--no-prefix',
|
||||
dest='no_prefix',
|
||||
action='store_true',
|
||||
help='Do not remove a prefix from paths in the archive.')
|
||||
options, args = parser.parse_args(args)
|
||||
|
||||
if len(args) != 2:
|
||||
parser.print_help()
|
||||
return 1
|
||||
|
||||
archive, output = args
|
||||
|
||||
if not os.path.exists(archive):
|
||||
# Skip archives that weren't downloaded.
|
||||
return 0
|
||||
|
||||
with open(archive) as f:
|
||||
sha256 = hashlib.sha256()
|
||||
while True:
|
||||
chunk = f.read(1024 * 1024)
|
||||
if not chunk:
|
||||
break
|
||||
sha256.update(chunk)
|
||||
digest = sha256.hexdigest()
|
||||
|
||||
stamp_path = os.path.join(output, ".dawn_archive_digest")
|
||||
if os.path.exists(stamp_path):
|
||||
with open(stamp_path) as f:
|
||||
if f.read().strip() == digest:
|
||||
print "Already up-to-date."
|
||||
return 0
|
||||
|
||||
if archive.endswith('.zip'):
|
||||
entries = IterateZip(archive)
|
||||
elif archive.endswith('.tar.gz'):
|
||||
entries = IterateTar(archive, 'gz')
|
||||
elif archive.endswith('.tar.bz2'):
|
||||
entries = IterateTar(archive, 'bz2')
|
||||
else:
|
||||
raise ValueError(archive)
|
||||
|
||||
try:
|
||||
if os.path.exists(output):
|
||||
print "Removing %s" % (output, )
|
||||
shutil.rmtree(output)
|
||||
|
||||
print "Extracting %s to %s" % (archive, output)
|
||||
prefix = None
|
||||
num_extracted = 0
|
||||
for entry in entries:
|
||||
# Even on Windows, zip files must always use forward slashes.
|
||||
if '\\' in entry.path or entry.path.startswith('/'):
|
||||
raise ValueError(entry.path)
|
||||
|
||||
if not options.no_prefix:
|
||||
new_prefix, rest = entry.path.split('/', 1)
|
||||
|
||||
# Ensure the archive is consistent.
|
||||
if prefix is None:
|
||||
prefix = new_prefix
|
||||
if prefix != new_prefix:
|
||||
raise ValueError((prefix, new_prefix))
|
||||
else:
|
||||
rest = entry.path
|
||||
|
||||
# Extract the file into the output directory.
|
||||
fixed_path = CheckedJoin(output, rest)
|
||||
if not os.path.isdir(os.path.dirname(fixed_path)):
|
||||
os.makedirs(os.path.dirname(fixed_path))
|
||||
if isinstance(entry, FileEntry):
|
||||
with open(fixed_path, 'wb') as out:
|
||||
shutil.copyfileobj(entry.fileobj, out)
|
||||
elif isinstance(entry, SymlinkEntry):
|
||||
os.symlink(entry.target, fixed_path)
|
||||
else:
|
||||
raise TypeError('unknown entry type')
|
||||
|
||||
# Fix up permissions if needbe.
|
||||
# TODO(davidben): To be extra tidy, this should only track the execute bit
|
||||
# as in git.
|
||||
if entry.mode is not None:
|
||||
os.chmod(fixed_path, entry.mode)
|
||||
|
||||
# Print every 100 files, so bots do not time out on large archives.
|
||||
num_extracted += 1
|
||||
if num_extracted % 100 == 0:
|
||||
print "Extracted %d files..." % (num_extracted, )
|
||||
finally:
|
||||
entries.close()
|
||||
|
||||
with open(stamp_path, 'w') as f:
|
||||
f.write(digest)
|
||||
|
||||
print "Done. Extracted %d files." % (num_extracted, )
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
|
@ -41,7 +41,7 @@ function(idlgen)
|
|||
message(FATAL_ERROR "idlgen() missing IDLS argument(s)")
|
||||
endif()
|
||||
add_custom_command(
|
||||
COMMAND "go" "run" "main.go"
|
||||
COMMAND ${GO_EXECUTABLE} "run" "main.go"
|
||||
"--template" "${IDLGEN_TEMPLATE}"
|
||||
"--output" "${IDLGEN_OUTPUT}"
|
||||
${IDLGEN_IDLS}
|
||||
|
|
Loading…
Reference in New Issue