Consistent formatting for Dawn/Tint.

This CL updates the clang format files to have a single shared format
between Dawn and Tint. The major changes are tabs are 4 spaces, lines
are 100 columns and namespaces are not indented.

Bug: dawn:1339
Change-Id: I4208742c95643998d9fd14e77a9cc558071ded39
Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/87603
Commit-Queue: Dan Sinclair <dsinclair@chromium.org>
Reviewed-by: Corentin Wallez <cwallez@chromium.org>
Kokoro: Kokoro <noreply+kokoro@google.com>
This commit is contained in:
dan sinclair
2022-05-01 14:40:55 +00:00
committed by Dawn LUCI CQ
parent 73b1d1dafa
commit 41e4d9a34c
1827 changed files with 218382 additions and 227741 deletions

View File

@@ -1 +0,0 @@
filter=-runtime/indentation_namespace

View File

@@ -31,126 +31,117 @@
namespace dawn {
template <typename T>
struct IsDawnBitmask {
static constexpr bool enable = false;
};
template <typename T>
struct IsDawnBitmask {
static constexpr bool enable = false;
};
template <typename T, typename Enable = void>
struct LowerBitmask {
static constexpr bool enable = false;
};
template <typename T, typename Enable = void>
struct LowerBitmask {
static constexpr bool enable = false;
};
template <typename T>
struct LowerBitmask<T, typename std::enable_if<IsDawnBitmask<T>::enable>::type> {
static constexpr bool enable = true;
using type = T;
constexpr static T Lower(T t) {
return t;
}
};
template <typename T>
struct LowerBitmask<T, typename std::enable_if<IsDawnBitmask<T>::enable>::type> {
static constexpr bool enable = true;
using type = T;
constexpr static T Lower(T t) { return t; }
};
template <typename T>
struct BoolConvertible {
using Integral = typename std::underlying_type<T>::type;
template <typename T>
struct BoolConvertible {
using Integral = typename std::underlying_type<T>::type;
// NOLINTNEXTLINE(runtime/explicit)
constexpr BoolConvertible(Integral value) : value(value) {
}
constexpr operator bool() const {
return value != 0;
}
constexpr operator T() const {
return static_cast<T>(value);
}
// NOLINTNEXTLINE(runtime/explicit)
constexpr BoolConvertible(Integral value) : value(value) {}
constexpr operator bool() const { return value != 0; }
constexpr operator T() const { return static_cast<T>(value); }
Integral value;
};
Integral value;
};
template <typename T>
struct LowerBitmask<BoolConvertible<T>> {
static constexpr bool enable = true;
using type = T;
static constexpr type Lower(BoolConvertible<T> t) {
return t;
}
};
template <typename T>
struct LowerBitmask<BoolConvertible<T>> {
static constexpr bool enable = true;
using type = T;
static constexpr type Lower(BoolConvertible<T> t) { return t; }
};
template <typename T1,
typename T2,
typename = typename std::enable_if<LowerBitmask<T1>::enable &&
LowerBitmask<T2>::enable>::type>
constexpr BoolConvertible<typename LowerBitmask<T1>::type> operator|(T1 left, T2 right) {
using T = typename LowerBitmask<T1>::type;
using Integral = typename std::underlying_type<T>::type;
return static_cast<Integral>(LowerBitmask<T1>::Lower(left)) |
static_cast<Integral>(LowerBitmask<T2>::Lower(right));
}
template <
typename T1,
typename T2,
typename = typename std::enable_if<LowerBitmask<T1>::enable && LowerBitmask<T2>::enable>::type>
constexpr BoolConvertible<typename LowerBitmask<T1>::type> operator|(T1 left, T2 right) {
using T = typename LowerBitmask<T1>::type;
using Integral = typename std::underlying_type<T>::type;
return static_cast<Integral>(LowerBitmask<T1>::Lower(left)) |
static_cast<Integral>(LowerBitmask<T2>::Lower(right));
}
template <typename T1,
typename T2,
typename = typename std::enable_if<LowerBitmask<T1>::enable &&
LowerBitmask<T2>::enable>::type>
constexpr BoolConvertible<typename LowerBitmask<T1>::type> operator&(T1 left, T2 right) {
using T = typename LowerBitmask<T1>::type;
using Integral = typename std::underlying_type<T>::type;
return static_cast<Integral>(LowerBitmask<T1>::Lower(left)) &
static_cast<Integral>(LowerBitmask<T2>::Lower(right));
}
template <
typename T1,
typename T2,
typename = typename std::enable_if<LowerBitmask<T1>::enable && LowerBitmask<T2>::enable>::type>
constexpr BoolConvertible<typename LowerBitmask<T1>::type> operator&(T1 left, T2 right) {
using T = typename LowerBitmask<T1>::type;
using Integral = typename std::underlying_type<T>::type;
return static_cast<Integral>(LowerBitmask<T1>::Lower(left)) &
static_cast<Integral>(LowerBitmask<T2>::Lower(right));
}
template <typename T1,
typename T2,
typename = typename std::enable_if<LowerBitmask<T1>::enable &&
LowerBitmask<T2>::enable>::type>
constexpr BoolConvertible<typename LowerBitmask<T1>::type> operator^(T1 left, T2 right) {
using T = typename LowerBitmask<T1>::type;
using Integral = typename std::underlying_type<T>::type;
return static_cast<Integral>(LowerBitmask<T1>::Lower(left)) ^
static_cast<Integral>(LowerBitmask<T2>::Lower(right));
}
template <
typename T1,
typename T2,
typename = typename std::enable_if<LowerBitmask<T1>::enable && LowerBitmask<T2>::enable>::type>
constexpr BoolConvertible<typename LowerBitmask<T1>::type> operator^(T1 left, T2 right) {
using T = typename LowerBitmask<T1>::type;
using Integral = typename std::underlying_type<T>::type;
return static_cast<Integral>(LowerBitmask<T1>::Lower(left)) ^
static_cast<Integral>(LowerBitmask<T2>::Lower(right));
}
template <typename T1>
constexpr BoolConvertible<typename LowerBitmask<T1>::type> operator~(T1 t) {
using T = typename LowerBitmask<T1>::type;
using Integral = typename std::underlying_type<T>::type;
return ~static_cast<Integral>(LowerBitmask<T1>::Lower(t));
}
template <typename T1>
constexpr BoolConvertible<typename LowerBitmask<T1>::type> operator~(T1 t) {
using T = typename LowerBitmask<T1>::type;
using Integral = typename std::underlying_type<T>::type;
return ~static_cast<Integral>(LowerBitmask<T1>::Lower(t));
}
template <typename T,
typename T2,
typename = typename std::enable_if<IsDawnBitmask<T>::enable &&
LowerBitmask<T2>::enable>::type>
constexpr T& operator&=(T& l, T2 right) {
T r = LowerBitmask<T2>::Lower(right);
l = l & r;
return l;
}
template <
typename T,
typename T2,
typename = typename std::enable_if<IsDawnBitmask<T>::enable && LowerBitmask<T2>::enable>::type>
constexpr T& operator&=(T& l, T2 right) {
T r = LowerBitmask<T2>::Lower(right);
l = l & r;
return l;
}
template <typename T,
typename T2,
typename = typename std::enable_if<IsDawnBitmask<T>::enable &&
LowerBitmask<T2>::enable>::type>
constexpr T& operator|=(T& l, T2 right) {
T r = LowerBitmask<T2>::Lower(right);
l = l | r;
return l;
}
template <
typename T,
typename T2,
typename = typename std::enable_if<IsDawnBitmask<T>::enable && LowerBitmask<T2>::enable>::type>
constexpr T& operator|=(T& l, T2 right) {
T r = LowerBitmask<T2>::Lower(right);
l = l | r;
return l;
}
template <typename T,
typename T2,
typename = typename std::enable_if<IsDawnBitmask<T>::enable &&
LowerBitmask<T2>::enable>::type>
constexpr T& operator^=(T& l, T2 right) {
T r = LowerBitmask<T2>::Lower(right);
l = l ^ r;
return l;
}
template <
typename T,
typename T2,
typename = typename std::enable_if<IsDawnBitmask<T>::enable && LowerBitmask<T2>::enable>::type>
constexpr T& operator^=(T& l, T2 right) {
T r = LowerBitmask<T2>::Lower(right);
l = l ^ r;
return l;
}
template <typename T>
constexpr bool HasZeroOrOneBits(T value) {
using Integral = typename std::underlying_type<T>::type;
return (static_cast<Integral>(value) & (static_cast<Integral>(value) - 1)) == 0;
}
template <typename T>
constexpr bool HasZeroOrOneBits(T value) {
using Integral = typename std::underlying_type<T>::type;
return (static_cast<Integral>(value) & (static_cast<Integral>(value) - 1)) == 0;
}
} // namespace dawn

View File

@@ -65,7 +65,7 @@ struct DawnWSIContextD3D12 {
#endif
#if defined(DAWN_ENABLE_BACKEND_METAL) && defined(__OBJC__)
# import <Metal/Metal.h>
#import <Metal/Metal.h>
struct DawnWSIContextMetal {
id<MTLDevice> device = nil;

View File

@@ -30,81 +30,81 @@ struct ID3D12Resource;
namespace dawn::native::d3d12 {
class D3D11on12ResourceCache;
class D3D11on12ResourceCache;
DAWN_NATIVE_EXPORT Microsoft::WRL::ComPtr<ID3D12Device> GetD3D12Device(WGPUDevice device);
DAWN_NATIVE_EXPORT DawnSwapChainImplementation CreateNativeSwapChainImpl(WGPUDevice device,
HWND window);
DAWN_NATIVE_EXPORT WGPUTextureFormat
GetNativeSwapChainPreferredFormat(const DawnSwapChainImplementation* swapChain);
DAWN_NATIVE_EXPORT Microsoft::WRL::ComPtr<ID3D12Device> GetD3D12Device(WGPUDevice device);
DAWN_NATIVE_EXPORT DawnSwapChainImplementation CreateNativeSwapChainImpl(WGPUDevice device,
HWND window);
DAWN_NATIVE_EXPORT WGPUTextureFormat
GetNativeSwapChainPreferredFormat(const DawnSwapChainImplementation* swapChain);
enum MemorySegment {
Local,
NonLocal,
};
enum MemorySegment {
Local,
NonLocal,
};
DAWN_NATIVE_EXPORT uint64_t SetExternalMemoryReservation(WGPUDevice device,
uint64_t requestedReservationSize,
MemorySegment memorySegment);
DAWN_NATIVE_EXPORT uint64_t SetExternalMemoryReservation(WGPUDevice device,
uint64_t requestedReservationSize,
MemorySegment memorySegment);
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorDXGISharedHandle : ExternalImageDescriptor {
public:
ExternalImageDescriptorDXGISharedHandle();
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorDXGISharedHandle : ExternalImageDescriptor {
public:
ExternalImageDescriptorDXGISharedHandle();
// Note: SharedHandle must be a handle to a texture object.
HANDLE sharedHandle;
};
// Note: SharedHandle must be a handle to a texture object.
HANDLE sharedHandle;
};
// Keyed mutex acquire/release uses a fixed key of 0 to match Chromium behavior.
constexpr UINT64 kDXGIKeyedMutexAcquireReleaseKey = 0;
// Keyed mutex acquire/release uses a fixed key of 0 to match Chromium behavior.
constexpr UINT64 kDXGIKeyedMutexAcquireReleaseKey = 0;
struct DAWN_NATIVE_EXPORT ExternalImageAccessDescriptorDXGIKeyedMutex
: ExternalImageAccessDescriptor {
public:
// TODO(chromium:1241533): Remove deprecated keyed mutex params after removing associated
// code from Chromium - we use a fixed key of 0 for acquire and release everywhere now.
uint64_t acquireMutexKey;
uint64_t releaseMutexKey;
bool isSwapChainTexture = false;
};
struct DAWN_NATIVE_EXPORT ExternalImageAccessDescriptorDXGIKeyedMutex
: ExternalImageAccessDescriptor {
public:
// TODO(chromium:1241533): Remove deprecated keyed mutex params after removing associated
// code from Chromium - we use a fixed key of 0 for acquire and release everywhere now.
uint64_t acquireMutexKey;
uint64_t releaseMutexKey;
bool isSwapChainTexture = false;
};
class DAWN_NATIVE_EXPORT ExternalImageDXGI {
public:
~ExternalImageDXGI();
class DAWN_NATIVE_EXPORT ExternalImageDXGI {
public:
~ExternalImageDXGI();
// Note: SharedHandle must be a handle to a texture object.
static std::unique_ptr<ExternalImageDXGI> Create(
WGPUDevice device,
const ExternalImageDescriptorDXGISharedHandle* descriptor);
// Note: SharedHandle must be a handle to a texture object.
static std::unique_ptr<ExternalImageDXGI> Create(
WGPUDevice device,
const ExternalImageDescriptorDXGISharedHandle* descriptor);
WGPUTexture ProduceTexture(WGPUDevice device,
const ExternalImageAccessDescriptorDXGIKeyedMutex* descriptor);
WGPUTexture ProduceTexture(WGPUDevice device,
const ExternalImageAccessDescriptorDXGIKeyedMutex* descriptor);
private:
ExternalImageDXGI(Microsoft::WRL::ComPtr<ID3D12Resource> d3d12Resource,
const WGPUTextureDescriptor* descriptor);
private:
ExternalImageDXGI(Microsoft::WRL::ComPtr<ID3D12Resource> d3d12Resource,
const WGPUTextureDescriptor* descriptor);
Microsoft::WRL::ComPtr<ID3D12Resource> mD3D12Resource;
Microsoft::WRL::ComPtr<ID3D12Resource> mD3D12Resource;
// Contents of WGPUTextureDescriptor are stored individually since the descriptor
// could outlive this image.
WGPUTextureUsageFlags mUsage;
WGPUTextureUsageFlags mUsageInternal = WGPUTextureUsage_None;
WGPUTextureDimension mDimension;
WGPUExtent3D mSize;
WGPUTextureFormat mFormat;
uint32_t mMipLevelCount;
uint32_t mSampleCount;
// Contents of WGPUTextureDescriptor are stored individually since the descriptor
// could outlive this image.
WGPUTextureUsageFlags mUsage;
WGPUTextureUsageFlags mUsageInternal = WGPUTextureUsage_None;
WGPUTextureDimension mDimension;
WGPUExtent3D mSize;
WGPUTextureFormat mFormat;
uint32_t mMipLevelCount;
uint32_t mSampleCount;
std::unique_ptr<D3D11on12ResourceCache> mD3D11on12ResourceCache;
};
std::unique_ptr<D3D11on12ResourceCache> mD3D11on12ResourceCache;
};
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptions : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptions();
explicit AdapterDiscoveryOptions(Microsoft::WRL::ComPtr<IDXGIAdapter> adapter);
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptions : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptions();
explicit AdapterDiscoveryOptions(Microsoft::WRL::ComPtr<IDXGIAdapter> adapter);
Microsoft::WRL::ComPtr<IDXGIAdapter> dxgiAdapter;
};
Microsoft::WRL::ComPtr<IDXGIAdapter> dxgiAdapter;
};
} // namespace dawn::native::d3d12

View File

@@ -23,237 +23,237 @@
#include "dawn/webgpu.h"
namespace dawn::platform {
class Platform;
class Platform;
} // namespace dawn::platform
namespace wgpu {
struct AdapterProperties;
struct DeviceDescriptor;
struct AdapterProperties;
struct DeviceDescriptor;
} // namespace wgpu
namespace dawn::native {
class InstanceBase;
class AdapterBase;
class InstanceBase;
class AdapterBase;
// An optional parameter of Adapter::CreateDevice() to send additional information when creating
// a Device. For example, we can use it to enable a workaround, optimization or feature.
struct DAWN_NATIVE_EXPORT DawnDeviceDescriptor {
std::vector<const char*> requiredFeatures;
std::vector<const char*> forceEnabledToggles;
std::vector<const char*> forceDisabledToggles;
// An optional parameter of Adapter::CreateDevice() to send additional information when creating
// a Device. For example, we can use it to enable a workaround, optimization or feature.
struct DAWN_NATIVE_EXPORT DawnDeviceDescriptor {
std::vector<const char*> requiredFeatures;
std::vector<const char*> forceEnabledToggles;
std::vector<const char*> forceDisabledToggles;
const WGPURequiredLimits* requiredLimits = nullptr;
};
const WGPURequiredLimits* requiredLimits = nullptr;
};
// A struct to record the information of a toggle. A toggle is a code path in Dawn device that
// can be manually configured to run or not outside Dawn, including workarounds, special
// features and optimizations.
struct ToggleInfo {
const char* name;
const char* description;
const char* url;
};
// A struct to record the information of a toggle. A toggle is a code path in Dawn device that
// can be manually configured to run or not outside Dawn, including workarounds, special
// features and optimizations.
struct ToggleInfo {
const char* name;
const char* description;
const char* url;
};
// A struct to record the information of a feature. A feature is a GPU feature that is not
// required to be supported by all Dawn backends and can only be used when it is enabled on the
// creation of device.
using FeatureInfo = ToggleInfo;
// A struct to record the information of a feature. A feature is a GPU feature that is not
// required to be supported by all Dawn backends and can only be used when it is enabled on the
// creation of device.
using FeatureInfo = ToggleInfo;
// An adapter is an object that represent on possibility of creating devices in the system.
// Most of the time it will represent a combination of a physical GPU and an API. Not that the
// same GPU can be represented by multiple adapters but on different APIs.
//
// The underlying Dawn adapter is owned by the Dawn instance so this class is not RAII but just
// a reference to an underlying adapter.
class DAWN_NATIVE_EXPORT Adapter {
public:
Adapter();
// NOLINTNEXTLINE(runtime/explicit)
Adapter(AdapterBase* impl);
~Adapter();
// An adapter is an object that represent on possibility of creating devices in the system.
// Most of the time it will represent a combination of a physical GPU and an API. Not that the
// same GPU can be represented by multiple adapters but on different APIs.
//
// The underlying Dawn adapter is owned by the Dawn instance so this class is not RAII but just
// a reference to an underlying adapter.
class DAWN_NATIVE_EXPORT Adapter {
public:
Adapter();
// NOLINTNEXTLINE(runtime/explicit)
Adapter(AdapterBase* impl);
~Adapter();
Adapter(const Adapter& other);
Adapter& operator=(const Adapter& other);
Adapter(const Adapter& other);
Adapter& operator=(const Adapter& other);
// Essentially webgpu.h's wgpuAdapterGetProperties while we don't have WGPUAdapter in
// dawn.json
void GetProperties(wgpu::AdapterProperties* properties) const;
void GetProperties(WGPUAdapterProperties* properties) const;
// Essentially webgpu.h's wgpuAdapterGetProperties while we don't have WGPUAdapter in
// dawn.json
void GetProperties(wgpu::AdapterProperties* properties) const;
void GetProperties(WGPUAdapterProperties* properties) const;
std::vector<const char*> GetSupportedExtensions() const;
std::vector<const char*> GetSupportedFeatures() const;
WGPUDeviceProperties GetAdapterProperties() const;
bool GetLimits(WGPUSupportedLimits* limits) const;
std::vector<const char*> GetSupportedExtensions() const;
std::vector<const char*> GetSupportedFeatures() const;
WGPUDeviceProperties GetAdapterProperties() const;
bool GetLimits(WGPUSupportedLimits* limits) const;
void SetUseTieredLimits(bool useTieredLimits);
void SetUseTieredLimits(bool useTieredLimits);
// Check that the Adapter is able to support importing external images. This is necessary
// to implement the swapchain and interop APIs in Chromium.
bool SupportsExternalImages() const;
// Check that the Adapter is able to support importing external images. This is necessary
// to implement the swapchain and interop APIs in Chromium.
bool SupportsExternalImages() const;
explicit operator bool() const;
explicit operator bool() const;
// Create a device on this adapter. On an error, nullptr is returned.
WGPUDevice CreateDevice(const DawnDeviceDescriptor* deviceDescriptor);
WGPUDevice CreateDevice(const wgpu::DeviceDescriptor* deviceDescriptor);
WGPUDevice CreateDevice(const WGPUDeviceDescriptor* deviceDescriptor = nullptr);
// Create a device on this adapter. On an error, nullptr is returned.
WGPUDevice CreateDevice(const DawnDeviceDescriptor* deviceDescriptor);
WGPUDevice CreateDevice(const wgpu::DeviceDescriptor* deviceDescriptor);
WGPUDevice CreateDevice(const WGPUDeviceDescriptor* deviceDescriptor = nullptr);
void RequestDevice(const DawnDeviceDescriptor* descriptor,
WGPURequestDeviceCallback callback,
void* userdata);
void RequestDevice(const wgpu::DeviceDescriptor* descriptor,
WGPURequestDeviceCallback callback,
void* userdata);
void RequestDevice(const WGPUDeviceDescriptor* descriptor,
WGPURequestDeviceCallback callback,
void* userdata);
void RequestDevice(const DawnDeviceDescriptor* descriptor,
WGPURequestDeviceCallback callback,
void* userdata);
void RequestDevice(const wgpu::DeviceDescriptor* descriptor,
WGPURequestDeviceCallback callback,
void* userdata);
void RequestDevice(const WGPUDeviceDescriptor* descriptor,
WGPURequestDeviceCallback callback,
void* userdata);
// Returns the underlying WGPUAdapter object.
WGPUAdapter Get() const;
// Returns the underlying WGPUAdapter object.
WGPUAdapter Get() const;
// Reset the backend device object for testing purposes.
void ResetInternalDeviceForTesting();
// Reset the backend device object for testing purposes.
void ResetInternalDeviceForTesting();
private:
AdapterBase* mImpl = nullptr;
};
private:
AdapterBase* mImpl = nullptr;
};
// Base class for options passed to Instance::DiscoverAdapters.
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptionsBase {
public:
const WGPUBackendType backendType;
// Base class for options passed to Instance::DiscoverAdapters.
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptionsBase {
public:
const WGPUBackendType backendType;
protected:
explicit AdapterDiscoveryOptionsBase(WGPUBackendType type);
};
protected:
explicit AdapterDiscoveryOptionsBase(WGPUBackendType type);
};
enum BackendValidationLevel { Full, Partial, Disabled };
enum BackendValidationLevel { Full, Partial, Disabled };
// Represents a connection to dawn_native and is used for dependency injection, discovering
// system adapters and injecting custom adapters (like a Swiftshader Vulkan adapter).
//
// This is an RAII class for Dawn instances and also controls the lifetime of all adapters
// for this instance.
class DAWN_NATIVE_EXPORT Instance {
public:
explicit Instance(const WGPUInstanceDescriptor* desc = nullptr);
~Instance();
// Represents a connection to dawn_native and is used for dependency injection, discovering
// system adapters and injecting custom adapters (like a Swiftshader Vulkan adapter).
//
// This is an RAII class for Dawn instances and also controls the lifetime of all adapters
// for this instance.
class DAWN_NATIVE_EXPORT Instance {
public:
explicit Instance(const WGPUInstanceDescriptor* desc = nullptr);
~Instance();
Instance(const Instance& other) = delete;
Instance& operator=(const Instance& other) = delete;
Instance(const Instance& other) = delete;
Instance& operator=(const Instance& other) = delete;
// Gather all adapters in the system that can be accessed with no special options. These
// adapters will later be returned by GetAdapters.
void DiscoverDefaultAdapters();
// Gather all adapters in the system that can be accessed with no special options. These
// adapters will later be returned by GetAdapters.
void DiscoverDefaultAdapters();
// Adds adapters that can be discovered with the options provided (like a getProcAddress).
// The backend is chosen based on the type of the options used. Returns true on success.
bool DiscoverAdapters(const AdapterDiscoveryOptionsBase* options);
// Adds adapters that can be discovered with the options provided (like a getProcAddress).
// The backend is chosen based on the type of the options used. Returns true on success.
bool DiscoverAdapters(const AdapterDiscoveryOptionsBase* options);
// Returns all the adapters that the instance knows about.
std::vector<Adapter> GetAdapters() const;
// Returns all the adapters that the instance knows about.
std::vector<Adapter> GetAdapters() const;
const ToggleInfo* GetToggleInfo(const char* toggleName);
const FeatureInfo* GetFeatureInfo(WGPUFeatureName feature);
const ToggleInfo* GetToggleInfo(const char* toggleName);
const FeatureInfo* GetFeatureInfo(WGPUFeatureName feature);
// Enables backend validation layers
void EnableBackendValidation(bool enableBackendValidation);
void SetBackendValidationLevel(BackendValidationLevel validationLevel);
// Enables backend validation layers
void EnableBackendValidation(bool enableBackendValidation);
void SetBackendValidationLevel(BackendValidationLevel validationLevel);
// Enable debug capture on Dawn startup
void EnableBeginCaptureOnStartup(bool beginCaptureOnStartup);
// Enable debug capture on Dawn startup
void EnableBeginCaptureOnStartup(bool beginCaptureOnStartup);
// TODO(dawn:1374) Deprecate this once it is passed via the descriptor.
void SetPlatform(dawn::platform::Platform* platform);
// TODO(dawn:1374) Deprecate this once it is passed via the descriptor.
void SetPlatform(dawn::platform::Platform* platform);
// Returns the underlying WGPUInstance object.
WGPUInstance Get() const;
// Returns the underlying WGPUInstance object.
WGPUInstance Get() const;
private:
InstanceBase* mImpl = nullptr;
};
private:
InstanceBase* mImpl = nullptr;
};
// Backend-agnostic API for dawn_native
DAWN_NATIVE_EXPORT const DawnProcTable& GetProcs();
// Backend-agnostic API for dawn_native
DAWN_NATIVE_EXPORT const DawnProcTable& GetProcs();
// Query the names of all the toggles that are enabled in device
DAWN_NATIVE_EXPORT std::vector<const char*> GetTogglesUsed(WGPUDevice device);
// Query the names of all the toggles that are enabled in device
DAWN_NATIVE_EXPORT std::vector<const char*> GetTogglesUsed(WGPUDevice device);
// Backdoor to get the number of lazy clears for testing
DAWN_NATIVE_EXPORT size_t GetLazyClearCountForTesting(WGPUDevice device);
// Backdoor to get the number of lazy clears for testing
DAWN_NATIVE_EXPORT size_t GetLazyClearCountForTesting(WGPUDevice device);
// Backdoor to get the number of deprecation warnings for testing
DAWN_NATIVE_EXPORT size_t GetDeprecationWarningCountForTesting(WGPUDevice device);
// Backdoor to get the number of deprecation warnings for testing
DAWN_NATIVE_EXPORT size_t GetDeprecationWarningCountForTesting(WGPUDevice device);
// Query if texture has been initialized
DAWN_NATIVE_EXPORT bool IsTextureSubresourceInitialized(
WGPUTexture texture,
uint32_t baseMipLevel,
uint32_t levelCount,
uint32_t baseArrayLayer,
uint32_t layerCount,
WGPUTextureAspect aspect = WGPUTextureAspect_All);
// Query if texture has been initialized
DAWN_NATIVE_EXPORT bool IsTextureSubresourceInitialized(
WGPUTexture texture,
uint32_t baseMipLevel,
uint32_t levelCount,
uint32_t baseArrayLayer,
uint32_t layerCount,
WGPUTextureAspect aspect = WGPUTextureAspect_All);
// Backdoor to get the order of the ProcMap for testing
DAWN_NATIVE_EXPORT std::vector<const char*> GetProcMapNamesForTesting();
// Backdoor to get the order of the ProcMap for testing
DAWN_NATIVE_EXPORT std::vector<const char*> GetProcMapNamesForTesting();
DAWN_NATIVE_EXPORT bool DeviceTick(WGPUDevice device);
DAWN_NATIVE_EXPORT bool DeviceTick(WGPUDevice device);
// ErrorInjector functions used for testing only. Defined in dawn_native/ErrorInjector.cpp
DAWN_NATIVE_EXPORT void EnableErrorInjector();
DAWN_NATIVE_EXPORT void DisableErrorInjector();
DAWN_NATIVE_EXPORT void ClearErrorInjector();
DAWN_NATIVE_EXPORT uint64_t AcquireErrorInjectorCallCount();
DAWN_NATIVE_EXPORT void InjectErrorAt(uint64_t index);
// ErrorInjector functions used for testing only. Defined in dawn_native/ErrorInjector.cpp
DAWN_NATIVE_EXPORT void EnableErrorInjector();
DAWN_NATIVE_EXPORT void DisableErrorInjector();
DAWN_NATIVE_EXPORT void ClearErrorInjector();
DAWN_NATIVE_EXPORT uint64_t AcquireErrorInjectorCallCount();
DAWN_NATIVE_EXPORT void InjectErrorAt(uint64_t index);
// The different types of external images
enum ExternalImageType {
OpaqueFD,
DmaBuf,
IOSurface,
DXGISharedHandle,
EGLImage,
};
// The different types of external images
enum ExternalImageType {
OpaqueFD,
DmaBuf,
IOSurface,
DXGISharedHandle,
EGLImage,
};
// Common properties of external images
struct DAWN_NATIVE_EXPORT ExternalImageDescriptor {
public:
const WGPUTextureDescriptor* cTextureDescriptor; // Must match image creation params
bool isInitialized; // Whether the texture is initialized on import
ExternalImageType GetType() const;
// Common properties of external images
struct DAWN_NATIVE_EXPORT ExternalImageDescriptor {
public:
const WGPUTextureDescriptor* cTextureDescriptor; // Must match image creation params
bool isInitialized; // Whether the texture is initialized on import
ExternalImageType GetType() const;
protected:
explicit ExternalImageDescriptor(ExternalImageType type);
protected:
explicit ExternalImageDescriptor(ExternalImageType type);
private:
ExternalImageType mType;
};
private:
ExternalImageType mType;
};
struct DAWN_NATIVE_EXPORT ExternalImageAccessDescriptor {
public:
bool isInitialized; // Whether the texture is initialized on import
WGPUTextureUsageFlags usage;
};
struct DAWN_NATIVE_EXPORT ExternalImageAccessDescriptor {
public:
bool isInitialized; // Whether the texture is initialized on import
WGPUTextureUsageFlags usage;
};
struct DAWN_NATIVE_EXPORT ExternalImageExportInfo {
public:
bool isInitialized; // Whether the texture is initialized after export
ExternalImageType GetType() const;
struct DAWN_NATIVE_EXPORT ExternalImageExportInfo {
public:
bool isInitialized; // Whether the texture is initialized after export
ExternalImageType GetType() const;
protected:
explicit ExternalImageExportInfo(ExternalImageType type);
protected:
explicit ExternalImageExportInfo(ExternalImageType type);
private:
ExternalImageType mType;
};
private:
ExternalImageType mType;
};
DAWN_NATIVE_EXPORT const char* GetObjectLabelForTesting(void* objectHandle);
DAWN_NATIVE_EXPORT const char* GetObjectLabelForTesting(void* objectHandle);
DAWN_NATIVE_EXPORT uint64_t GetAllocatedSizeForTesting(WGPUBuffer buffer);
DAWN_NATIVE_EXPORT uint64_t GetAllocatedSizeForTesting(WGPUBuffer buffer);
DAWN_NATIVE_EXPORT bool BindGroupLayoutBindingsEqualForTesting(WGPUBindGroupLayout a,
WGPUBindGroupLayout b);
DAWN_NATIVE_EXPORT bool BindGroupLayoutBindingsEqualForTesting(WGPUBindGroupLayout a,
WGPUBindGroupLayout b);
} // namespace dawn::native

View File

@@ -29,41 +29,41 @@ struct __IOSurface;
typedef __IOSurface* IOSurfaceRef;
#ifdef __OBJC__
# import <Metal/Metal.h>
#import <Metal/Metal.h>
#endif // __OBJC__
namespace dawn::native::metal {
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptions : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptions();
};
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptions : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptions();
};
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorIOSurface : ExternalImageDescriptor {
public:
ExternalImageDescriptorIOSurface();
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorIOSurface : ExternalImageDescriptor {
public:
ExternalImageDescriptorIOSurface();
IOSurfaceRef ioSurface;
IOSurfaceRef ioSurface;
// This has been deprecated.
uint32_t plane;
};
// This has been deprecated.
uint32_t plane;
};
DAWN_NATIVE_EXPORT WGPUTexture
WrapIOSurface(WGPUDevice device, const ExternalImageDescriptorIOSurface* descriptor);
DAWN_NATIVE_EXPORT WGPUTexture WrapIOSurface(WGPUDevice device,
const ExternalImageDescriptorIOSurface* descriptor);
// When making Metal interop with other APIs, we need to be careful that QueueSubmit doesn't
// mean that the operations will be visible to other APIs/Metal devices right away. macOS
// does have a global queue of graphics operations, but the command buffers are inserted there
// when they are "scheduled". Submitting other operations before the command buffer is
// scheduled could lead to races in who gets scheduled first and incorrect rendering.
DAWN_NATIVE_EXPORT void WaitForCommandsToBeScheduled(WGPUDevice device);
// When making Metal interop with other APIs, we need to be careful that QueueSubmit doesn't
// mean that the operations will be visible to other APIs/Metal devices right away. macOS
// does have a global queue of graphics operations, but the command buffers are inserted there
// when they are "scheduled". Submitting other operations before the command buffer is
// scheduled could lead to races in who gets scheduled first and incorrect rendering.
DAWN_NATIVE_EXPORT void WaitForCommandsToBeScheduled(WGPUDevice device);
} // namespace dawn::native::metal
#ifdef __OBJC__
namespace dawn::native::metal {
DAWN_NATIVE_EXPORT id<MTLDevice> GetMetalDevice(WGPUDevice device);
DAWN_NATIVE_EXPORT id<MTLDevice> GetMetalDevice(WGPUDevice device);
} // namespace dawn::native::metal
#endif // __OBJC__

View File

@@ -19,7 +19,7 @@
#include "dawn/native/DawnNative.h"
namespace dawn::native::null {
DAWN_NATIVE_EXPORT DawnSwapChainImplementation CreateNativeSwapChainImpl();
DAWN_NATIVE_EXPORT DawnSwapChainImplementation CreateNativeSwapChainImpl();
} // namespace dawn::native::null
#endif // INCLUDE_DAWN_NATIVE_NULLBACKEND_H_

View File

@@ -22,33 +22,34 @@ typedef void* EGLImage;
namespace dawn::native::opengl {
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptions : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptions();
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptions : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptions();
void* (*getProc)(const char*);
};
void* (*getProc)(const char*);
};
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptionsES : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptionsES();
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptionsES : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptionsES();
void* (*getProc)(const char*);
};
void* (*getProc)(const char*);
};
using PresentCallback = void (*)(void*);
DAWN_NATIVE_EXPORT DawnSwapChainImplementation
CreateNativeSwapChainImpl(WGPUDevice device, PresentCallback present, void* presentUserdata);
DAWN_NATIVE_EXPORT WGPUTextureFormat
GetNativeSwapChainPreferredFormat(const DawnSwapChainImplementation* swapChain);
using PresentCallback = void (*)(void*);
DAWN_NATIVE_EXPORT DawnSwapChainImplementation CreateNativeSwapChainImpl(WGPUDevice device,
PresentCallback present,
void* presentUserdata);
DAWN_NATIVE_EXPORT WGPUTextureFormat
GetNativeSwapChainPreferredFormat(const DawnSwapChainImplementation* swapChain);
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorEGLImage : ExternalImageDescriptor {
public:
ExternalImageDescriptorEGLImage();
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorEGLImage : ExternalImageDescriptor {
public:
ExternalImageDescriptorEGLImage();
::EGLImage image;
};
::EGLImage image;
};
DAWN_NATIVE_EXPORT WGPUTexture
WrapExternalEGLImage(WGPUDevice device, const ExternalImageDescriptorEGLImage* descriptor);
DAWN_NATIVE_EXPORT WGPUTexture
WrapExternalEGLImage(WGPUDevice device, const ExternalImageDescriptorEGLImage* descriptor);
} // namespace dawn::native::opengl

View File

@@ -24,116 +24,116 @@
namespace dawn::native::vulkan {
DAWN_NATIVE_EXPORT VkInstance GetInstance(WGPUDevice device);
DAWN_NATIVE_EXPORT VkInstance GetInstance(WGPUDevice device);
DAWN_NATIVE_EXPORT PFN_vkVoidFunction GetInstanceProcAddr(WGPUDevice device, const char* pName);
DAWN_NATIVE_EXPORT PFN_vkVoidFunction GetInstanceProcAddr(WGPUDevice device, const char* pName);
DAWN_NATIVE_EXPORT DawnSwapChainImplementation
CreateNativeSwapChainImpl(WGPUDevice device, ::VkSurfaceKHR surface);
DAWN_NATIVE_EXPORT WGPUTextureFormat
GetNativeSwapChainPreferredFormat(const DawnSwapChainImplementation* swapChain);
DAWN_NATIVE_EXPORT DawnSwapChainImplementation CreateNativeSwapChainImpl(WGPUDevice device,
::VkSurfaceKHR surface);
DAWN_NATIVE_EXPORT WGPUTextureFormat
GetNativeSwapChainPreferredFormat(const DawnSwapChainImplementation* swapChain);
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptions : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptions();
struct DAWN_NATIVE_EXPORT AdapterDiscoveryOptions : public AdapterDiscoveryOptionsBase {
AdapterDiscoveryOptions();
bool forceSwiftShader = false;
};
bool forceSwiftShader = false;
};
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorVk : ExternalImageDescriptor {
public:
// The following members may be ignored if |ExternalImageDescriptor::isInitialized| is false
// since the import does not need to preserve texture contents.
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorVk : ExternalImageDescriptor {
public:
// The following members may be ignored if |ExternalImageDescriptor::isInitialized| is false
// since the import does not need to preserve texture contents.
// See https://www.khronos.org/registry/vulkan/specs/1.1/html/chap7.html. The acquire
// operation old/new layouts must match exactly the layouts in the release operation. So
// we may need to issue two barriers releasedOldLayout -> releasedNewLayout ->
// cTextureDescriptor.usage if the new layout is not compatible with the desired usage.
// The first barrier is the queue transfer, the second is the layout transition to our
// desired usage.
VkImageLayout releasedOldLayout = VK_IMAGE_LAYOUT_GENERAL;
VkImageLayout releasedNewLayout = VK_IMAGE_LAYOUT_GENERAL;
// See https://www.khronos.org/registry/vulkan/specs/1.1/html/chap7.html. The acquire
// operation old/new layouts must match exactly the layouts in the release operation. So
// we may need to issue two barriers releasedOldLayout -> releasedNewLayout ->
// cTextureDescriptor.usage if the new layout is not compatible with the desired usage.
// The first barrier is the queue transfer, the second is the layout transition to our
// desired usage.
VkImageLayout releasedOldLayout = VK_IMAGE_LAYOUT_GENERAL;
VkImageLayout releasedNewLayout = VK_IMAGE_LAYOUT_GENERAL;
protected:
using ExternalImageDescriptor::ExternalImageDescriptor;
};
protected:
using ExternalImageDescriptor::ExternalImageDescriptor;
};
struct ExternalImageExportInfoVk : ExternalImageExportInfo {
public:
// See comments in |ExternalImageDescriptorVk|
// Contains the old/new layouts used in the queue release operation.
VkImageLayout releasedOldLayout;
VkImageLayout releasedNewLayout;
struct ExternalImageExportInfoVk : ExternalImageExportInfo {
public:
// See comments in |ExternalImageDescriptorVk|
// Contains the old/new layouts used in the queue release operation.
VkImageLayout releasedOldLayout;
VkImageLayout releasedNewLayout;
protected:
using ExternalImageExportInfo::ExternalImageExportInfo;
};
protected:
using ExternalImageExportInfo::ExternalImageExportInfo;
};
// Can't use DAWN_PLATFORM_LINUX since header included in both Dawn and Chrome
#ifdef __linux__
// Common properties of external images represented by FDs. On successful import the file
// descriptor's ownership is transferred to the Dawn implementation and they shouldn't be
// used outside of Dawn again. TODO(enga): Also transfer ownership in the error case so the
// caller can assume the FD is always consumed.
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorFD : ExternalImageDescriptorVk {
public:
int memoryFD; // A file descriptor from an export of the memory of the image
std::vector<int> waitFDs; // File descriptors of semaphores which will be waited on
// Common properties of external images represented by FDs. On successful import the file
// descriptor's ownership is transferred to the Dawn implementation and they shouldn't be
// used outside of Dawn again. TODO(enga): Also transfer ownership in the error case so the
// caller can assume the FD is always consumed.
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorFD : ExternalImageDescriptorVk {
public:
int memoryFD; // A file descriptor from an export of the memory of the image
std::vector<int> waitFDs; // File descriptors of semaphores which will be waited on
protected:
using ExternalImageDescriptorVk::ExternalImageDescriptorVk;
};
protected:
using ExternalImageDescriptorVk::ExternalImageDescriptorVk;
};
// Descriptor for opaque file descriptor image import
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorOpaqueFD : ExternalImageDescriptorFD {
ExternalImageDescriptorOpaqueFD();
// Descriptor for opaque file descriptor image import
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorOpaqueFD : ExternalImageDescriptorFD {
ExternalImageDescriptorOpaqueFD();
VkDeviceSize allocationSize; // Must match VkMemoryAllocateInfo from image creation
uint32_t memoryTypeIndex; // Must match VkMemoryAllocateInfo from image creation
};
VkDeviceSize allocationSize; // Must match VkMemoryAllocateInfo from image creation
uint32_t memoryTypeIndex; // Must match VkMemoryAllocateInfo from image creation
};
// Descriptor for dma-buf file descriptor image import
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorDmaBuf : ExternalImageDescriptorFD {
ExternalImageDescriptorDmaBuf();
// Descriptor for dma-buf file descriptor image import
struct DAWN_NATIVE_EXPORT ExternalImageDescriptorDmaBuf : ExternalImageDescriptorFD {
ExternalImageDescriptorDmaBuf();
uint32_t stride; // Stride of the buffer in bytes
uint64_t drmModifier; // DRM modifier of the buffer
};
uint32_t stride; // Stride of the buffer in bytes
uint64_t drmModifier; // DRM modifier of the buffer
};
// Info struct that is written to in |ExportVulkanImage|.
struct DAWN_NATIVE_EXPORT ExternalImageExportInfoFD : ExternalImageExportInfoVk {
public:
// Contains the exported semaphore handles.
std::vector<int> semaphoreHandles;
// Info struct that is written to in |ExportVulkanImage|.
struct DAWN_NATIVE_EXPORT ExternalImageExportInfoFD : ExternalImageExportInfoVk {
public:
// Contains the exported semaphore handles.
std::vector<int> semaphoreHandles;
protected:
using ExternalImageExportInfoVk::ExternalImageExportInfoVk;
};
protected:
using ExternalImageExportInfoVk::ExternalImageExportInfoVk;
};
struct DAWN_NATIVE_EXPORT ExternalImageExportInfoOpaqueFD : ExternalImageExportInfoFD {
ExternalImageExportInfoOpaqueFD();
};
struct DAWN_NATIVE_EXPORT ExternalImageExportInfoOpaqueFD : ExternalImageExportInfoFD {
ExternalImageExportInfoOpaqueFD();
};
struct DAWN_NATIVE_EXPORT ExternalImageExportInfoDmaBuf : ExternalImageExportInfoFD {
ExternalImageExportInfoDmaBuf();
};
struct DAWN_NATIVE_EXPORT ExternalImageExportInfoDmaBuf : ExternalImageExportInfoFD {
ExternalImageExportInfoDmaBuf();
};
#endif // __linux__
// Imports external memory into a Vulkan image. Internally, this uses external memory /
// semaphore extensions to import the image and wait on the provided synchronizaton
// primitives before the texture can be used.
// On failure, returns a nullptr.
DAWN_NATIVE_EXPORT WGPUTexture WrapVulkanImage(WGPUDevice device,
const ExternalImageDescriptorVk* descriptor);
// Imports external memory into a Vulkan image. Internally, this uses external memory /
// semaphore extensions to import the image and wait on the provided synchronizaton
// primitives before the texture can be used.
// On failure, returns a nullptr.
DAWN_NATIVE_EXPORT WGPUTexture WrapVulkanImage(WGPUDevice device,
const ExternalImageDescriptorVk* descriptor);
// Exports external memory from a Vulkan image. This must be called on wrapped textures
// before they are destroyed. It writes the semaphore to wait on and the old/new image
// layouts to |info|. Pass VK_IMAGE_LAYOUT_UNDEFINED as |desiredLayout| if you don't want to
// perform a layout transition.
DAWN_NATIVE_EXPORT bool ExportVulkanImage(WGPUTexture texture,
VkImageLayout desiredLayout,
ExternalImageExportInfoVk* info);
// Exports external memory from a Vulkan image. This must be called on wrapped textures
// before they are destroyed. It writes the semaphore to wait on and the old/new image
// layouts to |info|. Pass VK_IMAGE_LAYOUT_UNDEFINED as |desiredLayout| if you don't want to
// perform a layout transition.
DAWN_NATIVE_EXPORT bool ExportVulkanImage(WGPUTexture texture,
VkImageLayout desiredLayout,
ExternalImageExportInfoVk* info);
} // namespace dawn::native::vulkan

View File

@@ -16,21 +16,21 @@
#define INCLUDE_DAWN_NATIVE_DAWN_NATIVE_EXPORT_H_
#if defined(DAWN_NATIVE_SHARED_LIBRARY)
# if defined(_WIN32)
# if defined(DAWN_NATIVE_IMPLEMENTATION)
# define DAWN_NATIVE_EXPORT __declspec(dllexport)
# else
# define DAWN_NATIVE_EXPORT __declspec(dllimport)
# endif
# else // defined(_WIN32)
# if defined(DAWN_NATIVE_IMPLEMENTATION)
# define DAWN_NATIVE_EXPORT __attribute__((visibility("default")))
# else
# define DAWN_NATIVE_EXPORT
# endif
# endif // defined(_WIN32)
#else // defined(DAWN_NATIVE_SHARED_LIBRARY)
# define DAWN_NATIVE_EXPORT
#if defined(_WIN32)
#if defined(DAWN_NATIVE_IMPLEMENTATION)
#define DAWN_NATIVE_EXPORT __declspec(dllexport)
#else
#define DAWN_NATIVE_EXPORT __declspec(dllimport)
#endif
#else // defined(_WIN32)
#if defined(DAWN_NATIVE_IMPLEMENTATION)
#define DAWN_NATIVE_EXPORT __attribute__((visibility("default")))
#else
#define DAWN_NATIVE_EXPORT
#endif
#endif // defined(_WIN32)
#else // defined(DAWN_NATIVE_SHARED_LIBRARY)
#define DAWN_NATIVE_EXPORT
#endif // defined(DAWN_NATIVE_SHARED_LIBRARY)
#endif // INCLUDE_DAWN_NATIVE_DAWN_NATIVE_EXPORT_H_

View File

@@ -24,91 +24,90 @@
namespace dawn::platform {
enum class TraceCategory {
General, // General trace events
Validation, // Dawn validation
Recording, // Native command recording
GPUWork, // Actual GPU work
};
enum class TraceCategory {
General, // General trace events
Validation, // Dawn validation
Recording, // Native command recording
GPUWork, // Actual GPU work
};
class DAWN_PLATFORM_EXPORT CachingInterface {
public:
CachingInterface();
virtual ~CachingInterface();
class DAWN_PLATFORM_EXPORT CachingInterface {
public:
CachingInterface();
virtual ~CachingInterface();
// LoadData has two modes. The first mode is used to get a value which
// corresponds to the |key|. The |valueOut| is a caller provided buffer
// allocated to the size |valueSize| which is loaded with data of the
// size returned. The second mode is used to query for the existence of
// the |key| where |valueOut| is nullptr and |valueSize| must be 0.
// The return size is non-zero if the |key| exists.
virtual size_t LoadData(const WGPUDevice device,
const void* key,
size_t keySize,
void* valueOut,
size_t valueSize) = 0;
// LoadData has two modes. The first mode is used to get a value which
// corresponds to the |key|. The |valueOut| is a caller provided buffer
// allocated to the size |valueSize| which is loaded with data of the
// size returned. The second mode is used to query for the existence of
// the |key| where |valueOut| is nullptr and |valueSize| must be 0.
// The return size is non-zero if the |key| exists.
virtual size_t LoadData(const WGPUDevice device,
const void* key,
size_t keySize,
void* valueOut,
size_t valueSize) = 0;
// StoreData puts a |value| in the cache which corresponds to the |key|.
virtual void StoreData(const WGPUDevice device,
const void* key,
size_t keySize,
const void* value,
size_t valueSize) = 0;
// StoreData puts a |value| in the cache which corresponds to the |key|.
virtual void StoreData(const WGPUDevice device,
const void* key,
size_t keySize,
const void* value,
size_t valueSize) = 0;
private:
CachingInterface(const CachingInterface&) = delete;
CachingInterface& operator=(const CachingInterface&) = delete;
};
private:
CachingInterface(const CachingInterface&) = delete;
CachingInterface& operator=(const CachingInterface&) = delete;
};
class DAWN_PLATFORM_EXPORT WaitableEvent {
public:
WaitableEvent() = default;
virtual ~WaitableEvent() = default;
virtual void Wait() = 0; // Wait for completion
virtual bool IsComplete() = 0; // Non-blocking check if the event is complete
};
class DAWN_PLATFORM_EXPORT WaitableEvent {
public:
WaitableEvent() = default;
virtual ~WaitableEvent() = default;
virtual void Wait() = 0; // Wait for completion
virtual bool IsComplete() = 0; // Non-blocking check if the event is complete
};
using PostWorkerTaskCallback = void (*)(void* userdata);
using PostWorkerTaskCallback = void (*)(void* userdata);
class DAWN_PLATFORM_EXPORT WorkerTaskPool {
public:
WorkerTaskPool() = default;
virtual ~WorkerTaskPool() = default;
virtual std::unique_ptr<WaitableEvent> PostWorkerTask(PostWorkerTaskCallback,
void* userdata) = 0;
};
class DAWN_PLATFORM_EXPORT WorkerTaskPool {
public:
WorkerTaskPool() = default;
virtual ~WorkerTaskPool() = default;
virtual std::unique_ptr<WaitableEvent> PostWorkerTask(PostWorkerTaskCallback,
void* userdata) = 0;
};
class DAWN_PLATFORM_EXPORT Platform {
public:
Platform();
virtual ~Platform();
class DAWN_PLATFORM_EXPORT Platform {
public:
Platform();
virtual ~Platform();
virtual const unsigned char* GetTraceCategoryEnabledFlag(TraceCategory category);
virtual const unsigned char* GetTraceCategoryEnabledFlag(TraceCategory category);
virtual double MonotonicallyIncreasingTime();
virtual double MonotonicallyIncreasingTime();
virtual uint64_t AddTraceEvent(char phase,
const unsigned char* categoryGroupEnabled,
const char* name,
uint64_t id,
double timestamp,
int numArgs,
const char** argNames,
const unsigned char* argTypes,
const uint64_t* argValues,
unsigned char flags);
virtual uint64_t AddTraceEvent(char phase,
const unsigned char* categoryGroupEnabled,
const char* name,
uint64_t id,
double timestamp,
int numArgs,
const char** argNames,
const unsigned char* argTypes,
const uint64_t* argValues,
unsigned char flags);
// The |fingerprint| is provided by Dawn to inform the client to discard the Dawn caches
// when the fingerprint changes. The returned CachingInterface is expected to outlive the
// device which uses it to persistently cache objects.
virtual CachingInterface* GetCachingInterface(const void* fingerprint,
size_t fingerprintSize);
virtual std::unique_ptr<WorkerTaskPool> CreateWorkerTaskPool();
// The |fingerprint| is provided by Dawn to inform the client to discard the Dawn caches
// when the fingerprint changes. The returned CachingInterface is expected to outlive the
// device which uses it to persistently cache objects.
virtual CachingInterface* GetCachingInterface(const void* fingerprint, size_t fingerprintSize);
virtual std::unique_ptr<WorkerTaskPool> CreateWorkerTaskPool();
private:
Platform(const Platform&) = delete;
Platform& operator=(const Platform&) = delete;
};
private:
Platform(const Platform&) = delete;
Platform& operator=(const Platform&) = delete;
};
} // namespace dawn::platform

View File

@@ -16,21 +16,21 @@
#define INCLUDE_DAWN_PLATFORM_DAWN_PLATFORM_EXPORT_H_
#if defined(DAWN_PLATFORM_SHARED_LIBRARY)
# if defined(_WIN32)
# if defined(DAWN_PLATFORM_IMPLEMENTATION)
# define DAWN_PLATFORM_EXPORT __declspec(dllexport)
# else
# define DAWN_PLATFORM_EXPORT __declspec(dllimport)
# endif
# else // defined(_WIN32)
# if defined(DAWN_PLATFORM_IMPLEMENTATION)
# define DAWN_PLATFORM_EXPORT __attribute__((visibility("default")))
# else
# define DAWN_PLATFORM_EXPORT
# endif
# endif // defined(_WIN32)
#else // defined(DAWN_PLATFORM_SHARED_LIBRARY)
# define DAWN_PLATFORM_EXPORT
#if defined(_WIN32)
#if defined(DAWN_PLATFORM_IMPLEMENTATION)
#define DAWN_PLATFORM_EXPORT __declspec(dllexport)
#else
#define DAWN_PLATFORM_EXPORT __declspec(dllimport)
#endif
#else // defined(_WIN32)
#if defined(DAWN_PLATFORM_IMPLEMENTATION)
#define DAWN_PLATFORM_EXPORT __attribute__((visibility("default")))
#else
#define DAWN_PLATFORM_EXPORT
#endif
#endif // defined(_WIN32)
#else // defined(DAWN_PLATFORM_SHARED_LIBRARY)
#define DAWN_PLATFORM_EXPORT
#endif // defined(DAWN_PLATFORM_SHARED_LIBRARY)
#endif // INCLUDE_DAWN_PLATFORM_DAWN_PLATFORM_EXPORT_H_

View File

@@ -23,53 +23,52 @@
namespace dawn::wire {
class DAWN_WIRE_EXPORT CommandSerializer {
public:
CommandSerializer();
virtual ~CommandSerializer();
CommandSerializer(const CommandSerializer& rhs) = delete;
CommandSerializer& operator=(const CommandSerializer& rhs) = delete;
class DAWN_WIRE_EXPORT CommandSerializer {
public:
CommandSerializer();
virtual ~CommandSerializer();
CommandSerializer(const CommandSerializer& rhs) = delete;
CommandSerializer& operator=(const CommandSerializer& rhs) = delete;
// Get space for serializing commands.
// GetCmdSpace will never be called with a value larger than
// what GetMaximumAllocationSize returns. Return nullptr to indicate
// a fatal error.
virtual void* GetCmdSpace(size_t size) = 0;
virtual bool Flush() = 0;
virtual size_t GetMaximumAllocationSize() const = 0;
virtual void OnSerializeError();
};
// Get space for serializing commands.
// GetCmdSpace will never be called with a value larger than
// what GetMaximumAllocationSize returns. Return nullptr to indicate
// a fatal error.
virtual void* GetCmdSpace(size_t size) = 0;
virtual bool Flush() = 0;
virtual size_t GetMaximumAllocationSize() const = 0;
virtual void OnSerializeError();
};
class DAWN_WIRE_EXPORT CommandHandler {
public:
CommandHandler();
virtual ~CommandHandler();
CommandHandler(const CommandHandler& rhs) = delete;
CommandHandler& operator=(const CommandHandler& rhs) = delete;
class DAWN_WIRE_EXPORT CommandHandler {
public:
CommandHandler();
virtual ~CommandHandler();
CommandHandler(const CommandHandler& rhs) = delete;
CommandHandler& operator=(const CommandHandler& rhs) = delete;
virtual const volatile char* HandleCommands(const volatile char* commands, size_t size) = 0;
};
virtual const volatile char* HandleCommands(const volatile char* commands, size_t size) = 0;
};
DAWN_WIRE_EXPORT size_t
SerializedWGPUDevicePropertiesSize(const WGPUDeviceProperties* deviceProperties);
DAWN_WIRE_EXPORT size_t
SerializedWGPUDevicePropertiesSize(const WGPUDeviceProperties* deviceProperties);
DAWN_WIRE_EXPORT void SerializeWGPUDeviceProperties(
const WGPUDeviceProperties* deviceProperties,
char* serializeBuffer);
DAWN_WIRE_EXPORT void SerializeWGPUDeviceProperties(const WGPUDeviceProperties* deviceProperties,
char* serializeBuffer);
DAWN_WIRE_EXPORT bool DeserializeWGPUDeviceProperties(WGPUDeviceProperties* deviceProperties,
const volatile char* deserializeBuffer,
size_t deserializeBufferSize);
DAWN_WIRE_EXPORT bool DeserializeWGPUDeviceProperties(WGPUDeviceProperties* deviceProperties,
const volatile char* deserializeBuffer,
size_t deserializeBufferSize);
DAWN_WIRE_EXPORT size_t
SerializedWGPUSupportedLimitsSize(const WGPUSupportedLimits* supportedLimits);
DAWN_WIRE_EXPORT size_t
SerializedWGPUSupportedLimitsSize(const WGPUSupportedLimits* supportedLimits);
DAWN_WIRE_EXPORT void SerializeWGPUSupportedLimits(const WGPUSupportedLimits* supportedLimits,
char* serializeBuffer);
DAWN_WIRE_EXPORT void SerializeWGPUSupportedLimits(const WGPUSupportedLimits* supportedLimits,
char* serializeBuffer);
DAWN_WIRE_EXPORT bool DeserializeWGPUSupportedLimits(WGPUSupportedLimits* supportedLimits,
const volatile char* deserializeBuffer,
size_t deserializeBufferSize);
DAWN_WIRE_EXPORT bool DeserializeWGPUSupportedLimits(WGPUSupportedLimits* supportedLimits,
const volatile char* deserializeBuffer,
size_t deserializeBufferSize);
} // namespace dawn::wire

View File

@@ -23,160 +23,158 @@
namespace dawn::wire {
namespace client {
class Client;
class MemoryTransferService;
namespace client {
class Client;
class MemoryTransferService;
DAWN_WIRE_EXPORT const DawnProcTable& GetProcs();
} // namespace client
DAWN_WIRE_EXPORT const DawnProcTable& GetProcs();
} // namespace client
struct ReservedTexture {
WGPUTexture texture;
uint32_t id;
uint32_t generation;
uint32_t deviceId;
uint32_t deviceGeneration;
};
struct ReservedTexture {
WGPUTexture texture;
uint32_t id;
uint32_t generation;
uint32_t deviceId;
uint32_t deviceGeneration;
};
struct ReservedSwapChain {
WGPUSwapChain swapchain;
uint32_t id;
uint32_t generation;
uint32_t deviceId;
uint32_t deviceGeneration;
};
struct ReservedSwapChain {
WGPUSwapChain swapchain;
uint32_t id;
uint32_t generation;
uint32_t deviceId;
uint32_t deviceGeneration;
};
struct ReservedDevice {
WGPUDevice device;
uint32_t id;
uint32_t generation;
};
struct ReservedDevice {
WGPUDevice device;
uint32_t id;
uint32_t generation;
};
struct ReservedInstance {
WGPUInstance instance;
uint32_t id;
uint32_t generation;
};
struct ReservedInstance {
WGPUInstance instance;
uint32_t id;
uint32_t generation;
};
struct DAWN_WIRE_EXPORT WireClientDescriptor {
CommandSerializer* serializer;
client::MemoryTransferService* memoryTransferService = nullptr;
};
struct DAWN_WIRE_EXPORT WireClientDescriptor {
CommandSerializer* serializer;
client::MemoryTransferService* memoryTransferService = nullptr;
};
class DAWN_WIRE_EXPORT WireClient : public CommandHandler {
class DAWN_WIRE_EXPORT WireClient : public CommandHandler {
public:
explicit WireClient(const WireClientDescriptor& descriptor);
~WireClient() override;
const volatile char* HandleCommands(const volatile char* commands, size_t size) final;
ReservedTexture ReserveTexture(WGPUDevice device);
ReservedSwapChain ReserveSwapChain(WGPUDevice device);
ReservedDevice ReserveDevice();
ReservedInstance ReserveInstance();
void ReclaimTextureReservation(const ReservedTexture& reservation);
void ReclaimSwapChainReservation(const ReservedSwapChain& reservation);
void ReclaimDeviceReservation(const ReservedDevice& reservation);
void ReclaimInstanceReservation(const ReservedInstance& reservation);
// Disconnects the client.
// Commands allocated after this point will not be sent.
void Disconnect();
private:
std::unique_ptr<client::Client> mImpl;
};
namespace client {
class DAWN_WIRE_EXPORT MemoryTransferService {
public:
MemoryTransferService();
virtual ~MemoryTransferService();
class ReadHandle;
class WriteHandle;
// Create a handle for reading server data.
// This may fail and return nullptr.
virtual ReadHandle* CreateReadHandle(size_t) = 0;
// Create a handle for writing server data.
// This may fail and return nullptr.
virtual WriteHandle* CreateWriteHandle(size_t) = 0;
class DAWN_WIRE_EXPORT ReadHandle {
public:
explicit WireClient(const WireClientDescriptor& descriptor);
~WireClient() override;
ReadHandle();
virtual ~ReadHandle();
const volatile char* HandleCommands(const volatile char* commands, size_t size) final;
// Get the required serialization size for SerializeCreate
virtual size_t SerializeCreateSize() = 0;
ReservedTexture ReserveTexture(WGPUDevice device);
ReservedSwapChain ReserveSwapChain(WGPUDevice device);
ReservedDevice ReserveDevice();
ReservedInstance ReserveInstance();
// Serialize the handle into |serializePointer| so it can be received by the server.
virtual void SerializeCreate(void* serializePointer) = 0;
void ReclaimTextureReservation(const ReservedTexture& reservation);
void ReclaimSwapChainReservation(const ReservedSwapChain& reservation);
void ReclaimDeviceReservation(const ReservedDevice& reservation);
void ReclaimInstanceReservation(const ReservedInstance& reservation);
// Simply return the base address of the allocation (without applying any offset)
// Returns nullptr if the allocation failed.
// The data must live at least until the ReadHandle is destructued
virtual const void* GetData() = 0;
// Disconnects the client.
// Commands allocated after this point will not be sent.
void Disconnect();
// Gets called when a MapReadCallback resolves.
// deserialize the data update and apply
// it to the range (offset, offset + size) of allocation
// There could be nothing to be deserialized (if using shared memory)
// Needs to check potential offset/size OOB and overflow
virtual bool DeserializeDataUpdate(const void* deserializePointer,
size_t deserializeSize,
size_t offset,
size_t size) = 0;
private:
std::unique_ptr<client::Client> mImpl;
ReadHandle(const ReadHandle&) = delete;
ReadHandle& operator=(const ReadHandle&) = delete;
};
namespace client {
class DAWN_WIRE_EXPORT MemoryTransferService {
public:
MemoryTransferService();
virtual ~MemoryTransferService();
class DAWN_WIRE_EXPORT WriteHandle {
public:
WriteHandle();
virtual ~WriteHandle();
class ReadHandle;
class WriteHandle;
// Get the required serialization size for SerializeCreate
virtual size_t SerializeCreateSize() = 0;
// Create a handle for reading server data.
// This may fail and return nullptr.
virtual ReadHandle* CreateReadHandle(size_t) = 0;
// Serialize the handle into |serializePointer| so it can be received by the server.
virtual void SerializeCreate(void* serializePointer) = 0;
// Create a handle for writing server data.
// This may fail and return nullptr.
virtual WriteHandle* CreateWriteHandle(size_t) = 0;
// Simply return the base address of the allocation (without applying any offset)
// The data returned should be zero-initialized.
// The data returned must live at least until the WriteHandle is destructed.
// On failure, the pointer returned should be null.
virtual void* GetData() = 0;
class DAWN_WIRE_EXPORT ReadHandle {
public:
ReadHandle();
virtual ~ReadHandle();
// Get the required serialization size for SerializeDataUpdate
virtual size_t SizeOfSerializeDataUpdate(size_t offset, size_t size) = 0;
// Get the required serialization size for SerializeCreate
virtual size_t SerializeCreateSize() = 0;
// Serialize a command to send the modified contents of
// the subrange (offset, offset + size) of the allocation at buffer unmap
// This subrange is always the whole mapped region for now
// There could be nothing to be serialized (if using shared memory)
virtual void SerializeDataUpdate(void* serializePointer, size_t offset, size_t size) = 0;
// Serialize the handle into |serializePointer| so it can be received by the server.
virtual void SerializeCreate(void* serializePointer) = 0;
private:
WriteHandle(const WriteHandle&) = delete;
WriteHandle& operator=(const WriteHandle&) = delete;
};
// Simply return the base address of the allocation (without applying any offset)
// Returns nullptr if the allocation failed.
// The data must live at least until the ReadHandle is destructued
virtual const void* GetData() = 0;
private:
MemoryTransferService(const MemoryTransferService&) = delete;
MemoryTransferService& operator=(const MemoryTransferService&) = delete;
};
// Gets called when a MapReadCallback resolves.
// deserialize the data update and apply
// it to the range (offset, offset + size) of allocation
// There could be nothing to be deserialized (if using shared memory)
// Needs to check potential offset/size OOB and overflow
virtual bool DeserializeDataUpdate(const void* deserializePointer,
size_t deserializeSize,
size_t offset,
size_t size) = 0;
private:
ReadHandle(const ReadHandle&) = delete;
ReadHandle& operator=(const ReadHandle&) = delete;
};
class DAWN_WIRE_EXPORT WriteHandle {
public:
WriteHandle();
virtual ~WriteHandle();
// Get the required serialization size for SerializeCreate
virtual size_t SerializeCreateSize() = 0;
// Serialize the handle into |serializePointer| so it can be received by the server.
virtual void SerializeCreate(void* serializePointer) = 0;
// Simply return the base address of the allocation (without applying any offset)
// The data returned should be zero-initialized.
// The data returned must live at least until the WriteHandle is destructed.
// On failure, the pointer returned should be null.
virtual void* GetData() = 0;
// Get the required serialization size for SerializeDataUpdate
virtual size_t SizeOfSerializeDataUpdate(size_t offset, size_t size) = 0;
// Serialize a command to send the modified contents of
// the subrange (offset, offset + size) of the allocation at buffer unmap
// This subrange is always the whole mapped region for now
// There could be nothing to be serialized (if using shared memory)
virtual void SerializeDataUpdate(void* serializePointer,
size_t offset,
size_t size) = 0;
private:
WriteHandle(const WriteHandle&) = delete;
WriteHandle& operator=(const WriteHandle&) = delete;
};
private:
MemoryTransferService(const MemoryTransferService&) = delete;
MemoryTransferService& operator=(const MemoryTransferService&) = delete;
};
// Backdoor to get the order of the ProcMap for testing
DAWN_WIRE_EXPORT std::vector<const char*> GetProcMapNamesForTesting();
} // namespace client
// Backdoor to get the order of the ProcMap for testing
DAWN_WIRE_EXPORT std::vector<const char*> GetProcMapNamesForTesting();
} // namespace client
} // namespace dawn::wire
#endif // INCLUDE_DAWN_WIRE_WIRECLIENT_H_

View File

@@ -23,126 +23,126 @@ struct DawnProcTable;
namespace dawn::wire {
namespace server {
class Server;
class MemoryTransferService;
} // namespace server
namespace server {
class Server;
class MemoryTransferService;
} // namespace server
struct DAWN_WIRE_EXPORT WireServerDescriptor {
const DawnProcTable* procs;
CommandSerializer* serializer;
server::MemoryTransferService* memoryTransferService = nullptr;
};
struct DAWN_WIRE_EXPORT WireServerDescriptor {
const DawnProcTable* procs;
CommandSerializer* serializer;
server::MemoryTransferService* memoryTransferService = nullptr;
};
class DAWN_WIRE_EXPORT WireServer : public CommandHandler {
class DAWN_WIRE_EXPORT WireServer : public CommandHandler {
public:
explicit WireServer(const WireServerDescriptor& descriptor);
~WireServer() override;
const volatile char* HandleCommands(const volatile char* commands, size_t size) final;
bool InjectTexture(WGPUTexture texture,
uint32_t id,
uint32_t generation,
uint32_t deviceId,
uint32_t deviceGeneration);
bool InjectSwapChain(WGPUSwapChain swapchain,
uint32_t id,
uint32_t generation,
uint32_t deviceId,
uint32_t deviceGeneration);
bool InjectDevice(WGPUDevice device, uint32_t id, uint32_t generation);
bool InjectInstance(WGPUInstance instance, uint32_t id, uint32_t generation);
// Look up a device by (id, generation) pair. Returns nullptr if the generation
// has expired or the id is not found.
// The Wire does not have destroy hooks to allow an embedder to observe when an object
// has been destroyed, but in Chrome, we need to know the list of live devices so we
// can call device.Tick() on all of them periodically to ensure progress on asynchronous
// work is made. Getting this list can be done by tracking the (id, generation) of
// previously injected devices, and observing if GetDevice(id, generation) returns non-null.
WGPUDevice GetDevice(uint32_t id, uint32_t generation);
private:
std::unique_ptr<server::Server> mImpl;
};
namespace server {
class DAWN_WIRE_EXPORT MemoryTransferService {
public:
MemoryTransferService();
virtual ~MemoryTransferService();
class ReadHandle;
class WriteHandle;
// Deserialize data to create Read/Write handles. These handles are for the client
// to Read/Write data.
virtual bool DeserializeReadHandle(const void* deserializePointer,
size_t deserializeSize,
ReadHandle** readHandle) = 0;
virtual bool DeserializeWriteHandle(const void* deserializePointer,
size_t deserializeSize,
WriteHandle** writeHandle) = 0;
class DAWN_WIRE_EXPORT ReadHandle {
public:
explicit WireServer(const WireServerDescriptor& descriptor);
~WireServer() override;
ReadHandle();
virtual ~ReadHandle();
const volatile char* HandleCommands(const volatile char* commands, size_t size) final;
// Return the size of the command serialized if
// SerializeDataUpdate is called with the same offset/size args
virtual size_t SizeOfSerializeDataUpdate(size_t offset, size_t size) = 0;
bool InjectTexture(WGPUTexture texture,
uint32_t id,
uint32_t generation,
uint32_t deviceId,
uint32_t deviceGeneration);
bool InjectSwapChain(WGPUSwapChain swapchain,
uint32_t id,
uint32_t generation,
uint32_t deviceId,
uint32_t deviceGeneration);
bool InjectDevice(WGPUDevice device, uint32_t id, uint32_t generation);
bool InjectInstance(WGPUInstance instance, uint32_t id, uint32_t generation);
// Look up a device by (id, generation) pair. Returns nullptr if the generation
// has expired or the id is not found.
// The Wire does not have destroy hooks to allow an embedder to observe when an object
// has been destroyed, but in Chrome, we need to know the list of live devices so we
// can call device.Tick() on all of them periodically to ensure progress on asynchronous
// work is made. Getting this list can be done by tracking the (id, generation) of
// previously injected devices, and observing if GetDevice(id, generation) returns non-null.
WGPUDevice GetDevice(uint32_t id, uint32_t generation);
// Gets called when a MapReadCallback resolves.
// Serialize the data update for the range (offset, offset + size) into
// |serializePointer| to the client There could be nothing to be serialized (if
// using shared memory)
virtual void SerializeDataUpdate(const void* data,
size_t offset,
size_t size,
void* serializePointer) = 0;
private:
std::unique_ptr<server::Server> mImpl;
ReadHandle(const ReadHandle&) = delete;
ReadHandle& operator=(const ReadHandle&) = delete;
};
namespace server {
class DAWN_WIRE_EXPORT MemoryTransferService {
public:
MemoryTransferService();
virtual ~MemoryTransferService();
class DAWN_WIRE_EXPORT WriteHandle {
public:
WriteHandle();
virtual ~WriteHandle();
class ReadHandle;
class WriteHandle;
// Set the target for writes from the client. DeserializeFlush should copy data
// into the target.
void SetTarget(void* data);
// Set Staging data length for OOB check
void SetDataLength(size_t dataLength);
// Deserialize data to create Read/Write handles. These handles are for the client
// to Read/Write data.
virtual bool DeserializeReadHandle(const void* deserializePointer,
size_t deserializeSize,
ReadHandle** readHandle) = 0;
virtual bool DeserializeWriteHandle(const void* deserializePointer,
size_t deserializeSize,
WriteHandle** writeHandle) = 0;
// This function takes in the serialized result of
// client::MemoryTransferService::WriteHandle::SerializeDataUpdate.
// Needs to check potential offset/size OOB and overflow
virtual bool DeserializeDataUpdate(const void* deserializePointer,
size_t deserializeSize,
size_t offset,
size_t size) = 0;
class DAWN_WIRE_EXPORT ReadHandle {
public:
ReadHandle();
virtual ~ReadHandle();
protected:
void* mTargetData = nullptr;
size_t mDataLength = 0;
// Return the size of the command serialized if
// SerializeDataUpdate is called with the same offset/size args
virtual size_t SizeOfSerializeDataUpdate(size_t offset, size_t size) = 0;
private:
WriteHandle(const WriteHandle&) = delete;
WriteHandle& operator=(const WriteHandle&) = delete;
};
// Gets called when a MapReadCallback resolves.
// Serialize the data update for the range (offset, offset + size) into
// |serializePointer| to the client There could be nothing to be serialized (if
// using shared memory)
virtual void SerializeDataUpdate(const void* data,
size_t offset,
size_t size,
void* serializePointer) = 0;
private:
ReadHandle(const ReadHandle&) = delete;
ReadHandle& operator=(const ReadHandle&) = delete;
};
class DAWN_WIRE_EXPORT WriteHandle {
public:
WriteHandle();
virtual ~WriteHandle();
// Set the target for writes from the client. DeserializeFlush should copy data
// into the target.
void SetTarget(void* data);
// Set Staging data length for OOB check
void SetDataLength(size_t dataLength);
// This function takes in the serialized result of
// client::MemoryTransferService::WriteHandle::SerializeDataUpdate.
// Needs to check potential offset/size OOB and overflow
virtual bool DeserializeDataUpdate(const void* deserializePointer,
size_t deserializeSize,
size_t offset,
size_t size) = 0;
protected:
void* mTargetData = nullptr;
size_t mDataLength = 0;
private:
WriteHandle(const WriteHandle&) = delete;
WriteHandle& operator=(const WriteHandle&) = delete;
};
private:
MemoryTransferService(const MemoryTransferService&) = delete;
MemoryTransferService& operator=(const MemoryTransferService&) = delete;
};
} // namespace server
private:
MemoryTransferService(const MemoryTransferService&) = delete;
MemoryTransferService& operator=(const MemoryTransferService&) = delete;
};
} // namespace server
} // namespace dawn::wire

View File

@@ -16,21 +16,21 @@
#define INCLUDE_DAWN_WIRE_DAWN_WIRE_EXPORT_H_
#if defined(DAWN_WIRE_SHARED_LIBRARY)
# if defined(_WIN32)
# if defined(DAWN_WIRE_IMPLEMENTATION)
# define DAWN_WIRE_EXPORT __declspec(dllexport)
# else
# define DAWN_WIRE_EXPORT __declspec(dllimport)
# endif
# else // defined(_WIN32)
# if defined(DAWN_WIRE_IMPLEMENTATION)
# define DAWN_WIRE_EXPORT __attribute__((visibility("default")))
# else
# define DAWN_WIRE_EXPORT
# endif
# endif // defined(_WIN32)
#else // defined(DAWN_WIRE_SHARED_LIBRARY)
# define DAWN_WIRE_EXPORT
#if defined(_WIN32)
#if defined(DAWN_WIRE_IMPLEMENTATION)
#define DAWN_WIRE_EXPORT __declspec(dllexport)
#else
#define DAWN_WIRE_EXPORT __declspec(dllimport)
#endif
#else // defined(_WIN32)
#if defined(DAWN_WIRE_IMPLEMENTATION)
#define DAWN_WIRE_EXPORT __attribute__((visibility("default")))
#else
#define DAWN_WIRE_EXPORT
#endif
#endif // defined(_WIN32)
#else // defined(DAWN_WIRE_SHARED_LIBRARY)
#define DAWN_WIRE_EXPORT
#endif // defined(DAWN_WIRE_SHARED_LIBRARY)
#endif // INCLUDE_DAWN_WIRE_DAWN_WIRE_EXPORT_H_

View File

@@ -1,2 +0,0 @@
# http://clang.llvm.org/docs/ClangFormatStyleOptions.html
BasedOnStyle: Chromium