blob: 087359686fe6529042581ae3fb1a522c56c8b609 [file] [log] [blame] [edit]
// Copyright 2018 The Dawn Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "dawn/native/Instance.h"
#include <utility>
#include "dawn/common/Assert.h"
#include "dawn/common/GPUInfo.h"
#include "dawn/common/Log.h"
#include "dawn/common/SystemUtils.h"
#include "dawn/native/CallbackTaskManager.h"
#include "dawn/native/ChainUtils_autogen.h"
#include "dawn/native/Device.h"
#include "dawn/native/ErrorData.h"
#include "dawn/native/Surface.h"
#include "dawn/native/Toggles.h"
#include "dawn/native/ValidationUtils_autogen.h"
#include "dawn/platform/DawnPlatform.h"
// For SwiftShader fallback
#if defined(DAWN_ENABLE_BACKEND_VULKAN)
#include "dawn/native/VulkanBackend.h"
#endif // defined(DAWN_ENABLE_BACKEND_VULKAN)
#if defined(DAWN_USE_X11)
#include "dawn/native/XlibXcbFunctions.h"
#endif // defined(DAWN_USE_X11)
#include <optional>
namespace dawn::native {
// Forward definitions of each backend's "Connect" function that creates new BackendConnection.
// Conditionally compiled declarations are used to avoid using static constructors instead.
#if defined(DAWN_ENABLE_BACKEND_D3D11)
namespace d3d11 {
BackendConnection* Connect(InstanceBase* instance);
}
#endif // defined(DAWN_ENABLE_BACKEND_D3D11)
#if defined(DAWN_ENABLE_BACKEND_D3D12)
namespace d3d12 {
BackendConnection* Connect(InstanceBase* instance);
}
#endif // defined(DAWN_ENABLE_BACKEND_D3D12)
#if defined(DAWN_ENABLE_BACKEND_METAL)
namespace metal {
BackendConnection* Connect(InstanceBase* instance);
}
#endif // defined(DAWN_ENABLE_BACKEND_METAL)
#if defined(DAWN_ENABLE_BACKEND_NULL)
namespace null {
BackendConnection* Connect(InstanceBase* instance);
}
#endif // defined(DAWN_ENABLE_BACKEND_NULL)
#if defined(DAWN_ENABLE_BACKEND_OPENGL)
namespace opengl {
BackendConnection* Connect(InstanceBase* instance, wgpu::BackendType backendType);
}
#endif // defined(DAWN_ENABLE_BACKEND_OPENGL)
#if defined(DAWN_ENABLE_BACKEND_VULKAN)
namespace vulkan {
BackendConnection* Connect(InstanceBase* instance);
}
#endif // defined(DAWN_ENABLE_BACKEND_VULKAN)
namespace {
BackendsBitset GetEnabledBackends() {
BackendsBitset enabledBackends;
#if defined(DAWN_ENABLE_BACKEND_NULL)
enabledBackends.set(wgpu::BackendType::Null);
#endif // defined(DAWN_ENABLE_BACKEND_NULL)
#if defined(DAWN_ENABLE_BACKEND_D3D11)
enabledBackends.set(wgpu::BackendType::D3D11);
#endif // defined(DAWN_ENABLE_BACKEND_D3D11)
#if defined(DAWN_ENABLE_BACKEND_D3D12)
enabledBackends.set(wgpu::BackendType::D3D12);
#endif // defined(DAWN_ENABLE_BACKEND_D3D12)
#if defined(DAWN_ENABLE_BACKEND_METAL)
enabledBackends.set(wgpu::BackendType::Metal);
#endif // defined(DAWN_ENABLE_BACKEND_METAL)
#if defined(DAWN_ENABLE_BACKEND_VULKAN)
enabledBackends.set(wgpu::BackendType::Vulkan);
#endif // defined(DAWN_ENABLE_BACKEND_VULKAN)
#if defined(DAWN_ENABLE_BACKEND_DESKTOP_GL)
enabledBackends.set(wgpu::BackendType::OpenGL);
#endif // defined(DAWN_ENABLE_BACKEND_DESKTOP_GL)
#if defined(DAWN_ENABLE_BACKEND_OPENGLES)
enabledBackends.set(wgpu::BackendType::OpenGLES);
#endif // defined(DAWN_ENABLE_BACKEND_OPENGLES)
return enabledBackends;
}
dawn::platform::CachingInterface* GetCachingInterface(dawn::platform::Platform* platform) {
if (platform != nullptr) {
return platform->GetCachingInterface();
}
return nullptr;
}
} // anonymous namespace
InstanceBase* APICreateInstance(const InstanceDescriptor* descriptor) {
return InstanceBase::Create(descriptor).Detach();
}
// InstanceBase
// static
Ref<InstanceBase> InstanceBase::Create(const InstanceDescriptor* descriptor) {
static constexpr InstanceDescriptor kDefaultDesc = {};
if (descriptor == nullptr) {
descriptor = &kDefaultDesc;
}
const DawnTogglesDescriptor* instanceTogglesDesc = nullptr;
FindInChain(descriptor->nextInChain, &instanceTogglesDesc);
// Set up the instance toggle state from toggles descriptor
TogglesState instanceToggles =
TogglesState::CreateFromTogglesDescriptor(instanceTogglesDesc, ToggleStage::Instance);
// By default disable the AllowUnsafeAPIs instance toggle, it will be inherited to adapters
// and devices created by this instance if not overriden.
instanceToggles.Default(Toggle::AllowUnsafeAPIs, false);
Ref<InstanceBase> instance = AcquireRef(new InstanceBase(instanceToggles));
if (instance->ConsumedError(instance->Initialize(descriptor))) {
return nullptr;
}
return instance;
}
InstanceBase::InstanceBase(const TogglesState& instanceToggles) : mToggles(instanceToggles) {}
InstanceBase::~InstanceBase() = default;
void InstanceBase::WillDropLastExternalRef() {
// InstanceBase uses RefCountedWithExternalCount to break refcycles.
//
// InstanceBase holds Refs to AdapterBases it has discovered, which hold Refs back to the
// InstanceBase.
// In order to break this cycle and prevent leaks, when the application drops the last external
// ref and WillDropLastExternalRef is called, the instance clears out any member refs to
// adapters that hold back-refs to the instance - thus breaking any reference cycles.
mPhysicalDevices.clear();
}
// TODO(crbug.com/dawn/832): make the platform an initialization parameter of the instance.
MaybeError InstanceBase::Initialize(const InstanceDescriptor* descriptor) {
DAWN_TRY(ValidateSTypes(descriptor->nextInChain, {{wgpu::SType::DawnInstanceDescriptor},
{wgpu::SType::DawnTogglesDescriptor}}));
const DawnInstanceDescriptor* dawnDesc = nullptr;
FindInChain(descriptor->nextInChain, &dawnDesc);
if (dawnDesc != nullptr) {
for (uint32_t i = 0; i < dawnDesc->additionalRuntimeSearchPathsCount; ++i) {
mRuntimeSearchPaths.push_back(dawnDesc->additionalRuntimeSearchPaths[i]);
}
}
// Default paths to search are next to the shared library, next to the executable, and
// no path (just libvulkan.so).
if (auto p = GetModuleDirectory()) {
mRuntimeSearchPaths.push_back(std::move(*p));
}
if (auto p = GetExecutableDirectory()) {
mRuntimeSearchPaths.push_back(std::move(*p));
}
mRuntimeSearchPaths.push_back("");
mCallbackTaskManager = AcquireRef(new CallbackTaskManager());
// Initialize the platform to the default for now.
mDefaultPlatform = std::make_unique<dawn::platform::Platform>();
SetPlatform(dawnDesc != nullptr ? dawnDesc->platform : mDefaultPlatform.get());
return {};
}
void InstanceBase::APIRequestAdapter(const RequestAdapterOptions* options,
WGPURequestAdapterCallback callback,
void* userdata) {
static constexpr RequestAdapterOptions kDefaultOptions = {};
if (options == nullptr) {
options = &kDefaultOptions;
}
auto result = RequestAdapterInternal(options);
if (result.IsError()) {
auto err = result.AcquireError();
std::string msg = err->GetFormattedMessage();
// TODO(crbug.com/dawn/1122): Call callbacks only on wgpuInstanceProcessEvents
callback(WGPURequestAdapterStatus_Error, nullptr, msg.c_str(), userdata);
} else {
Ref<AdapterBase> adapter = result.AcquireSuccess();
// TODO(crbug.com/dawn/1122): Call callbacks only on wgpuInstanceProcessEvents
callback(WGPURequestAdapterStatus_Success, ToAPI(adapter.Detach()), nullptr, userdata);
}
}
ResultOrError<Ref<AdapterBase>> InstanceBase::RequestAdapterInternal(
const RequestAdapterOptions* options) {
ASSERT(options != nullptr);
if (options->forceFallbackAdapter) {
#if defined(DAWN_ENABLE_BACKEND_VULKAN)
if (GetEnabledBackends()[wgpu::BackendType::Vulkan]) {
dawn_native::vulkan::PhysicalDeviceDiscoveryOptions vulkanOptions;
vulkanOptions.forceSwiftShader = true;
MaybeError result = DiscoverPhysicalDevicesInternal(&vulkanOptions);
if (result.IsError()) {
dawn::WarningLog() << absl::StrFormat(
"Skipping Vulkan Swiftshader adapter because initialization failed: %s",
result.AcquireError()->GetFormattedMessage());
return Ref<AdapterBase>(nullptr);
}
}
#else
return Ref<AdapterBase>(nullptr);
#endif // defined(DAWN_ENABLE_BACKEND_VULKAN)
} else {
DiscoverDefaultPhysicalDevices();
}
wgpu::AdapterType preferredType;
switch (options->powerPreference) {
case wgpu::PowerPreference::LowPower:
preferredType = wgpu::AdapterType::IntegratedGPU;
break;
case wgpu::PowerPreference::Undefined:
case wgpu::PowerPreference::HighPerformance:
preferredType = wgpu::AdapterType::DiscreteGPU;
break;
}
std::optional<size_t> discreteGPUAdapterIndex;
std::optional<size_t> integratedGPUAdapterIndex;
std::optional<size_t> cpuAdapterIndex;
std::optional<size_t> unknownAdapterIndex;
Ref<PhysicalDeviceBase> selectedPhysicalDevice;
FeatureLevel featureLevel =
options->compatibilityMode ? FeatureLevel::Compatibility : FeatureLevel::Core;
for (size_t i = 0; i < mPhysicalDevices.size(); ++i) {
if (!mPhysicalDevices[i]->SupportsFeatureLevel(featureLevel)) {
continue;
}
if (options->forceFallbackAdapter) {
if (!gpu_info::IsGoogleSwiftshader(mPhysicalDevices[i]->GetVendorId(),
mPhysicalDevices[i]->GetDeviceId())) {
continue;
}
selectedPhysicalDevice = mPhysicalDevices[i];
break;
}
if (mPhysicalDevices[i]->GetAdapterType() == preferredType) {
selectedPhysicalDevice = mPhysicalDevices[i];
break;
}
switch (mPhysicalDevices[i]->GetAdapterType()) {
case wgpu::AdapterType::DiscreteGPU:
discreteGPUAdapterIndex = i;
break;
case wgpu::AdapterType::IntegratedGPU:
integratedGPUAdapterIndex = i;
break;
case wgpu::AdapterType::CPU:
cpuAdapterIndex = i;
break;
case wgpu::AdapterType::Unknown:
unknownAdapterIndex = i;
break;
}
}
// For now, we always prefer the discrete GPU
if (selectedPhysicalDevice == nullptr) {
if (discreteGPUAdapterIndex) {
selectedPhysicalDevice = mPhysicalDevices[*discreteGPUAdapterIndex];
} else if (integratedGPUAdapterIndex) {
selectedPhysicalDevice = mPhysicalDevices[*integratedGPUAdapterIndex];
} else if (cpuAdapterIndex) {
selectedPhysicalDevice = mPhysicalDevices[*cpuAdapterIndex];
} else if (unknownAdapterIndex) {
selectedPhysicalDevice = mPhysicalDevices[*unknownAdapterIndex];
}
}
if (selectedPhysicalDevice == nullptr) {
return Ref<AdapterBase>(nullptr);
}
// Set up toggles state for default adapters, currently adapter don't have a toggles
// descriptor so just inherit from instance toggles.
// TODO(dawn:1495): Handle the adapter toggles descriptor after implemented.
TogglesState adapterToggles = TogglesState(ToggleStage::Adapter);
adapterToggles.InheritFrom(mToggles);
return AcquireRef(
new AdapterBase(std::move(selectedPhysicalDevice), featureLevel, adapterToggles));
}
void InstanceBase::DiscoverDefaultPhysicalDevices() {
for (wgpu::BackendType b : IterateBitSet(GetEnabledBackends())) {
EnsureBackendConnection(b);
}
if (mDiscoveredDefaultAdapters) {
return;
}
// Query and merge all default adapters for all backends
for (std::unique_ptr<BackendConnection>& backend : mBackends) {
std::vector<Ref<PhysicalDeviceBase>> physicalDevices =
backend->DiscoverDefaultPhysicalDevices();
for (Ref<PhysicalDeviceBase>& physicalDevice : physicalDevices) {
ASSERT(physicalDevice->GetBackendType() == backend->GetType());
ASSERT(physicalDevice->GetInstance() == this);
mPhysicalDevices.push_back(std::move(physicalDevice));
}
}
mDiscoveredDefaultAdapters = true;
}
// This is just a wrapper around the real logic that uses Error.h error handling.
bool InstanceBase::DiscoverPhysicalDevices(const PhysicalDeviceDiscoveryOptionsBase* options) {
MaybeError result = DiscoverPhysicalDevicesInternal(options);
if (result.IsError()) {
dawn::WarningLog() << absl::StrFormat(
"Skipping %s adapter because initialization failed: %s", FromAPI(options->backendType),
result.AcquireError()->GetFormattedMessage());
return false;
}
return true;
}
const TogglesState& InstanceBase::GetTogglesState() const {
return mToggles;
}
const ToggleInfo* InstanceBase::GetToggleInfo(const char* toggleName) {
return mTogglesInfo.GetToggleInfo(toggleName);
}
Toggle InstanceBase::ToggleNameToEnum(const char* toggleName) {
return mTogglesInfo.ToggleNameToEnum(toggleName);
}
const FeatureInfo* InstanceBase::GetFeatureInfo(wgpu::FeatureName feature) {
return mFeaturesInfo.GetFeatureInfo(feature);
}
std::vector<Ref<AdapterBase>> InstanceBase::GetAdapters() const {
// Set up toggles state for default adapters, currently adapter don't have a toggles
// descriptor so just inherit from instance toggles.
// TODO(dawn:1495): Handle the adapter toggles descriptor after implemented.
TogglesState adapterToggles = TogglesState(ToggleStage::Adapter);
adapterToggles.InheritFrom(mToggles);
std::vector<Ref<AdapterBase>> adapters;
for (const auto& physicalDevice : mPhysicalDevices) {
for (FeatureLevel featureLevel : {FeatureLevel::Compatibility, FeatureLevel::Core}) {
if (physicalDevice->SupportsFeatureLevel(featureLevel)) {
adapters.push_back(
AcquireRef(new AdapterBase(physicalDevice, featureLevel, adapterToggles)));
}
}
}
return adapters;
}
void InstanceBase::EnsureBackendConnection(wgpu::BackendType backendType) {
if (mBackendsConnected[backendType]) {
return;
}
auto Register = [this](BackendConnection* connection, wgpu::BackendType expectedType) {
if (connection != nullptr) {
ASSERT(connection->GetType() == expectedType);
ASSERT(connection->GetInstance() == this);
mBackends.push_back(std::unique_ptr<BackendConnection>(connection));
}
};
switch (backendType) {
#if defined(DAWN_ENABLE_BACKEND_NULL)
case wgpu::BackendType::Null:
Register(null::Connect(this), wgpu::BackendType::Null);
break;
#endif // defined(DAWN_ENABLE_BACKEND_NULL)
#if defined(DAWN_ENABLE_BACKEND_D3D11)
case wgpu::BackendType::D3D11:
Register(d3d11::Connect(this), wgpu::BackendType::D3D11);
break;
#endif // defined(DAWN_ENABLE_BACKEND_D3D11)
#if defined(DAWN_ENABLE_BACKEND_D3D12)
case wgpu::BackendType::D3D12:
Register(d3d12::Connect(this), wgpu::BackendType::D3D12);
break;
#endif // defined(DAWN_ENABLE_BACKEND_D3D12)
#if defined(DAWN_ENABLE_BACKEND_METAL)
case wgpu::BackendType::Metal:
Register(metal::Connect(this), wgpu::BackendType::Metal);
break;
#endif // defined(DAWN_ENABLE_BACKEND_METAL)
#if defined(DAWN_ENABLE_BACKEND_VULKAN)
case wgpu::BackendType::Vulkan:
Register(vulkan::Connect(this), wgpu::BackendType::Vulkan);
break;
#endif // defined(DAWN_ENABLE_BACKEND_VULKAN)
#if defined(DAWN_ENABLE_BACKEND_DESKTOP_GL)
case wgpu::BackendType::OpenGL:
Register(opengl::Connect(this, wgpu::BackendType::OpenGL), wgpu::BackendType::OpenGL);
break;
#endif // defined(DAWN_ENABLE_BACKEND_DESKTOP_GL)
#if defined(DAWN_ENABLE_BACKEND_OPENGLES)
case wgpu::BackendType::OpenGLES:
Register(opengl::Connect(this, wgpu::BackendType::OpenGLES),
wgpu::BackendType::OpenGLES);
break;
#endif // defined(DAWN_ENABLE_BACKEND_OPENGLES)
default:
UNREACHABLE();
}
mBackendsConnected.set(backendType);
}
MaybeError InstanceBase::DiscoverPhysicalDevicesInternal(
const PhysicalDeviceDiscoveryOptionsBase* options) {
wgpu::BackendType backendType = static_cast<wgpu::BackendType>(options->backendType);
DAWN_TRY(ValidateBackendType(backendType));
if (!GetEnabledBackends()[backendType]) {
return DAWN_VALIDATION_ERROR("%s not supported.", backendType);
}
EnsureBackendConnection(backendType);
bool foundBackend = false;
for (std::unique_ptr<BackendConnection>& backend : mBackends) {
if (backend->GetType() != backendType) {
continue;
}
foundBackend = true;
std::vector<Ref<PhysicalDeviceBase>> newPhysicalDevices;
DAWN_TRY_ASSIGN(newPhysicalDevices, backend->DiscoverPhysicalDevices(options));
for (Ref<PhysicalDeviceBase>& physicalDevice : newPhysicalDevices) {
ASSERT(physicalDevice->GetBackendType() == backend->GetType());
ASSERT(physicalDevice->GetInstance() == this);
mPhysicalDevices.push_back(std::move(physicalDevice));
}
}
DAWN_INVALID_IF(!foundBackend, "%s not available.", backendType);
return {};
}
bool InstanceBase::ConsumedError(MaybeError maybeError) {
if (maybeError.IsError()) {
ConsumeError(maybeError.AcquireError());
return true;
}
return false;
}
bool InstanceBase::IsBackendValidationEnabled() const {
return mBackendValidationLevel != BackendValidationLevel::Disabled;
}
void InstanceBase::SetBackendValidationLevel(BackendValidationLevel level) {
mBackendValidationLevel = level;
}
BackendValidationLevel InstanceBase::GetBackendValidationLevel() const {
return mBackendValidationLevel;
}
void InstanceBase::EnableBeginCaptureOnStartup(bool beginCaptureOnStartup) {
mBeginCaptureOnStartup = beginCaptureOnStartup;
}
bool InstanceBase::IsBeginCaptureOnStartupEnabled() const {
return mBeginCaptureOnStartup;
}
void InstanceBase::EnableAdapterBlocklist(bool enable) {
mEnableAdapterBlocklist = enable;
}
bool InstanceBase::IsAdapterBlocklistEnabled() const {
return mEnableAdapterBlocklist;
}
void InstanceBase::SetPlatform(dawn::platform::Platform* platform) {
if (platform == nullptr) {
mPlatform = mDefaultPlatform.get();
} else {
mPlatform = platform;
}
mBlobCache = std::make_unique<BlobCache>(GetCachingInterface(platform));
}
void InstanceBase::SetPlatformForTesting(dawn::platform::Platform* platform) {
SetPlatform(platform);
}
dawn::platform::Platform* InstanceBase::GetPlatform() {
return mPlatform;
}
BlobCache* InstanceBase::GetBlobCache(bool enabled) {
if (enabled) {
return mBlobCache.get();
}
return &mPassthroughBlobCache;
}
uint64_t InstanceBase::GetDeviceCountForTesting() const {
std::lock_guard<std::mutex> lg(mDevicesListMutex);
return mDevicesList.size();
}
void InstanceBase::AddDevice(DeviceBase* device) {
std::lock_guard<std::mutex> lg(mDevicesListMutex);
mDevicesList.insert(device);
}
void InstanceBase::RemoveDevice(DeviceBase* device) {
std::lock_guard<std::mutex> lg(mDevicesListMutex);
mDevicesList.erase(device);
}
bool InstanceBase::APIProcessEvents() {
std::vector<Ref<DeviceBase>> devices;
{
std::lock_guard<std::mutex> lg(mDevicesListMutex);
for (auto device : mDevicesList) {
devices.push_back(device);
}
}
bool hasMoreEvents = false;
for (auto device : devices) {
hasMoreEvents = device->APITick() || hasMoreEvents;
}
mCallbackTaskManager->Flush();
return hasMoreEvents || !mCallbackTaskManager->IsEmpty();
}
const std::vector<std::string>& InstanceBase::GetRuntimeSearchPaths() const {
return mRuntimeSearchPaths;
}
const Ref<CallbackTaskManager>& InstanceBase::GetCallbackTaskManager() const {
return mCallbackTaskManager;
}
void InstanceBase::ConsumeError(std::unique_ptr<ErrorData> error) {
ASSERT(error != nullptr);
dawn::ErrorLog() << error->GetFormattedMessage();
}
const XlibXcbFunctions* InstanceBase::GetOrCreateXlibXcbFunctions() {
#if defined(DAWN_USE_X11)
if (mXlibXcbFunctions == nullptr) {
mXlibXcbFunctions = std::make_unique<XlibXcbFunctions>();
}
return mXlibXcbFunctions.get();
#else
UNREACHABLE();
#endif // defined(DAWN_USE_X11)
}
Surface* InstanceBase::APICreateSurface(const SurfaceDescriptor* descriptor) {
if (ConsumedError(ValidateSurfaceDescriptor(this, descriptor))) {
return Surface::MakeError(this);
}
return new Surface(this, descriptor);
}
} // namespace dawn::native