blob: f87c392eb44b270be89b165dee7293c9f8eb9b23 [file] [log] [blame]
Corentin Wallez4a9ef4e2018-07-18 11:40:26 +02001// Copyright 2017 The Dawn Authors
Austin Eng376f1c62017-05-30 20:03:44 -04002//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
Corentin Wallezd37523f2018-07-24 13:53:51 +020015#include "dawn_native/Device.h"
Austin Eng376f1c62017-05-30 20:03:44 -040016
Corentin Wallez022d0742020-04-06 16:55:22 +000017#include "common/Log.h"
Corentin Wallezec18f962019-01-10 10:50:54 +000018#include "dawn_native/Adapter.h"
Austin Engb98f0fa2019-07-26 19:08:18 +000019#include "dawn_native/AttachmentState.h"
Corentin Wallezd37523f2018-07-24 13:53:51 +020020#include "dawn_native/BindGroup.h"
21#include "dawn_native/BindGroupLayout.h"
Corentin Wallezd37523f2018-07-24 13:53:51 +020022#include "dawn_native/Buffer.h"
Jiawei Shaoc74af7032021-05-02 03:22:30 +000023#include "dawn_native/CallbackTaskManager.h"
Corentin Wallezd37523f2018-07-24 13:53:51 +020024#include "dawn_native/CommandBuffer.h"
Corentin Walleze1f0d4e2019-02-15 12:54:08 +000025#include "dawn_native/CommandEncoder.h"
Brandon Jones47b6b682021-04-08 04:25:11 +000026#include "dawn_native/CompilationMessages.h"
Corentin Wallezd37523f2018-07-24 13:53:51 +020027#include "dawn_native/ComputePipeline.h"
Jiawei Shaoc74af7032021-05-02 03:22:30 +000028#include "dawn_native/CreatePipelineAsyncTask.h"
Bryan Bernhart67a73bd2019-02-15 21:18:40 +000029#include "dawn_native/DynamicUploader.h"
Corentin Wallezd37523f2018-07-24 13:53:51 +020030#include "dawn_native/ErrorData.h"
Austin Engf35dcfe2019-09-10 23:19:11 +000031#include "dawn_native/ErrorScope.h"
Brandon Jones0e92e9b2021-04-01 20:46:42 +000032#include "dawn_native/ExternalTexture.h"
Jiawei Shao15d4c2e2019-04-26 07:52:57 +000033#include "dawn_native/Instance.h"
Yan, Shaobodb8766b2020-11-04 02:30:16 +000034#include "dawn_native/InternalPipelineStore.h"
Bryan Bernhart41b3f9c2020-11-20 20:38:37 +000035#include "dawn_native/PersistentCache.h"
Corentin Wallezd37523f2018-07-24 13:53:51 +020036#include "dawn_native/PipelineLayout.h"
Hao Lib6eff5a2020-06-11 00:34:14 +000037#include "dawn_native/QuerySet.h"
Corentin Wallezd37523f2018-07-24 13:53:51 +020038#include "dawn_native/Queue.h"
Austin Eng8a488c12019-08-13 22:12:54 +000039#include "dawn_native/RenderBundleEncoder.h"
Corentin Wallezd37523f2018-07-24 13:53:51 +020040#include "dawn_native/RenderPipeline.h"
41#include "dawn_native/Sampler.h"
42#include "dawn_native/ShaderModule.h"
Corentin Wallezd26ee852020-01-25 10:05:40 +000043#include "dawn_native/Surface.h"
Corentin Wallezd37523f2018-07-24 13:53:51 +020044#include "dawn_native/SwapChain.h"
45#include "dawn_native/Texture.h"
Austin Engf35dcfe2019-09-10 23:19:11 +000046#include "dawn_native/ValidationUtils_autogen.h"
Austin Eng376f1c62017-05-30 20:03:44 -040047
48#include <unordered_set>
49
Corentin Wallez49a65d02018-07-24 16:45:45 +020050namespace dawn_native {
Austin Eng376f1c62017-05-30 20:03:44 -040051
Corentin Wallez8a437942020-04-17 16:45:17 +000052 // DeviceBase sub-structures
Austin Eng376f1c62017-05-30 20:03:44 -040053
54 // The caches are unordered_sets of pointers with special hash and compare functions
55 // to compare the value of the objects, instead of the pointers.
Corentin Wallez0ee98592019-05-01 12:57:27 +000056 template <typename Object>
57 using ContentLessObjectCache =
58 std::unordered_set<Object*, typename Object::HashFunc, typename Object::EqualityFunc>;
Austin Eng376f1c62017-05-30 20:03:44 -040059
60 struct DeviceBase::Caches {
Corentin Wallez73ea1f12020-04-07 16:19:47 +000061 ~Caches() {
62 ASSERT(attachmentStates.empty());
63 ASSERT(bindGroupLayouts.empty());
64 ASSERT(computePipelines.empty());
65 ASSERT(pipelineLayouts.empty());
66 ASSERT(renderPipelines.empty());
67 ASSERT(samplers.empty());
68 ASSERT(shaderModules.empty());
69 }
70
Austin Engb98f0fa2019-07-26 19:08:18 +000071 ContentLessObjectCache<AttachmentStateBlueprint> attachmentStates;
Corentin Wallez0ee98592019-05-01 12:57:27 +000072 ContentLessObjectCache<BindGroupLayoutBase> bindGroupLayouts;
Corentin Wallez1152bba2019-05-01 13:48:47 +000073 ContentLessObjectCache<ComputePipelineBase> computePipelines;
Corentin Wallez0ee98592019-05-01 12:57:27 +000074 ContentLessObjectCache<PipelineLayoutBase> pipelineLayouts;
Corentin Wallez94274b62019-05-02 15:30:56 +000075 ContentLessObjectCache<RenderPipelineBase> renderPipelines;
Idan Raiter2bc31692019-05-20 18:16:34 +000076 ContentLessObjectCache<SamplerBase> samplers;
Corentin Wallezc5351982019-05-01 13:27:07 +000077 ContentLessObjectCache<ShaderModuleBase> shaderModules;
Austin Eng376f1c62017-05-30 20:03:44 -040078 };
79
Corentin Wallez8a437942020-04-17 16:45:17 +000080 struct DeviceBase::DeprecationWarnings {
Corentin Wallezdbec2fa2020-05-04 16:04:40 +000081 std::unordered_set<std::string> emitted;
Corentin Wallez8a437942020-04-17 16:45:17 +000082 size_t count = 0;
83 };
84
Austin Eng376f1c62017-05-30 20:03:44 -040085 // DeviceBase
86
Jiawei Shao15d4c2e2019-04-26 07:52:57 +000087 DeviceBase::DeviceBase(AdapterBase* adapter, const DeviceDescriptor* descriptor)
Austin Engf6ef7532021-01-13 17:54:37 +000088 : mInstance(adapter->GetInstance()), mAdapter(adapter) {
Jiawei Shao574b9512019-08-02 00:06:38 +000089 if (descriptor != nullptr) {
Corentin Wallez022d0742020-04-06 16:55:22 +000090 ApplyToggleOverrides(descriptor);
Jiawei Shao574b9512019-08-02 00:06:38 +000091 ApplyExtensions(descriptor);
92 }
Corentin Wallez022d0742020-04-06 16:55:22 +000093
Corentin Wallez09ee5eb2020-04-07 15:10:17 +000094 mFormatTable = BuildFormatTable(this);
Corentin Wallez022d0742020-04-06 16:55:22 +000095 SetDefaultToggles();
Austin Eng376f1c62017-05-30 20:03:44 -040096 }
97
Austin Engf6ef7532021-01-13 17:54:37 +000098 DeviceBase::~DeviceBase() = default;
Austin Eng376f1c62017-05-30 20:03:44 -040099
Corentin Wallez8a437942020-04-17 16:45:17 +0000100 MaybeError DeviceBase::Initialize(QueueBase* defaultQueue) {
Corentin Wallez6d315da2021-02-04 15:33:42 +0000101 mQueue = AcquireRef(defaultQueue);
Corentin Wallez09ee5eb2020-04-07 15:10:17 +0000102
Austin Engf2bc3b32020-11-16 23:32:36 +0000103#if defined(DAWN_ENABLE_ASSERTS)
Austin Engef9e4412021-02-19 18:17:22 +0000104 mUncapturedErrorCallback = [](WGPUErrorType, char const*, void*) {
105 static bool calledOnce = false;
106 if (!calledOnce) {
107 calledOnce = true;
108 dawn::WarningLog() << "No Dawn device uncaptured error callback was set. This is "
109 "probably not intended. If you really want to ignore errors "
110 "and suppress this message, set the callback to null.";
111 }
112 };
Austin Engf2bc3b32020-11-16 23:32:36 +0000113
114 mDeviceLostCallback = [](char const*, void*) {
115 static bool calledOnce = false;
116 if (!calledOnce) {
117 calledOnce = true;
118 dawn::WarningLog() << "No Dawn device lost callback was set. This is probably not "
119 "intended. If you really want to ignore device lost "
120 "and suppress this message, set the callback to null.";
121 }
122 };
123#endif // DAWN_ENABLE_ASSERTS
124
Corentin Wallez09ee5eb2020-04-07 15:10:17 +0000125 mCaches = std::make_unique<DeviceBase::Caches>();
Austin Engef9e4412021-02-19 18:17:22 +0000126 mErrorScopeStack = std::make_unique<ErrorScopeStack>();
Corentin Wallez09ee5eb2020-04-07 15:10:17 +0000127 mDynamicUploader = std::make_unique<DynamicUploader>(this);
Jiawei Shaoc74af7032021-05-02 03:22:30 +0000128 mCallbackTaskManager = std::make_unique<CallbackTaskManager>();
Corentin Wallez8a437942020-04-17 16:45:17 +0000129 mDeprecationWarnings = std::make_unique<DeprecationWarnings>();
Yan, Shaobodb8766b2020-11-04 02:30:16 +0000130 mInternalPipelineStore = std::make_unique<InternalPipelineStore>();
Bryan Bernhart41b3f9c2020-11-20 20:38:37 +0000131 mPersistentCache = std::make_unique<PersistentCache>(this);
Corentin Wallez09ee5eb2020-04-07 15:10:17 +0000132
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000133 // Starting from now the backend can start doing reentrant calls so the device is marked as
134 // alive.
135 mState = State::Alive;
136
Tomek Ponitka5fb974c2020-07-01 13:09:46 +0000137 DAWN_TRY_ASSIGN(mEmptyBindGroupLayout, CreateEmptyBindGroupLayout());
138
Corentin Wallez09ee5eb2020-04-07 15:10:17 +0000139 return {};
140 }
141
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000142 void DeviceBase::ShutDownBase() {
Austin Enga75b2302021-02-04 20:38:02 +0000143 // Skip handling device facilities if they haven't even been created (or failed doing so)
144 if (mState != State::BeingCreated) {
Jiawei Shaoc74af7032021-05-02 03:22:30 +0000145 // Call all the callbacks immediately as the device is about to shut down.
146 auto callbackTasks = mCallbackTaskManager->AcquireCallbackTasks();
147 for (std::unique_ptr<CallbackTask>& callbackTask : callbackTasks) {
148 callbackTask->HandleShutDown();
149 }
Austin Enga75b2302021-02-04 20:38:02 +0000150 }
151
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000152 // Disconnect the device, depending on which state we are currently in.
153 switch (mState) {
154 case State::BeingCreated:
155 // The GPU timeline was never started so we don't have to wait.
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000156 break;
157
158 case State::Alive:
159 // Alive is the only state which can have GPU work happening. Wait for all of it to
160 // complete before proceeding with destruction.
Natasha Lee3fe84b52020-06-16 17:53:38 +0000161 // Ignore errors so that we can continue with destruction
162 IgnoreErrors(WaitForIdleForDestruction());
Natasha Lee783cd5a2020-06-04 02:26:46 +0000163 AssumeCommandsComplete();
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000164 break;
165
166 case State::BeingDisconnected:
167 // Getting disconnected is a transient state happening in a single API call so there
168 // is always an external reference keeping the Device alive, which means the
169 // destructor cannot run while BeingDisconnected.
170 UNREACHABLE();
171 break;
172
173 case State::Disconnected:
174 break;
Natasha Lee2c8a17e2019-12-16 23:36:16 +0000175 }
Natasha Lee783cd5a2020-06-04 02:26:46 +0000176 ASSERT(mCompletedSerial == mLastSubmittedSerial);
Brandon Jones4ad35862020-10-15 16:21:03 +0000177 ASSERT(mFutureSerial <= mCompletedSerial);
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000178
Corentin Walleze3f44e32020-04-15 10:07:25 +0000179 if (mState != State::BeingCreated) {
Austin Enga75b2302021-02-04 20:38:02 +0000180 // The GPU timeline is finished.
181 // Tick the queue-related tasks since they should be complete. This must be done before
182 // ShutDownImpl() it may relinquish resources that will be freed by backends in the
183 // ShutDownImpl() call.
Corentin Walleze91975c2021-03-29 15:40:55 +0000184 mQueue->Tick(GetCompletedCommandSerial());
Austin Enga75b2302021-02-04 20:38:02 +0000185 // Call TickImpl once last time to clean up resources
Natasha Lee3fe84b52020-06-16 17:53:38 +0000186 // Ignore errors so that we can continue with destruction
187 IgnoreErrors(TickImpl());
Corentin Walleze3f44e32020-04-15 10:07:25 +0000188 }
189
190 // At this point GPU operations are always finished, so we are in the disconnected state.
191 mState = State::Disconnected;
192
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000193 mDynamicUploader = nullptr;
Jiawei Shaoc74af7032021-05-02 03:22:30 +0000194 mCallbackTaskManager = nullptr;
Bryan Bernhart41b3f9c2020-11-20 20:38:37 +0000195 mPersistentCache = nullptr;
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000196
Tomek Ponitkabf27bd72020-06-29 11:13:43 +0000197 mEmptyBindGroupLayout = nullptr;
198
Yan, Shaobodb8766b2020-11-04 02:30:16 +0000199 mInternalPipelineStore = nullptr;
200
Natasha Lee783cd5a2020-06-04 02:26:46 +0000201 AssumeCommandsComplete();
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000202 // Tell the backend that it can free all the objects now that the GPU timeline is empty.
203 ShutDownImpl();
204
205 mCaches = nullptr;
Natasha Lee2c8a17e2019-12-16 23:36:16 +0000206 }
207
Corentin Walleza0afd312020-04-01 12:07:43 +0000208 void DeviceBase::HandleError(InternalErrorType type, const char* message) {
Austin Eng8ef94f12021-01-13 17:53:29 +0000209 if (type == InternalErrorType::DeviceLost) {
210 // A real device lost happened. Set the state to disconnected as the device cannot be
211 // used.
212 mState = State::Disconnected;
213 } else if (type == InternalErrorType::Internal) {
214 // If we receive an internal error, assume the backend can't recover and proceed with
215 // device destruction. We first wait for all previous commands to be completed so that
216 // backend objects can be freed immediately, before handling the loss.
217
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000218 // Move away from the Alive state so that the application cannot use this device
219 // anymore.
220 // TODO(cwallez@chromium.org): Do we need atomics for this to become visible to other
221 // threads in a multithreaded scenario?
222 mState = State::BeingDisconnected;
223
Natasha Lee3fe84b52020-06-16 17:53:38 +0000224 // Ignore errors so that we can continue with destruction
Natasha Lee783cd5a2020-06-04 02:26:46 +0000225 // Assume all commands are complete after WaitForIdleForDestruction (because they were)
Natasha Lee3fe84b52020-06-16 17:53:38 +0000226 IgnoreErrors(WaitForIdleForDestruction());
227 IgnoreErrors(TickImpl());
Natasha Lee783cd5a2020-06-04 02:26:46 +0000228 AssumeCommandsComplete();
Brandon Jones4ad35862020-10-15 16:21:03 +0000229 ASSERT(mFutureSerial <= mCompletedSerial);
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000230 mState = State::Disconnected;
231
232 // Now everything is as if the device was lost.
233 type = InternalErrorType::DeviceLost;
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000234 }
Corentin Walleza0afd312020-04-01 12:07:43 +0000235
Austin Engef9e4412021-02-19 18:17:22 +0000236 if (type == InternalErrorType::DeviceLost) {
237 // The device was lost, call the application callback.
238 if (mDeviceLostCallback != nullptr) {
239 mDeviceLostCallback(message, mDeviceLostUserdata);
240 mDeviceLostCallback = nullptr;
241 }
242
Corentin Wallez6d315da2021-02-04 15:33:42 +0000243 mQueue->HandleDeviceLoss();
Jiawei Shaoc74af7032021-05-02 03:22:30 +0000244 auto callbackTasks = mCallbackTaskManager->AcquireCallbackTasks();
245 for (std::unique_ptr<CallbackTask>& callbackTask : callbackTasks) {
246 callbackTask->HandleDeviceLoss();
247 }
Corentin Wallezc1d3a662021-01-27 15:54:12 +0000248
Austin Engef9e4412021-02-19 18:17:22 +0000249 // Still forward device loss errors to the error scopes so they all reject.
250 mErrorScopeStack->HandleError(ToWGPUErrorType(type), message);
251 } else {
252 // Pass the error to the error scope stack and call the uncaptured error callback
253 // if it isn't handled. DeviceLost is not handled here because it should be
254 // handled by the lost callback.
255 bool captured = mErrorScopeStack->HandleError(ToWGPUErrorType(type), message);
256 if (!captured && mUncapturedErrorCallback != nullptr) {
257 mUncapturedErrorCallback(static_cast<WGPUErrorType>(ToWGPUErrorType(type)), message,
258 mUncapturedErrorUserdata);
259 }
Corentin Walleza0afd312020-04-01 12:07:43 +0000260 }
Austin Engf35dcfe2019-09-10 23:19:11 +0000261 }
262
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000263 void DeviceBase::APIInjectError(wgpu::ErrorType type, const char* message) {
Austin Enge06f01b2019-09-30 22:50:59 +0000264 if (ConsumedError(ValidateErrorType(type))) {
265 return;
266 }
Corentin Walleza0afd312020-04-01 12:07:43 +0000267
268 // This method should only be used to make error scope reject. For DeviceLost there is the
269 // LoseForTesting function that can be used instead.
270 if (type != wgpu::ErrorType::Validation && type != wgpu::ErrorType::OutOfMemory) {
271 HandleError(InternalErrorType::Validation,
272 "Invalid injected error, must be Validation or OutOfMemory");
Austin Eng3fd022e2019-10-03 14:40:09 +0000273 return;
274 }
Corentin Walleza0afd312020-04-01 12:07:43 +0000275
276 HandleError(FromWGPUErrorType(type), message);
Austin Enge06f01b2019-09-30 22:50:59 +0000277 }
278
Rafael Cintron69c68d02020-01-10 17:58:28 +0000279 void DeviceBase::ConsumeError(std::unique_ptr<ErrorData> error) {
Austin Engb11bd2d2019-09-18 21:03:41 +0000280 ASSERT(error != nullptr);
Austin Eng75ef5962020-05-13 23:10:36 +0000281 std::ostringstream ss;
282 ss << error->GetMessage();
283 for (const auto& callsite : error->GetBacktrace()) {
284 ss << "\n at " << callsite.function << " (" << callsite.file << ":" << callsite.line
285 << ")";
286 }
287 HandleError(error->GetType(), ss.str().c_str());
Austin Eng376f1c62017-05-30 20:03:44 -0400288 }
289
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000290 void DeviceBase::APISetUncapturedErrorCallback(wgpu::ErrorCallback callback, void* userdata) {
Austin Engef9e4412021-02-19 18:17:22 +0000291 mUncapturedErrorCallback = callback;
292 mUncapturedErrorUserdata = userdata;
Austin Eng376f1c62017-05-30 20:03:44 -0400293 }
294
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000295 void DeviceBase::APISetDeviceLostCallback(wgpu::DeviceLostCallback callback, void* userdata) {
Natasha Lee9bba4a92019-12-18 18:59:20 +0000296 mDeviceLostCallback = callback;
297 mDeviceLostUserdata = userdata;
298 }
299
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000300 void DeviceBase::APIPushErrorScope(wgpu::ErrorFilter filter) {
Austin Engf35dcfe2019-09-10 23:19:11 +0000301 if (ConsumedError(ValidateErrorFilter(filter))) {
302 return;
303 }
Austin Engef9e4412021-02-19 18:17:22 +0000304 mErrorScopeStack->Push(filter);
Austin Eng45238d72019-09-04 22:54:03 +0000305 }
306
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000307 bool DeviceBase::APIPopErrorScope(wgpu::ErrorCallback callback, void* userdata) {
Austin Engef9e4412021-02-19 18:17:22 +0000308 if (mErrorScopeStack->Empty()) {
Austin Engf35dcfe2019-09-10 23:19:11 +0000309 return false;
310 }
Austin Engef9e4412021-02-19 18:17:22 +0000311 ErrorScope scope = mErrorScopeStack->Pop();
312 if (callback != nullptr) {
313 callback(static_cast<WGPUErrorType>(scope.GetErrorType()), scope.GetErrorMessage(),
314 userdata);
315 }
Austin Engf35dcfe2019-09-10 23:19:11 +0000316
317 return true;
Austin Eng45238d72019-09-04 22:54:03 +0000318 }
319
Bryan Bernhart41b3f9c2020-11-20 20:38:37 +0000320 PersistentCache* DeviceBase::GetPersistentCache() {
321 ASSERT(mPersistentCache.get() != nullptr);
322 return mPersistentCache.get();
323 }
324
Corentin Walleza594f8f2019-02-13 13:09:18 +0000325 MaybeError DeviceBase::ValidateObject(const ObjectBase* object) const {
Jiawei Shao91fbfc32019-10-15 00:17:35 +0000326 ASSERT(object != nullptr);
Corentin Walleza594f8f2019-02-13 13:09:18 +0000327 if (DAWN_UNLIKELY(object->GetDevice() != this)) {
328 return DAWN_VALIDATION_ERROR("Object from a different device.");
329 }
330 if (DAWN_UNLIKELY(object->IsError())) {
331 return DAWN_VALIDATION_ERROR("Object is an error.");
332 }
333 return {};
334 }
335
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000336 MaybeError DeviceBase::ValidateIsAlive() const {
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000337 if (DAWN_LIKELY(mState == State::Alive)) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000338 return {};
339 }
Austin Eng8ef94f12021-01-13 17:53:29 +0000340 return DAWN_VALIDATION_ERROR("Device is lost");
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000341 }
342
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000343 void DeviceBase::APILoseForTesting() {
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000344 if (mState != State::Alive) {
Natasha Lee31eacb92020-03-17 15:03:18 +0000345 return;
346 }
347
Corentin Walleza0afd312020-04-01 12:07:43 +0000348 HandleError(InternalErrorType::Internal, "Device lost for testing");
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000349 }
350
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000351 DeviceBase::State DeviceBase::GetState() const {
352 return mState;
353 }
354
Natasha Lee74f50542020-01-28 22:18:58 +0000355 bool DeviceBase::IsLost() const {
Corentin Wallez73ea1f12020-04-07 16:19:47 +0000356 ASSERT(mState != State::BeingCreated);
357 return mState != State::Alive;
Natasha Lee74f50542020-01-28 22:18:58 +0000358 }
359
Corentin Wallezd77fd5f2019-01-30 16:07:48 +0000360 AdapterBase* DeviceBase::GetAdapter() const {
361 return mAdapter;
362 }
363
Austin Engd4ce7362019-08-13 19:00:34 +0000364 dawn_platform::Platform* DeviceBase::GetPlatform() const {
365 return GetAdapter()->GetInstance()->GetPlatform();
366 }
367
Corentin Wallez62139fc2020-09-28 19:35:14 +0000368 ExecutionSerial DeviceBase::GetCompletedCommandSerial() const {
Natasha Lee351c95a2020-05-07 21:52:54 +0000369 return mCompletedSerial;
370 }
371
Corentin Wallez62139fc2020-09-28 19:35:14 +0000372 ExecutionSerial DeviceBase::GetLastSubmittedCommandSerial() const {
Natasha Lee351c95a2020-05-07 21:52:54 +0000373 return mLastSubmittedSerial;
374 }
375
Brandon Jones4ad35862020-10-15 16:21:03 +0000376 ExecutionSerial DeviceBase::GetFutureSerial() const {
377 return mFutureSerial;
Natasha Lee783cd5a2020-06-04 02:26:46 +0000378 }
379
Yan, Shaobodb8766b2020-11-04 02:30:16 +0000380 InternalPipelineStore* DeviceBase::GetInternalPipelineStore() {
381 return mInternalPipelineStore.get();
382 }
383
Natasha Lee351c95a2020-05-07 21:52:54 +0000384 void DeviceBase::IncrementLastSubmittedCommandSerial() {
385 mLastSubmittedSerial++;
386 }
387
Natasha Lee351c95a2020-05-07 21:52:54 +0000388 void DeviceBase::AssumeCommandsComplete() {
Brandon Jones4ad35862020-10-15 16:21:03 +0000389 ExecutionSerial maxSerial =
390 ExecutionSerial(std::max(mLastSubmittedSerial + ExecutionSerial(1), mFutureSerial));
Natasha Lee783cd5a2020-06-04 02:26:46 +0000391 mLastSubmittedSerial = maxSerial;
392 mCompletedSerial = maxSerial;
Natasha Lee351c95a2020-05-07 21:52:54 +0000393 }
394
Brandon Jones4ad35862020-10-15 16:21:03 +0000395 bool DeviceBase::IsDeviceIdle() {
396 ExecutionSerial maxSerial = std::max(mLastSubmittedSerial, mFutureSerial);
397 if (mCompletedSerial == maxSerial) {
398 return true;
399 }
400 return false;
401 }
402
Corentin Wallez62139fc2020-09-28 19:35:14 +0000403 ExecutionSerial DeviceBase::GetPendingCommandSerial() const {
404 return mLastSubmittedSerial + ExecutionSerial(1);
Natasha Lee351c95a2020-05-07 21:52:54 +0000405 }
406
Brandon Jones4ad35862020-10-15 16:21:03 +0000407 void DeviceBase::AddFutureSerial(ExecutionSerial serial) {
408 if (serial > mFutureSerial) {
409 mFutureSerial = serial;
Natasha Lee783cd5a2020-06-04 02:26:46 +0000410 }
411 }
412
Corentin Wallez6870e6d2021-04-07 18:09:21 +0000413 MaybeError DeviceBase::CheckPassedSerials() {
414 ExecutionSerial completedSerial;
415 DAWN_TRY_ASSIGN(completedSerial, CheckAndUpdateCompletedSerials());
Natasha Lee351c95a2020-05-07 21:52:54 +0000416
417 ASSERT(completedSerial <= mLastSubmittedSerial);
418 // completedSerial should not be less than mCompletedSerial unless it is 0.
419 // It can be 0 when there's no fences to check.
Corentin Wallez62139fc2020-09-28 19:35:14 +0000420 ASSERT(completedSerial >= mCompletedSerial || completedSerial == ExecutionSerial(0));
Natasha Lee351c95a2020-05-07 21:52:54 +0000421
422 if (completedSerial > mCompletedSerial) {
423 mCompletedSerial = completedSerial;
424 }
Corentin Wallez6870e6d2021-04-07 18:09:21 +0000425
426 return {};
Natasha Lee351c95a2020-05-07 21:52:54 +0000427 }
428
Corentin Wallez1f6c8c42019-10-23 11:57:41 +0000429 ResultOrError<const Format*> DeviceBase::GetInternalFormat(wgpu::TextureFormat format) const {
Corentin Wallez1546bfb2019-07-18 09:25:04 +0000430 size_t index = ComputeFormatIndex(format);
431 if (index >= mFormatTable.size()) {
432 return DAWN_VALIDATION_ERROR("Unknown texture format");
433 }
434
435 const Format* internalFormat = &mFormatTable[index];
436 if (!internalFormat->isSupported) {
437 return DAWN_VALIDATION_ERROR("Unsupported texture format");
438 }
439
440 return internalFormat;
441 }
442
Corentin Wallez1f6c8c42019-10-23 11:57:41 +0000443 const Format& DeviceBase::GetValidInternalFormat(wgpu::TextureFormat format) const {
Corentin Wallez1546bfb2019-07-18 09:25:04 +0000444 size_t index = ComputeFormatIndex(format);
445 ASSERT(index < mFormatTable.size());
446 ASSERT(mFormatTable[index].isSupported);
447 return mFormatTable[index];
448 }
449
Tomek Ponitka5fb974c2020-07-01 13:09:46 +0000450 ResultOrError<Ref<BindGroupLayoutBase>> DeviceBase::GetOrCreateBindGroupLayout(
Corentin Wallez36afbb62018-07-25 17:03:23 +0200451 const BindGroupLayoutDescriptor* descriptor) {
Austin Eng84bcf442019-10-30 00:20:03 +0000452 BindGroupLayoutBase blueprint(this, descriptor);
Kai Ninomiya234becf2018-07-10 12:23:50 -0700453
Bryan Bernhart24bf7a42020-12-03 18:42:13 +0000454 const size_t blueprintHash = blueprint.ComputeContentHash();
455 blueprint.SetContentHash(blueprintHash);
456
Corentin Wallez50f99582021-03-31 18:36:32 +0000457 Ref<BindGroupLayoutBase> result;
Kai Ninomiya234becf2018-07-10 12:23:50 -0700458 auto iter = mCaches->bindGroupLayouts.find(&blueprint);
Corentin Wallezfbecc282017-11-23 10:32:51 -0800459 if (iter != mCaches->bindGroupLayouts.end()) {
Tomek Ponitka5fb974c2020-07-01 13:09:46 +0000460 result = *iter;
461 } else {
Corentin Wallez50f99582021-03-31 18:36:32 +0000462 DAWN_TRY_ASSIGN(result, CreateBindGroupLayoutImpl(descriptor));
463 result->SetIsCachedReference();
464 result->SetContentHash(blueprintHash);
465 mCaches->bindGroupLayouts.insert(result.Get());
Austin Eng376f1c62017-05-30 20:03:44 -0400466 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000467
Tomek Ponitka5fb974c2020-07-01 13:09:46 +0000468 return std::move(result);
Austin Eng376f1c62017-05-30 20:03:44 -0400469 }
470
471 void DeviceBase::UncacheBindGroupLayout(BindGroupLayoutBase* obj) {
Austin Eng84bcf442019-10-30 00:20:03 +0000472 ASSERT(obj->IsCachedReference());
Corentin Wallez0ee98592019-05-01 12:57:27 +0000473 size_t removedCount = mCaches->bindGroupLayouts.erase(obj);
474 ASSERT(removedCount == 1);
475 }
476
Tomek Ponitka5fb974c2020-07-01 13:09:46 +0000477 // Private function used at initialization
478 ResultOrError<Ref<BindGroupLayoutBase>> DeviceBase::CreateEmptyBindGroupLayout() {
479 BindGroupLayoutDescriptor desc = {};
480 desc.entryCount = 0;
481 desc.entries = nullptr;
Tomek Ponitkabf27bd72020-06-29 11:13:43 +0000482
Tomek Ponitka5fb974c2020-07-01 13:09:46 +0000483 return GetOrCreateBindGroupLayout(&desc);
484 }
485
486 BindGroupLayoutBase* DeviceBase::GetEmptyBindGroupLayout() {
Corentin Wallez55f251d2020-11-18 09:10:22 +0000487 ASSERT(mEmptyBindGroupLayout != nullptr);
Tomek Ponitka5fb974c2020-07-01 13:09:46 +0000488 return mEmptyBindGroupLayout.Get();
Tomek Ponitkabf27bd72020-06-29 11:13:43 +0000489 }
490
Jiawei Shaoc243f672021-04-08 01:22:22 +0000491 std::pair<Ref<ComputePipelineBase>, size_t> DeviceBase::GetCachedComputePipeline(
Corentin Wallez1152bba2019-05-01 13:48:47 +0000492 const ComputePipelineDescriptor* descriptor) {
Austin Eng84bcf442019-10-30 00:20:03 +0000493 ComputePipelineBase blueprint(this, descriptor);
Corentin Wallez1152bba2019-05-01 13:48:47 +0000494
Bryan Bernhart24bf7a42020-12-03 18:42:13 +0000495 const size_t blueprintHash = blueprint.ComputeContentHash();
496 blueprint.SetContentHash(blueprintHash);
497
Corentin Wallez50f99582021-03-31 18:36:32 +0000498 Ref<ComputePipelineBase> result;
Corentin Wallez1152bba2019-05-01 13:48:47 +0000499 auto iter = mCaches->computePipelines.find(&blueprint);
500 if (iter != mCaches->computePipelines.end()) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000501 result = *iter;
Corentin Wallez1152bba2019-05-01 13:48:47 +0000502 }
503
Jiawei Shaoc243f672021-04-08 01:22:22 +0000504 return std::make_pair(result, blueprintHash);
505 }
506
507 Ref<ComputePipelineBase> DeviceBase::AddOrGetCachedPipeline(
508 Ref<ComputePipelineBase> computePipeline,
509 size_t blueprintHash) {
510 computePipeline->SetContentHash(blueprintHash);
511 auto insertion = mCaches->computePipelines.insert(computePipeline.Get());
512 if (insertion.second) {
513 computePipeline->SetIsCachedReference();
514 return computePipeline;
515 } else {
516 return *(insertion.first);
517 }
Corentin Wallez1152bba2019-05-01 13:48:47 +0000518 }
519
520 void DeviceBase::UncacheComputePipeline(ComputePipelineBase* obj) {
Austin Eng84bcf442019-10-30 00:20:03 +0000521 ASSERT(obj->IsCachedReference());
Corentin Wallez1152bba2019-05-01 13:48:47 +0000522 size_t removedCount = mCaches->computePipelines.erase(obj);
523 ASSERT(removedCount == 1);
524 }
525
Corentin Wallez50f99582021-03-31 18:36:32 +0000526 ResultOrError<Ref<PipelineLayoutBase>> DeviceBase::GetOrCreatePipelineLayout(
Corentin Wallez0ee98592019-05-01 12:57:27 +0000527 const PipelineLayoutDescriptor* descriptor) {
Austin Eng84bcf442019-10-30 00:20:03 +0000528 PipelineLayoutBase blueprint(this, descriptor);
Corentin Wallez0ee98592019-05-01 12:57:27 +0000529
Bryan Bernhart24bf7a42020-12-03 18:42:13 +0000530 const size_t blueprintHash = blueprint.ComputeContentHash();
531 blueprint.SetContentHash(blueprintHash);
532
Corentin Wallez50f99582021-03-31 18:36:32 +0000533 Ref<PipelineLayoutBase> result;
Corentin Wallez0ee98592019-05-01 12:57:27 +0000534 auto iter = mCaches->pipelineLayouts.find(&blueprint);
535 if (iter != mCaches->pipelineLayouts.end()) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000536 result = *iter;
537 } else {
538 DAWN_TRY_ASSIGN(result, CreatePipelineLayoutImpl(descriptor));
539 result->SetIsCachedReference();
540 result->SetContentHash(blueprintHash);
541 mCaches->pipelineLayouts.insert(result.Get());
Corentin Wallez0ee98592019-05-01 12:57:27 +0000542 }
543
Corentin Wallez50f99582021-03-31 18:36:32 +0000544 return std::move(result);
Corentin Wallez0ee98592019-05-01 12:57:27 +0000545 }
546
547 void DeviceBase::UncachePipelineLayout(PipelineLayoutBase* obj) {
Austin Eng84bcf442019-10-30 00:20:03 +0000548 ASSERT(obj->IsCachedReference());
Corentin Wallez0ee98592019-05-01 12:57:27 +0000549 size_t removedCount = mCaches->pipelineLayouts.erase(obj);
550 ASSERT(removedCount == 1);
Austin Eng376f1c62017-05-30 20:03:44 -0400551 }
552
Corentin Wallez50f99582021-03-31 18:36:32 +0000553 ResultOrError<Ref<RenderPipelineBase>> DeviceBase::GetOrCreateRenderPipeline(
Brandon Jones41c87d92021-05-21 05:01:38 +0000554 const RenderPipelineDescriptor* descriptor) {
Brandon Jones3ceb6542021-04-01 01:28:52 +0000555 RenderPipelineBase blueprint(this, descriptor);
556
557 const size_t blueprintHash = blueprint.ComputeContentHash();
558 blueprint.SetContentHash(blueprintHash);
559
560 Ref<RenderPipelineBase> result;
561 auto iter = mCaches->renderPipelines.find(&blueprint);
562 if (iter != mCaches->renderPipelines.end()) {
563 result = *iter;
564 } else {
Brandon Jonesf7592642021-04-02 19:42:28 +0000565 DAWN_TRY_ASSIGN(result, CreateRenderPipelineImpl(descriptor));
Brandon Jones3ceb6542021-04-01 01:28:52 +0000566 result->SetIsCachedReference();
567 result->SetContentHash(blueprintHash);
568 mCaches->renderPipelines.insert(result.Get());
569 }
570
571 return std::move(result);
572 }
573
Corentin Wallez94274b62019-05-02 15:30:56 +0000574 void DeviceBase::UncacheRenderPipeline(RenderPipelineBase* obj) {
Austin Eng84bcf442019-10-30 00:20:03 +0000575 ASSERT(obj->IsCachedReference());
Corentin Wallez94274b62019-05-02 15:30:56 +0000576 size_t removedCount = mCaches->renderPipelines.erase(obj);
577 ASSERT(removedCount == 1);
578 }
579
Corentin Wallez50f99582021-03-31 18:36:32 +0000580 ResultOrError<Ref<SamplerBase>> DeviceBase::GetOrCreateSampler(
Idan Raiter2bc31692019-05-20 18:16:34 +0000581 const SamplerDescriptor* descriptor) {
Austin Eng84bcf442019-10-30 00:20:03 +0000582 SamplerBase blueprint(this, descriptor);
Idan Raiter2bc31692019-05-20 18:16:34 +0000583
Bryan Bernhart24bf7a42020-12-03 18:42:13 +0000584 const size_t blueprintHash = blueprint.ComputeContentHash();
585 blueprint.SetContentHash(blueprintHash);
586
Corentin Wallez50f99582021-03-31 18:36:32 +0000587 Ref<SamplerBase> result;
Idan Raiter2bc31692019-05-20 18:16:34 +0000588 auto iter = mCaches->samplers.find(&blueprint);
589 if (iter != mCaches->samplers.end()) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000590 result = *iter;
591 } else {
592 DAWN_TRY_ASSIGN(result, CreateSamplerImpl(descriptor));
593 result->SetIsCachedReference();
594 result->SetContentHash(blueprintHash);
595 mCaches->samplers.insert(result.Get());
Idan Raiter2bc31692019-05-20 18:16:34 +0000596 }
597
Corentin Wallez50f99582021-03-31 18:36:32 +0000598 return std::move(result);
Idan Raiter2bc31692019-05-20 18:16:34 +0000599 }
600
601 void DeviceBase::UncacheSampler(SamplerBase* obj) {
Austin Eng84bcf442019-10-30 00:20:03 +0000602 ASSERT(obj->IsCachedReference());
Idan Raiter2bc31692019-05-20 18:16:34 +0000603 size_t removedCount = mCaches->samplers.erase(obj);
604 ASSERT(removedCount == 1);
605 }
606
Corentin Wallez50f99582021-03-31 18:36:32 +0000607 ResultOrError<Ref<ShaderModuleBase>> DeviceBase::GetOrCreateShaderModule(
Austin Eng0d948f72020-12-07 18:12:13 +0000608 const ShaderModuleDescriptor* descriptor,
609 ShaderModuleParseResult* parseResult) {
Brandon Jones47b6b682021-04-08 04:25:11 +0000610 ASSERT(parseResult != nullptr);
611
Austin Eng84bcf442019-10-30 00:20:03 +0000612 ShaderModuleBase blueprint(this, descriptor);
Corentin Wallezc5351982019-05-01 13:27:07 +0000613
Bryan Bernhart24bf7a42020-12-03 18:42:13 +0000614 const size_t blueprintHash = blueprint.ComputeContentHash();
615 blueprint.SetContentHash(blueprintHash);
616
Corentin Wallez50f99582021-03-31 18:36:32 +0000617 Ref<ShaderModuleBase> result;
Corentin Wallezc5351982019-05-01 13:27:07 +0000618 auto iter = mCaches->shaderModules.find(&blueprint);
619 if (iter != mCaches->shaderModules.end()) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000620 result = *iter;
621 } else {
Brandon Jones47b6b682021-04-08 04:25:11 +0000622 if (!parseResult->HasParsedShader()) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000623 // We skip the parse on creation if validation isn't enabled which let's us quickly
624 // lookup in the cache without validating and parsing. We need the parsed module
625 // now, so call validate. Most of |ValidateShaderModuleDescriptor| is parsing, but
626 // we can consider splitting it if additional validation is added.
627 ASSERT(!IsValidationEnabled());
Brandon Jones47b6b682021-04-08 04:25:11 +0000628 DAWN_TRY(ValidateShaderModuleDescriptor(this, descriptor, parseResult));
Corentin Wallez50f99582021-03-31 18:36:32 +0000629 }
Brandon Jones47b6b682021-04-08 04:25:11 +0000630 DAWN_TRY_ASSIGN(result, CreateShaderModuleImpl(descriptor, parseResult));
Corentin Wallez50f99582021-03-31 18:36:32 +0000631 result->SetIsCachedReference();
632 result->SetContentHash(blueprintHash);
633 mCaches->shaderModules.insert(result.Get());
Corentin Wallezc5351982019-05-01 13:27:07 +0000634 }
635
Corentin Wallez50f99582021-03-31 18:36:32 +0000636 return std::move(result);
Corentin Wallezc5351982019-05-01 13:27:07 +0000637 }
638
639 void DeviceBase::UncacheShaderModule(ShaderModuleBase* obj) {
Austin Eng84bcf442019-10-30 00:20:03 +0000640 ASSERT(obj->IsCachedReference());
Corentin Wallezc5351982019-05-01 13:27:07 +0000641 size_t removedCount = mCaches->shaderModules.erase(obj);
642 ASSERT(removedCount == 1);
643 }
644
Austin Engbb10a912019-08-07 21:47:24 +0000645 Ref<AttachmentState> DeviceBase::GetOrCreateAttachmentState(
Austin Eng8a488c12019-08-13 22:12:54 +0000646 AttachmentStateBlueprint* blueprint) {
647 auto iter = mCaches->attachmentStates.find(blueprint);
Austin Engb98f0fa2019-07-26 19:08:18 +0000648 if (iter != mCaches->attachmentStates.end()) {
Austin Engbb10a912019-08-07 21:47:24 +0000649 return static_cast<AttachmentState*>(*iter);
Austin Engb98f0fa2019-07-26 19:08:18 +0000650 }
651
Austin Engf35dcfe2019-09-10 23:19:11 +0000652 Ref<AttachmentState> attachmentState = AcquireRef(new AttachmentState(this, *blueprint));
Austin Eng84bcf442019-10-30 00:20:03 +0000653 attachmentState->SetIsCachedReference();
Bryan Bernhart24bf7a42020-12-03 18:42:13 +0000654 attachmentState->SetContentHash(attachmentState->ComputeContentHash());
Austin Engbb10a912019-08-07 21:47:24 +0000655 mCaches->attachmentStates.insert(attachmentState.Get());
Austin Engb98f0fa2019-07-26 19:08:18 +0000656 return attachmentState;
657 }
658
Austin Engbb10a912019-08-07 21:47:24 +0000659 Ref<AttachmentState> DeviceBase::GetOrCreateAttachmentState(
Austin Eng8a488c12019-08-13 22:12:54 +0000660 const RenderBundleEncoderDescriptor* descriptor) {
661 AttachmentStateBlueprint blueprint(descriptor);
662 return GetOrCreateAttachmentState(&blueprint);
663 }
664
665 Ref<AttachmentState> DeviceBase::GetOrCreateAttachmentState(
666 const RenderPipelineDescriptor* descriptor) {
667 AttachmentStateBlueprint blueprint(descriptor);
668 return GetOrCreateAttachmentState(&blueprint);
669 }
670
671 Ref<AttachmentState> DeviceBase::GetOrCreateAttachmentState(
Austin Engb98f0fa2019-07-26 19:08:18 +0000672 const RenderPassDescriptor* descriptor) {
673 AttachmentStateBlueprint blueprint(descriptor);
Austin Eng8a488c12019-08-13 22:12:54 +0000674 return GetOrCreateAttachmentState(&blueprint);
Austin Engb98f0fa2019-07-26 19:08:18 +0000675 }
676
677 void DeviceBase::UncacheAttachmentState(AttachmentState* obj) {
Austin Eng84bcf442019-10-30 00:20:03 +0000678 ASSERT(obj->IsCachedReference());
Austin Engb98f0fa2019-07-26 19:08:18 +0000679 size_t removedCount = mCaches->attachmentStates.erase(obj);
680 ASSERT(removedCount == 1);
681 }
682
Corentin Wallez52f23832018-07-16 17:40:08 +0200683 // Object creation API methods
684
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000685 BindGroupBase* DeviceBase::APICreateBindGroup(const BindGroupDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000686 Ref<BindGroupBase> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000687 if (ConsumedError(CreateBindGroup(descriptor), &result)) {
Corentin Walleza594f8f2019-02-13 13:09:18 +0000688 return BindGroupBase::MakeError(this);
Corentin Wallez6f9d21e2018-12-05 07:18:30 +0000689 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000690 return result.Detach();
Austin Eng376f1c62017-05-30 20:03:44 -0400691 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000692 BindGroupLayoutBase* DeviceBase::APICreateBindGroupLayout(
Corentin Wallez36afbb62018-07-25 17:03:23 +0200693 const BindGroupLayoutDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000694 Ref<BindGroupLayoutBase> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000695 if (ConsumedError(CreateBindGroupLayout(descriptor), &result)) {
Corentin Walleza594f8f2019-02-13 13:09:18 +0000696 return BindGroupLayoutBase::MakeError(this);
Kai Ninomiya234becf2018-07-10 12:23:50 -0700697 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000698 return result.Detach();
Austin Eng376f1c62017-05-30 20:03:44 -0400699 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000700 BufferBase* DeviceBase::APICreateBuffer(const BufferDescriptor* descriptor) {
Corentin Wallez8a991992020-07-08 19:45:40 +0000701 Ref<BufferBase> result = nullptr;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000702 if (ConsumedError(CreateBuffer(descriptor), &result)) {
Corentin Wallez55f251d2020-11-18 09:10:22 +0000703 ASSERT(result == nullptr);
Corentin Wallezb2ea1912020-07-07 11:21:51 +0000704 return BufferBase::MakeError(this, descriptor);
Corentin Wallez82b65732018-08-22 15:37:29 +0200705 }
Corentin Wallez8a991992020-07-08 19:45:40 +0000706 return result.Detach();
Austin Eng376f1c62017-05-30 20:03:44 -0400707 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000708 CommandEncoder* DeviceBase::APICreateCommandEncoder(
709 const CommandEncoderDescriptor* descriptor) {
Corentin Wallez321c1222019-11-13 17:00:37 +0000710 return new CommandEncoder(this, descriptor);
Corentin Walleze1f0d4e2019-02-15 12:54:08 +0000711 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000712 ComputePipelineBase* DeviceBase::APICreateComputePipeline(
Corentin Wallez8e335a52018-08-27 23:12:56 +0200713 const ComputePipelineDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000714 Ref<ComputePipelineBase> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000715 if (ConsumedError(CreateComputePipeline(descriptor), &result)) {
Corentin Walleza594f8f2019-02-13 13:09:18 +0000716 return ComputePipelineBase::MakeError(this);
Corentin Wallez8e335a52018-08-27 23:12:56 +0200717 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000718 return result.Detach();
Corentin Wallez29ced282017-07-14 10:58:07 -0400719 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000720 void DeviceBase::APICreateComputePipelineAsync(const ComputePipelineDescriptor* descriptor,
721 WGPUCreateComputePipelineAsyncCallback callback,
722 void* userdata) {
Corentin Wallez0af4a832021-04-19 08:52:35 +0000723 MaybeError maybeResult = CreateComputePipelineAsync(descriptor, callback, userdata);
Jiawei Shaoc243f672021-04-08 01:22:22 +0000724
725 // Call the callback directly when a validation error has been found in the front-end
Corentin Wallez0af4a832021-04-19 08:52:35 +0000726 // validations. If there is no error, then CreateComputePipelineAsync will call the
Jiawei Shaoc243f672021-04-08 01:22:22 +0000727 // callback.
Corentin Wallez50f99582021-03-31 18:36:32 +0000728 if (maybeResult.IsError()) {
729 std::unique_ptr<ErrorData> error = maybeResult.AcquireError();
Corentin Wallez2d3c2e32021-02-22 18:27:36 +0000730 callback(WGPUCreatePipelineAsyncStatus_Error, nullptr, error->GetMessage().c_str(),
Jiawei Shaoae5f9502020-10-19 01:56:08 +0000731 userdata);
Jiawei Shaoae5f9502020-10-19 01:56:08 +0000732 }
Jiawei Shaoae5f9502020-10-19 01:56:08 +0000733 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000734 PipelineLayoutBase* DeviceBase::APICreatePipelineLayout(
Corentin Wallez36afbb62018-07-25 17:03:23 +0200735 const PipelineLayoutDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000736 Ref<PipelineLayoutBase> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000737 if (ConsumedError(CreatePipelineLayout(descriptor), &result)) {
Corentin Walleza594f8f2019-02-13 13:09:18 +0000738 return PipelineLayoutBase::MakeError(this);
Kai Ninomiyaf53f98b2018-06-27 16:21:39 -0700739 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000740 return result.Detach();
Austin Eng376f1c62017-05-30 20:03:44 -0400741 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000742 QuerySetBase* DeviceBase::APICreateQuerySet(const QuerySetDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000743 Ref<QuerySetBase> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000744 if (ConsumedError(CreateQuerySet(descriptor), &result)) {
Hao Lib6eff5a2020-06-11 00:34:14 +0000745 return QuerySetBase::MakeError(this);
746 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000747 return result.Detach();
Hao Lib6eff5a2020-06-11 00:34:14 +0000748 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000749 SamplerBase* DeviceBase::APICreateSampler(const SamplerDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000750 Ref<SamplerBase> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000751 if (ConsumedError(CreateSampler(descriptor), &result)) {
Corentin Walleza594f8f2019-02-13 13:09:18 +0000752 return SamplerBase::MakeError(this);
Corentin Wallez1ae19e82018-05-17 17:09:07 -0400753 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000754 return result.Detach();
Austin Eng376f1c62017-05-30 20:03:44 -0400755 }
Brandon Jones41c87d92021-05-21 05:01:38 +0000756 void DeviceBase::APICreateRenderPipelineAsync(const RenderPipelineDescriptor* descriptor,
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000757 WGPUCreateRenderPipelineAsyncCallback callback,
758 void* userdata) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000759 ResultOrError<Ref<RenderPipelineBase>> maybeResult =
Corentin Wallez0af4a832021-04-19 08:52:35 +0000760 CreateRenderPipeline(descriptor);
Corentin Wallez50f99582021-03-31 18:36:32 +0000761 if (maybeResult.IsError()) {
762 std::unique_ptr<ErrorData> error = maybeResult.AcquireError();
Corentin Wallez2d3c2e32021-02-22 18:27:36 +0000763 callback(WGPUCreatePipelineAsyncStatus_Error, nullptr, error->GetMessage().c_str(),
Jiawei Shao03e14002020-10-21 04:37:41 +0000764 userdata);
765 return;
766 }
767
Jiawei Shaoc243f672021-04-08 01:22:22 +0000768 Ref<RenderPipelineBase> result = maybeResult.AcquireSuccess();
Jiawei Shaoc74af7032021-05-02 03:22:30 +0000769 std::unique_ptr<CreateRenderPipelineAsyncCallbackTask> callbackTask =
770 std::make_unique<CreateRenderPipelineAsyncCallbackTask>(std::move(result), "", callback,
771 userdata);
772 mCallbackTaskManager->AddCallbackTask(std::move(callbackTask));
Jiawei Shao03e14002020-10-21 04:37:41 +0000773 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000774 RenderBundleEncoder* DeviceBase::APICreateRenderBundleEncoder(
Austin Eng8a488c12019-08-13 22:12:54 +0000775 const RenderBundleEncoderDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000776 Ref<RenderBundleEncoder> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000777 if (ConsumedError(CreateRenderBundleEncoder(descriptor), &result)) {
Corentin Wallez321c1222019-11-13 17:00:37 +0000778 return RenderBundleEncoder::MakeError(this);
Austin Eng8a488c12019-08-13 22:12:54 +0000779 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000780 return result.Detach();
Austin Eng8a488c12019-08-13 22:12:54 +0000781 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000782 RenderPipelineBase* DeviceBase::APICreateRenderPipeline(
Yan, Shaoboa4924272018-12-10 19:47:22 +0000783 const RenderPipelineDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000784 Ref<RenderPipelineBase> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000785 if (ConsumedError(CreateRenderPipeline(descriptor), &result)) {
Corentin Walleza594f8f2019-02-13 13:09:18 +0000786 return RenderPipelineBase::MakeError(this);
Yan, Shaoboa4924272018-12-10 19:47:22 +0000787 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000788 return result.Detach();
Yan, Shaoboa4924272018-12-10 19:47:22 +0000789 }
Brandon Jones41c87d92021-05-21 05:01:38 +0000790 RenderPipelineBase* DeviceBase::APICreateRenderPipeline2(
791 const RenderPipelineDescriptor* descriptor) {
792 EmitDeprecationWarning(
793 "CreateRenderPipeline2() has been deprecated. Please begin using "
794 "CreateRenderPipeline() instead.");
795 return APICreateRenderPipeline(descriptor);
796 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000797 ShaderModuleBase* DeviceBase::APICreateShaderModule(const ShaderModuleDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000798 Ref<ShaderModuleBase> result;
Brandon Jones47b6b682021-04-08 04:25:11 +0000799 ShaderModuleParseResult parseResult = {};
Corentin Wallez0af4a832021-04-19 08:52:35 +0000800 if (ConsumedError(CreateShaderModule(descriptor, &parseResult), &result)) {
Brandon Jones47b6b682021-04-08 04:25:11 +0000801 return ShaderModuleBase::MakeError(this, std::move(parseResult.compilationMessages));
Corentin Wallezdf671032018-08-20 17:01:20 +0200802 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000803 return result.Detach();
Austin Eng376f1c62017-05-30 20:03:44 -0400804 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000805 SwapChainBase* DeviceBase::APICreateSwapChain(Surface* surface,
806 const SwapChainDescriptor* descriptor) {
Corentin Wallez50f99582021-03-31 18:36:32 +0000807 Ref<SwapChainBase> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000808 if (ConsumedError(CreateSwapChain(surface, descriptor), &result)) {
Corentin Wallez7be2a712019-02-15 11:15:58 +0000809 return SwapChainBase::MakeError(this);
810 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000811 return result.Detach();
Kai Ninomiyac16a67a2017-07-27 18:30:57 -0700812 }
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000813 TextureBase* DeviceBase::APICreateTexture(const TextureDescriptor* descriptor) {
Rafael Cintron0e9320b2020-04-23 19:47:12 +0000814 Ref<TextureBase> result;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000815 if (ConsumedError(CreateTexture(descriptor), &result)) {
Corentin Walleza594f8f2019-02-13 13:09:18 +0000816 return TextureBase::MakeError(this);
Jiawei Shao425428f2018-08-27 08:44:48 +0800817 }
Rafael Cintron0e9320b2020-04-23 19:47:12 +0000818 return result.Detach();
Austin Eng376f1c62017-05-30 20:03:44 -0400819 }
820
Austin Engcf1fdf42020-06-15 23:42:13 +0000821 // For Dawn Wire
822
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000823 BufferBase* DeviceBase::APICreateErrorBuffer() {
Corentin Wallezb2ea1912020-07-07 11:21:51 +0000824 BufferDescriptor desc = {};
825 return BufferBase::MakeError(this, &desc);
Austin Engcf1fdf42020-06-15 23:42:13 +0000826 }
827
Corentin Wallez52f23832018-07-16 17:40:08 +0200828 // Other Device API methods
829
Brandon Jones4ad35862020-10-15 16:21:03 +0000830 // Returns true if future ticking is needed.
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000831 bool DeviceBase::APITick() {
Corentin Wallez1bc1ed42021-03-30 10:21:36 +0000832 if (ConsumedError(Tick())) {
Brandon Jones4ad35862020-10-15 16:21:03 +0000833 return false;
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000834 }
Corentin Wallez1bc1ed42021-03-30 10:21:36 +0000835 return !IsDeviceIdle();
836 }
837
838 MaybeError DeviceBase::Tick() {
839 DAWN_TRY(ValidateIsAlive());
840
Natasha Lee783cd5a2020-06-04 02:26:46 +0000841 // to avoid overly ticking, we only want to tick when:
842 // 1. the last submitted serial has moved beyond the completed serial
843 // 2. or the completed serial has not reached the future serial set by the trackers
Brandon Jones4ad35862020-10-15 16:21:03 +0000844 if (mLastSubmittedSerial > mCompletedSerial || mCompletedSerial < mFutureSerial) {
Corentin Wallez6870e6d2021-04-07 18:09:21 +0000845 DAWN_TRY(CheckPassedSerials());
Corentin Wallez1bc1ed42021-03-30 10:21:36 +0000846 DAWN_TRY(TickImpl());
Natasha Lee783cd5a2020-06-04 02:26:46 +0000847
848 // There is no GPU work in flight, we need to move the serials forward so that
849 // so that CPU operations waiting on GPU completion can know they don't have to wait.
850 // AssumeCommandsComplete will assign the max serial we must tick to in order to
851 // fire the awaiting callbacks.
852 if (mCompletedSerial == mLastSubmittedSerial) {
853 AssumeCommandsComplete();
854 }
855
856 // TODO(cwallez@chromium.org): decouple TickImpl from updating the serial so that we can
857 // tick the dynamic uploader before the backend resource allocators. This would allow
858 // reclaiming resources one tick earlier.
859 mDynamicUploader->Deallocate(mCompletedSerial);
Corentin Walleze91975c2021-03-29 15:40:55 +0000860 mQueue->Tick(mCompletedSerial);
Jiawei Shaoc74af7032021-05-02 03:22:30 +0000861 }
Jiawei Shao42103bc2020-10-24 03:11:43 +0000862
Jiawei Shaoc74af7032021-05-02 03:22:30 +0000863 // We have to check mCallbackTaskManager in every Tick because it is not related to any
864 // global serials.
865 if (!mCallbackTaskManager->IsEmpty()) {
866 // If a user calls Queue::Submit inside the callback, then the device will be ticked,
867 // which in turns ticks the tracker, causing reentrance and dead lock here. To prevent
868 // such reentrant call, we remove all the callback tasks from mCallbackTaskManager,
869 // update mCallbackTaskManager, then call all the callbacks.
870 auto callbackTasks = mCallbackTaskManager->AcquireCallbackTasks();
871 for (std::unique_ptr<CallbackTask>& callbackTask : callbackTasks) {
872 callbackTask->Finish();
873 }
Natasha Lee783cd5a2020-06-04 02:26:46 +0000874 }
Brandon Jones4ad35862020-10-15 16:21:03 +0000875
Corentin Wallez1bc1ed42021-03-30 10:21:36 +0000876 return {};
Corentin Wallezdbb57292017-06-14 13:33:45 -0400877 }
878
Corentin Wallez2ce4b902021-03-29 14:02:05 +0000879 QueueBase* DeviceBase::APIGetQueue() {
Corentin Wallez6d315da2021-02-04 15:33:42 +0000880 // Backends gave the primary queue during initialization.
881 ASSERT(mQueue != nullptr);
Corentin Wallez8a437942020-04-17 16:45:17 +0000882
883 // Returns a new reference to the queue.
Corentin Wallez6d315da2021-02-04 15:33:42 +0000884 mQueue->Reference();
885 return mQueue.Get();
886 }
887
Brandon Jones0e92e9b2021-04-01 20:46:42 +0000888 ExternalTextureBase* DeviceBase::APICreateExternalTexture(
889 const ExternalTextureDescriptor* descriptor) {
890 Ref<ExternalTextureBase> result = nullptr;
Corentin Wallez0af4a832021-04-19 08:52:35 +0000891 if (ConsumedError(CreateExternalTexture(descriptor), &result)) {
Brandon Jones0e92e9b2021-04-01 20:46:42 +0000892 return ExternalTextureBase::MakeError(this);
893 }
894
895 return result.Detach();
896 }
897
Jiawei Shao574b9512019-08-02 00:06:38 +0000898 void DeviceBase::ApplyExtensions(const DeviceDescriptor* deviceDescriptor) {
899 ASSERT(deviceDescriptor);
900 ASSERT(GetAdapter()->SupportsAllRequestedExtensions(deviceDescriptor->requiredExtensions));
901
902 mEnabledExtensions = GetAdapter()->GetInstance()->ExtensionNamesToExtensionsSet(
903 deviceDescriptor->requiredExtensions);
904 }
905
906 std::vector<const char*> DeviceBase::GetEnabledExtensions() const {
907 return mEnabledExtensions.GetEnabledExtensionNames();
908 }
909
Jiawei Shao574b9512019-08-02 00:06:38 +0000910 bool DeviceBase::IsExtensionEnabled(Extension extension) const {
911 return mEnabledExtensions.IsEnabled(extension);
912 }
913
Austin Eng4d156092019-11-21 00:48:39 +0000914 bool DeviceBase::IsValidationEnabled() const {
915 return !IsToggleEnabled(Toggle::SkipValidation);
916 }
917
Idan Raiterbcc65f22020-07-01 21:37:57 +0000918 bool DeviceBase::IsRobustnessEnabled() const {
919 return !IsToggleEnabled(Toggle::DisableRobustness);
920 }
921
Natasha Leee69627f2019-07-26 17:54:48 +0000922 size_t DeviceBase::GetLazyClearCountForTesting() {
923 return mLazyClearCountForTesting;
924 }
925
926 void DeviceBase::IncrementLazyClearCountForTesting() {
927 ++mLazyClearCountForTesting;
928 }
929
Corentin Wallez8a437942020-04-17 16:45:17 +0000930 size_t DeviceBase::GetDeprecationWarningCountForTesting() {
931 return mDeprecationWarnings->count;
932 }
933
934 void DeviceBase::EmitDeprecationWarning(const char* warning) {
935 mDeprecationWarnings->count++;
936 if (mDeprecationWarnings->emitted.insert(warning).second) {
937 dawn::WarningLog() << warning;
938 }
939 }
940
Corentin Walleze91975c2021-03-29 15:40:55 +0000941 QueueBase* DeviceBase::GetQueue() const {
942 return mQueue.Get();
943 }
944
Corentin Wallez52f23832018-07-16 17:40:08 +0200945 // Implementation details of object creation
946
Corentin Wallez0af4a832021-04-19 08:52:35 +0000947 ResultOrError<Ref<BindGroupBase>> DeviceBase::CreateBindGroup(
Corentin Wallez50f99582021-03-31 18:36:32 +0000948 const BindGroupDescriptor* descriptor) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000949 DAWN_TRY(ValidateIsAlive());
Corentin Wallez3966eb12020-04-21 07:36:30 +0000950 if (IsValidationEnabled()) {
Corentin Wallez84ae2bf2020-05-13 17:05:55 +0000951 DAWN_TRY(ValidateBindGroupDescriptor(this, descriptor));
Corentin Wallez3966eb12020-04-21 07:36:30 +0000952 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000953 return CreateBindGroupImpl(descriptor);
Corentin Wallez6f9d21e2018-12-05 07:18:30 +0000954 }
955
Corentin Wallez0af4a832021-04-19 08:52:35 +0000956 ResultOrError<Ref<BindGroupLayoutBase>> DeviceBase::CreateBindGroupLayout(
Corentin Wallez36afbb62018-07-25 17:03:23 +0200957 const BindGroupLayoutDescriptor* descriptor) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000958 DAWN_TRY(ValidateIsAlive());
Corentin Wallez3966eb12020-04-21 07:36:30 +0000959 if (IsValidationEnabled()) {
Corentin Wallez84ae2bf2020-05-13 17:05:55 +0000960 DAWN_TRY(ValidateBindGroupLayoutDescriptor(this, descriptor));
Corentin Wallez3966eb12020-04-21 07:36:30 +0000961 }
Corentin Wallez50f99582021-03-31 18:36:32 +0000962 return GetOrCreateBindGroupLayout(descriptor);
Corentin Wallez52f23832018-07-16 17:40:08 +0200963 }
964
Corentin Wallez0af4a832021-04-19 08:52:35 +0000965 ResultOrError<Ref<BufferBase>> DeviceBase::CreateBuffer(const BufferDescriptor* descriptor) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000966 DAWN_TRY(ValidateIsAlive());
Austin Eng4d156092019-11-21 00:48:39 +0000967 if (IsValidationEnabled()) {
968 DAWN_TRY(ValidateBufferDescriptor(this, descriptor));
969 }
Corentin Wallezb2ea1912020-07-07 11:21:51 +0000970
Corentin Wallez8a991992020-07-08 19:45:40 +0000971 Ref<BufferBase> buffer;
Corentin Wallezb2ea1912020-07-07 11:21:51 +0000972 DAWN_TRY_ASSIGN(buffer, CreateBufferImpl(descriptor));
973
974 if (descriptor->mappedAtCreation) {
975 DAWN_TRY(buffer->MapAtCreation());
976 }
977
Corentin Wallez8a991992020-07-08 19:45:40 +0000978 return std::move(buffer);
Corentin Wallez82b65732018-08-22 15:37:29 +0200979 }
980
Corentin Wallez0af4a832021-04-19 08:52:35 +0000981 ResultOrError<Ref<ComputePipelineBase>> DeviceBase::CreateComputePipeline(
Corentin Wallez8e335a52018-08-27 23:12:56 +0200982 const ComputePipelineDescriptor* descriptor) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +0000983 DAWN_TRY(ValidateIsAlive());
Austin Eng4d156092019-11-21 00:48:39 +0000984 if (IsValidationEnabled()) {
985 DAWN_TRY(ValidateComputePipelineDescriptor(this, descriptor));
986 }
Austin Engf6eb8902019-11-22 17:02:22 +0000987
Jiawei Shaoc243f672021-04-08 01:22:22 +0000988 // Ref will keep the pipeline layout alive until the end of the function where
989 // the pipeline will take another reference.
990 Ref<PipelineLayoutBase> layoutRef;
991 ComputePipelineDescriptor appliedDescriptor;
992 DAWN_TRY_ASSIGN(layoutRef, ValidateAndGetComputePipelineDescriptorWithDefaults(
993 *descriptor, &appliedDescriptor));
Austin Engf6eb8902019-11-22 17:02:22 +0000994
Jiawei Shaoc243f672021-04-08 01:22:22 +0000995 auto pipelineAndBlueprintFromCache = GetCachedComputePipeline(&appliedDescriptor);
996 if (pipelineAndBlueprintFromCache.first.Get() != nullptr) {
997 return std::move(pipelineAndBlueprintFromCache.first);
Austin Engf6eb8902019-11-22 17:02:22 +0000998 }
Jiawei Shaoc243f672021-04-08 01:22:22 +0000999
1000 Ref<ComputePipelineBase> backendObj;
1001 DAWN_TRY_ASSIGN(backendObj, CreateComputePipelineImpl(&appliedDescriptor));
1002 size_t blueprintHash = pipelineAndBlueprintFromCache.second;
1003 return AddOrGetCachedPipeline(backendObj, blueprintHash);
1004 }
1005
Corentin Wallez0af4a832021-04-19 08:52:35 +00001006 MaybeError DeviceBase::CreateComputePipelineAsync(
Jiawei Shaoc243f672021-04-08 01:22:22 +00001007 const ComputePipelineDescriptor* descriptor,
1008 WGPUCreateComputePipelineAsyncCallback callback,
1009 void* userdata) {
1010 DAWN_TRY(ValidateIsAlive());
1011 if (IsValidationEnabled()) {
1012 DAWN_TRY(ValidateComputePipelineDescriptor(this, descriptor));
1013 }
1014
1015 // Ref will keep the pipeline layout alive until the end of the function where
1016 // the pipeline will take another reference.
1017 Ref<PipelineLayoutBase> layoutRef;
1018 ComputePipelineDescriptor appliedDescriptor;
1019 DAWN_TRY_ASSIGN(layoutRef, ValidateAndGetComputePipelineDescriptorWithDefaults(
1020 *descriptor, &appliedDescriptor));
1021
1022 // Call the callback directly when we can get a cached compute pipeline object.
1023 auto pipelineAndBlueprintFromCache = GetCachedComputePipeline(&appliedDescriptor);
1024 if (pipelineAndBlueprintFromCache.first.Get() != nullptr) {
1025 Ref<ComputePipelineBase> result = std::move(pipelineAndBlueprintFromCache.first);
1026 callback(WGPUCreatePipelineAsyncStatus_Success,
1027 reinterpret_cast<WGPUComputePipeline>(result.Detach()), "", userdata);
1028 } else {
1029 // Otherwise we will create the pipeline object in CreateComputePipelineAsyncImpl(),
1030 // where the pipeline object may be created asynchronously and the result will be saved
1031 // to mCreatePipelineAsyncTracker.
1032 const size_t blueprintHash = pipelineAndBlueprintFromCache.second;
1033 CreateComputePipelineAsyncImpl(&appliedDescriptor, blueprintHash, callback, userdata);
1034 }
1035
1036 return {};
1037 }
1038
1039 ResultOrError<Ref<PipelineLayoutBase>>
1040 DeviceBase::ValidateAndGetComputePipelineDescriptorWithDefaults(
1041 const ComputePipelineDescriptor& descriptor,
1042 ComputePipelineDescriptor* outDescriptor) {
1043 Ref<PipelineLayoutBase> layoutRef;
1044 *outDescriptor = descriptor;
1045 if (outDescriptor->layout == nullptr) {
1046 DAWN_TRY_ASSIGN(layoutRef, PipelineLayoutBase::CreateDefault(
1047 this, {{SingleShaderStage::Compute,
1048 outDescriptor->computeStage.module,
1049 outDescriptor->computeStage.entryPoint}}));
1050 outDescriptor->layout = layoutRef.Get();
1051 }
1052
1053 return layoutRef;
1054 }
1055
1056 // TODO(jiawei.shao@intel.com): override this function with the async version on the backends
1057 // that supports creating compute pipeline asynchronously
1058 void DeviceBase::CreateComputePipelineAsyncImpl(const ComputePipelineDescriptor* descriptor,
1059 size_t blueprintHash,
1060 WGPUCreateComputePipelineAsyncCallback callback,
1061 void* userdata) {
1062 Ref<ComputePipelineBase> result;
1063 std::string errorMessage;
1064
1065 auto resultOrError = CreateComputePipelineImpl(descriptor);
1066 if (resultOrError.IsError()) {
1067 std::unique_ptr<ErrorData> error = resultOrError.AcquireError();
1068 errorMessage = error->GetMessage();
1069 } else {
1070 result = AddOrGetCachedPipeline(resultOrError.AcquireSuccess(), blueprintHash);
1071 }
1072
Jiawei Shaoc74af7032021-05-02 03:22:30 +00001073 std::unique_ptr<CreateComputePipelineAsyncCallbackTask> callbackTask =
1074 std::make_unique<CreateComputePipelineAsyncCallbackTask>(
1075 std::move(result), errorMessage, callback, userdata);
1076 mCallbackTaskManager->AddCallbackTask(std::move(callbackTask));
Corentin Wallez8e335a52018-08-27 23:12:56 +02001077 }
1078
Corentin Wallez0af4a832021-04-19 08:52:35 +00001079 ResultOrError<Ref<PipelineLayoutBase>> DeviceBase::CreatePipelineLayout(
Corentin Wallez36afbb62018-07-25 17:03:23 +02001080 const PipelineLayoutDescriptor* descriptor) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +00001081 DAWN_TRY(ValidateIsAlive());
Austin Eng4d156092019-11-21 00:48:39 +00001082 if (IsValidationEnabled()) {
1083 DAWN_TRY(ValidatePipelineLayoutDescriptor(this, descriptor));
1084 }
Corentin Wallez50f99582021-03-31 18:36:32 +00001085 return GetOrCreatePipelineLayout(descriptor);
Corentin Wallez52f23832018-07-16 17:40:08 +02001086 }
1087
Corentin Wallez0af4a832021-04-19 08:52:35 +00001088 ResultOrError<Ref<ExternalTextureBase>> DeviceBase::CreateExternalTexture(
Brandon Jones0e92e9b2021-04-01 20:46:42 +00001089 const ExternalTextureDescriptor* descriptor) {
1090 if (IsValidationEnabled()) {
1091 DAWN_TRY(ValidateExternalTextureDescriptor(this, descriptor));
1092 }
1093
1094 return ExternalTextureBase::Create(this, descriptor);
1095 }
1096
Corentin Wallez0af4a832021-04-19 08:52:35 +00001097 ResultOrError<Ref<QuerySetBase>> DeviceBase::CreateQuerySet(
Corentin Wallez50f99582021-03-31 18:36:32 +00001098 const QuerySetDescriptor* descriptor) {
Hao Lib6eff5a2020-06-11 00:34:14 +00001099 DAWN_TRY(ValidateIsAlive());
1100 if (IsValidationEnabled()) {
1101 DAWN_TRY(ValidateQuerySetDescriptor(this, descriptor));
1102 }
Corentin Wallez50f99582021-03-31 18:36:32 +00001103 return CreateQuerySetImpl(descriptor);
Hao Lib6eff5a2020-06-11 00:34:14 +00001104 }
1105
Corentin Wallez0af4a832021-04-19 08:52:35 +00001106 ResultOrError<Ref<RenderBundleEncoder>> DeviceBase::CreateRenderBundleEncoder(
Austin Eng8a488c12019-08-13 22:12:54 +00001107 const RenderBundleEncoderDescriptor* descriptor) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +00001108 DAWN_TRY(ValidateIsAlive());
Austin Eng4d156092019-11-21 00:48:39 +00001109 if (IsValidationEnabled()) {
1110 DAWN_TRY(ValidateRenderBundleEncoderDescriptor(this, descriptor));
1111 }
Corentin Wallez50f99582021-03-31 18:36:32 +00001112 return RenderBundleEncoder::Create(this, descriptor);
Austin Eng8a488c12019-08-13 22:12:54 +00001113 }
1114
Corentin Wallez0af4a832021-04-19 08:52:35 +00001115 ResultOrError<Ref<RenderPipelineBase>> DeviceBase::CreateRenderPipeline(
Brandon Jones41c87d92021-05-21 05:01:38 +00001116 const RenderPipelineDescriptor* descriptor) {
Brandon Jones7f77cfd2021-03-31 08:56:02 +00001117 DAWN_TRY(ValidateIsAlive());
1118 if (IsValidationEnabled()) {
1119 DAWN_TRY(ValidateRenderPipelineDescriptor(this, descriptor));
1120 }
1121
Brandon Jones7f77cfd2021-03-31 08:56:02 +00001122 if (descriptor->layout == nullptr) {
Brandon Jones41c87d92021-05-21 05:01:38 +00001123 RenderPipelineDescriptor descriptorWithDefaultLayout = *descriptor;
Brandon Jones3ceb6542021-04-01 01:28:52 +00001124
Brandon Jones7f77cfd2021-03-31 08:56:02 +00001125 // Ref will keep the pipeline layout alive until the end of the function where
1126 // the pipeline will take another reference.
Brandon Jones3ceb6542021-04-01 01:28:52 +00001127 Ref<PipelineLayoutBase> layoutRef;
Brandon Jones7f77cfd2021-03-31 08:56:02 +00001128 DAWN_TRY_ASSIGN(layoutRef,
1129 PipelineLayoutBase::CreateDefault(this, GetStages(descriptor)));
Brandon Jones3ceb6542021-04-01 01:28:52 +00001130 descriptorWithDefaultLayout.layout = layoutRef.Get();
Brandon Jones7f77cfd2021-03-31 08:56:02 +00001131
Brandon Jones3ceb6542021-04-01 01:28:52 +00001132 return GetOrCreateRenderPipeline(&descriptorWithDefaultLayout);
1133 } else {
1134 return GetOrCreateRenderPipeline(descriptor);
1135 }
Brandon Jones0702b702021-03-11 21:19:00 +00001136 }
1137
Corentin Wallez0af4a832021-04-19 08:52:35 +00001138 ResultOrError<Ref<SamplerBase>> DeviceBase::CreateSampler(
Corentin Wallez50f99582021-03-31 18:36:32 +00001139 const SamplerDescriptor* descriptor) {
shrekshaob3177d42021-01-26 02:22:58 +00001140 const SamplerDescriptor defaultDescriptor = {};
Natasha Lee0ecc48e2020-01-15 19:02:13 +00001141 DAWN_TRY(ValidateIsAlive());
shrekshaob3177d42021-01-26 02:22:58 +00001142 descriptor = descriptor != nullptr ? descriptor : &defaultDescriptor;
Austin Eng4d156092019-11-21 00:48:39 +00001143 if (IsValidationEnabled()) {
1144 DAWN_TRY(ValidateSamplerDescriptor(this, descriptor));
1145 }
Corentin Wallez50f99582021-03-31 18:36:32 +00001146 return GetOrCreateSampler(descriptor);
Corentin Wallez52f23832018-07-16 17:40:08 +02001147 }
1148
Corentin Wallez0af4a832021-04-19 08:52:35 +00001149 ResultOrError<Ref<ShaderModuleBase>> DeviceBase::CreateShaderModule(
Brandon Jones47b6b682021-04-08 04:25:11 +00001150 const ShaderModuleDescriptor* descriptor,
1151 ShaderModuleParseResult* parseResult) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +00001152 DAWN_TRY(ValidateIsAlive());
Austin Eng0d948f72020-12-07 18:12:13 +00001153
Corentin Wallez0af4a832021-04-19 08:52:35 +00001154 // ShaderModule can be called from inside dawn_native. If that's the case handle the error
1155 // directly in Dawn and don't need the parse results since there should be no validation
1156 // errors.
1157 ShaderModuleParseResult ignoredResults;
1158 if (parseResult == nullptr) {
1159 parseResult = &ignoredResults;
1160 }
1161
Austin Eng4d156092019-11-21 00:48:39 +00001162 if (IsValidationEnabled()) {
Brandon Jones47b6b682021-04-08 04:25:11 +00001163 DAWN_TRY(ValidateShaderModuleDescriptor(this, descriptor, parseResult));
Austin Eng4d156092019-11-21 00:48:39 +00001164 }
Austin Eng0d948f72020-12-07 18:12:13 +00001165
Brandon Jones47b6b682021-04-08 04:25:11 +00001166 return GetOrCreateShaderModule(descriptor, parseResult);
Corentin Wallezdf671032018-08-20 17:01:20 +02001167 }
1168
Corentin Wallez0af4a832021-04-19 08:52:35 +00001169 ResultOrError<Ref<SwapChainBase>> DeviceBase::CreateSwapChain(
Corentin Wallez50f99582021-03-31 18:36:32 +00001170 Surface* surface,
1171 const SwapChainDescriptor* descriptor) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +00001172 DAWN_TRY(ValidateIsAlive());
Austin Eng4d156092019-11-21 00:48:39 +00001173 if (IsValidationEnabled()) {
Corentin Wallezd87e6762020-01-23 17:20:38 +00001174 DAWN_TRY(ValidateSwapChainDescriptor(this, surface, descriptor));
Austin Eng4d156092019-11-21 00:48:39 +00001175 }
Corentin Wallezd87e6762020-01-23 17:20:38 +00001176
Corentin Wallez25eeaa32020-10-27 11:31:26 +00001177 // TODO(dawn:269): Remove this code path once implementation-based swapchains are removed.
Corentin Wallezd87e6762020-01-23 17:20:38 +00001178 if (surface == nullptr) {
Corentin Wallez50f99582021-03-31 18:36:32 +00001179 return CreateSwapChainImpl(descriptor);
Corentin Wallezd87e6762020-01-23 17:20:38 +00001180 } else {
1181 ASSERT(descriptor->implementation == 0);
Corentin Wallezd26ee852020-01-25 10:05:40 +00001182
1183 NewSwapChainBase* previousSwapChain = surface->GetAttachedSwapChain();
Corentin Wallez50f99582021-03-31 18:36:32 +00001184 ResultOrError<Ref<NewSwapChainBase>> maybeNewSwapChain =
Corentin Wallez25eeaa32020-10-27 11:31:26 +00001185 CreateSwapChainImpl(surface, previousSwapChain, descriptor);
Corentin Wallezd26ee852020-01-25 10:05:40 +00001186
1187 if (previousSwapChain != nullptr) {
Corentin Wallez25eeaa32020-10-27 11:31:26 +00001188 previousSwapChain->DetachFromSurface();
Corentin Wallezd26ee852020-01-25 10:05:40 +00001189 }
Corentin Wallezd26ee852020-01-25 10:05:40 +00001190
Corentin Wallez50f99582021-03-31 18:36:32 +00001191 Ref<NewSwapChainBase> newSwapChain;
Corentin Wallez25eeaa32020-10-27 11:31:26 +00001192 DAWN_TRY_ASSIGN(newSwapChain, std::move(maybeNewSwapChain));
1193
1194 newSwapChain->SetIsAttached();
Corentin Wallez50f99582021-03-31 18:36:32 +00001195 surface->SetAttachedSwapChain(newSwapChain.Get());
1196 return newSwapChain;
Corentin Wallezd87e6762020-01-23 17:20:38 +00001197 }
Corentin Wallez7be2a712019-02-15 11:15:58 +00001198 }
1199
Corentin Wallez0af4a832021-04-19 08:52:35 +00001200 ResultOrError<Ref<TextureBase>> DeviceBase::CreateTexture(const TextureDescriptor* descriptor) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +00001201 DAWN_TRY(ValidateIsAlive());
Austin Eng4d156092019-11-21 00:48:39 +00001202 if (IsValidationEnabled()) {
Brandon Jones76e5a9f2021-05-13 17:51:23 +00001203 DAWN_TRY(ValidateTextureDescriptor(this, descriptor));
Austin Eng4d156092019-11-21 00:48:39 +00001204 }
Brandon Jones76e5a9f2021-05-13 17:51:23 +00001205 return CreateTextureImpl(descriptor);
Jiawei Shao425428f2018-08-27 08:44:48 +08001206 }
1207
Corentin Wallez0af4a832021-04-19 08:52:35 +00001208 ResultOrError<Ref<TextureViewBase>> DeviceBase::CreateTextureView(
Corentin Wallez50f99582021-03-31 18:36:32 +00001209 TextureBase* texture,
1210 const TextureViewDescriptor* descriptor) {
Natasha Lee0ecc48e2020-01-15 19:02:13 +00001211 DAWN_TRY(ValidateIsAlive());
Kai Ninomiya93196db2019-08-26 22:51:19 +00001212 DAWN_TRY(ValidateObject(texture));
1213 TextureViewDescriptor desc = GetTextureViewDescriptorWithDefaults(texture, descriptor);
Austin Eng4d156092019-11-21 00:48:39 +00001214 if (IsValidationEnabled()) {
Yunchao He1e45c5e2021-04-08 18:32:17 +00001215 DAWN_TRY(ValidateTextureViewDescriptor(this, texture, &desc));
Austin Eng4d156092019-11-21 00:48:39 +00001216 }
Corentin Wallez50f99582021-03-31 18:36:32 +00001217 return CreateTextureViewImpl(texture, &desc);
Jiawei Shao6329e5a2018-10-12 08:32:58 +00001218 }
1219
Corentin Wallez52f23832018-07-16 17:40:08 +02001220 // Other implementation details
1221
Bryan Bernhart450e2122019-09-18 22:06:41 +00001222 DynamicUploader* DeviceBase::GetDynamicUploader() const {
Bryan Bernhart67a73bd2019-02-15 21:18:40 +00001223 return mDynamicUploader.get();
1224 }
1225
Corentin Wallez022d0742020-04-06 16:55:22 +00001226 // The Toggle device facility
1227
1228 std::vector<const char*> DeviceBase::GetTogglesUsed() const {
1229 return mEnabledToggles.GetContainedToggleNames();
1230 }
1231
1232 bool DeviceBase::IsToggleEnabled(Toggle toggle) const {
1233 return mEnabledToggles.Has(toggle);
1234 }
1235
Jiawei Shao15d4c2e2019-04-26 07:52:57 +00001236 void DeviceBase::SetToggle(Toggle toggle, bool isEnabled) {
Corentin Wallez022d0742020-04-06 16:55:22 +00001237 if (!mOverridenToggles.Has(toggle)) {
1238 mEnabledToggles.Set(toggle, isEnabled);
1239 }
1240 }
1241
1242 void DeviceBase::ForceSetToggle(Toggle toggle, bool isEnabled) {
1243 if (!mOverridenToggles.Has(toggle) && mEnabledToggles.Has(toggle) != isEnabled) {
1244 dawn::WarningLog() << "Forcing toggle \"" << ToggleEnumToName(toggle) << "\" to "
Jiawei Shaob6e141a2020-11-20 08:27:29 +00001245 << isEnabled << " when it was overriden to be " << !isEnabled;
Corentin Wallez022d0742020-04-06 16:55:22 +00001246 }
Corentin Wallezcb84c792020-04-06 08:32:01 +00001247 mEnabledToggles.Set(toggle, isEnabled);
Jiawei Shao15d4c2e2019-04-26 07:52:57 +00001248 }
1249
Corentin Wallez022d0742020-04-06 16:55:22 +00001250 void DeviceBase::SetDefaultToggles() {
1251 SetToggle(Toggle::LazyClearResourceOnFirstUse, true);
Corentin Wallez07987ed2021-02-01 16:22:08 +00001252 SetToggle(Toggle::DisallowUnsafeAPIs, true);
Corentin Wallez022d0742020-04-06 16:55:22 +00001253 }
1254
1255 void DeviceBase::ApplyToggleOverrides(const DeviceDescriptor* deviceDescriptor) {
1256 ASSERT(deviceDescriptor);
1257
1258 for (const char* toggleName : deviceDescriptor->forceEnabledToggles) {
1259 Toggle toggle = GetAdapter()->GetInstance()->ToggleNameToEnum(toggleName);
1260 if (toggle != Toggle::InvalidEnum) {
1261 mEnabledToggles.Set(toggle, true);
1262 mOverridenToggles.Set(toggle, true);
1263 }
1264 }
1265 for (const char* toggleName : deviceDescriptor->forceDisabledToggles) {
1266 Toggle toggle = GetAdapter()->GetInstance()->ToggleNameToEnum(toggleName);
1267 if (toggle != Toggle::InvalidEnum) {
1268 mEnabledToggles.Set(toggle, false);
1269 mOverridenToggles.Set(toggle, true);
1270 }
1271 }
1272 }
1273
Corentin Wallezb2ea1912020-07-07 11:21:51 +00001274} // namespace dawn_native