blob: 296e9934198720257d82b20e13b06f6cf8f42be2 [file] [log] [blame]
// Copyright 2024 The Dawn & Tint Authors
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "dawn/native/SharedResourceMemory.h"
#include <utility>
#include "dawn/native/Buffer.h"
#include "dawn/native/ChainUtils.h"
#include "dawn/native/Device.h"
#include "dawn/native/Queue.h"
#include "dawn/native/Texture.h"
namespace dawn::native {
SharedResource::~SharedResource() = default;
SharedResourceMemoryContents* SharedResource::GetSharedResourceMemoryContents() const {
return mSharedResourceMemoryContents.Get();
}
SharedResourceMemory::SharedResourceMemory(DeviceBase* device,
ObjectBase::ErrorTag tag,
const char* label)
: ApiObjectBase(device, tag, label),
mContents(new SharedResourceMemoryContents(GetWeakRef(this))) {}
SharedResourceMemory::~SharedResourceMemory() = default;
void SharedResourceMemory::DestroyImpl() {}
bool SharedResourceMemoryContents::HasWriteAccess() const {
return mSharedResourceAccessState == SharedResourceAccessState::Write;
}
bool SharedResourceMemoryContents::HasExclusiveReadAccess() const {
return mSharedResourceAccessState == SharedResourceAccessState::ExclusiveRead;
}
int SharedResourceMemoryContents::GetReadAccessCount() const {
return mReadAccessCount;
}
void SharedResourceMemory::Initialize() {
DAWN_ASSERT(!IsError());
mContents = CreateContents();
}
Ref<SharedResourceMemoryContents> SharedResourceMemory::CreateContents() {
return AcquireRef(new SharedResourceMemoryContents(GetWeakRef(this)));
}
SharedResourceMemoryContents* SharedResourceMemory::GetContents() const {
return mContents.Get();
}
MaybeError SharedResourceMemory::ValidateResourceCreatedFromSelf(SharedResource* resource) {
auto* contents = resource->GetSharedResourceMemoryContents();
DAWN_INVALID_IF(contents == nullptr, "%s was not created from %s.", resource, this);
auto* sharedResourceMemory =
resource->GetSharedResourceMemoryContents()->GetSharedResourceMemory().Promote().Get();
DAWN_INVALID_IF(sharedResourceMemory != this, "%s created from %s cannot be used with %s.",
resource, sharedResourceMemory, this);
return {};
}
bool SharedResourceMemory::APIBeginAccess(
TextureBase* texture,
const SharedTextureMemoryBeginAccessDescriptor* descriptor) {
return !GetDevice()->ConsumedError(BeginAccess(texture, descriptor),
"calling %s.BeginAccess(%s).", this, texture);
}
bool SharedResourceMemory::APIBeginAccess(
BufferBase* buffer,
const SharedBufferMemoryBeginAccessDescriptor* descriptor) {
return !GetDevice()->ConsumedError(BeginAccess(buffer, descriptor),
"calling %s.BeginAccess(%s).", this, buffer);
}
template <typename Resource, typename BeginAccessDescriptor>
MaybeError SharedResourceMemory::BeginAccess(Resource* resource,
const BeginAccessDescriptor* rawDescriptor) {
DAWN_TRY(GetDevice()->ValidateIsAlive());
DAWN_TRY(GetDevice()->ValidateObject(resource));
UnpackedPtr<BeginAccessDescriptor> descriptor;
DAWN_TRY_ASSIGN(descriptor, ValidateAndUnpack(rawDescriptor));
for (size_t i = 0; i < descriptor->fenceCount; ++i) {
DAWN_TRY(GetDevice()->ValidateObject(descriptor->fences[i]));
}
DAWN_TRY(ValidateResourceCreatedFromSelf(resource));
DAWN_INVALID_IF(resource->IsDestroyed(), "%s has been destroyed.", resource);
DAWN_INVALID_IF(resource->HasAccess(), "%s is already used to access %s.", resource, this);
if constexpr (std::is_same_v<Resource, TextureBase>) {
DAWN_INVALID_IF(resource->GetFormat().IsMultiPlanar() && !descriptor->initialized,
"%s with multiplanar format (%s) must be initialized.", resource,
resource->GetFormat().format);
DAWN_INVALID_IF(mContents->HasWriteAccess(), "%s is currently accessed for writing.", this);
DAWN_INVALID_IF(mContents->HasExclusiveReadAccess(),
"%s is currently accessed for exclusive reading.", this);
if (static_cast<TextureBase*>(resource)->IsReadOnly()) {
if (descriptor->concurrentRead) {
DAWN_ASSERT(!mExclusiveAccess);
DAWN_INVALID_IF(!descriptor->initialized, "Concurrent reading an uninitialized %s.",
resource);
++mContents->mReadAccessCount;
mContents->mSharedResourceAccessState = SharedResourceAccessState::SimultaneousRead;
} else {
DAWN_INVALID_IF(
mContents->mReadAccessCount != 0,
"Exclusive read access used while %s is currently accessed for reading.", this);
mContents->mSharedResourceAccessState = SharedResourceAccessState::ExclusiveRead;
mExclusiveAccess = resource;
}
} else {
DAWN_INVALID_IF(descriptor->concurrentRead, "Concurrent reading read-write %s.",
resource);
DAWN_INVALID_IF(mContents->mReadAccessCount != 0,
"Read-Write access used while %s is currently accessed for reading.",
this);
mContents->mSharedResourceAccessState = SharedResourceAccessState::Write;
mExclusiveAccess = resource;
}
} else if constexpr (std::is_same_v<Resource, BufferBase>) {
DAWN_INVALID_IF(mExclusiveAccess != nullptr,
"Cannot begin access with %s on %s which is currently accessed by %s.",
resource, this, mExclusiveAccess.Get());
mContents->mSharedResourceAccessState = SharedResourceAccessState::Write;
mExclusiveAccess = resource;
}
DAWN_TRY(BeginAccessImpl(resource, descriptor));
for (size_t i = 0; i < descriptor->fenceCount; ++i) {
mContents->mPendingFences->push_back(
{descriptor->fences[i], descriptor->signaledValues[i]});
}
DAWN_ASSERT(!resource->IsError());
resource->SetHasAccess(true);
resource->SetInitialized(descriptor->initialized);
return {};
}
bool SharedResourceMemory::APIEndAccess(TextureBase* texture,
SharedTextureMemoryEndAccessState* state) {
bool didEnd = false;
[[maybe_unused]] bool hadError = GetDevice()->ConsumedError(
EndAccess(texture, state, &didEnd), "calling %s.EndAccess(%s).", this, texture);
return didEnd;
}
bool SharedResourceMemory::APIEndAccess(BufferBase* buffer,
SharedBufferMemoryEndAccessState* state) {
bool didEnd = false;
[[maybe_unused]] bool hadError = GetDevice()->ConsumedError(
EndAccess(buffer, state, &didEnd), "calling %s.EndAccess(%s).", this, buffer);
return didEnd;
}
MaybeError SharedResourceMemory::BeginAccessImpl(
TextureBase* texture,
const UnpackedPtr<SharedTextureMemoryBeginAccessDescriptor>& descriptor) {
DAWN_UNREACHABLE();
}
MaybeError SharedResourceMemory::BeginAccessImpl(
BufferBase* buffer,
const UnpackedPtr<SharedBufferMemoryBeginAccessDescriptor>& descriptor) {
DAWN_UNREACHABLE();
}
ResultOrError<FenceAndSignalValue> SharedResourceMemory::EndAccessImpl(
TextureBase* texture,
UnpackedPtr<SharedTextureMemoryEndAccessState>& state) {
DAWN_UNREACHABLE();
}
ResultOrError<FenceAndSignalValue> SharedResourceMemory::EndAccessImpl(
BufferBase* buffer,
UnpackedPtr<SharedBufferMemoryEndAccessState>& state) {
DAWN_UNREACHABLE();
}
bool SharedResourceMemory::APIIsDeviceLost() const {
return GetDevice()->IsLost();
}
template <typename Resource, typename EndAccessState>
MaybeError SharedResourceMemory::EndAccess(Resource* resource,
EndAccessState* state,
bool* didEnd) {
DAWN_TRY(GetDevice()->ValidateObject(resource));
DAWN_TRY(ValidateResourceCreatedFromSelf(resource));
DAWN_INVALID_IF(!resource->HasAccess(), "%s is not currently being accessed.", resource);
if constexpr (std::is_same_v<Resource, TextureBase>) {
if (static_cast<TextureBase*>(resource)->IsReadOnly()) {
DAWN_ASSERT(!mContents->HasWriteAccess());
if (mContents->HasExclusiveReadAccess()) {
DAWN_ASSERT(mContents->mReadAccessCount == 0);
mContents->mSharedResourceAccessState = SharedResourceAccessState::NotAccessed;
mExclusiveAccess = nullptr;
} else {
DAWN_ASSERT(mContents->mSharedResourceAccessState ==
SharedResourceAccessState::SimultaneousRead);
DAWN_ASSERT(mExclusiveAccess == nullptr);
--mContents->mReadAccessCount;
if (mContents->mReadAccessCount == 0) {
mContents->mSharedResourceAccessState = SharedResourceAccessState::NotAccessed;
}
}
} else {
DAWN_ASSERT(mContents->mSharedResourceAccessState == SharedResourceAccessState::Write);
DAWN_ASSERT(mContents->mReadAccessCount == 0);
mContents->mSharedResourceAccessState = SharedResourceAccessState::NotAccessed;
mExclusiveAccess = nullptr;
}
} else if constexpr (std::is_same_v<Resource, BufferBase>) {
DAWN_INVALID_IF(
static_cast<BufferBase*>(resource)->APIGetMapState() != wgpu::BufferMapState::Unmapped,
"%s is currently mapped or pending map.", resource);
DAWN_INVALID_IF(mExclusiveAccess != resource,
"Cannot end access with %s on %s which is currently accessed by %s.",
resource, this, mExclusiveAccess.Get());
mContents->mSharedResourceAccessState = SharedResourceAccessState::NotAccessed;
mExclusiveAccess = nullptr;
}
PendingFenceList fenceList;
mContents->AcquirePendingFences(&fenceList);
DAWN_ASSERT(!resource->IsError());
resource->SetHasAccess(false);
*didEnd = true;
// Call the error-generating part of the EndAccess implementation. This is separated out because
// writing the output state must happen regardless of whether or not EndAccessInternal
// succeeds.
MaybeError err;
{
ResultOrError<FenceAndSignalValue> result = EndAccessInternal(resource, state);
if (result.IsSuccess()) {
fenceList->push_back(result.AcquireSuccess());
} else {
err = result.AcquireError();
}
}
// Copy the fences to the output state.
if (size_t fenceCount = fenceList->size()) {
auto* fences = new SharedFenceBase*[fenceCount];
uint64_t* signaledValues = new uint64_t[fenceCount];
for (size_t i = 0; i < fenceCount; ++i) {
fences[i] = ReturnToAPI(std::move(fenceList[i].object));
signaledValues[i] = fenceList[i].signaledValue;
}
state->fenceCount = fenceCount;
state->fences = fences;
state->signaledValues = signaledValues;
} else {
state->fenceCount = 0;
state->fences = nullptr;
state->signaledValues = nullptr;
}
state->initialized = resource->IsInitialized();
return err;
}
template <typename Resource, typename EndAccessState>
ResultOrError<FenceAndSignalValue> SharedResourceMemory::EndAccessInternal(
Resource* resource,
EndAccessState* rawState) {
UnpackedPtr<EndAccessState> state;
DAWN_TRY_ASSIGN(state, ValidateAndUnpack(rawState));
// Ensure that commands are submitted before exporting fences with the last usage serial.
DAWN_TRY(GetDevice()->GetQueue()->EnsureCommandsFlushed(mContents->GetLastUsageSerial()));
return EndAccessImpl(resource, state);
}
// SharedResourceMemoryContents
SharedResourceMemoryContents::SharedResourceMemoryContents(
WeakRef<SharedResourceMemory> sharedResourceMemory)
: mSharedResourceMemory(std::move(sharedResourceMemory)) {}
const WeakRef<SharedResourceMemory>& SharedResourceMemoryContents::GetSharedResourceMemory() const {
return mSharedResourceMemory;
}
void SharedResourceMemoryContents::AcquirePendingFences(PendingFenceList* fences) {
*fences = mPendingFences;
mPendingFences->clear();
}
void SharedResourceMemoryContents::SetLastUsageSerial(ExecutionSerial lastUsageSerial) {
mLastUsageSerial = lastUsageSerial;
}
ExecutionSerial SharedResourceMemoryContents::GetLastUsageSerial() const {
return mLastUsageSerial;
}
} // namespace dawn::native