| // Copyright 2019 The Dawn Authors |
| // |
| // Licensed under the Apache License, Version 2.0 (the "License"); |
| // you may not use this file except in compliance with the License. |
| // You may obtain a copy of the License at |
| // |
| // http://www.apache.org/licenses/LICENSE-2.0 |
| // |
| // Unless required by applicable law or agreed to in writing, software |
| // distributed under the License is distributed on an "AS IS" BASIS, |
| // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| // See the License for the specific language governing permissions and |
| // limitations under the License. |
| |
| #include "dawn_native/vulkan/UtilsVulkan.h" |
| |
| #include "common/Assert.h" |
| #include "dawn_native/EnumMaskIterator.h" |
| #include "dawn_native/Format.h" |
| #include "dawn_native/Pipeline.h" |
| #include "dawn_native/ShaderModule.h" |
| #include "dawn_native/vulkan/DeviceVk.h" |
| #include "dawn_native/vulkan/Forward.h" |
| #include "dawn_native/vulkan/TextureVk.h" |
| #include "dawn_native/vulkan/VulkanError.h" |
| |
| namespace dawn_native { namespace vulkan { |
| |
| VkCompareOp ToVulkanCompareOp(wgpu::CompareFunction op) { |
| switch (op) { |
| case wgpu::CompareFunction::Never: |
| return VK_COMPARE_OP_NEVER; |
| case wgpu::CompareFunction::Less: |
| return VK_COMPARE_OP_LESS; |
| case wgpu::CompareFunction::LessEqual: |
| return VK_COMPARE_OP_LESS_OR_EQUAL; |
| case wgpu::CompareFunction::Greater: |
| return VK_COMPARE_OP_GREATER; |
| case wgpu::CompareFunction::GreaterEqual: |
| return VK_COMPARE_OP_GREATER_OR_EQUAL; |
| case wgpu::CompareFunction::Equal: |
| return VK_COMPARE_OP_EQUAL; |
| case wgpu::CompareFunction::NotEqual: |
| return VK_COMPARE_OP_NOT_EQUAL; |
| case wgpu::CompareFunction::Always: |
| return VK_COMPARE_OP_ALWAYS; |
| |
| case wgpu::CompareFunction::Undefined: |
| break; |
| } |
| UNREACHABLE(); |
| } |
| |
| // Convert Dawn texture aspects to Vulkan texture aspect flags |
| VkImageAspectFlags VulkanAspectMask(const Aspect& aspects) { |
| VkImageAspectFlags flags = 0; |
| for (Aspect aspect : IterateEnumMask(aspects)) { |
| switch (aspect) { |
| case Aspect::Color: |
| flags |= VK_IMAGE_ASPECT_COLOR_BIT; |
| break; |
| case Aspect::Depth: |
| flags |= VK_IMAGE_ASPECT_DEPTH_BIT; |
| break; |
| case Aspect::Stencil: |
| flags |= VK_IMAGE_ASPECT_STENCIL_BIT; |
| break; |
| |
| case Aspect::CombinedDepthStencil: |
| flags |= VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; |
| break; |
| |
| case Aspect::Plane0: |
| case Aspect::Plane1: |
| case Aspect::None: |
| UNREACHABLE(); |
| } |
| } |
| return flags; |
| } |
| |
| // Vulkan SPEC requires the source/destination region specified by each element of |
| // pRegions must be a region that is contained within srcImage/dstImage. Here the size of |
| // the image refers to the virtual size, while Dawn validates texture copy extent with the |
| // physical size, so we need to re-calculate the texture copy extent to ensure it should fit |
| // in the virtual size of the subresource. |
| Extent3D ComputeTextureCopyExtent(const TextureCopy& textureCopy, const Extent3D& copySize) { |
| Extent3D validTextureCopyExtent = copySize; |
| const TextureBase* texture = textureCopy.texture.Get(); |
| Extent3D virtualSizeAtLevel = texture->GetMipLevelVirtualSize(textureCopy.mipLevel); |
| ASSERT(textureCopy.origin.x <= virtualSizeAtLevel.width); |
| ASSERT(textureCopy.origin.y <= virtualSizeAtLevel.height); |
| if (copySize.width > virtualSizeAtLevel.width - textureCopy.origin.x) { |
| ASSERT(texture->GetFormat().isCompressed); |
| validTextureCopyExtent.width = virtualSizeAtLevel.width - textureCopy.origin.x; |
| } |
| if (copySize.height > virtualSizeAtLevel.height - textureCopy.origin.y) { |
| ASSERT(texture->GetFormat().isCompressed); |
| validTextureCopyExtent.height = virtualSizeAtLevel.height - textureCopy.origin.y; |
| } |
| |
| return validTextureCopyExtent; |
| } |
| |
| VkBufferImageCopy ComputeBufferImageCopyRegion(const BufferCopy& bufferCopy, |
| const TextureCopy& textureCopy, |
| const Extent3D& copySize) { |
| TextureDataLayout passDataLayout; |
| passDataLayout.offset = bufferCopy.offset; |
| passDataLayout.rowsPerImage = bufferCopy.rowsPerImage; |
| passDataLayout.bytesPerRow = bufferCopy.bytesPerRow; |
| return ComputeBufferImageCopyRegion(passDataLayout, textureCopy, copySize); |
| } |
| |
| VkBufferImageCopy ComputeBufferImageCopyRegion(const TextureDataLayout& dataLayout, |
| const TextureCopy& textureCopy, |
| const Extent3D& copySize) { |
| const Texture* texture = ToBackend(textureCopy.texture.Get()); |
| |
| VkBufferImageCopy region; |
| |
| region.bufferOffset = dataLayout.offset; |
| // In Vulkan the row length is in texels while it is in bytes for Dawn |
| const TexelBlockInfo& blockInfo = |
| texture->GetFormat().GetAspectInfo(textureCopy.aspect).block; |
| ASSERT(dataLayout.bytesPerRow % blockInfo.byteSize == 0); |
| region.bufferRowLength = dataLayout.bytesPerRow / blockInfo.byteSize * blockInfo.width; |
| region.bufferImageHeight = dataLayout.rowsPerImage * blockInfo.height; |
| |
| region.imageSubresource.aspectMask = VulkanAspectMask(textureCopy.aspect); |
| region.imageSubresource.mipLevel = textureCopy.mipLevel; |
| |
| switch (textureCopy.texture->GetDimension()) { |
| case wgpu::TextureDimension::e2D: { |
| region.imageOffset.x = textureCopy.origin.x; |
| region.imageOffset.y = textureCopy.origin.y; |
| region.imageOffset.z = 0; |
| |
| region.imageSubresource.baseArrayLayer = textureCopy.origin.z; |
| region.imageSubresource.layerCount = copySize.depthOrArrayLayers; |
| |
| Extent3D imageExtent = ComputeTextureCopyExtent(textureCopy, copySize); |
| region.imageExtent.width = imageExtent.width; |
| region.imageExtent.height = imageExtent.height; |
| region.imageExtent.depth = 1; |
| break; |
| } |
| |
| case wgpu::TextureDimension::e3D: { |
| region.imageOffset.x = textureCopy.origin.x; |
| region.imageOffset.y = textureCopy.origin.y; |
| region.imageOffset.z = textureCopy.origin.z; |
| |
| region.imageSubresource.baseArrayLayer = 0; |
| region.imageSubresource.layerCount = 1; |
| |
| Extent3D imageExtent = ComputeTextureCopyExtent(textureCopy, copySize); |
| region.imageExtent.width = imageExtent.width; |
| region.imageExtent.height = imageExtent.height; |
| region.imageExtent.depth = imageExtent.depthOrArrayLayers; |
| break; |
| } |
| |
| case wgpu::TextureDimension::e1D: |
| UNREACHABLE(); |
| } |
| |
| return region; |
| } |
| |
| void SetDebugName(Device* device, |
| VkObjectType objectType, |
| uint64_t objectHandle, |
| const char* prefix, |
| std::string label) { |
| if (!objectHandle) { |
| return; |
| } |
| |
| if (device->GetGlobalInfo().HasExt(InstanceExt::DebugUtils)) { |
| VkDebugUtilsObjectNameInfoEXT objectNameInfo; |
| objectNameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; |
| objectNameInfo.pNext = nullptr; |
| objectNameInfo.objectType = objectType; |
| objectNameInfo.objectHandle = objectHandle; |
| |
| if (label.empty() || !device->IsToggleEnabled(Toggle::UseUserDefinedLabelsInBackend)) { |
| objectNameInfo.pObjectName = prefix; |
| device->fn.SetDebugUtilsObjectNameEXT(device->GetVkDevice(), &objectNameInfo); |
| return; |
| } |
| |
| std::string objectName = prefix; |
| objectName += "_"; |
| objectName += label; |
| objectNameInfo.pObjectName = objectName.c_str(); |
| device->fn.SetDebugUtilsObjectNameEXT(device->GetVkDevice(), &objectNameInfo); |
| } |
| } |
| |
| VkSpecializationInfo* GetVkSpecializationInfo( |
| const ProgrammableStage& programmableStage, |
| VkSpecializationInfo* specializationInfo, |
| std::vector<OverridableConstantScalar>* specializationDataEntries, |
| std::vector<VkSpecializationMapEntry>* specializationMapEntries) { |
| ASSERT(specializationInfo); |
| ASSERT(specializationDataEntries); |
| ASSERT(specializationMapEntries); |
| |
| if (programmableStage.constants.size() == 0) { |
| return nullptr; |
| } |
| |
| const EntryPointMetadata& entryPointMetaData = |
| programmableStage.module->GetEntryPoint(programmableStage.entryPoint); |
| |
| for (const auto& pipelineConstant : programmableStage.constants) { |
| const std::string& identifier = pipelineConstant.first; |
| double value = pipelineConstant.second; |
| |
| // This is already validated so `identifier` must exist |
| const auto& moduleConstant = entryPointMetaData.overridableConstants.at(identifier); |
| |
| specializationMapEntries->push_back( |
| VkSpecializationMapEntry{moduleConstant.id, |
| static_cast<uint32_t>(specializationDataEntries->size() * |
| sizeof(OverridableConstantScalar)), |
| sizeof(OverridableConstantScalar)}); |
| |
| OverridableConstantScalar entry{}; |
| switch (moduleConstant.type) { |
| case EntryPointMetadata::OverridableConstant::Type::Boolean: |
| entry.b = static_cast<int32_t>(value); |
| break; |
| case EntryPointMetadata::OverridableConstant::Type::Float32: |
| entry.f32 = static_cast<float>(value); |
| break; |
| case EntryPointMetadata::OverridableConstant::Type::Int32: |
| entry.i32 = static_cast<int32_t>(value); |
| break; |
| case EntryPointMetadata::OverridableConstant::Type::Uint32: |
| entry.u32 = static_cast<uint32_t>(value); |
| break; |
| default: |
| UNREACHABLE(); |
| } |
| specializationDataEntries->push_back(entry); |
| } |
| |
| specializationInfo->mapEntryCount = static_cast<uint32_t>(specializationMapEntries->size()); |
| specializationInfo->pMapEntries = specializationMapEntries->data(); |
| specializationInfo->dataSize = |
| specializationDataEntries->size() * sizeof(OverridableConstantScalar); |
| specializationInfo->pData = specializationDataEntries->data(); |
| |
| return specializationInfo; |
| } |
| |
| }} // namespace dawn_native::vulkan |