Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 1 | // Copyright 2020 The Dawn Authors |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | // you may not use this file except in compliance with the License. |
| 5 | // You may obtain a copy of the License at |
| 6 | // |
| 7 | // http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | // |
| 9 | // Unless required by applicable law or agreed to in writing, software |
| 10 | // distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | // See the License for the specific language governing permissions and |
| 13 | // limitations under the License. |
| 14 | |
| 15 | #include "tests/DawnTest.h" |
| 16 | |
Brandon Jones | b6f4d53 | 2020-11-13 02:11:12 +0000 | [diff] [blame] | 17 | #include "dawn_native/Device.h" |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 18 | #include "dawn_native/Toggles.h" |
Bryan Bernhart | cb859a2 | 2020-04-06 22:07:42 +0000 | [diff] [blame] | 19 | #include "dawn_native/d3d12/BindGroupLayoutD3D12.h" |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 20 | #include "dawn_native/d3d12/DeviceD3D12.h" |
| 21 | #include "dawn_native/d3d12/ShaderVisibleDescriptorAllocatorD3D12.h" |
Bryan Bernhart | 4f86505 | 2020-04-10 18:43:22 +0000 | [diff] [blame] | 22 | #include "dawn_native/d3d12/StagingDescriptorAllocatorD3D12.h" |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 23 | #include "utils/ComboRenderPipelineDescriptor.h" |
| 24 | #include "utils/WGPUHelpers.h" |
| 25 | |
| 26 | constexpr uint32_t kRTSize = 4; |
| 27 | |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 28 | // Pooling tests are required to advance the GPU completed serial to reuse heaps. |
| 29 | // This requires Tick() to be called at-least |kFrameDepth| times. This constant |
| 30 | // should be updated if the internals of Tick() change. |
| 31 | constexpr uint32_t kFrameDepth = 2; |
| 32 | |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 33 | using namespace dawn_native::d3d12; |
| 34 | |
| 35 | class D3D12DescriptorHeapTests : public DawnTest { |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 36 | protected: |
Austin Eng | 40dc5d3 | 2020-05-15 22:06:35 +0000 | [diff] [blame] | 37 | void SetUp() override { |
| 38 | DawnTest::SetUp(); |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 39 | DAWN_TEST_UNSUPPORTED_IF(UsesWire()); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 40 | mD3DDevice = reinterpret_cast<Device*>(device.Get()); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 41 | |
Corentin Wallez | 7aec4ae | 2021-03-24 15:55:32 +0000 | [diff] [blame] | 42 | mSimpleVSModule = utils::CreateShaderModule(device, R"( |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 43 | |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 44 | [[stage(vertex)]] fn main( |
| 45 | [[builtin(vertex_index)]] VertexIndex : u32 |
| 46 | ) -> [[builtin(position)]] vec4<f32> { |
Corentin Wallez | b86e45f | 2021-06-17 21:36:11 +0000 | [diff] [blame] | 47 | var pos = array<vec2<f32>, 3>( |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 48 | vec2<f32>(-1.0, 1.0), |
| 49 | vec2<f32>( 1.0, 1.0), |
| 50 | vec2<f32>(-1.0, -1.0) |
| 51 | ); |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 52 | return vec4<f32>(pos[VertexIndex], 0.0, 1.0); |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 53 | })"); |
| 54 | |
Corentin Wallez | 7aec4ae | 2021-03-24 15:55:32 +0000 | [diff] [blame] | 55 | mSimpleFSModule = utils::CreateShaderModule(device, R"( |
James Price | d4f8c39 | 2021-12-15 13:13:26 +0000 | [diff] [blame] | 56 | struct U { |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 57 | color : vec4<f32>; |
| 58 | }; |
| 59 | [[group(0), binding(0)]] var<uniform> colorBuffer : U; |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 60 | |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 61 | [[stage(fragment)]] fn main() -> [[location(0)]] vec4<f32> { |
| 62 | return colorBuffer.color; |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 63 | })"); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 64 | } |
| 65 | |
Corentin Wallez | 7119a02 | 2020-04-08 16:04:32 +0000 | [diff] [blame] | 66 | utils::BasicRenderPass MakeRenderPass(uint32_t width, |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 67 | uint32_t height, |
| 68 | wgpu::TextureFormat format) { |
| 69 | DAWN_ASSERT(width > 0 && height > 0); |
| 70 | |
| 71 | wgpu::TextureDescriptor descriptor; |
| 72 | descriptor.dimension = wgpu::TextureDimension::e2D; |
| 73 | descriptor.size.width = width; |
| 74 | descriptor.size.height = height; |
shrekshao | b00de7f | 2021-03-22 21:12:36 +0000 | [diff] [blame] | 75 | descriptor.size.depthOrArrayLayers = 1; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 76 | descriptor.sampleCount = 1; |
| 77 | descriptor.format = format; |
| 78 | descriptor.mipLevelCount = 1; |
Corentin Wallez | 6b08781 | 2020-10-27 15:35:56 +0000 | [diff] [blame] | 79 | descriptor.usage = wgpu::TextureUsage::RenderAttachment | wgpu::TextureUsage::CopySrc; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 80 | wgpu::Texture color = device.CreateTexture(&descriptor); |
| 81 | |
| 82 | return utils::BasicRenderPass(width, height, color); |
| 83 | } |
| 84 | |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 85 | std::array<float, 4> GetSolidColor(uint32_t n) const { |
| 86 | ASSERT(n >> 24 == 0); |
| 87 | float b = (n & 0xFF) / 255.0f; |
| 88 | float g = ((n >> 8) & 0xFF) / 255.0f; |
| 89 | float r = ((n >> 16) & 0xFF) / 255.0f; |
| 90 | return {r, g, b, 1}; |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 91 | } |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 92 | |
| 93 | Device* mD3DDevice = nullptr; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 94 | |
| 95 | wgpu::ShaderModule mSimpleVSModule; |
| 96 | wgpu::ShaderModule mSimpleFSModule; |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 97 | }; |
| 98 | |
Bryan Bernhart | 4f86505 | 2020-04-10 18:43:22 +0000 | [diff] [blame] | 99 | class DummyStagingDescriptorAllocator { |
Bryan Bernhart | cb859a2 | 2020-04-06 22:07:42 +0000 | [diff] [blame] | 100 | public: |
Bryan Bernhart | 4f86505 | 2020-04-10 18:43:22 +0000 | [diff] [blame] | 101 | DummyStagingDescriptorAllocator(Device* device, |
| 102 | uint32_t descriptorCount, |
| 103 | uint32_t allocationsPerHeap) |
Bryan Bernhart | cb859a2 | 2020-04-06 22:07:42 +0000 | [diff] [blame] | 104 | : mAllocator(device, |
| 105 | descriptorCount, |
| 106 | allocationsPerHeap * descriptorCount, |
| 107 | D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER) { |
| 108 | } |
| 109 | |
| 110 | CPUDescriptorHeapAllocation AllocateCPUDescriptors() { |
| 111 | dawn_native::ResultOrError<CPUDescriptorHeapAllocation> result = |
| 112 | mAllocator.AllocateCPUDescriptors(); |
| 113 | return (result.IsSuccess()) ? result.AcquireSuccess() : CPUDescriptorHeapAllocation{}; |
| 114 | } |
| 115 | |
| 116 | void Deallocate(CPUDescriptorHeapAllocation& allocation) { |
| 117 | mAllocator.Deallocate(&allocation); |
| 118 | } |
| 119 | |
| 120 | private: |
Bryan Bernhart | 4f86505 | 2020-04-10 18:43:22 +0000 | [diff] [blame] | 121 | StagingDescriptorAllocator mAllocator; |
Bryan Bernhart | cb859a2 | 2020-04-06 22:07:42 +0000 | [diff] [blame] | 122 | }; |
| 123 | |
Bryan Bernhart | e25ee25 | 2020-05-18 23:25:31 +0000 | [diff] [blame] | 124 | // Verify the shader visible view heaps switch over within a single submit. |
| 125 | TEST_P(D3D12DescriptorHeapTests, SwitchOverViewHeap) { |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 126 | DAWN_TEST_UNSUPPORTED_IF(!mD3DDevice->IsToggleEnabled( |
Bryan Bernhart | e25ee25 | 2020-05-18 23:25:31 +0000 | [diff] [blame] | 127 | dawn_native::Toggle::UseD3D12SmallShaderVisibleHeapForTesting)); |
| 128 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 129 | utils::ComboRenderPipelineDescriptor renderPipelineDescriptor; |
Bryan Bernhart | e25ee25 | 2020-05-18 23:25:31 +0000 | [diff] [blame] | 130 | |
| 131 | // Fill in a view heap with "view only" bindgroups (1x view per group) by creating a |
| 132 | // view bindgroup each draw. After HEAP_SIZE + 1 draws, the heaps must switch over. |
Brandon Jones | bff9d3a | 2021-03-18 02:54:27 +0000 | [diff] [blame] | 133 | renderPipelineDescriptor.vertex.module = mSimpleVSModule; |
| 134 | renderPipelineDescriptor.cFragment.module = mSimpleFSModule; |
Bryan Bernhart | e25ee25 | 2020-05-18 23:25:31 +0000 | [diff] [blame] | 135 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 136 | wgpu::RenderPipeline renderPipeline = device.CreateRenderPipeline(&renderPipelineDescriptor); |
Bryan Bernhart | e25ee25 | 2020-05-18 23:25:31 +0000 | [diff] [blame] | 137 | utils::BasicRenderPass renderPass = utils::CreateBasicRenderPass(device, kRTSize, kRTSize); |
| 138 | |
| 139 | Device* d3dDevice = reinterpret_cast<Device*>(device.Get()); |
| 140 | ShaderVisibleDescriptorAllocator* allocator = |
| 141 | d3dDevice->GetViewShaderVisibleDescriptorAllocator(); |
| 142 | const uint64_t heapSize = allocator->GetShaderVisibleHeapSizeForTesting(); |
| 143 | |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 144 | const HeapVersionID heapSerial = allocator->GetShaderVisibleHeapSerialForTesting(); |
Bryan Bernhart | e25ee25 | 2020-05-18 23:25:31 +0000 | [diff] [blame] | 145 | |
| 146 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 147 | { |
| 148 | wgpu::RenderPassEncoder pass = encoder.BeginRenderPass(&renderPass.renderPassInfo); |
| 149 | |
| 150 | pass.SetPipeline(renderPipeline); |
| 151 | |
| 152 | std::array<float, 4> redColor = {1, 0, 0, 1}; |
| 153 | wgpu::Buffer uniformBuffer = utils::CreateBufferFromData( |
| 154 | device, &redColor, sizeof(redColor), wgpu::BufferUsage::Uniform); |
| 155 | |
| 156 | for (uint32_t i = 0; i < heapSize + 1; ++i) { |
| 157 | pass.SetBindGroup(0, utils::MakeBindGroup(device, renderPipeline.GetBindGroupLayout(0), |
| 158 | {{0, uniformBuffer, 0, sizeof(redColor)}})); |
| 159 | pass.Draw(3); |
| 160 | } |
| 161 | |
| 162 | pass.EndPass(); |
| 163 | } |
| 164 | |
| 165 | wgpu::CommandBuffer commands = encoder.Finish(); |
| 166 | queue.Submit(1, &commands); |
| 167 | |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 168 | EXPECT_EQ(allocator->GetShaderVisibleHeapSerialForTesting(), heapSerial + HeapVersionID(1)); |
Bryan Bernhart | e25ee25 | 2020-05-18 23:25:31 +0000 | [diff] [blame] | 169 | } |
| 170 | |
| 171 | // Verify the shader visible sampler heaps does not switch over within a single submit. |
| 172 | TEST_P(D3D12DescriptorHeapTests, NoSwitchOverSamplerHeap) { |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 173 | utils::ComboRenderPipelineDescriptor renderPipelineDescriptor; |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 174 | |
| 175 | // Fill in a sampler heap with "sampler only" bindgroups (1x sampler per group) by creating a |
Bryan Bernhart | e25ee25 | 2020-05-18 23:25:31 +0000 | [diff] [blame] | 176 | // sampler bindgroup each draw. After HEAP_SIZE + 1 draws, the heaps WILL NOT switch over |
| 177 | // because the sampler heap allocations are de-duplicated. |
Corentin Wallez | 7aec4ae | 2021-03-24 15:55:32 +0000 | [diff] [blame] | 178 | renderPipelineDescriptor.vertex.module = utils::CreateShaderModule(device, R"( |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 179 | [[stage(vertex)]] fn main() -> [[builtin(position)]] vec4<f32> { |
| 180 | return vec4<f32>(0.0, 0.0, 0.0, 1.0); |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 181 | })"); |
| 182 | |
Corentin Wallez | 7aec4ae | 2021-03-24 15:55:32 +0000 | [diff] [blame] | 183 | renderPipelineDescriptor.cFragment.module = utils::CreateShaderModule(device, R"( |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 184 | [[group(0), binding(0)]] var sampler0 : sampler; |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 185 | [[stage(fragment)]] fn main() -> [[location(0)]] vec4<f32> { |
Ben Clayton | 5c4ce7b | 2021-10-25 15:14:23 +0000 | [diff] [blame] | 186 | _ = sampler0; |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 187 | return vec4<f32>(0.0, 0.0, 0.0, 0.0); |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 188 | })"); |
| 189 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 190 | wgpu::RenderPipeline renderPipeline = device.CreateRenderPipeline(&renderPipelineDescriptor); |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 191 | utils::BasicRenderPass renderPass = utils::CreateBasicRenderPass(device, kRTSize, kRTSize); |
| 192 | |
Corentin Wallez | fb2e771 | 2021-02-05 20:26:54 +0000 | [diff] [blame] | 193 | wgpu::Sampler sampler = device.CreateSampler(); |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 194 | |
| 195 | Device* d3dDevice = reinterpret_cast<Device*>(device.Get()); |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 196 | ShaderVisibleDescriptorAllocator* allocator = |
| 197 | d3dDevice->GetSamplerShaderVisibleDescriptorAllocator(); |
| 198 | const uint64_t samplerHeapSize = allocator->GetShaderVisibleHeapSizeForTesting(); |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 199 | |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 200 | const HeapVersionID HeapVersionID = allocator->GetShaderVisibleHeapSerialForTesting(); |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 201 | |
| 202 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 203 | { |
| 204 | wgpu::RenderPassEncoder pass = encoder.BeginRenderPass(&renderPass.renderPassInfo); |
| 205 | |
| 206 | pass.SetPipeline(renderPipeline); |
| 207 | |
| 208 | for (uint32_t i = 0; i < samplerHeapSize + 1; ++i) { |
| 209 | pass.SetBindGroup(0, utils::MakeBindGroup(device, renderPipeline.GetBindGroupLayout(0), |
| 210 | {{0, sampler}})); |
Corentin Wallez | 67b1ad7 | 2020-03-31 16:21:35 +0000 | [diff] [blame] | 211 | pass.Draw(3); |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 212 | } |
| 213 | |
| 214 | pass.EndPass(); |
| 215 | } |
| 216 | |
| 217 | wgpu::CommandBuffer commands = encoder.Finish(); |
| 218 | queue.Submit(1, &commands); |
| 219 | |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 220 | EXPECT_EQ(allocator->GetShaderVisibleHeapSerialForTesting(), HeapVersionID); |
Bryan Bernhart | 0363c3e | 2020-02-27 01:14:22 +0000 | [diff] [blame] | 221 | } |
| 222 | |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 223 | // Verify shader-visible heaps can be recycled for multiple submits. |
| 224 | TEST_P(D3D12DescriptorHeapTests, PoolHeapsInMultipleSubmits) { |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 225 | // Use small heaps to count only pool-allocated switches. |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 226 | DAWN_TEST_UNSUPPORTED_IF(!mD3DDevice->IsToggleEnabled( |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 227 | dawn_native::Toggle::UseD3D12SmallShaderVisibleHeapForTesting)); |
| 228 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 229 | ShaderVisibleDescriptorAllocator* allocator = |
| 230 | mD3DDevice->GetSamplerShaderVisibleDescriptorAllocator(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 231 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 232 | std::list<ComPtr<ID3D12DescriptorHeap>> heaps = {allocator->GetShaderVisibleHeap()}; |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 233 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 234 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), 0u); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 235 | |
Brandon Jones | b6f4d53 | 2020-11-13 02:11:12 +0000 | [diff] [blame] | 236 | // Allocate + increment internal serials up to |kFrameDepth| and ensure heaps are always unique. |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 237 | for (uint32_t i = 0; i < kFrameDepth; i++) { |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 238 | EXPECT_TRUE(allocator->AllocateAndSwitchShaderVisibleHeap().IsSuccess()); |
| 239 | ComPtr<ID3D12DescriptorHeap> heap = allocator->GetShaderVisibleHeap(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 240 | EXPECT_TRUE(std::find(heaps.begin(), heaps.end(), heap) == heaps.end()); |
| 241 | heaps.push_back(heap); |
Brandon Jones | b6f4d53 | 2020-11-13 02:11:12 +0000 | [diff] [blame] | 242 | // CheckPassedSerials() will update the last internally completed serial. |
Corentin Wallez | 6870e6d | 2021-04-07 18:09:21 +0000 | [diff] [blame] | 243 | EXPECT_TRUE(mD3DDevice->CheckPassedSerials().IsSuccess()); |
Brandon Jones | b6f4d53 | 2020-11-13 02:11:12 +0000 | [diff] [blame] | 244 | // NextSerial() will increment the last internally submitted serial. |
| 245 | EXPECT_TRUE(mD3DDevice->NextSerial().IsSuccess()); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 246 | } |
| 247 | |
| 248 | // Repeat up to |kFrameDepth| again but ensure heaps are the same in the expected order |
| 249 | // (oldest heaps are recycled first). The "+ 1" is so we also include the very first heap in the |
| 250 | // check. |
| 251 | for (uint32_t i = 0; i < kFrameDepth + 1; i++) { |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 252 | EXPECT_TRUE(allocator->AllocateAndSwitchShaderVisibleHeap().IsSuccess()); |
| 253 | ComPtr<ID3D12DescriptorHeap> heap = allocator->GetShaderVisibleHeap(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 254 | EXPECT_TRUE(heaps.front() == heap); |
| 255 | heaps.pop_front(); |
Corentin Wallez | 6870e6d | 2021-04-07 18:09:21 +0000 | [diff] [blame] | 256 | EXPECT_TRUE(mD3DDevice->CheckPassedSerials().IsSuccess()); |
Brandon Jones | b6f4d53 | 2020-11-13 02:11:12 +0000 | [diff] [blame] | 257 | EXPECT_TRUE(mD3DDevice->NextSerial().IsSuccess()); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 258 | } |
| 259 | |
| 260 | EXPECT_TRUE(heaps.empty()); |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 261 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), kFrameDepth); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 262 | } |
| 263 | |
| 264 | // Verify shader-visible heaps do not recycle in a pending submit. |
| 265 | TEST_P(D3D12DescriptorHeapTests, PoolHeapsInPendingSubmit) { |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 266 | // Use small heaps to count only pool-allocated switches. |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 267 | DAWN_TEST_UNSUPPORTED_IF(!mD3DDevice->IsToggleEnabled( |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 268 | dawn_native::Toggle::UseD3D12SmallShaderVisibleHeapForTesting)); |
| 269 | |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 270 | constexpr uint32_t kNumOfSwitches = 5; |
| 271 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 272 | ShaderVisibleDescriptorAllocator* allocator = |
| 273 | mD3DDevice->GetSamplerShaderVisibleDescriptorAllocator(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 274 | |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 275 | const HeapVersionID heapSerial = allocator->GetShaderVisibleHeapSerialForTesting(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 276 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 277 | std::set<ComPtr<ID3D12DescriptorHeap>> heaps = {allocator->GetShaderVisibleHeap()}; |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 278 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 279 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), 0u); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 280 | |
| 281 | // Switch-over |kNumOfSwitches| and ensure heaps are always unique. |
| 282 | for (uint32_t i = 0; i < kNumOfSwitches; i++) { |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 283 | EXPECT_TRUE(allocator->AllocateAndSwitchShaderVisibleHeap().IsSuccess()); |
| 284 | ComPtr<ID3D12DescriptorHeap> heap = allocator->GetShaderVisibleHeap(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 285 | EXPECT_TRUE(std::find(heaps.begin(), heaps.end(), heap) == heaps.end()); |
| 286 | heaps.insert(heap); |
| 287 | } |
| 288 | |
| 289 | // After |kNumOfSwitches|, no heaps are recycled. |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 290 | EXPECT_EQ(allocator->GetShaderVisibleHeapSerialForTesting(), |
| 291 | heapSerial + HeapVersionID(kNumOfSwitches)); |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 292 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), kNumOfSwitches); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 293 | } |
| 294 | |
| 295 | // Verify switching shader-visible heaps do not recycle in a pending submit but do so |
| 296 | // once no longer pending. |
| 297 | TEST_P(D3D12DescriptorHeapTests, PoolHeapsInPendingAndMultipleSubmits) { |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 298 | // Use small heaps to count only pool-allocated switches. |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 299 | DAWN_TEST_UNSUPPORTED_IF(!mD3DDevice->IsToggleEnabled( |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 300 | dawn_native::Toggle::UseD3D12SmallShaderVisibleHeapForTesting)); |
| 301 | |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 302 | constexpr uint32_t kNumOfSwitches = 5; |
| 303 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 304 | ShaderVisibleDescriptorAllocator* allocator = |
| 305 | mD3DDevice->GetSamplerShaderVisibleDescriptorAllocator(); |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 306 | const HeapVersionID heapSerial = allocator->GetShaderVisibleHeapSerialForTesting(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 307 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 308 | std::set<ComPtr<ID3D12DescriptorHeap>> heaps = {allocator->GetShaderVisibleHeap()}; |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 309 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 310 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), 0u); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 311 | |
| 312 | // Switch-over |kNumOfSwitches| to create a pool of unique heaps. |
| 313 | for (uint32_t i = 0; i < kNumOfSwitches; i++) { |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 314 | EXPECT_TRUE(allocator->AllocateAndSwitchShaderVisibleHeap().IsSuccess()); |
| 315 | ComPtr<ID3D12DescriptorHeap> heap = allocator->GetShaderVisibleHeap(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 316 | EXPECT_TRUE(std::find(heaps.begin(), heaps.end(), heap) == heaps.end()); |
| 317 | heaps.insert(heap); |
| 318 | } |
| 319 | |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 320 | EXPECT_EQ(allocator->GetShaderVisibleHeapSerialForTesting(), |
| 321 | heapSerial + HeapVersionID(kNumOfSwitches)); |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 322 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), kNumOfSwitches); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 323 | |
| 324 | // Ensure switched-over heaps can be recycled by advancing the GPU by at-least |kFrameDepth|. |
| 325 | for (uint32_t i = 0; i < kFrameDepth; i++) { |
Corentin Wallez | 2ce4b90 | 2021-03-29 14:02:05 +0000 | [diff] [blame] | 326 | mD3DDevice->APITick(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 327 | } |
| 328 | |
| 329 | // Switch-over |kNumOfSwitches| again reusing the same heaps. |
| 330 | for (uint32_t i = 0; i < kNumOfSwitches; i++) { |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 331 | EXPECT_TRUE(allocator->AllocateAndSwitchShaderVisibleHeap().IsSuccess()); |
| 332 | ComPtr<ID3D12DescriptorHeap> heap = allocator->GetShaderVisibleHeap(); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 333 | EXPECT_TRUE(std::find(heaps.begin(), heaps.end(), heap) != heaps.end()); |
| 334 | heaps.erase(heap); |
| 335 | } |
| 336 | |
| 337 | // After switching-over |kNumOfSwitches| x 2, ensure no additional heaps exist. |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 338 | EXPECT_EQ(allocator->GetShaderVisibleHeapSerialForTesting(), |
| 339 | heapSerial + HeapVersionID(kNumOfSwitches * 2)); |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 340 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), kNumOfSwitches); |
Bryan Bernhart | 52d0627 | 2020-03-09 22:56:59 +0000 | [diff] [blame] | 341 | } |
| 342 | |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 343 | // Verify shader-visible heaps do not recycle in multiple submits. |
| 344 | TEST_P(D3D12DescriptorHeapTests, GrowHeapsInMultipleSubmits) { |
| 345 | ShaderVisibleDescriptorAllocator* allocator = |
| 346 | mD3DDevice->GetSamplerShaderVisibleDescriptorAllocator(); |
| 347 | |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 348 | const HeapVersionID heapSerial = allocator->GetShaderVisibleHeapSerialForTesting(); |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 349 | |
| 350 | std::set<ComPtr<ID3D12DescriptorHeap>> heaps = {allocator->GetShaderVisibleHeap()}; |
| 351 | |
| 352 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), 0u); |
| 353 | |
| 354 | // Growth: Allocate + Tick() and ensure heaps are always unique. |
| 355 | while (allocator->GetShaderVisiblePoolSizeForTesting() == 0) { |
| 356 | EXPECT_TRUE(allocator->AllocateAndSwitchShaderVisibleHeap().IsSuccess()); |
| 357 | ComPtr<ID3D12DescriptorHeap> heap = allocator->GetShaderVisibleHeap(); |
| 358 | EXPECT_TRUE(std::find(heaps.begin(), heaps.end(), heap) == heaps.end()); |
| 359 | heaps.insert(heap); |
Corentin Wallez | 2ce4b90 | 2021-03-29 14:02:05 +0000 | [diff] [blame] | 360 | mD3DDevice->APITick(); |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 361 | } |
| 362 | |
| 363 | // Verify the number of switches equals the size of heaps allocated (minus the initial). |
| 364 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), 1u); |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 365 | EXPECT_EQ(allocator->GetShaderVisibleHeapSerialForTesting(), |
| 366 | heapSerial + HeapVersionID(heaps.size() - 1)); |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 367 | } |
| 368 | |
| 369 | // Verify shader-visible heaps do not recycle in a pending submit. |
| 370 | TEST_P(D3D12DescriptorHeapTests, GrowHeapsInPendingSubmit) { |
| 371 | ShaderVisibleDescriptorAllocator* allocator = |
| 372 | mD3DDevice->GetSamplerShaderVisibleDescriptorAllocator(); |
| 373 | |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 374 | const HeapVersionID heapSerial = allocator->GetShaderVisibleHeapSerialForTesting(); |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 375 | |
| 376 | std::set<ComPtr<ID3D12DescriptorHeap>> heaps = {allocator->GetShaderVisibleHeap()}; |
| 377 | |
| 378 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), 0u); |
| 379 | |
| 380 | // Growth: Allocate new heaps. |
| 381 | while (allocator->GetShaderVisiblePoolSizeForTesting() == 0) { |
| 382 | EXPECT_TRUE(allocator->AllocateAndSwitchShaderVisibleHeap().IsSuccess()); |
| 383 | ComPtr<ID3D12DescriptorHeap> heap = allocator->GetShaderVisibleHeap(); |
| 384 | EXPECT_TRUE(std::find(heaps.begin(), heaps.end(), heap) == heaps.end()); |
| 385 | heaps.insert(heap); |
| 386 | } |
| 387 | |
| 388 | // Verify the number of switches equals the size of heaps allocated (minus the initial). |
| 389 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), 1u); |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 390 | EXPECT_EQ(allocator->GetShaderVisibleHeapSerialForTesting(), |
| 391 | heapSerial + HeapVersionID(heaps.size() - 1)); |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 392 | } |
| 393 | |
| 394 | // Verify switching shader-visible heaps do not recycle in a pending submit but do so |
| 395 | // once no longer pending. |
| 396 | // Switches over many times until |kNumOfPooledHeaps| heaps are pool-allocated. |
| 397 | TEST_P(D3D12DescriptorHeapTests, GrowAndPoolHeapsInPendingAndMultipleSubmits) { |
| 398 | ShaderVisibleDescriptorAllocator* allocator = |
| 399 | mD3DDevice->GetSamplerShaderVisibleDescriptorAllocator(); |
| 400 | |
| 401 | std::set<ComPtr<ID3D12DescriptorHeap>> heaps = {allocator->GetShaderVisibleHeap()}; |
| 402 | |
| 403 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), 0u); |
| 404 | |
| 405 | uint32_t kNumOfPooledHeaps = 5; |
| 406 | while (allocator->GetShaderVisiblePoolSizeForTesting() < kNumOfPooledHeaps) { |
| 407 | EXPECT_TRUE(allocator->AllocateAndSwitchShaderVisibleHeap().IsSuccess()); |
| 408 | ComPtr<ID3D12DescriptorHeap> heap = allocator->GetShaderVisibleHeap(); |
| 409 | EXPECT_TRUE(std::find(heaps.begin(), heaps.end(), heap) == heaps.end()); |
| 410 | heaps.insert(heap); |
| 411 | } |
| 412 | |
| 413 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), kNumOfPooledHeaps); |
| 414 | |
| 415 | // Ensure switched-over heaps can be recycled by advancing the GPU by at-least |kFrameDepth|. |
| 416 | for (uint32_t i = 0; i < kFrameDepth; i++) { |
Corentin Wallez | 2ce4b90 | 2021-03-29 14:02:05 +0000 | [diff] [blame] | 417 | mD3DDevice->APITick(); |
Bryan Bernhart | f03590a | 2020-07-30 21:50:32 +0000 | [diff] [blame] | 418 | } |
| 419 | |
| 420 | // Switch-over the pool-allocated heaps. |
| 421 | for (uint32_t i = 0; i < kNumOfPooledHeaps; i++) { |
| 422 | EXPECT_TRUE(allocator->AllocateAndSwitchShaderVisibleHeap().IsSuccess()); |
| 423 | ComPtr<ID3D12DescriptorHeap> heap = allocator->GetShaderVisibleHeap(); |
| 424 | EXPECT_FALSE(std::find(heaps.begin(), heaps.end(), heap) == heaps.end()); |
| 425 | } |
| 426 | |
| 427 | EXPECT_EQ(allocator->GetShaderVisiblePoolSizeForTesting(), kNumOfPooledHeaps); |
| 428 | } |
| 429 | |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 430 | // Verify encoding multiple heaps worth of bindgroups. |
| 431 | // Shader-visible heaps will switch out |kNumOfHeaps| times. |
| 432 | TEST_P(D3D12DescriptorHeapTests, EncodeManyUBO) { |
| 433 | // This test draws a solid color triangle |heapSize| times. Each draw uses a new bindgroup that |
| 434 | // has its own UBO with a "color value" in the range [1... heapSize]. After |heapSize| draws, |
| 435 | // the result is the arithmetic sum of the sequence after the framebuffer is blended by |
| 436 | // accumulation. By checking for this sum, we ensure each bindgroup was encoded correctly. |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 437 | DAWN_TEST_UNSUPPORTED_IF(!mD3DDevice->IsToggleEnabled( |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 438 | dawn_native::Toggle::UseD3D12SmallShaderVisibleHeapForTesting)); |
| 439 | |
| 440 | utils::BasicRenderPass renderPass = |
shrekshao | bdc029e | 2021-07-19 23:27:27 +0000 | [diff] [blame] | 441 | MakeRenderPass(kRTSize, kRTSize, wgpu::TextureFormat::R16Float); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 442 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 443 | utils::ComboRenderPipelineDescriptor pipelineDescriptor; |
Brandon Jones | bff9d3a | 2021-03-18 02:54:27 +0000 | [diff] [blame] | 444 | pipelineDescriptor.vertex.module = mSimpleVSModule; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 445 | |
Corentin Wallez | 7aec4ae | 2021-03-24 15:55:32 +0000 | [diff] [blame] | 446 | pipelineDescriptor.cFragment.module = utils::CreateShaderModule(device, R"( |
James Price | d4f8c39 | 2021-12-15 13:13:26 +0000 | [diff] [blame] | 447 | struct U { |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 448 | heapSize : f32; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 449 | }; |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 450 | [[group(0), binding(0)]] var<uniform> buffer0 : U; |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 451 | |
shrekshao | 4f2edf5 | 2021-08-11 21:12:36 +0000 | [diff] [blame] | 452 | [[stage(fragment)]] fn main() -> [[location(0)]] vec4<f32> { |
| 453 | return vec4<f32>(buffer0.heapSize, 0.0, 0.0, 1.0); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 454 | })"); |
| 455 | |
Brandon Jones | bff9d3a | 2021-03-18 02:54:27 +0000 | [diff] [blame] | 456 | wgpu::BlendState blend; |
| 457 | blend.color.operation = wgpu::BlendOperation::Add; |
| 458 | blend.color.srcFactor = wgpu::BlendFactor::One; |
| 459 | blend.color.dstFactor = wgpu::BlendFactor::One; |
| 460 | blend.alpha.operation = wgpu::BlendOperation::Add; |
| 461 | blend.alpha.srcFactor = wgpu::BlendFactor::One; |
| 462 | blend.alpha.dstFactor = wgpu::BlendFactor::One; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 463 | |
shrekshao | bdc029e | 2021-07-19 23:27:27 +0000 | [diff] [blame] | 464 | pipelineDescriptor.cTargets[0].format = wgpu::TextureFormat::R16Float; |
Brandon Jones | bff9d3a | 2021-03-18 02:54:27 +0000 | [diff] [blame] | 465 | pipelineDescriptor.cTargets[0].blend = &blend; |
| 466 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 467 | wgpu::RenderPipeline renderPipeline = device.CreateRenderPipeline(&pipelineDescriptor); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 468 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 469 | const uint32_t heapSize = |
| 470 | mD3DDevice->GetViewShaderVisibleDescriptorAllocator()->GetShaderVisibleHeapSizeForTesting(); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 471 | |
| 472 | constexpr uint32_t kNumOfHeaps = 2; |
| 473 | |
| 474 | const uint32_t numOfEncodedBindGroups = kNumOfHeaps * heapSize; |
| 475 | |
| 476 | std::vector<wgpu::BindGroup> bindGroups; |
| 477 | for (uint32_t i = 0; i < numOfEncodedBindGroups; i++) { |
| 478 | const float color = i + 1; |
| 479 | wgpu::Buffer uniformBuffer = |
| 480 | utils::CreateBufferFromData(device, &color, sizeof(color), wgpu::BufferUsage::Uniform); |
| 481 | bindGroups.push_back(utils::MakeBindGroup(device, renderPipeline.GetBindGroupLayout(0), |
| 482 | {{0, uniformBuffer}})); |
| 483 | } |
| 484 | |
| 485 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 486 | { |
| 487 | wgpu::RenderPassEncoder pass = encoder.BeginRenderPass(&renderPass.renderPassInfo); |
| 488 | |
| 489 | pass.SetPipeline(renderPipeline); |
| 490 | |
| 491 | for (uint32_t i = 0; i < numOfEncodedBindGroups; ++i) { |
| 492 | pass.SetBindGroup(0, bindGroups[i]); |
Corentin Wallez | 67b1ad7 | 2020-03-31 16:21:35 +0000 | [diff] [blame] | 493 | pass.Draw(3); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 494 | } |
| 495 | |
| 496 | pass.EndPass(); |
| 497 | } |
| 498 | |
| 499 | wgpu::CommandBuffer commands = encoder.Finish(); |
| 500 | queue.Submit(1, &commands); |
| 501 | |
| 502 | float colorSum = numOfEncodedBindGroups * (numOfEncodedBindGroups + 1) / 2; |
shrekshao | bdc029e | 2021-07-19 23:27:27 +0000 | [diff] [blame] | 503 | EXPECT_PIXEL_FLOAT16_EQ(colorSum, renderPass.color, 0, 0); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 504 | } |
| 505 | |
| 506 | // Verify encoding one bindgroup then a heaps worth in different submits. |
| 507 | // Shader-visible heaps should switch out once upon encoding 1 + |heapSize| descriptors. |
| 508 | // The first descriptor's memory will be reused when the second submit encodes |heapSize| |
| 509 | // descriptors. |
| 510 | TEST_P(D3D12DescriptorHeapTests, EncodeUBOOverflowMultipleSubmit) { |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 511 | DAWN_TEST_UNSUPPORTED_IF(!mD3DDevice->IsToggleEnabled( |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 512 | dawn_native::Toggle::UseD3D12SmallShaderVisibleHeapForTesting)); |
| 513 | |
Ben Clayton | 9ef74c5 | 2021-03-30 15:22:17 +0000 | [diff] [blame] | 514 | // TODO(crbug.com/dawn/742): Test output is wrong with D3D12 + WARP. |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 515 | DAWN_SUPPRESS_TEST_IF(IsD3D12() && IsWARP()); |
Ben Clayton | 9ef74c5 | 2021-03-30 15:22:17 +0000 | [diff] [blame] | 516 | |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 517 | utils::BasicRenderPass renderPass = utils::CreateBasicRenderPass(device, kRTSize, kRTSize); |
| 518 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 519 | utils::ComboRenderPipelineDescriptor pipelineDescriptor; |
Brandon Jones | bff9d3a | 2021-03-18 02:54:27 +0000 | [diff] [blame] | 520 | pipelineDescriptor.vertex.module = mSimpleVSModule; |
| 521 | pipelineDescriptor.cFragment.module = mSimpleFSModule; |
| 522 | pipelineDescriptor.cTargets[0].format = renderPass.colorFormat; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 523 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 524 | wgpu::RenderPipeline renderPipeline = device.CreateRenderPipeline(&pipelineDescriptor); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 525 | |
| 526 | // Encode the first descriptor and submit. |
| 527 | { |
| 528 | std::array<float, 4> greenColor = {0, 1, 0, 1}; |
| 529 | wgpu::Buffer uniformBuffer = utils::CreateBufferFromData( |
| 530 | device, &greenColor, sizeof(greenColor), wgpu::BufferUsage::Uniform); |
| 531 | |
| 532 | wgpu::BindGroup bindGroup = utils::MakeBindGroup( |
| 533 | device, renderPipeline.GetBindGroupLayout(0), {{0, uniformBuffer}}); |
| 534 | |
| 535 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 536 | { |
| 537 | wgpu::RenderPassEncoder pass = encoder.BeginRenderPass(&renderPass.renderPassInfo); |
| 538 | |
| 539 | pass.SetPipeline(renderPipeline); |
| 540 | pass.SetBindGroup(0, bindGroup); |
Corentin Wallez | 67b1ad7 | 2020-03-31 16:21:35 +0000 | [diff] [blame] | 541 | pass.Draw(3); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 542 | pass.EndPass(); |
| 543 | } |
| 544 | |
| 545 | wgpu::CommandBuffer commands = encoder.Finish(); |
| 546 | queue.Submit(1, &commands); |
| 547 | } |
| 548 | |
| 549 | EXPECT_PIXEL_RGBA8_EQ(RGBA8::kGreen, renderPass.color, 0, 0); |
| 550 | |
| 551 | // Encode a heap worth of descriptors. |
| 552 | { |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 553 | const uint32_t heapSize = mD3DDevice->GetSamplerShaderVisibleDescriptorAllocator() |
| 554 | ->GetShaderVisibleHeapSizeForTesting(); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 555 | |
| 556 | std::vector<wgpu::BindGroup> bindGroups; |
| 557 | for (uint32_t i = 0; i < heapSize - 1; i++) { |
| 558 | std::array<float, 4> fillColor = GetSolidColor(i + 1); // Avoid black |
| 559 | wgpu::Buffer uniformBuffer = utils::CreateBufferFromData( |
| 560 | device, &fillColor, sizeof(fillColor), wgpu::BufferUsage::Uniform); |
| 561 | |
| 562 | bindGroups.push_back(utils::MakeBindGroup(device, renderPipeline.GetBindGroupLayout(0), |
| 563 | {{0, uniformBuffer}})); |
| 564 | } |
| 565 | |
| 566 | std::array<float, 4> redColor = {1, 0, 0, 1}; |
| 567 | wgpu::Buffer lastUniformBuffer = utils::CreateBufferFromData( |
| 568 | device, &redColor, sizeof(redColor), wgpu::BufferUsage::Uniform); |
| 569 | |
| 570 | bindGroups.push_back(utils::MakeBindGroup(device, renderPipeline.GetBindGroupLayout(0), |
| 571 | {{0, lastUniformBuffer, 0, sizeof(redColor)}})); |
| 572 | |
| 573 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 574 | { |
| 575 | wgpu::RenderPassEncoder pass = encoder.BeginRenderPass(&renderPass.renderPassInfo); |
| 576 | |
| 577 | pass.SetPipeline(renderPipeline); |
| 578 | |
| 579 | for (uint32_t i = 0; i < heapSize; ++i) { |
| 580 | pass.SetBindGroup(0, bindGroups[i]); |
Corentin Wallez | 67b1ad7 | 2020-03-31 16:21:35 +0000 | [diff] [blame] | 581 | pass.Draw(3); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 582 | } |
| 583 | |
| 584 | pass.EndPass(); |
| 585 | } |
| 586 | |
| 587 | wgpu::CommandBuffer commands = encoder.Finish(); |
| 588 | queue.Submit(1, &commands); |
| 589 | } |
| 590 | |
| 591 | EXPECT_PIXEL_RGBA8_EQ(RGBA8::kRed, renderPass.color, 0, 0); |
| 592 | } |
| 593 | |
| 594 | // Verify encoding a heaps worth of bindgroups plus one more then reuse the first |
| 595 | // bindgroup in the same submit. |
| 596 | // Shader-visible heaps should switch out once then re-encode the first descriptor at a new offset |
| 597 | // in the heap. |
| 598 | TEST_P(D3D12DescriptorHeapTests, EncodeReuseUBOOverflow) { |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 599 | DAWN_TEST_UNSUPPORTED_IF(!mD3DDevice->IsToggleEnabled( |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 600 | dawn_native::Toggle::UseD3D12SmallShaderVisibleHeapForTesting)); |
| 601 | |
| 602 | utils::BasicRenderPass renderPass = utils::CreateBasicRenderPass(device, kRTSize, kRTSize); |
| 603 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 604 | utils::ComboRenderPipelineDescriptor pipelineDescriptor; |
Brandon Jones | bff9d3a | 2021-03-18 02:54:27 +0000 | [diff] [blame] | 605 | pipelineDescriptor.vertex.module = mSimpleVSModule; |
| 606 | pipelineDescriptor.cFragment.module = mSimpleFSModule; |
| 607 | pipelineDescriptor.cTargets[0].format = renderPass.colorFormat; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 608 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 609 | wgpu::RenderPipeline pipeline = device.CreateRenderPipeline(&pipelineDescriptor); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 610 | |
| 611 | std::array<float, 4> redColor = {1, 0, 0, 1}; |
| 612 | wgpu::Buffer firstUniformBuffer = utils::CreateBufferFromData( |
| 613 | device, &redColor, sizeof(redColor), wgpu::BufferUsage::Uniform); |
| 614 | |
| 615 | std::vector<wgpu::BindGroup> bindGroups = {utils::MakeBindGroup( |
| 616 | device, pipeline.GetBindGroupLayout(0), {{0, firstUniformBuffer, 0, sizeof(redColor)}})}; |
| 617 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 618 | const uint32_t heapSize = |
| 619 | mD3DDevice->GetViewShaderVisibleDescriptorAllocator()->GetShaderVisibleHeapSizeForTesting(); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 620 | |
| 621 | for (uint32_t i = 0; i < heapSize; i++) { |
| 622 | const std::array<float, 4>& fillColor = GetSolidColor(i + 1); // Avoid black |
| 623 | wgpu::Buffer uniformBuffer = utils::CreateBufferFromData( |
| 624 | device, &fillColor, sizeof(fillColor), wgpu::BufferUsage::Uniform); |
| 625 | bindGroups.push_back(utils::MakeBindGroup(device, pipeline.GetBindGroupLayout(0), |
| 626 | {{0, uniformBuffer, 0, sizeof(fillColor)}})); |
| 627 | } |
| 628 | |
| 629 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 630 | { |
| 631 | wgpu::RenderPassEncoder pass = encoder.BeginRenderPass(&renderPass.renderPassInfo); |
| 632 | |
| 633 | pass.SetPipeline(pipeline); |
| 634 | |
| 635 | // Encode a heap worth of descriptors plus one more. |
| 636 | for (uint32_t i = 0; i < heapSize + 1; ++i) { |
| 637 | pass.SetBindGroup(0, bindGroups[i]); |
Corentin Wallez | 67b1ad7 | 2020-03-31 16:21:35 +0000 | [diff] [blame] | 638 | pass.Draw(3); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 639 | } |
| 640 | |
| 641 | // Re-encode the first bindgroup again. |
| 642 | pass.SetBindGroup(0, bindGroups[0]); |
Corentin Wallez | 67b1ad7 | 2020-03-31 16:21:35 +0000 | [diff] [blame] | 643 | pass.Draw(3); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 644 | |
| 645 | pass.EndPass(); |
| 646 | } |
| 647 | |
| 648 | wgpu::CommandBuffer commands = encoder.Finish(); |
| 649 | queue.Submit(1, &commands); |
| 650 | |
| 651 | // Make sure the first bindgroup was encoded correctly. |
| 652 | EXPECT_PIXEL_RGBA8_EQ(RGBA8::kRed, renderPass.color, 0, 0); |
| 653 | } |
| 654 | |
| 655 | // Verify encoding a heaps worth of bindgroups plus one more in the first submit then reuse the |
| 656 | // first bindgroup again in the second submit. |
| 657 | // Shader-visible heaps should switch out once then re-encode the |
| 658 | // first descriptor at the same offset in the heap. |
| 659 | TEST_P(D3D12DescriptorHeapTests, EncodeReuseUBOMultipleSubmits) { |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 660 | DAWN_TEST_UNSUPPORTED_IF(!mD3DDevice->IsToggleEnabled( |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 661 | dawn_native::Toggle::UseD3D12SmallShaderVisibleHeapForTesting)); |
| 662 | |
| 663 | utils::BasicRenderPass renderPass = utils::CreateBasicRenderPass(device, kRTSize, kRTSize); |
| 664 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 665 | utils::ComboRenderPipelineDescriptor pipelineDescriptor; |
Brandon Jones | bff9d3a | 2021-03-18 02:54:27 +0000 | [diff] [blame] | 666 | pipelineDescriptor.vertex.module = mSimpleVSModule; |
| 667 | pipelineDescriptor.cFragment.module = mSimpleFSModule; |
| 668 | pipelineDescriptor.cTargets[0].format = renderPass.colorFormat; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 669 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 670 | wgpu::RenderPipeline pipeline = device.CreateRenderPipeline(&pipelineDescriptor); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 671 | |
| 672 | // Encode heap worth of descriptors plus one more. |
| 673 | std::array<float, 4> redColor = {1, 0, 0, 1}; |
| 674 | |
| 675 | wgpu::Buffer firstUniformBuffer = utils::CreateBufferFromData( |
| 676 | device, &redColor, sizeof(redColor), wgpu::BufferUsage::Uniform); |
| 677 | |
| 678 | std::vector<wgpu::BindGroup> bindGroups = {utils::MakeBindGroup( |
| 679 | device, pipeline.GetBindGroupLayout(0), {{0, firstUniformBuffer, 0, sizeof(redColor)}})}; |
| 680 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 681 | const uint32_t heapSize = |
| 682 | mD3DDevice->GetViewShaderVisibleDescriptorAllocator()->GetShaderVisibleHeapSizeForTesting(); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 683 | |
| 684 | for (uint32_t i = 0; i < heapSize; i++) { |
| 685 | std::array<float, 4> fillColor = GetSolidColor(i + 1); // Avoid black |
| 686 | wgpu::Buffer uniformBuffer = utils::CreateBufferFromData( |
| 687 | device, &fillColor, sizeof(fillColor), wgpu::BufferUsage::Uniform); |
| 688 | |
| 689 | bindGroups.push_back(utils::MakeBindGroup(device, pipeline.GetBindGroupLayout(0), |
| 690 | {{0, uniformBuffer, 0, sizeof(fillColor)}})); |
| 691 | } |
| 692 | |
| 693 | { |
| 694 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 695 | { |
| 696 | wgpu::RenderPassEncoder pass = encoder.BeginRenderPass(&renderPass.renderPassInfo); |
| 697 | |
| 698 | pass.SetPipeline(pipeline); |
| 699 | |
| 700 | for (uint32_t i = 0; i < heapSize + 1; ++i) { |
| 701 | pass.SetBindGroup(0, bindGroups[i]); |
Corentin Wallez | 67b1ad7 | 2020-03-31 16:21:35 +0000 | [diff] [blame] | 702 | pass.Draw(3); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 703 | } |
| 704 | |
| 705 | pass.EndPass(); |
| 706 | } |
| 707 | |
| 708 | wgpu::CommandBuffer commands = encoder.Finish(); |
| 709 | queue.Submit(1, &commands); |
| 710 | } |
| 711 | |
| 712 | // Re-encode the first bindgroup again. |
| 713 | { |
| 714 | std::array<float, 4> greenColor = {0, 1, 0, 1}; |
Corentin Wallez | 47a3341 | 2020-06-02 09:24:39 +0000 | [diff] [blame] | 715 | queue.WriteBuffer(firstUniformBuffer, 0, &greenColor, sizeof(greenColor)); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 716 | |
| 717 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 718 | { |
| 719 | wgpu::RenderPassEncoder pass = encoder.BeginRenderPass(&renderPass.renderPassInfo); |
| 720 | |
| 721 | pass.SetPipeline(pipeline); |
| 722 | |
| 723 | pass.SetBindGroup(0, bindGroups[0]); |
Corentin Wallez | 67b1ad7 | 2020-03-31 16:21:35 +0000 | [diff] [blame] | 724 | pass.Draw(3); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 725 | |
| 726 | pass.EndPass(); |
| 727 | } |
| 728 | |
| 729 | wgpu::CommandBuffer commands = encoder.Finish(); |
| 730 | queue.Submit(1, &commands); |
| 731 | } |
| 732 | |
| 733 | // Make sure the first bindgroup was re-encoded correctly. |
| 734 | EXPECT_PIXEL_RGBA8_EQ(RGBA8::kGreen, renderPass.color, 0, 0); |
| 735 | } |
| 736 | |
| 737 | // Verify encoding many sampler and ubo worth of bindgroups. |
Bryan Bernhart | b46d002 | 2020-04-23 20:36:22 +0000 | [diff] [blame] | 738 | // Shader-visible heaps should switch out |kNumOfViewHeaps| times. |
Ben Clayton | 448e1e9 | 2021-06-25 15:44:02 +0000 | [diff] [blame] | 739 | TEST_P(D3D12DescriptorHeapTests, EncodeManyUBOAndSamplers) { |
Jiawei Shao | 44fc6e3 | 2021-05-26 01:04:32 +0000 | [diff] [blame] | 740 | DAWN_TEST_UNSUPPORTED_IF(!mD3DDevice->IsToggleEnabled( |
Bryan Bernhart | b46d002 | 2020-04-23 20:36:22 +0000 | [diff] [blame] | 741 | dawn_native::Toggle::UseD3D12SmallShaderVisibleHeapForTesting)); |
| 742 | |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 743 | // Create a solid filled texture. |
| 744 | wgpu::TextureDescriptor descriptor; |
| 745 | descriptor.dimension = wgpu::TextureDimension::e2D; |
| 746 | descriptor.size.width = kRTSize; |
| 747 | descriptor.size.height = kRTSize; |
shrekshao | b00de7f | 2021-03-22 21:12:36 +0000 | [diff] [blame] | 748 | descriptor.size.depthOrArrayLayers = 1; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 749 | descriptor.sampleCount = 1; |
| 750 | descriptor.format = wgpu::TextureFormat::RGBA8Unorm; |
| 751 | descriptor.mipLevelCount = 1; |
Brandon Jones | 27e17a6 | 2021-08-10 04:07:37 +0000 | [diff] [blame] | 752 | descriptor.usage = wgpu::TextureUsage::TextureBinding | wgpu::TextureUsage::RenderAttachment | |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 753 | wgpu::TextureUsage::CopySrc; |
| 754 | wgpu::Texture texture = device.CreateTexture(&descriptor); |
| 755 | wgpu::TextureView textureView = texture.CreateView(); |
| 756 | |
| 757 | { |
| 758 | utils::BasicRenderPass renderPass = utils::BasicRenderPass(kRTSize, kRTSize, texture); |
| 759 | |
| 760 | utils::ComboRenderPassDescriptor renderPassDesc({textureView}); |
| 761 | renderPassDesc.cColorAttachments[0].loadOp = wgpu::LoadOp::Clear; |
| 762 | renderPassDesc.cColorAttachments[0].clearColor = {0.0f, 1.0f, 0.0f, 1.0f}; |
Corentin Wallez | 1ac4565 | 2021-09-14 10:42:22 +0000 | [diff] [blame] | 763 | renderPass.renderPassInfo.cColorAttachments[0].view = textureView; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 764 | |
| 765 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 766 | auto pass = encoder.BeginRenderPass(&renderPassDesc); |
| 767 | pass.EndPass(); |
| 768 | |
| 769 | wgpu::CommandBuffer commandBuffer = encoder.Finish(); |
| 770 | queue.Submit(1, &commandBuffer); |
| 771 | |
| 772 | RGBA8 filled(0, 255, 0, 255); |
| 773 | EXPECT_PIXEL_RGBA8_EQ(filled, renderPass.color, 0, 0); |
| 774 | } |
| 775 | |
| 776 | { |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 777 | utils::ComboRenderPipelineDescriptor pipelineDescriptor; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 778 | |
Corentin Wallez | 7aec4ae | 2021-03-24 15:55:32 +0000 | [diff] [blame] | 779 | pipelineDescriptor.vertex.module = utils::CreateShaderModule(device, R"( |
James Price | d4f8c39 | 2021-12-15 13:13:26 +0000 | [diff] [blame] | 780 | struct U { |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 781 | transform : mat2x2<f32>; |
| 782 | }; |
| 783 | [[group(0), binding(0)]] var<uniform> buffer0 : U; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 784 | |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 785 | [[stage(vertex)]] fn main( |
| 786 | [[builtin(vertex_index)]] VertexIndex : u32 |
| 787 | ) -> [[builtin(position)]] vec4<f32> { |
Corentin Wallez | b86e45f | 2021-06-17 21:36:11 +0000 | [diff] [blame] | 788 | var pos = array<vec2<f32>, 3>( |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 789 | vec2<f32>(-1.0, 1.0), |
| 790 | vec2<f32>( 1.0, 1.0), |
| 791 | vec2<f32>(-1.0, -1.0) |
| 792 | ); |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 793 | return vec4<f32>(buffer0.transform * (pos[VertexIndex]), 0.0, 1.0); |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 794 | })"); |
Corentin Wallez | 7aec4ae | 2021-03-24 15:55:32 +0000 | [diff] [blame] | 795 | pipelineDescriptor.cFragment.module = utils::CreateShaderModule(device, R"( |
James Price | d4f8c39 | 2021-12-15 13:13:26 +0000 | [diff] [blame] | 796 | struct U { |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 797 | color : vec4<f32>; |
| 798 | }; |
| 799 | [[group(0), binding(1)]] var sampler0 : sampler; |
| 800 | [[group(0), binding(2)]] var texture0 : texture_2d<f32>; |
| 801 | [[group(0), binding(3)]] var<uniform> buffer0 : U; |
| 802 | |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 803 | [[stage(fragment)]] fn main( |
James Price | eae70b7 | 2021-04-19 15:29:49 +0000 | [diff] [blame] | 804 | [[builtin(position)]] FragCoord : vec4<f32> |
Corentin Wallez | 78d27e8 | 2021-04-13 10:42:44 +0000 | [diff] [blame] | 805 | ) -> [[location(0)]] vec4<f32> { |
| 806 | return textureSample(texture0, sampler0, FragCoord.xy) + buffer0.color; |
Corentin Wallez | 36f19da | 2021-03-16 10:00:44 +0000 | [diff] [blame] | 807 | })"); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 808 | |
| 809 | utils::BasicRenderPass renderPass = utils::CreateBasicRenderPass(device, kRTSize, kRTSize); |
Brandon Jones | bff9d3a | 2021-03-18 02:54:27 +0000 | [diff] [blame] | 810 | pipelineDescriptor.cTargets[0].format = renderPass.colorFormat; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 811 | |
Brandon Jones | 41c87d9 | 2021-05-21 05:01:38 +0000 | [diff] [blame] | 812 | wgpu::RenderPipeline pipeline = device.CreateRenderPipeline(&pipelineDescriptor); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 813 | |
| 814 | // Encode a heap worth of descriptors |kNumOfHeaps| times. |
Ben Clayton | e4f4a37 | 2021-06-21 07:55:57 +0000 | [diff] [blame] | 815 | constexpr float transform[] = {1.f, 0.f, 0.f, 1.f}; |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 816 | wgpu::Buffer transformBuffer = utils::CreateBufferFromData( |
| 817 | device, &transform, sizeof(transform), wgpu::BufferUsage::Uniform); |
| 818 | |
| 819 | wgpu::SamplerDescriptor samplerDescriptor; |
| 820 | wgpu::Sampler sampler = device.CreateSampler(&samplerDescriptor); |
| 821 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 822 | ShaderVisibleDescriptorAllocator* viewAllocator = |
| 823 | mD3DDevice->GetViewShaderVisibleDescriptorAllocator(); |
Bryan Bernhart | b46d002 | 2020-04-23 20:36:22 +0000 | [diff] [blame] | 824 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 825 | ShaderVisibleDescriptorAllocator* samplerAllocator = |
| 826 | mD3DDevice->GetSamplerShaderVisibleDescriptorAllocator(); |
Bryan Bernhart | b46d002 | 2020-04-23 20:36:22 +0000 | [diff] [blame] | 827 | |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 828 | const HeapVersionID viewHeapSerial = viewAllocator->GetShaderVisibleHeapSerialForTesting(); |
| 829 | const HeapVersionID samplerHeapSerial = |
| 830 | samplerAllocator->GetShaderVisibleHeapSerialForTesting(); |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 831 | |
| 832 | const uint32_t viewHeapSize = viewAllocator->GetShaderVisibleHeapSizeForTesting(); |
Bryan Bernhart | b46d002 | 2020-04-23 20:36:22 +0000 | [diff] [blame] | 833 | |
| 834 | // "Small" view heap is always 2 x sampler heap size and encodes 3x the descriptors per |
| 835 | // group. This means the count of heaps switches is determined by the total number of views |
| 836 | // to encode. Compute the number of bindgroups to encode by counting the required views for |
| 837 | // |kNumOfViewHeaps| heaps worth. |
| 838 | constexpr uint32_t kViewsPerBindGroup = 3; |
| 839 | constexpr uint32_t kNumOfViewHeaps = 5; |
| 840 | |
| 841 | const uint32_t numOfEncodedBindGroups = |
| 842 | (viewHeapSize * kNumOfViewHeaps) / kViewsPerBindGroup; |
| 843 | |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 844 | std::vector<wgpu::BindGroup> bindGroups; |
Bryan Bernhart | b46d002 | 2020-04-23 20:36:22 +0000 | [diff] [blame] | 845 | for (uint32_t i = 0; i < numOfEncodedBindGroups - 1; i++) { |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 846 | std::array<float, 4> fillColor = GetSolidColor(i + 1); // Avoid black |
| 847 | wgpu::Buffer uniformBuffer = utils::CreateBufferFromData( |
| 848 | device, &fillColor, sizeof(fillColor), wgpu::BufferUsage::Uniform); |
| 849 | |
Idan Raiter | f434fdc | 2020-06-19 21:39:23 +0000 | [diff] [blame] | 850 | bindGroups.push_back(utils::MakeBindGroup(device, pipeline.GetBindGroupLayout(0), |
| 851 | {{0, transformBuffer, 0, sizeof(transform)}, |
| 852 | {1, sampler}, |
| 853 | {2, textureView}, |
| 854 | {3, uniformBuffer, 0, sizeof(fillColor)}})); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 855 | } |
| 856 | |
| 857 | std::array<float, 4> redColor = {1, 0, 0, 1}; |
| 858 | wgpu::Buffer lastUniformBuffer = utils::CreateBufferFromData( |
| 859 | device, &redColor, sizeof(redColor), wgpu::BufferUsage::Uniform); |
| 860 | |
| 861 | bindGroups.push_back(utils::MakeBindGroup(device, pipeline.GetBindGroupLayout(0), |
| 862 | {{0, transformBuffer, 0, sizeof(transform)}, |
| 863 | {1, sampler}, |
| 864 | {2, textureView}, |
| 865 | {3, lastUniformBuffer, 0, sizeof(redColor)}})); |
| 866 | |
| 867 | wgpu::CommandEncoder encoder = device.CreateCommandEncoder(); |
| 868 | wgpu::RenderPassEncoder pass = encoder.BeginRenderPass(&renderPass.renderPassInfo); |
| 869 | |
| 870 | pass.SetPipeline(pipeline); |
| 871 | |
Bryan Bernhart | b46d002 | 2020-04-23 20:36:22 +0000 | [diff] [blame] | 872 | for (uint32_t i = 0; i < numOfEncodedBindGroups; ++i) { |
| 873 | pass.SetBindGroup(0, bindGroups[i]); |
Corentin Wallez | 67b1ad7 | 2020-03-31 16:21:35 +0000 | [diff] [blame] | 874 | pass.Draw(3); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 875 | } |
| 876 | |
| 877 | pass.EndPass(); |
| 878 | |
| 879 | wgpu::CommandBuffer commands = encoder.Finish(); |
| 880 | queue.Submit(1, &commands); |
| 881 | |
| 882 | // Final accumulated color is result of sampled + UBO color. |
| 883 | RGBA8 filled(255, 255, 0, 255); |
| 884 | RGBA8 notFilled(0, 0, 0, 0); |
| 885 | EXPECT_PIXEL_RGBA8_EQ(filled, renderPass.color, 0, 0); |
| 886 | EXPECT_PIXEL_RGBA8_EQ(notFilled, renderPass.color, kRTSize - 1, 0); |
Bryan Bernhart | b46d002 | 2020-04-23 20:36:22 +0000 | [diff] [blame] | 887 | |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 888 | EXPECT_EQ(viewAllocator->GetShaderVisiblePoolSizeForTesting(), kNumOfViewHeaps); |
| 889 | EXPECT_EQ(viewAllocator->GetShaderVisibleHeapSerialForTesting(), |
Corentin Wallez | cac14e0 | 2020-09-28 16:05:24 +0000 | [diff] [blame] | 890 | viewHeapSerial + HeapVersionID(kNumOfViewHeaps)); |
Bryan Bernhart | 303a3da | 2020-04-30 23:19:16 +0000 | [diff] [blame] | 891 | |
Bryan Bernhart | e25ee25 | 2020-05-18 23:25:31 +0000 | [diff] [blame] | 892 | EXPECT_EQ(samplerAllocator->GetShaderVisiblePoolSizeForTesting(), 0u); |
| 893 | EXPECT_EQ(samplerAllocator->GetShaderVisibleHeapSerialForTesting(), samplerHeapSerial); |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 894 | } |
| 895 | } |
| 896 | |
Bryan Bernhart | cb859a2 | 2020-04-06 22:07:42 +0000 | [diff] [blame] | 897 | // Verify a single allocate/deallocate. |
| 898 | // One non-shader visible heap will be created. |
| 899 | TEST_P(D3D12DescriptorHeapTests, Single) { |
| 900 | constexpr uint32_t kDescriptorCount = 4; |
| 901 | constexpr uint32_t kAllocationsPerHeap = 3; |
Bryan Bernhart | 4f86505 | 2020-04-10 18:43:22 +0000 | [diff] [blame] | 902 | DummyStagingDescriptorAllocator allocator(mD3DDevice, kDescriptorCount, kAllocationsPerHeap); |
Bryan Bernhart | cb859a2 | 2020-04-06 22:07:42 +0000 | [diff] [blame] | 903 | |
| 904 | CPUDescriptorHeapAllocation allocation = allocator.AllocateCPUDescriptors(); |
| 905 | EXPECT_EQ(allocation.GetHeapIndex(), 0u); |
| 906 | EXPECT_NE(allocation.OffsetFrom(0, 0).ptr, 0u); |
| 907 | |
| 908 | allocator.Deallocate(allocation); |
| 909 | EXPECT_FALSE(allocation.IsValid()); |
| 910 | } |
| 911 | |
| 912 | // Verify allocating many times causes the pool to increase in size. |
| 913 | // Creates |kNumOfHeaps| non-shader visible heaps. |
| 914 | TEST_P(D3D12DescriptorHeapTests, Sequential) { |
| 915 | constexpr uint32_t kDescriptorCount = 4; |
| 916 | constexpr uint32_t kAllocationsPerHeap = 3; |
Bryan Bernhart | 4f86505 | 2020-04-10 18:43:22 +0000 | [diff] [blame] | 917 | DummyStagingDescriptorAllocator allocator(mD3DDevice, kDescriptorCount, kAllocationsPerHeap); |
Bryan Bernhart | cb859a2 | 2020-04-06 22:07:42 +0000 | [diff] [blame] | 918 | |
| 919 | // Allocate |kNumOfHeaps| worth. |
| 920 | constexpr uint32_t kNumOfHeaps = 2; |
| 921 | |
| 922 | std::set<uint32_t> allocatedHeaps; |
| 923 | |
| 924 | std::vector<CPUDescriptorHeapAllocation> allocations; |
| 925 | for (uint32_t i = 0; i < kAllocationsPerHeap * kNumOfHeaps; i++) { |
| 926 | CPUDescriptorHeapAllocation allocation = allocator.AllocateCPUDescriptors(); |
| 927 | EXPECT_EQ(allocation.GetHeapIndex(), i / kAllocationsPerHeap); |
| 928 | EXPECT_NE(allocation.OffsetFrom(0, 0).ptr, 0u); |
| 929 | allocations.push_back(allocation); |
| 930 | allocatedHeaps.insert(allocation.GetHeapIndex()); |
| 931 | } |
| 932 | |
| 933 | EXPECT_EQ(allocatedHeaps.size(), kNumOfHeaps); |
| 934 | |
| 935 | // Deallocate all. |
| 936 | for (CPUDescriptorHeapAllocation& allocation : allocations) { |
| 937 | allocator.Deallocate(allocation); |
| 938 | EXPECT_FALSE(allocation.IsValid()); |
| 939 | } |
| 940 | } |
| 941 | |
| 942 | // Verify that re-allocating a number of allocations < pool size, all heaps are reused. |
| 943 | // Creates and reuses |kNumofHeaps| non-shader visible heaps. |
| 944 | TEST_P(D3D12DescriptorHeapTests, ReuseFreedHeaps) { |
| 945 | constexpr uint32_t kDescriptorCount = 4; |
| 946 | constexpr uint32_t kAllocationsPerHeap = 25; |
Bryan Bernhart | 4f86505 | 2020-04-10 18:43:22 +0000 | [diff] [blame] | 947 | DummyStagingDescriptorAllocator allocator(mD3DDevice, kDescriptorCount, kAllocationsPerHeap); |
Bryan Bernhart | cb859a2 | 2020-04-06 22:07:42 +0000 | [diff] [blame] | 948 | |
| 949 | constexpr uint32_t kNumofHeaps = 10; |
| 950 | |
| 951 | std::list<CPUDescriptorHeapAllocation> allocations; |
| 952 | std::set<size_t> allocationPtrs; |
| 953 | |
| 954 | // Allocate |kNumofHeaps| heaps worth. |
| 955 | for (uint32_t i = 0; i < kAllocationsPerHeap * kNumofHeaps; i++) { |
| 956 | CPUDescriptorHeapAllocation allocation = allocator.AllocateCPUDescriptors(); |
| 957 | allocations.push_back(allocation); |
| 958 | EXPECT_TRUE(allocationPtrs.insert(allocation.OffsetFrom(0, 0).ptr).second); |
| 959 | } |
| 960 | |
| 961 | // Deallocate all. |
| 962 | for (CPUDescriptorHeapAllocation& allocation : allocations) { |
| 963 | allocator.Deallocate(allocation); |
| 964 | EXPECT_FALSE(allocation.IsValid()); |
| 965 | } |
| 966 | |
| 967 | allocations.clear(); |
| 968 | |
| 969 | // Re-allocate all again. |
| 970 | std::set<size_t> reallocatedPtrs; |
| 971 | for (uint32_t i = 0; i < kAllocationsPerHeap * kNumofHeaps; i++) { |
| 972 | CPUDescriptorHeapAllocation allocation = allocator.AllocateCPUDescriptors(); |
| 973 | allocations.push_back(allocation); |
| 974 | EXPECT_TRUE(reallocatedPtrs.insert(allocation.OffsetFrom(0, 0).ptr).second); |
| 975 | EXPECT_TRUE(std::find(allocationPtrs.begin(), allocationPtrs.end(), |
| 976 | allocation.OffsetFrom(0, 0).ptr) != allocationPtrs.end()); |
| 977 | } |
| 978 | |
| 979 | // Deallocate all again. |
| 980 | for (CPUDescriptorHeapAllocation& allocation : allocations) { |
| 981 | allocator.Deallocate(allocation); |
| 982 | EXPECT_FALSE(allocation.IsValid()); |
| 983 | } |
| 984 | } |
| 985 | |
| 986 | // Verify allocating then deallocating many times. |
| 987 | TEST_P(D3D12DescriptorHeapTests, AllocateDeallocateMany) { |
| 988 | constexpr uint32_t kDescriptorCount = 4; |
| 989 | constexpr uint32_t kAllocationsPerHeap = 25; |
Bryan Bernhart | 4f86505 | 2020-04-10 18:43:22 +0000 | [diff] [blame] | 990 | DummyStagingDescriptorAllocator allocator(mD3DDevice, kDescriptorCount, kAllocationsPerHeap); |
Bryan Bernhart | cb859a2 | 2020-04-06 22:07:42 +0000 | [diff] [blame] | 991 | |
| 992 | std::list<CPUDescriptorHeapAllocation> list3; |
| 993 | std::list<CPUDescriptorHeapAllocation> list5; |
| 994 | std::list<CPUDescriptorHeapAllocation> allocations; |
| 995 | |
| 996 | constexpr uint32_t kNumofHeaps = 2; |
| 997 | |
| 998 | // Allocate |kNumofHeaps| heaps worth. |
| 999 | for (uint32_t i = 0; i < kAllocationsPerHeap * kNumofHeaps; i++) { |
| 1000 | CPUDescriptorHeapAllocation allocation = allocator.AllocateCPUDescriptors(); |
| 1001 | EXPECT_NE(allocation.OffsetFrom(0, 0).ptr, 0u); |
| 1002 | if (i % 3 == 0) { |
| 1003 | list3.push_back(allocation); |
| 1004 | } else { |
| 1005 | allocations.push_back(allocation); |
| 1006 | } |
| 1007 | } |
| 1008 | |
| 1009 | // Deallocate every 3rd allocation. |
| 1010 | for (auto it = list3.begin(); it != list3.end(); it = list3.erase(it)) { |
| 1011 | allocator.Deallocate(*it); |
| 1012 | } |
| 1013 | |
| 1014 | // Allocate again. |
| 1015 | for (uint32_t i = 0; i < kAllocationsPerHeap * kNumofHeaps; i++) { |
| 1016 | CPUDescriptorHeapAllocation allocation = allocator.AllocateCPUDescriptors(); |
| 1017 | EXPECT_NE(allocation.OffsetFrom(0, 0).ptr, 0u); |
| 1018 | if (i % 5 == 0) { |
| 1019 | list5.push_back(allocation); |
| 1020 | } else { |
| 1021 | allocations.push_back(allocation); |
| 1022 | } |
| 1023 | } |
| 1024 | |
| 1025 | // Deallocate every 5th allocation. |
| 1026 | for (auto it = list5.begin(); it != list5.end(); it = list5.erase(it)) { |
| 1027 | allocator.Deallocate(*it); |
| 1028 | } |
| 1029 | |
| 1030 | // Allocate again. |
| 1031 | for (uint32_t i = 0; i < kAllocationsPerHeap * kNumofHeaps; i++) { |
| 1032 | CPUDescriptorHeapAllocation allocation = allocator.AllocateCPUDescriptors(); |
| 1033 | EXPECT_NE(allocation.OffsetFrom(0, 0).ptr, 0u); |
| 1034 | allocations.push_back(allocation); |
| 1035 | } |
| 1036 | |
| 1037 | // Deallocate remaining. |
| 1038 | for (CPUDescriptorHeapAllocation& allocation : allocations) { |
| 1039 | allocator.Deallocate(allocation); |
| 1040 | EXPECT_FALSE(allocation.IsValid()); |
| 1041 | } |
| 1042 | } |
| 1043 | |
Bryan Bernhart | 0463899 | 2020-03-25 18:31:08 +0000 | [diff] [blame] | 1044 | DAWN_INSTANTIATE_TEST(D3D12DescriptorHeapTests, |
| 1045 | D3D12Backend(), |
| 1046 | D3D12Backend({"use_d3d12_small_shader_visible_heap"})); |