Instance a VkImageMemoryBarrier.
@breif transform imagelayout from undefined to transferdst. Used during CopyBuffertoImage.
@breif transform imagelayout from undefined to transfersrc. Used during CopyImagetoBuffer.
@breif transform imagelayout from undefined to shaderread. Used during CopyImagetoBuffer.
@breif transform imagelayout from transferdst to shaderread. Used combine with CopyBuffertoImage.
transform imagelayout from transfersrc to shaderread. Used combine with CopyImagetoBuffer.
transfer imagelayout from undefined to depthattachment. Used during Creating depth renderresource.
transfer imagelayout from undefined to depthattachment. Used during Creating depth renderresource.
Use Custom Cmd.
Instance a VkImageMemoryBarrier.
@breif transform imagelayout from undefined to transferdst. Used during CopyBuffertoImage.
@breif transform imagelayout from undefined to transfersrc. Used during CopyImagetoBuffer.
@breif transform imagelayout from undefined to shaderread. Used during CopyImagetoBuffer.
@breif transform imagelayout from transferdst to shaderread. Used combine with CopyBuffertoImage.
transform imagelayout from transfersrc to shaderread. Used combine with CopyImagetoBuffer.
transfer imagelayout from undefined to depthattachment. Used during Creating depth renderresource.
transfer imagelayout from undefined to depthattachment. Used during Creating depth renderresource.
Use Custom Cmd.
115 {
117
118 VkPipelineStageFlags sourceStage;
119 VkPipelineStageFlags destinationStage;
120
124 VkImageMemoryBarrier barrier{};
125 barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
126 barrier.oldLayout = oldLayout;
127 barrier.newLayout = newLayout;
128 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
129 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
131 barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
132 barrier.subresourceRange.baseMipLevel = 0;
134 barrier.subresourceRange.baseArrayLayer = 0;
135 barrier.subresourceRange.layerCount =
m_Layers;
136 barrier.srcAccessMask = 0;
137 barrier.dstAccessMask = 0;
138
143 if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
144 {
145 barrier.srcAccessMask = 0;
146 barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
147
148
149 sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
150
151
152 destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
153 }
154
159 else if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
160 {
161 barrier.srcAccessMask = 0;
162 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
163
164
165 sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
166
167
168 destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
169 }
170
175 else if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
176 {
177 barrier.srcAccessMask = 0;
178 barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
179
180
181 sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
182
183
184 destinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
185 }
186
191 else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
192 {
193 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
194 barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
195
196
197 sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
198
199
200 destinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
201 }
202
207 else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
208 {
209 barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
210 barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
211
212
213 sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
214
215
216 destinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
217 }
218
223 else if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)
224 {
225 barrier.srcAccessMask = 0;
226 barrier.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
227
228 sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
229 destinationStage = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT;
230 }
231
236 else if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_GENERAL)
237 {
238 barrier.srcAccessMask = 0;
239 barrier.dstAccessMask = VK_IMAGE_ASPECT_NONE;
240
241 sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
242 destinationStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
243 }
244 else
245 {
246 SPICES_CORE_WARN("Unsupported layout transition!");
247 }
248
249 if (newLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)
250 {
251 barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
252
253 if (format == VK_FORMAT_D32_SFLOAT_S8_UINT || format == VK_FORMAT_D24_UNORM_S8_UINT)
254 {
255 barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
256 }
257 }
258 else
259 {
260 barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
261 }
262
267 vkCmdPipelineBarrier(
268 commandBuffer,
269 sourceStage,
270 destinationStage,
271 0,
272 0,
273 nullptr,
274 0,
275 nullptr,
276 1,
277 &barrier
278 );
279 });
280 }
#define SPICES_PROFILE_ZONE
static void CustomGraphicCmd(VulkanState &vulkanState, T func)
Create a new command buffer and record custom cmd, submit to graphic queue, execute it immediately.
uint32_t m_MipLevels
Image mipmaps num.
VkImage m_Image
The VkImage this Class Wrapped.
uint32_t m_Layers
Image layer(texture cube: 6).
VulkanState & m_VulkanState
The global VulkanState Referenced from VulkanRenderBackend.