Adds support for clear attachments to the render graph. Clearing attachments require that the command buffer is being set for rendering (vkCmdBeginRendering). **What works** - Clear attachments are working with dynamic rendering and the render graph - `GPUVulkanTest.framebuffer_clear_color_single_attachment` - `GPUVulkanTest.framebuffer_clear_color_multiple_attachments` - `GPUVulkanTest.framebuffer_clear_multiple_color_multiple_attachments` - `GPUVulkanTest.framebuffer_scissor_test` - `GPUVulkanTest.framebuffer_clear_depth` **What still needs to be addressed** When a command buffer is rendering it cannot do any pipeline barriers. For clearing attachments this isn't needed, but when we address drawing we might need to register drawing resource dependencies to the dependency list of the begin rendering node. This will be solved when drawing will be implemented. [#121648] The begin rendering node is large as it has to store data for framebuffers with 8 color attachments as well. This might become an overhead and we could solve this by splicing the data of larger nodes into 2 lists. This will be addressed after we are able to draw a screen so we can collect data on this topic. [#121649] Pull Request: https://projects.blender.org/blender/blender/pulls/121073
364 lines
10 KiB
C++
364 lines
10 KiB
C++
/* SPDX-FileCopyrightText: 2022 Blender Authors
|
|
*
|
|
* SPDX-License-Identifier: GPL-2.0-or-later */
|
|
|
|
/** \file
|
|
* \ingroup gpu
|
|
*/
|
|
#include "vk_context.hh"
|
|
#include "vk_debug.hh"
|
|
|
|
#include "vk_backend.hh"
|
|
#include "vk_framebuffer.hh"
|
|
#include "vk_immediate.hh"
|
|
#include "vk_memory.hh"
|
|
#include "vk_shader.hh"
|
|
#include "vk_shader_interface.hh"
|
|
#include "vk_state_manager.hh"
|
|
#include "vk_texture.hh"
|
|
|
|
#include "GHOST_C-api.h"
|
|
|
|
namespace blender::gpu {
|
|
|
|
VKContext::VKContext(void *ghost_window,
|
|
void *ghost_context,
|
|
render_graph::VKResourceStateTracker &resources)
|
|
: render_graph(std::make_unique<render_graph::VKCommandBufferWrapper>(), resources)
|
|
{
|
|
ghost_window_ = ghost_window;
|
|
ghost_context_ = ghost_context;
|
|
|
|
state_manager = new VKStateManager();
|
|
imm = new VKImmediate();
|
|
|
|
/* For off-screen contexts. Default frame-buffer is empty. */
|
|
VKFrameBuffer *framebuffer = new VKFrameBuffer("back_left");
|
|
back_left = framebuffer;
|
|
active_fb = framebuffer;
|
|
}
|
|
|
|
VKContext::~VKContext()
|
|
{
|
|
if (surface_texture_) {
|
|
GPU_texture_free(surface_texture_);
|
|
surface_texture_ = nullptr;
|
|
}
|
|
VKBackend::get().device_.context_unregister(*this);
|
|
|
|
delete imm;
|
|
imm = nullptr;
|
|
}
|
|
|
|
void VKContext::sync_backbuffer()
|
|
{
|
|
if (ghost_context_) {
|
|
VKDevice &device = VKBackend::get().device_;
|
|
if (!command_buffers_.is_initialized()) {
|
|
command_buffers_.init(device);
|
|
descriptor_pools_.init(device);
|
|
device.init_dummy_buffer(*this);
|
|
device.init_dummy_color_attachment();
|
|
}
|
|
descriptor_pools_.reset();
|
|
}
|
|
|
|
if (ghost_window_) {
|
|
GHOST_VulkanSwapChainData swap_chain_data = {};
|
|
GHOST_GetVulkanSwapChainFormat((GHOST_WindowHandle)ghost_window_, &swap_chain_data);
|
|
|
|
const bool reset_framebuffer = swap_chain_format_ != swap_chain_data.format ||
|
|
vk_extent_.width != swap_chain_data.extent.width ||
|
|
vk_extent_.height != swap_chain_data.extent.height;
|
|
if (reset_framebuffer) {
|
|
if (has_active_framebuffer()) {
|
|
deactivate_framebuffer();
|
|
}
|
|
if (surface_texture_) {
|
|
GPU_texture_free(surface_texture_);
|
|
surface_texture_ = nullptr;
|
|
}
|
|
surface_texture_ = GPU_texture_create_2d("back-left",
|
|
swap_chain_data.extent.width,
|
|
swap_chain_data.extent.height,
|
|
1,
|
|
to_gpu_format(swap_chain_data.format),
|
|
GPU_TEXTURE_USAGE_ATTACHMENT,
|
|
nullptr);
|
|
|
|
back_left->attachment_set(GPU_FB_COLOR_ATTACHMENT0,
|
|
GPU_ATTACHMENT_TEXTURE(surface_texture_));
|
|
|
|
back_left->bind(false);
|
|
|
|
swap_chain_format_ = swap_chain_data.format;
|
|
vk_extent_ = swap_chain_data.extent;
|
|
}
|
|
}
|
|
}
|
|
|
|
void VKContext::activate()
|
|
{
|
|
/* Make sure no other context is already bound to this thread. */
|
|
BLI_assert(is_active_ == false);
|
|
|
|
is_active_ = true;
|
|
|
|
sync_backbuffer();
|
|
|
|
immActivate();
|
|
}
|
|
|
|
void VKContext::deactivate()
|
|
{
|
|
immDeactivate();
|
|
is_active_ = false;
|
|
}
|
|
|
|
void VKContext::begin_frame() {}
|
|
|
|
void VKContext::end_frame()
|
|
{
|
|
VKDevice &device = VKBackend::get().device_get();
|
|
device.destroy_discarded_resources();
|
|
}
|
|
|
|
void VKContext::flush()
|
|
{
|
|
command_buffers_.submit();
|
|
}
|
|
|
|
void VKContext::finish()
|
|
{
|
|
command_buffers_.finish();
|
|
}
|
|
|
|
void VKContext::memory_statistics_get(int *r_total_mem_kb, int *r_free_mem_kb)
|
|
{
|
|
const VKDevice &device = VKBackend::get().device_get();
|
|
device.memory_statistics_get(r_total_mem_kb, r_free_mem_kb);
|
|
}
|
|
|
|
/* -------------------------------------------------------------------- */
|
|
/** \name State manager
|
|
* \{ */
|
|
|
|
VKStateManager &VKContext::state_manager_get() const
|
|
{
|
|
return *static_cast<VKStateManager *>(state_manager);
|
|
}
|
|
|
|
void VKContext::debug_unbind_all_ubo()
|
|
{
|
|
state_manager_get().uniform_buffer_unbind_all();
|
|
};
|
|
|
|
void VKContext::debug_unbind_all_ssbo()
|
|
{
|
|
state_manager_get().storage_buffer_unbind_all();
|
|
};
|
|
|
|
/** \} */
|
|
|
|
/* -------------------------------------------------------------------- */
|
|
/** \name Frame-buffer
|
|
* \{ */
|
|
|
|
void VKContext::activate_framebuffer(VKFrameBuffer &framebuffer)
|
|
{
|
|
if (has_active_framebuffer()) {
|
|
deactivate_framebuffer();
|
|
}
|
|
|
|
BLI_assert(active_fb == nullptr);
|
|
active_fb = &framebuffer;
|
|
framebuffer.update_size();
|
|
framebuffer.update_srgb();
|
|
if (use_render_graph) {
|
|
framebuffer.rendering_reset();
|
|
}
|
|
else {
|
|
command_buffers_get().begin_render_pass(framebuffer);
|
|
}
|
|
}
|
|
|
|
VKFrameBuffer *VKContext::active_framebuffer_get() const
|
|
{
|
|
return unwrap(active_fb);
|
|
}
|
|
|
|
bool VKContext::has_active_framebuffer() const
|
|
{
|
|
return active_framebuffer_get() != nullptr;
|
|
}
|
|
|
|
void VKContext::deactivate_framebuffer()
|
|
{
|
|
VKFrameBuffer *framebuffer = active_framebuffer_get();
|
|
BLI_assert(framebuffer != nullptr);
|
|
if (use_render_graph) {
|
|
framebuffer->rendering_end(*this);
|
|
}
|
|
else {
|
|
command_buffers_get().end_render_pass(*framebuffer);
|
|
}
|
|
active_fb = nullptr;
|
|
}
|
|
|
|
void VKContext::rendering_end()
|
|
{
|
|
VKFrameBuffer *framebuffer = active_framebuffer_get();
|
|
if (framebuffer) {
|
|
framebuffer->rendering_end(*this);
|
|
}
|
|
}
|
|
|
|
/** \} */
|
|
|
|
/* -------------------------------------------------------------------- */
|
|
/** \name Compute pipeline
|
|
* \{ */
|
|
|
|
void VKContext::bind_compute_pipeline()
|
|
{
|
|
VKShader *shader = unwrap(this->shader);
|
|
BLI_assert(shader);
|
|
VKPipeline &pipeline = shader->pipeline_get();
|
|
pipeline.bind(*this, VK_PIPELINE_BIND_POINT_COMPUTE);
|
|
shader->push_constants.update(*this);
|
|
if (shader->has_descriptor_set()) {
|
|
descriptor_set_.bind(*this, shader->vk_pipeline_layout_get(), VK_PIPELINE_BIND_POINT_COMPUTE);
|
|
}
|
|
}
|
|
|
|
void VKContext::update_pipeline_data(render_graph::VKPipelineData &pipeline_data)
|
|
{
|
|
VKShader &vk_shader = unwrap(*shader);
|
|
pipeline_data.vk_pipeline_layout = vk_shader.vk_pipeline_layout_get();
|
|
pipeline_data.vk_pipeline = vk_shader.ensure_and_get_compute_pipeline();
|
|
|
|
/* Update descriptor set. */
|
|
pipeline_data.vk_descriptor_set = VK_NULL_HANDLE;
|
|
if (vk_shader.has_descriptor_set()) {
|
|
descriptor_set_.update(*this);
|
|
pipeline_data.vk_descriptor_set = descriptor_set_get().active_descriptor_set()->vk_handle();
|
|
}
|
|
|
|
/* Update push constants. */
|
|
pipeline_data.push_constants_data = nullptr;
|
|
pipeline_data.push_constants_size = 0;
|
|
const VKPushConstants::Layout &push_constants_layout =
|
|
vk_shader.interface_get().push_constants_layout_get();
|
|
if (push_constants_layout.storage_type_get() == VKPushConstants::StorageType::PUSH_CONSTANTS) {
|
|
vk_shader.push_constants.update(*this);
|
|
pipeline_data.push_constants_size = push_constants_layout.size_in_bytes();
|
|
pipeline_data.push_constants_data = vk_shader.push_constants.data();
|
|
}
|
|
}
|
|
|
|
render_graph::VKResourceAccessInfo &VKContext::update_and_get_access_info()
|
|
{
|
|
access_info_.reset();
|
|
state_manager_get().apply_bindings(*this, access_info_);
|
|
return access_info_;
|
|
}
|
|
|
|
/** \} */
|
|
|
|
/* -------------------------------------------------------------------- */
|
|
/** \name Graphics pipeline
|
|
* \{ */
|
|
|
|
void VKContext::bind_graphics_pipeline(const GPUPrimType prim_type,
|
|
const VKVertexAttributeObject &vertex_attribute_object)
|
|
{
|
|
VKShader *shader = unwrap(this->shader);
|
|
BLI_assert(shader);
|
|
BLI_assert_msg(
|
|
prim_type != GPU_PRIM_POINTS || shader->interface_get().is_point_shader(),
|
|
"GPU_PRIM_POINTS is used with a shader that doesn't set point size before "
|
|
"drawing fragments. Calling code should be adapted to use a shader that sets the "
|
|
"gl_PointSize before entering the fragment stage. For example `GPU_SHADER_3D_POINT_*`.");
|
|
|
|
shader->update_graphics_pipeline(*this, prim_type, vertex_attribute_object);
|
|
|
|
VKPipeline &pipeline = shader->pipeline_get();
|
|
pipeline.bind(*this, VK_PIPELINE_BIND_POINT_GRAPHICS);
|
|
shader->push_constants.update(*this);
|
|
if (shader->has_descriptor_set()) {
|
|
descriptor_set_.bind(*this, shader->vk_pipeline_layout_get(), VK_PIPELINE_BIND_POINT_GRAPHICS);
|
|
}
|
|
}
|
|
|
|
/** \} */
|
|
|
|
/* -------------------------------------------------------------------- */
|
|
/** \name Graphics pipeline
|
|
* \{ */
|
|
|
|
void VKContext::swap_buffers_pre_callback(const GHOST_VulkanSwapChainData *swap_chain_data)
|
|
{
|
|
VKContext *context = VKContext::get();
|
|
BLI_assert(context);
|
|
context->swap_buffers_pre_handler(*swap_chain_data);
|
|
}
|
|
|
|
void VKContext::swap_buffers_post_callback()
|
|
{
|
|
VKContext *context = VKContext::get();
|
|
BLI_assert(context);
|
|
context->swap_buffers_post_handler();
|
|
}
|
|
|
|
void VKContext::swap_buffers_pre_handler(const GHOST_VulkanSwapChainData &swap_chain_data)
|
|
{
|
|
/*
|
|
* Ensure no graphics/compute commands are scheduled. They could use the back buffer, which
|
|
* layout is altered here.
|
|
*/
|
|
command_buffers_get().submit();
|
|
|
|
VKFrameBuffer &framebuffer = *unwrap(back_left);
|
|
|
|
VKTexture wrapper("display_texture");
|
|
wrapper.init(swap_chain_data.image,
|
|
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
|
|
to_gpu_format(swap_chain_data.format));
|
|
wrapper.layout_ensure(*this, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
|
|
framebuffer.color_attachment_layout_ensure(*this, 0, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
|
|
VKTexture *color_attachment = unwrap(unwrap(framebuffer.color_tex(0)));
|
|
|
|
VkImageBlit image_blit = {};
|
|
image_blit.srcOffsets[0] = {0, color_attachment->height_get() - 1, 0};
|
|
image_blit.srcOffsets[1] = {color_attachment->width_get(), 0, 1};
|
|
image_blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
image_blit.srcSubresource.mipLevel = 0;
|
|
image_blit.srcSubresource.baseArrayLayer = 0;
|
|
image_blit.srcSubresource.layerCount = 1;
|
|
|
|
image_blit.dstOffsets[0] = {0, 0, 0};
|
|
image_blit.dstOffsets[1] = {
|
|
int32_t(swap_chain_data.extent.width), int32_t(swap_chain_data.extent.height), 1};
|
|
image_blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
image_blit.dstSubresource.mipLevel = 0;
|
|
image_blit.dstSubresource.baseArrayLayer = 0;
|
|
image_blit.dstSubresource.layerCount = 1;
|
|
|
|
command_buffers_get().blit(wrapper,
|
|
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
|
*color_attachment,
|
|
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
|
Span<VkImageBlit>(&image_blit, 1));
|
|
wrapper.layout_ensure(*this, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
|
|
command_buffers_get().submit();
|
|
}
|
|
|
|
void VKContext::swap_buffers_post_handler()
|
|
{
|
|
sync_backbuffer();
|
|
}
|
|
|
|
/** \} */
|
|
|
|
} // namespace blender::gpu
|