2023-08-16 00:20:26 +10:00
|
|
|
/* SPDX-FileCopyrightText: 2023 Blender Authors
|
2023-05-31 16:19:06 +02:00
|
|
|
*
|
|
|
|
|
* SPDX-License-Identifier: GPL-2.0-or-later */
|
2023-02-21 15:03:12 +01:00
|
|
|
|
|
|
|
|
/** \file
|
|
|
|
|
* \ingroup gpu
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
#include "vk_buffer.hh"
|
2023-05-04 10:06:48 +02:00
|
|
|
#include "vk_backend.hh"
|
2023-03-24 07:47:50 +01:00
|
|
|
#include "vk_context.hh"
|
2023-02-21 15:03:12 +01:00
|
|
|
|
|
|
|
|
namespace blender::gpu {
|
|
|
|
|
|
|
|
|
|
VKBuffer::~VKBuffer()
|
|
|
|
|
{
|
2023-08-24 10:23:21 +02:00
|
|
|
if (is_allocated()) {
|
|
|
|
|
free();
|
|
|
|
|
}
|
2023-02-21 15:03:12 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool VKBuffer::is_allocated() const
|
|
|
|
|
{
|
|
|
|
|
return allocation_ != VK_NULL_HANDLE;
|
|
|
|
|
}
|
|
|
|
|
|
2024-06-14 19:27:33 +02:00
|
|
|
bool VKBuffer::create(size_t size_in_bytes,
|
2023-11-24 13:52:48 +01:00
|
|
|
VkBufferUsageFlags buffer_usage,
|
2025-01-14 17:41:35 +01:00
|
|
|
VkMemoryPropertyFlags required_flags,
|
2025-01-24 11:54:59 +01:00
|
|
|
VkMemoryPropertyFlags preferred_flags,
|
|
|
|
|
VmaAllocationCreateFlags allocation_flags)
|
2023-02-21 15:03:12 +01:00
|
|
|
{
|
|
|
|
|
BLI_assert(!is_allocated());
|
2023-05-30 13:50:35 +02:00
|
|
|
BLI_assert(vk_buffer_ == VK_NULL_HANDLE);
|
|
|
|
|
BLI_assert(mapped_memory_ == nullptr);
|
2023-02-21 15:03:12 +01:00
|
|
|
|
|
|
|
|
size_in_bytes_ = size_in_bytes;
|
2025-03-13 15:05:16 +01:00
|
|
|
alloc_size_in_bytes_ = ceil_to_multiple_ul(max_ulul(size_in_bytes_, 16), 16);
|
2024-06-20 11:34:19 +02:00
|
|
|
VKDevice &device = VKBackend::get().device;
|
2023-02-21 15:03:12 +01:00
|
|
|
|
2023-05-04 10:06:48 +02:00
|
|
|
VmaAllocator allocator = device.mem_allocator_get();
|
2023-02-21 15:03:12 +01:00
|
|
|
VkBufferCreateInfo create_info = {};
|
|
|
|
|
create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
|
|
|
|
|
create_info.flags = 0;
|
2023-05-09 10:20:54 +02:00
|
|
|
/*
|
|
|
|
|
* Vulkan doesn't allow empty buffers but some areas (DrawManager Instance data, PyGPU) create
|
|
|
|
|
* them.
|
|
|
|
|
*/
|
2025-03-13 15:05:16 +01:00
|
|
|
create_info.size = alloc_size_in_bytes_;
|
2023-02-21 15:03:12 +01:00
|
|
|
create_info.usage = buffer_usage;
|
|
|
|
|
/* We use the same command queue for the compute and graphics pipeline, so it is safe to use
|
|
|
|
|
* exclusive resource handling. */
|
|
|
|
|
create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
|
|
|
|
|
create_info.queueFamilyIndexCount = 1;
|
2023-11-16 15:03:47 +01:00
|
|
|
const uint32_t queue_family_indices[1] = {device.queue_family_get()};
|
|
|
|
|
create_info.pQueueFamilyIndices = queue_family_indices;
|
2023-02-21 15:03:12 +01:00
|
|
|
|
|
|
|
|
VmaAllocationCreateInfo vma_create_info = {};
|
2025-01-24 11:54:59 +01:00
|
|
|
vma_create_info.flags = allocation_flags;
|
2023-02-21 15:03:12 +01:00
|
|
|
vma_create_info.priority = 1.0f;
|
2025-01-14 17:41:35 +01:00
|
|
|
vma_create_info.requiredFlags = required_flags;
|
|
|
|
|
vma_create_info.preferredFlags = preferred_flags;
|
2023-02-21 15:03:12 +01:00
|
|
|
vma_create_info.usage = VMA_MEMORY_USAGE_AUTO;
|
|
|
|
|
|
|
|
|
|
VkResult result = vmaCreateBuffer(
|
|
|
|
|
allocator, &create_info, &vma_create_info, &vk_buffer_, &allocation_, nullptr);
|
2023-03-09 09:27:42 +01:00
|
|
|
if (result != VK_SUCCESS) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2024-06-14 09:44:59 +02:00
|
|
|
device.resources.add_buffer(vk_buffer_);
|
2024-04-19 12:08:57 +02:00
|
|
|
|
2024-12-16 10:09:33 +01:00
|
|
|
vmaGetAllocationMemoryProperties(allocator, allocation_, &vk_memory_property_flags_);
|
|
|
|
|
|
|
|
|
|
if (vk_memory_property_flags_ & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
|
2023-11-24 13:52:48 +01:00
|
|
|
return map();
|
|
|
|
|
}
|
|
|
|
|
return true;
|
2023-02-21 15:03:12 +01:00
|
|
|
}
|
|
|
|
|
|
2024-10-01 14:22:56 +02:00
|
|
|
void VKBuffer::update_immediately(const void *data) const
|
2025-03-11 10:14:00 +01:00
|
|
|
{
|
|
|
|
|
update_sub_immediately(0, size_in_bytes_, data);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void VKBuffer::update_sub_immediately(size_t start_offset,
|
|
|
|
|
size_t data_size,
|
|
|
|
|
const void *data) const
|
2023-02-21 15:03:12 +01:00
|
|
|
{
|
2023-03-09 09:27:42 +01:00
|
|
|
BLI_assert_msg(is_mapped(), "Cannot update a non-mapped buffer.");
|
2025-03-11 10:14:00 +01:00
|
|
|
memcpy(static_cast<uint8_t *>(mapped_memory_) + start_offset, data, data_size);
|
2023-11-08 09:44:22 +01:00
|
|
|
}
|
2023-04-25 15:48:52 +02:00
|
|
|
|
2024-10-01 14:22:56 +02:00
|
|
|
void VKBuffer::update_render_graph(VKContext &context, void *data) const
|
|
|
|
|
{
|
|
|
|
|
BLI_assert(size_in_bytes_ <= 65536 && size_in_bytes_ % 4 == 0);
|
|
|
|
|
render_graph::VKUpdateBufferNode::CreateInfo update_buffer = {};
|
|
|
|
|
update_buffer.dst_buffer = vk_buffer_;
|
|
|
|
|
update_buffer.data_size = size_in_bytes_;
|
|
|
|
|
update_buffer.data = data;
|
2025-01-27 08:55:23 +01:00
|
|
|
context.render_graph().add_node(update_buffer);
|
2024-10-01 14:22:56 +02:00
|
|
|
}
|
|
|
|
|
|
2023-11-08 09:44:22 +01:00
|
|
|
void VKBuffer::flush() const
|
|
|
|
|
{
|
2024-06-20 11:34:19 +02:00
|
|
|
const VKDevice &device = VKBackend::get().device;
|
2023-05-04 10:06:48 +02:00
|
|
|
VmaAllocator allocator = device.mem_allocator_get();
|
2024-08-29 08:55:02 +02:00
|
|
|
vmaFlushAllocation(allocator, allocation_, 0, max_ulul(size_in_bytes(), 1));
|
2023-03-09 09:27:42 +01:00
|
|
|
}
|
|
|
|
|
|
2023-03-17 13:48:39 +01:00
|
|
|
void VKBuffer::clear(VKContext &context, uint32_t clear_value)
|
|
|
|
|
{
|
2024-04-19 12:08:57 +02:00
|
|
|
render_graph::VKFillBufferNode::CreateInfo fill_buffer = {};
|
|
|
|
|
fill_buffer.vk_buffer = vk_buffer_;
|
|
|
|
|
fill_buffer.data = clear_value;
|
2025-03-13 15:05:16 +01:00
|
|
|
fill_buffer.size = alloc_size_in_bytes_;
|
2025-01-27 08:55:23 +01:00
|
|
|
context.render_graph().add_node(fill_buffer);
|
2023-03-17 13:48:39 +01:00
|
|
|
}
|
|
|
|
|
|
2025-02-17 08:58:06 +01:00
|
|
|
void VKBuffer::async_flush_to_host(VKContext &context)
|
|
|
|
|
{
|
|
|
|
|
BLI_assert(async_timeline_ == 0);
|
|
|
|
|
context.rendering_end();
|
|
|
|
|
async_timeline_ = context.flush_render_graph(RenderGraphFlushFlags::SUBMIT |
|
|
|
|
|
RenderGraphFlushFlags::RENEW_RENDER_GRAPH);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void VKBuffer::read_async(VKContext &context, void *data)
|
|
|
|
|
{
|
|
|
|
|
BLI_assert_msg(is_mapped(), "Cannot read a non-mapped buffer.");
|
|
|
|
|
if (async_timeline_ == 0) {
|
|
|
|
|
async_flush_to_host(context);
|
|
|
|
|
}
|
|
|
|
|
VKDevice &device = VKBackend::get().device;
|
|
|
|
|
device.wait_for_timeline(async_timeline_);
|
|
|
|
|
async_timeline_ = 0;
|
|
|
|
|
memcpy(data, mapped_memory_, size_in_bytes_);
|
|
|
|
|
}
|
|
|
|
|
|
2024-04-19 12:08:57 +02:00
|
|
|
void VKBuffer::read(VKContext &context, void *data) const
|
2023-03-09 09:27:42 +01:00
|
|
|
{
|
2025-02-17 08:58:06 +01:00
|
|
|
|
2023-03-09 09:27:42 +01:00
|
|
|
BLI_assert_msg(is_mapped(), "Cannot read a non-mapped buffer.");
|
2025-02-17 08:58:06 +01:00
|
|
|
BLI_assert(async_timeline_ == 0);
|
2024-08-23 14:45:40 +02:00
|
|
|
context.rendering_end();
|
2025-01-27 08:55:23 +01:00
|
|
|
context.flush_render_graph(RenderGraphFlushFlags::SUBMIT |
|
|
|
|
|
RenderGraphFlushFlags::WAIT_FOR_COMPLETION |
|
|
|
|
|
RenderGraphFlushFlags::RENEW_RENDER_GRAPH);
|
2023-03-09 09:27:42 +01:00
|
|
|
memcpy(data, mapped_memory_, size_in_bytes_);
|
|
|
|
|
}
|
|
|
|
|
|
2023-03-13 19:25:18 +01:00
|
|
|
void *VKBuffer::mapped_memory_get() const
|
|
|
|
|
{
|
|
|
|
|
BLI_assert_msg(is_mapped(), "Cannot access a non-mapped buffer.");
|
|
|
|
|
return mapped_memory_;
|
|
|
|
|
}
|
|
|
|
|
|
2023-03-09 09:27:42 +01:00
|
|
|
bool VKBuffer::is_mapped() const
|
|
|
|
|
{
|
|
|
|
|
return mapped_memory_ != nullptr;
|
2023-02-21 15:03:12 +01:00
|
|
|
}
|
|
|
|
|
|
2023-05-04 10:06:48 +02:00
|
|
|
bool VKBuffer::map()
|
2023-02-21 15:03:12 +01:00
|
|
|
{
|
2023-03-09 09:27:42 +01:00
|
|
|
BLI_assert(!is_mapped());
|
2024-06-20 11:34:19 +02:00
|
|
|
const VKDevice &device = VKBackend::get().device;
|
2023-05-04 10:06:48 +02:00
|
|
|
VmaAllocator allocator = device.mem_allocator_get();
|
2023-03-09 09:27:42 +01:00
|
|
|
VkResult result = vmaMapMemory(allocator, allocation_, &mapped_memory_);
|
2023-02-21 15:03:12 +01:00
|
|
|
return result == VK_SUCCESS;
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-04 10:06:48 +02:00
|
|
|
void VKBuffer::unmap()
|
2023-02-21 15:03:12 +01:00
|
|
|
{
|
2023-03-09 09:27:42 +01:00
|
|
|
BLI_assert(is_mapped());
|
2024-06-20 11:34:19 +02:00
|
|
|
const VKDevice &device = VKBackend::get().device;
|
2023-05-04 10:06:48 +02:00
|
|
|
VmaAllocator allocator = device.mem_allocator_get();
|
2023-02-21 15:03:12 +01:00
|
|
|
vmaUnmapMemory(allocator, allocation_);
|
2023-03-09 09:27:42 +01:00
|
|
|
mapped_memory_ = nullptr;
|
2023-02-21 15:03:12 +01:00
|
|
|
}
|
|
|
|
|
|
2023-05-04 10:06:48 +02:00
|
|
|
bool VKBuffer::free()
|
2023-02-21 15:03:12 +01:00
|
|
|
{
|
2023-03-09 09:27:42 +01:00
|
|
|
if (is_mapped()) {
|
2023-05-04 10:06:48 +02:00
|
|
|
unmap();
|
2023-03-09 09:27:42 +01:00
|
|
|
}
|
|
|
|
|
|
2025-01-27 08:55:23 +01:00
|
|
|
VKDiscardPool::discard_pool_get().discard_buffer(vk_buffer_, allocation_);
|
2024-08-19 15:37:48 +02:00
|
|
|
|
2023-08-24 10:23:21 +02:00
|
|
|
allocation_ = VK_NULL_HANDLE;
|
|
|
|
|
vk_buffer_ = VK_NULL_HANDLE;
|
2024-08-19 15:37:48 +02:00
|
|
|
|
2023-02-21 15:03:12 +01:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2024-09-27 15:01:10 +02:00
|
|
|
void VKBuffer::free_immediately(VKDevice &device)
|
|
|
|
|
{
|
|
|
|
|
BLI_assert(vk_buffer_ != VK_NULL_HANDLE);
|
|
|
|
|
BLI_assert(allocation_ != VK_NULL_HANDLE);
|
|
|
|
|
if (is_mapped()) {
|
|
|
|
|
unmap();
|
|
|
|
|
}
|
|
|
|
|
device.resources.remove_buffer(vk_buffer_);
|
|
|
|
|
vmaDestroyBuffer(device.mem_allocator_get(), vk_buffer_, allocation_);
|
|
|
|
|
allocation_ = VK_NULL_HANDLE;
|
|
|
|
|
vk_buffer_ = VK_NULL_HANDLE;
|
|
|
|
|
}
|
|
|
|
|
|
2023-02-21 15:03:12 +01:00
|
|
|
} // namespace blender::gpu
|