2023-08-16 00:20:26 +10:00
|
|
|
/* SPDX-FileCopyrightText: 2021 Blender Authors
|
2023-05-31 16:19:06 +02:00
|
|
|
*
|
|
|
|
|
* SPDX-License-Identifier: GPL-2.0-or-later */
|
2022-01-17 14:45:22 +01:00
|
|
|
|
|
|
|
|
/** \file
|
|
|
|
|
* \ingroup gpu
|
|
|
|
|
*
|
2022-01-18 14:27:29 +11:00
|
|
|
* Descriptor type used to define shader structure, resources and interfaces.
|
2022-01-17 14:45:22 +01:00
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
#include "BLI_map.hh"
|
|
|
|
|
#include "BLI_set.hh"
|
|
|
|
|
#include "BLI_string_ref.hh"
|
|
|
|
|
|
2024-02-19 08:07:53 +01:00
|
|
|
#include "BKE_global.hh"
|
|
|
|
|
|
2024-03-23 01:24:18 +01:00
|
|
|
#include "GPU_capabilities.hh"
|
|
|
|
|
#include "GPU_context.hh"
|
|
|
|
|
#include "GPU_platform.hh"
|
|
|
|
|
#include "GPU_shader.hh"
|
2022-01-17 14:45:22 +01:00
|
|
|
|
|
|
|
|
#include "gpu_shader_create_info.hh"
|
|
|
|
|
#include "gpu_shader_create_info_private.hh"
|
2024-03-23 01:24:18 +01:00
|
|
|
#include "gpu_shader_dependency_private.hh"
|
2022-01-17 14:45:22 +01:00
|
|
|
|
2024-10-04 17:45:00 +02:00
|
|
|
#undef GPU_SHADER_NAMED_INTERFACE_INFO
|
2022-01-17 14:45:22 +01:00
|
|
|
#undef GPU_SHADER_INTERFACE_INFO
|
|
|
|
|
#undef GPU_SHADER_CREATE_INFO
|
2025-01-07 12:39:13 +01:00
|
|
|
#undef GPU_SHADER_NAMED_INTERFACE_END
|
|
|
|
|
#undef GPU_SHADER_INTERFACE_END
|
|
|
|
|
#undef GPU_SHADER_CREATE_END
|
2022-01-17 14:45:22 +01:00
|
|
|
|
|
|
|
|
namespace blender::gpu::shader {
|
|
|
|
|
|
2025-08-26 15:15:46 +02:00
|
|
|
using CreateInfoDictionary = Map<StringRef, ShaderCreateInfo *>;
|
|
|
|
|
using InterfaceDictionary = Map<StringRef, StageInterfaceInfo *>;
|
2022-01-17 14:45:22 +01:00
|
|
|
|
2025-08-26 15:15:46 +02:00
|
|
|
static CreateInfoDictionary *g_create_infos = nullptr;
|
|
|
|
|
static InterfaceDictionary *g_interfaces = nullptr;
|
2022-01-17 14:45:22 +01:00
|
|
|
|
2023-08-17 08:59:03 +02:00
|
|
|
/* -------------------------------------------------------------------- */
|
|
|
|
|
/** \name Check Backend Support
|
|
|
|
|
*
|
|
|
|
|
* \{ */
|
|
|
|
|
|
|
|
|
|
static bool is_vulkan_compatible_interface(const StageInterfaceInfo &iface)
|
|
|
|
|
{
|
|
|
|
|
if (iface.instance_name.is_empty()) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool use_flat = false;
|
|
|
|
|
bool use_smooth = false;
|
|
|
|
|
bool use_noperspective = false;
|
|
|
|
|
for (const StageInterfaceInfo::InOut &attr : iface.inouts) {
|
|
|
|
|
switch (attr.interp) {
|
|
|
|
|
case Interpolation::FLAT:
|
|
|
|
|
use_flat = true;
|
|
|
|
|
break;
|
|
|
|
|
case Interpolation::SMOOTH:
|
|
|
|
|
use_smooth = true;
|
|
|
|
|
break;
|
|
|
|
|
case Interpolation::NO_PERSPECTIVE:
|
|
|
|
|
use_noperspective = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
int num_used_interpolation_types = (use_flat ? 1 : 0) + (use_smooth ? 1 : 0) +
|
|
|
|
|
(use_noperspective ? 1 : 0);
|
|
|
|
|
|
2023-08-17 10:53:05 +02:00
|
|
|
#if 0
|
|
|
|
|
if (num_used_interpolation_types > 1) {
|
|
|
|
|
std::cout << "'" << iface.name << "' uses multiple interpolation types\n";
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
2023-08-17 08:59:03 +02:00
|
|
|
return num_used_interpolation_types <= 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool ShaderCreateInfo::is_vulkan_compatible() const
|
|
|
|
|
{
|
|
|
|
|
/* Vulkan doesn't support setting an interpolation mode per attribute in a struct. */
|
|
|
|
|
for (const StageInterfaceInfo *iface : vertex_out_interfaces_) {
|
|
|
|
|
if (!is_vulkan_compatible_interface(*iface)) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
for (const StageInterfaceInfo *iface : geometry_out_interfaces_) {
|
|
|
|
|
if (!is_vulkan_compatible_interface(*iface)) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/** \} */
|
|
|
|
|
|
2025-06-10 12:37:20 +02:00
|
|
|
void ShaderCreateInfo::resource_guard_defines(std::string &defines) const
|
|
|
|
|
{
|
2025-06-16 11:20:47 +02:00
|
|
|
if (name_.startswith("MA") || name_.startswith("WO")) {
|
|
|
|
|
defines += "#define CREATE_INFO_Material\n";
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
defines += "#define CREATE_INFO_" + name_ + "\n";
|
|
|
|
|
}
|
2025-06-10 12:37:20 +02:00
|
|
|
for (const auto &info_name : additional_infos_) {
|
|
|
|
|
const ShaderCreateInfo &info = *reinterpret_cast<const ShaderCreateInfo *>(
|
|
|
|
|
gpu_shader_create_info_get(info_name.c_str()));
|
|
|
|
|
|
|
|
|
|
info.resource_guard_defines(defines);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-10-01 20:14:56 +02:00
|
|
|
void ShaderCreateInfo::finalize(const bool recursive)
|
2022-01-17 14:45:22 +01:00
|
|
|
{
|
|
|
|
|
if (finalized_) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
finalized_ = true;
|
|
|
|
|
|
2022-02-06 01:21:07 +01:00
|
|
|
Set<StringRefNull> deps_merged;
|
|
|
|
|
|
2022-05-01 22:26:54 +02:00
|
|
|
validate_vertex_attributes();
|
|
|
|
|
|
2022-01-17 14:45:22 +01:00
|
|
|
for (auto &info_name : additional_infos_) {
|
2022-12-08 21:07:28 +01:00
|
|
|
|
|
|
|
|
/* Fetch create info. */
|
2022-01-17 14:45:22 +01:00
|
|
|
const ShaderCreateInfo &info = *reinterpret_cast<const ShaderCreateInfo *>(
|
|
|
|
|
gpu_shader_create_info_get(info_name.c_str()));
|
|
|
|
|
|
2024-10-01 20:14:56 +02:00
|
|
|
if (recursive) {
|
|
|
|
|
const_cast<ShaderCreateInfo &>(info).finalize(recursive);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
BLI_assert(info.finalized_);
|
|
|
|
|
}
|
2022-01-17 14:45:22 +01:00
|
|
|
|
|
|
|
|
interface_names_size_ += info.interface_names_size_;
|
|
|
|
|
|
2022-12-08 21:07:28 +01:00
|
|
|
/* NOTE: EEVEE Materials can result in nested includes. To avoid duplicate
|
|
|
|
|
* shader resources, we need to avoid inserting duplicates.
|
|
|
|
|
* TODO: Optimize create info preparation to include each individual "additional_info"
|
|
|
|
|
* only a single time. */
|
|
|
|
|
vertex_inputs_.extend_non_duplicates(info.vertex_inputs_);
|
|
|
|
|
fragment_outputs_.extend_non_duplicates(info.fragment_outputs_);
|
|
|
|
|
vertex_out_interfaces_.extend_non_duplicates(info.vertex_out_interfaces_);
|
|
|
|
|
geometry_out_interfaces_.extend_non_duplicates(info.geometry_out_interfaces_);
|
2023-09-06 14:39:38 +02:00
|
|
|
subpass_inputs_.extend_non_duplicates(info.subpass_inputs_);
|
2023-12-28 05:34:38 +01:00
|
|
|
specialization_constants_.extend_non_duplicates(info.specialization_constants_);
|
2025-06-03 17:34:04 +02:00
|
|
|
compilation_constants_.extend_non_duplicates(info.compilation_constants_);
|
2022-01-17 14:45:22 +01:00
|
|
|
|
2025-08-26 10:10:43 +02:00
|
|
|
shared_variables_.extend(info.shared_variables_);
|
|
|
|
|
|
2022-05-01 22:26:54 +02:00
|
|
|
validate_vertex_attributes(&info);
|
|
|
|
|
|
2022-12-08 21:07:28 +01:00
|
|
|
/* Insert with duplicate check. */
|
|
|
|
|
push_constants_.extend_non_duplicates(info.push_constants_);
|
|
|
|
|
defines_.extend_non_duplicates(info.defines_);
|
|
|
|
|
batch_resources_.extend_non_duplicates(info.batch_resources_);
|
|
|
|
|
pass_resources_.extend_non_duplicates(info.pass_resources_);
|
2024-08-03 11:06:17 +02:00
|
|
|
geometry_resources_.extend_non_duplicates(info.geometry_resources_);
|
2022-01-26 12:46:37 +01:00
|
|
|
typedef_sources_.extend_non_duplicates(info.typedef_sources_);
|
2022-01-17 14:45:22 +01:00
|
|
|
|
2024-01-12 22:07:46 +01:00
|
|
|
/* API-specific parameters.
|
|
|
|
|
* We will only copy API-specific parameters if they are otherwise unassigned. */
|
|
|
|
|
#ifdef WITH_METAL_BACKEND
|
|
|
|
|
if (mtl_max_threads_per_threadgroup_ == 0) {
|
|
|
|
|
mtl_max_threads_per_threadgroup_ = info.mtl_max_threads_per_threadgroup_;
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
2022-03-03 21:10:55 +01:00
|
|
|
if (info.early_fragment_test_) {
|
|
|
|
|
early_fragment_test_ = true;
|
2024-12-12 23:05:00 +01:00
|
|
|
depth_write_ = DepthWrite::UNCHANGED;
|
2022-03-03 21:10:55 +01:00
|
|
|
}
|
2022-12-08 23:02:52 +01:00
|
|
|
/* Modify depth write if has been changed from default.
|
|
|
|
|
* `UNCHANGED` implies gl_FragDepth is not used at all. */
|
|
|
|
|
if (info.depth_write_ != DepthWrite::UNCHANGED) {
|
2022-03-30 15:12:11 +02:00
|
|
|
depth_write_ = info.depth_write_;
|
|
|
|
|
}
|
2022-03-03 21:10:55 +01:00
|
|
|
|
2023-09-11 16:08:04 +02:00
|
|
|
/* Inherit builtin bits from additional info. */
|
2023-09-01 15:40:42 +02:00
|
|
|
builtins_ |= info.builtins_;
|
|
|
|
|
|
2022-05-01 22:26:54 +02:00
|
|
|
validate_merge(info);
|
2022-01-24 13:23:50 +01:00
|
|
|
|
2022-02-04 19:55:19 +01:00
|
|
|
auto assert_no_overlap = [&](const bool test, const StringRefNull error) {
|
|
|
|
|
if (!test) {
|
|
|
|
|
std::cout << name_ << ": Validation failed while merging " << info.name_ << " : ";
|
|
|
|
|
std::cout << error << std::endl;
|
|
|
|
|
BLI_assert(0);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2022-02-06 01:21:07 +01:00
|
|
|
if (!deps_merged.add(info.name_)) {
|
|
|
|
|
assert_no_overlap(false, "additional info already merged via another info");
|
|
|
|
|
}
|
|
|
|
|
|
2022-01-25 14:46:25 +01:00
|
|
|
if (info.compute_layout_.local_size_x != -1) {
|
2022-02-04 19:55:19 +01:00
|
|
|
assert_no_overlap(compute_layout_.local_size_x == -1, "Compute layout already defined");
|
|
|
|
|
compute_layout_ = info.compute_layout_;
|
2022-01-17 14:45:22 +01:00
|
|
|
}
|
2022-01-25 14:46:25 +01:00
|
|
|
|
2022-01-17 14:45:22 +01:00
|
|
|
if (!info.vertex_source_.is_empty()) {
|
2022-02-04 19:55:19 +01:00
|
|
|
assert_no_overlap(vertex_source_.is_empty(), "Vertex source already existing");
|
2022-01-17 14:45:22 +01:00
|
|
|
vertex_source_ = info.vertex_source_;
|
|
|
|
|
}
|
|
|
|
|
if (!info.geometry_source_.is_empty()) {
|
2022-02-04 19:55:19 +01:00
|
|
|
assert_no_overlap(geometry_source_.is_empty(), "Geometry source already existing");
|
2022-01-17 14:45:22 +01:00
|
|
|
geometry_source_ = info.geometry_source_;
|
2022-01-19 11:48:39 +01:00
|
|
|
geometry_layout_ = info.geometry_layout_;
|
2022-01-17 14:45:22 +01:00
|
|
|
}
|
|
|
|
|
if (!info.fragment_source_.is_empty()) {
|
2022-02-04 19:55:19 +01:00
|
|
|
assert_no_overlap(fragment_source_.is_empty(), "Fragment source already existing");
|
2022-01-17 14:45:22 +01:00
|
|
|
fragment_source_ = info.fragment_source_;
|
|
|
|
|
}
|
|
|
|
|
if (!info.compute_source_.is_empty()) {
|
2022-02-04 19:55:19 +01:00
|
|
|
assert_no_overlap(compute_source_.is_empty(), "Compute source already existing");
|
2022-01-17 14:45:22 +01:00
|
|
|
compute_source_ = info.compute_source_;
|
|
|
|
|
}
|
2025-06-10 12:37:20 +02:00
|
|
|
|
|
|
|
|
if (info.vertex_entry_fn_ != "main") {
|
|
|
|
|
assert_no_overlap(vertex_entry_fn_ == "main", "Vertex function already existing");
|
|
|
|
|
vertex_entry_fn_ = info.vertex_entry_fn_;
|
|
|
|
|
}
|
|
|
|
|
if (info.geometry_entry_fn_ != "main") {
|
|
|
|
|
assert_no_overlap(geometry_entry_fn_ == "main", "Geometry function already existing");
|
|
|
|
|
geometry_entry_fn_ = info.geometry_entry_fn_;
|
|
|
|
|
}
|
|
|
|
|
if (info.fragment_entry_fn_ != "main") {
|
|
|
|
|
assert_no_overlap(fragment_entry_fn_ == "main", "Fragment function already existing");
|
|
|
|
|
fragment_entry_fn_ = info.fragment_entry_fn_;
|
|
|
|
|
}
|
|
|
|
|
if (info.compute_entry_fn_ != "main") {
|
|
|
|
|
assert_no_overlap(compute_entry_fn_ == "main", "Compute function already existing");
|
|
|
|
|
compute_entry_fn_ = info.compute_entry_fn_;
|
|
|
|
|
}
|
2022-01-17 14:45:22 +01:00
|
|
|
}
|
2022-02-04 19:55:19 +01:00
|
|
|
|
2023-09-12 15:39:33 +02:00
|
|
|
if (!geometry_source_.is_empty() && bool(builtins_ & BuiltinBits::LAYER)) {
|
|
|
|
|
std::cout << name_
|
|
|
|
|
<< ": Validation failed. BuiltinBits::LAYER shouldn't be used with geometry shaders."
|
|
|
|
|
<< std::endl;
|
|
|
|
|
BLI_assert(0);
|
|
|
|
|
}
|
|
|
|
|
|
2022-02-04 19:55:19 +01:00
|
|
|
if (auto_resource_location_) {
|
|
|
|
|
int images = 0, samplers = 0, ubos = 0, ssbos = 0;
|
|
|
|
|
|
|
|
|
|
auto set_resource_slot = [&](Resource &res) {
|
|
|
|
|
switch (res.bind_type) {
|
|
|
|
|
case Resource::BindType::UNIFORM_BUFFER:
|
|
|
|
|
res.slot = ubos++;
|
|
|
|
|
break;
|
|
|
|
|
case Resource::BindType::STORAGE_BUFFER:
|
|
|
|
|
res.slot = ssbos++;
|
|
|
|
|
break;
|
|
|
|
|
case Resource::BindType::SAMPLER:
|
|
|
|
|
res.slot = samplers++;
|
|
|
|
|
break;
|
|
|
|
|
case Resource::BindType::IMAGE:
|
|
|
|
|
res.slot = images++;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
for (auto &res : batch_resources_) {
|
|
|
|
|
set_resource_slot(res);
|
|
|
|
|
}
|
|
|
|
|
for (auto &res : pass_resources_) {
|
|
|
|
|
set_resource_slot(res);
|
|
|
|
|
}
|
2024-08-03 11:06:17 +02:00
|
|
|
for (auto &res : geometry_resources_) {
|
|
|
|
|
set_resource_slot(res);
|
|
|
|
|
}
|
2022-02-04 19:55:19 +01:00
|
|
|
}
|
2022-01-17 14:45:22 +01:00
|
|
|
}
|
|
|
|
|
|
2022-04-12 18:28:27 -03:00
|
|
|
std::string ShaderCreateInfo::check_error() const
|
|
|
|
|
{
|
|
|
|
|
std::string error;
|
|
|
|
|
|
|
|
|
|
/* At least a vertex shader and a fragment shader are required, or only a compute shader. */
|
|
|
|
|
if (this->compute_source_.is_empty()) {
|
|
|
|
|
if (this->vertex_source_.is_empty()) {
|
|
|
|
|
error += "Missing vertex shader in " + this->name_ + ".\n";
|
|
|
|
|
}
|
2025-02-10 17:30:42 +01:00
|
|
|
if (this->fragment_source_.is_empty()) {
|
2022-04-12 18:28:27 -03:00
|
|
|
error += "Missing fragment shader in " + this->name_ + ".\n";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
if (!this->vertex_source_.is_empty()) {
|
2022-06-08 21:06:14 +02:00
|
|
|
error += "Compute shader has vertex_source_ shader attached in " + this->name_ + ".\n";
|
2022-04-12 18:28:27 -03:00
|
|
|
}
|
|
|
|
|
if (!this->geometry_source_.is_empty()) {
|
2022-06-08 21:06:14 +02:00
|
|
|
error += "Compute shader has geometry_source_ shader attached in " + this->name_ + ".\n";
|
2022-04-12 18:28:27 -03:00
|
|
|
}
|
|
|
|
|
if (!this->fragment_source_.is_empty()) {
|
2022-06-08 21:06:14 +02:00
|
|
|
error += "Compute shader has fragment_source_ shader attached in " + this->name_ + ".\n";
|
2022-04-12 18:28:27 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-10-13 10:42:25 +02:00
|
|
|
if (!this->geometry_source_.is_empty()) {
|
|
|
|
|
if (bool(this->builtins_ & BuiltinBits::BARYCENTRIC_COORD)) {
|
|
|
|
|
error += "Shader " + this->name_ +
|
|
|
|
|
" has geometry stage and uses barycentric coordinates. This is not allowed as "
|
|
|
|
|
"fallback injects a geometry stage.\n";
|
|
|
|
|
}
|
|
|
|
|
if (bool(this->builtins_ & BuiltinBits::VIEWPORT_INDEX)) {
|
|
|
|
|
error += "Shader " + this->name_ +
|
|
|
|
|
" has geometry stage and uses multi-viewport. This is not allowed as "
|
|
|
|
|
"fallback injects a geometry stage.\n";
|
|
|
|
|
}
|
|
|
|
|
if (bool(this->builtins_ & BuiltinBits::LAYER)) {
|
|
|
|
|
error += "Shader " + this->name_ +
|
|
|
|
|
" has geometry stage and uses layer output. This is not allowed as "
|
|
|
|
|
"fallback injects a geometry stage.\n";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-02-19 08:07:53 +01:00
|
|
|
if ((G.debug & G_DEBUG_GPU) == 0) {
|
|
|
|
|
return error;
|
|
|
|
|
}
|
|
|
|
|
|
2023-10-13 10:42:25 +02:00
|
|
|
if (bool(this->builtins_ &
|
|
|
|
|
(BuiltinBits::BARYCENTRIC_COORD | BuiltinBits::VIEWPORT_INDEX | BuiltinBits::LAYER)))
|
|
|
|
|
{
|
|
|
|
|
for (const StageInterfaceInfo *interface : this->vertex_out_interfaces_) {
|
|
|
|
|
if (interface->instance_name.is_empty()) {
|
|
|
|
|
error += "Shader " + this->name_ + " uses interface " + interface->name +
|
|
|
|
|
" that doesn't contain an instance name, but is required for the fallback "
|
|
|
|
|
"geometry shader.\n";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-08-18 08:44:30 +02:00
|
|
|
if (!this->is_vulkan_compatible()) {
|
|
|
|
|
error += this->name_ +
|
|
|
|
|
" contains a stage interface using an instance name and mixed interpolation modes. "
|
|
|
|
|
"This is not compatible with Vulkan and need to be adjusted.\n";
|
|
|
|
|
}
|
2023-12-28 05:34:38 +01:00
|
|
|
|
|
|
|
|
/* Validate specialization constants. */
|
|
|
|
|
for (int i = 0; i < specialization_constants_.size(); i++) {
|
|
|
|
|
for (int j = i + 1; j < specialization_constants_.size(); j++) {
|
|
|
|
|
if (specialization_constants_[i].name == specialization_constants_[j].name) {
|
|
|
|
|
error += this->name_ + " contains two specialization constants with the name: " +
|
|
|
|
|
std::string(specialization_constants_[i].name);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2023-08-18 08:44:30 +02:00
|
|
|
|
2025-06-03 17:34:04 +02:00
|
|
|
/* Validate compilation constants. */
|
|
|
|
|
for (int i = 0; i < compilation_constants_.size(); i++) {
|
|
|
|
|
for (int j = i + 1; j < compilation_constants_.size(); j++) {
|
|
|
|
|
if (compilation_constants_[i].name == compilation_constants_[j].name) {
|
|
|
|
|
error += this->name_ + " contains two compilation constants with the name: " +
|
|
|
|
|
std::string(compilation_constants_[i].name);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-26 10:10:43 +02:00
|
|
|
/* Validate shared variables. */
|
|
|
|
|
for (int i = 0; i < shared_variables_.size(); i++) {
|
|
|
|
|
for (int j = i + 1; j < shared_variables_.size(); j++) {
|
|
|
|
|
if (shared_variables_[i].name == shared_variables_[j].name) {
|
|
|
|
|
error += this->name_ + " contains two specialization constants with the name: " +
|
|
|
|
|
std::string(shared_variables_[i].name);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-04-12 18:28:27 -03:00
|
|
|
return error;
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-01 22:26:54 +02:00
|
|
|
void ShaderCreateInfo::validate_merge(const ShaderCreateInfo &other_info)
|
2022-01-24 13:23:50 +01:00
|
|
|
{
|
2022-02-04 19:55:19 +01:00
|
|
|
if (!auto_resource_location_) {
|
2022-01-26 16:06:22 +11:00
|
|
|
/* Check same bind-points usage in OGL. */
|
2022-01-24 13:23:50 +01:00
|
|
|
Set<int> images, samplers, ubos, ssbos;
|
|
|
|
|
|
|
|
|
|
auto register_resource = [&](const Resource &res) -> bool {
|
|
|
|
|
switch (res.bind_type) {
|
|
|
|
|
case Resource::BindType::UNIFORM_BUFFER:
|
|
|
|
|
return images.add(res.slot);
|
|
|
|
|
case Resource::BindType::STORAGE_BUFFER:
|
|
|
|
|
return samplers.add(res.slot);
|
|
|
|
|
case Resource::BindType::SAMPLER:
|
|
|
|
|
return ubos.add(res.slot);
|
|
|
|
|
case Resource::BindType::IMAGE:
|
|
|
|
|
return ssbos.add(res.slot);
|
|
|
|
|
default:
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2024-08-03 11:06:17 +02:00
|
|
|
auto print_error_msg = [&](const Resource &res, const Vector<Resource> &resources) {
|
2023-06-16 15:13:31 +02:00
|
|
|
auto print_resource_name = [&](const Resource &res) {
|
|
|
|
|
switch (res.bind_type) {
|
|
|
|
|
case Resource::BindType::UNIFORM_BUFFER:
|
|
|
|
|
std::cout << "Uniform Buffer " << res.uniformbuf.name;
|
|
|
|
|
break;
|
|
|
|
|
case Resource::BindType::STORAGE_BUFFER:
|
|
|
|
|
std::cout << "Storage Buffer " << res.storagebuf.name;
|
|
|
|
|
break;
|
|
|
|
|
case Resource::BindType::SAMPLER:
|
|
|
|
|
std::cout << "Sampler " << res.sampler.name;
|
|
|
|
|
break;
|
|
|
|
|
case Resource::BindType::IMAGE:
|
|
|
|
|
std::cout << "Image " << res.image.name;
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
std::cout << "Unknown Type";
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
for (const Resource &_res : resources) {
|
|
|
|
|
if (&res != &_res && res.bind_type == _res.bind_type && res.slot == _res.slot) {
|
|
|
|
|
std::cout << name_ << ": Validation failed : Overlapping ";
|
|
|
|
|
print_resource_name(res);
|
|
|
|
|
std::cout << " and ";
|
|
|
|
|
print_resource_name(_res);
|
|
|
|
|
std::cout << " at (" << res.slot << ") while merging " << other_info.name_ << std::endl;
|
|
|
|
|
}
|
2022-01-24 13:23:50 +01:00
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
for (auto &res : batch_resources_) {
|
|
|
|
|
if (register_resource(res) == false) {
|
2024-08-03 11:06:17 +02:00
|
|
|
print_error_msg(res, resources_get_all_());
|
2022-01-24 13:23:50 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (auto &res : pass_resources_) {
|
|
|
|
|
if (register_resource(res) == false) {
|
2024-08-03 11:06:17 +02:00
|
|
|
print_error_msg(res, resources_get_all_());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (auto &res : geometry_resources_) {
|
|
|
|
|
if (register_resource(res) == false) {
|
|
|
|
|
print_error_msg(res, resources_get_all_());
|
2022-01-24 13:23:50 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-05-01 22:26:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void ShaderCreateInfo::validate_vertex_attributes(const ShaderCreateInfo *other_info)
|
|
|
|
|
{
|
|
|
|
|
uint32_t attr_bits = 0;
|
|
|
|
|
for (auto &attr : vertex_inputs_) {
|
|
|
|
|
if (attr.index >= 16 || attr.index < 0) {
|
|
|
|
|
std::cout << name_ << ": \"" << attr.name
|
2025-04-11 22:39:01 +02:00
|
|
|
<< "\" : Type::float3x3_t unsupported as vertex attribute." << std::endl;
|
2022-05-01 22:26:54 +02:00
|
|
|
BLI_assert(0);
|
|
|
|
|
}
|
|
|
|
|
if (attr.index >= 16 || attr.index < 0) {
|
|
|
|
|
std::cout << name_ << ": Invalid index for attribute \"" << attr.name << "\"" << std::endl;
|
|
|
|
|
BLI_assert(0);
|
|
|
|
|
}
|
|
|
|
|
uint32_t attr_new = 0;
|
2025-04-11 22:39:01 +02:00
|
|
|
if (attr.type == Type::float4x4_t) {
|
2022-05-01 22:26:54 +02:00
|
|
|
for (int i = 0; i < 4; i++) {
|
|
|
|
|
attr_new |= 1 << (attr.index + i);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
attr_new |= 1 << attr.index;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if ((attr_bits & attr_new) != 0) {
|
|
|
|
|
std::cout << name_ << ": Attribute \"" << attr.name
|
|
|
|
|
<< "\" overlap one or more index from another attribute."
|
|
|
|
|
" Note that mat4 takes up 4 indices.";
|
|
|
|
|
if (other_info) {
|
|
|
|
|
std::cout << " While merging " << other_info->name_ << std::endl;
|
|
|
|
|
}
|
|
|
|
|
std::cout << std::endl;
|
|
|
|
|
BLI_assert(0);
|
|
|
|
|
}
|
|
|
|
|
attr_bits |= attr_new;
|
2022-01-24 13:23:50 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-01-17 14:45:22 +01:00
|
|
|
} // namespace blender::gpu::shader
|
|
|
|
|
|
|
|
|
|
using namespace blender::gpu::shader;
|
|
|
|
|
|
2024-01-28 12:12:52 +01:00
|
|
|
#ifdef _MSC_VER
|
|
|
|
|
/* Disable optimization for this function with MSVC. It does not like the fact
|
|
|
|
|
* shaders info are declared in the same function (same basic block or not does
|
|
|
|
|
* not change anything).
|
|
|
|
|
* Since it is just a function called to register shaders (once),
|
|
|
|
|
* the fact it's optimized or not does not matter, it's not on any hot
|
|
|
|
|
* code path. */
|
|
|
|
|
# pragma optimize("", off)
|
|
|
|
|
#endif
|
2022-01-17 14:45:22 +01:00
|
|
|
void gpu_shader_create_info_init()
|
|
|
|
|
{
|
2025-08-26 15:15:46 +02:00
|
|
|
g_create_infos = new CreateInfoDictionary();
|
|
|
|
|
g_interfaces = new InterfaceDictionary();
|
2022-01-17 14:45:22 +01:00
|
|
|
|
2024-10-04 17:45:00 +02:00
|
|
|
#define GPU_SHADER_NAMED_INTERFACE_INFO(_interface, _inst_name) \
|
|
|
|
|
StageInterfaceInfo *ptr_##_interface = new StageInterfaceInfo(#_interface, #_inst_name); \
|
|
|
|
|
StageInterfaceInfo &_interface = *ptr_##_interface; \
|
|
|
|
|
g_interfaces->add_new(#_interface, ptr_##_interface); \
|
|
|
|
|
_interface
|
|
|
|
|
|
2024-10-04 19:04:40 +02:00
|
|
|
#define GPU_SHADER_INTERFACE_INFO(_interface) \
|
|
|
|
|
StageInterfaceInfo *ptr_##_interface = new StageInterfaceInfo(#_interface); \
|
2023-09-08 16:53:27 +10:00
|
|
|
StageInterfaceInfo &_interface = *ptr_##_interface; \
|
2022-01-17 14:45:22 +01:00
|
|
|
g_interfaces->add_new(#_interface, ptr_##_interface); \
|
|
|
|
|
_interface
|
|
|
|
|
|
|
|
|
|
#define GPU_SHADER_CREATE_INFO(_info) \
|
2023-09-08 16:53:27 +10:00
|
|
|
ShaderCreateInfo *ptr_##_info = new ShaderCreateInfo(#_info); \
|
|
|
|
|
ShaderCreateInfo &_info = *ptr_##_info; \
|
2022-01-17 14:45:22 +01:00
|
|
|
g_create_infos->add_new(#_info, ptr_##_info); \
|
|
|
|
|
_info
|
|
|
|
|
|
2025-01-07 12:39:13 +01:00
|
|
|
#define GPU_SHADER_NAMED_INTERFACE_END(_inst_name) ;
|
|
|
|
|
#define GPU_SHADER_INTERFACE_END() ;
|
|
|
|
|
#define GPU_SHADER_CREATE_END() ;
|
|
|
|
|
|
2022-01-17 14:45:22 +01:00
|
|
|
/* Declare, register and construct the infos. */
|
2025-09-25 10:57:02 +02:00
|
|
|
#include "glsl_compositor_infos_list.hh"
|
|
|
|
|
#include "glsl_draw_infos_list.hh"
|
|
|
|
|
#include "glsl_gpu_infos_list.hh"
|
|
|
|
|
#include "glsl_ocio_infos_list.hh"
|
|
|
|
|
#ifdef WITH_OPENSUBDIV
|
|
|
|
|
# include "glsl_osd_infos_list.hh"
|
|
|
|
|
#endif
|
2022-01-17 14:45:22 +01:00
|
|
|
|
2024-11-05 17:12:28 +01:00
|
|
|
if (GPU_stencil_clasify_buffer_workaround()) {
|
2024-11-25 13:24:46 +11:00
|
|
|
/* WORKAROUND: Adding a dummy buffer that isn't used fixes a bug inside the Qualcomm driver. */
|
2024-11-05 17:12:28 +01:00
|
|
|
eevee_deferred_tile_classify.storage_buf(
|
2025-04-24 14:38:13 +02:00
|
|
|
12, Qualifier::read_write, "uint", "dummy_workaround_buf[]");
|
2024-11-05 17:12:28 +01:00
|
|
|
}
|
|
|
|
|
|
2022-02-01 13:45:54 +01:00
|
|
|
for (ShaderCreateInfo *info : g_create_infos->values()) {
|
2025-08-27 20:17:04 +02:00
|
|
|
info->is_generated_ = false;
|
|
|
|
|
|
2023-09-01 15:40:42 +02:00
|
|
|
info->builtins_ |= gpu_shader_dependency_get_builtins(info->vertex_source_);
|
|
|
|
|
info->builtins_ |= gpu_shader_dependency_get_builtins(info->fragment_source_);
|
|
|
|
|
info->builtins_ |= gpu_shader_dependency_get_builtins(info->geometry_source_);
|
|
|
|
|
info->builtins_ |= gpu_shader_dependency_get_builtins(info->compute_source_);
|
|
|
|
|
|
2024-07-19 15:48:00 +02:00
|
|
|
#if GPU_SHADER_PRINTF_ENABLE
|
2024-11-08 20:20:07 +01:00
|
|
|
const bool is_material_shader = info->name_.startswith("eevee_surf_");
|
2024-07-19 15:48:00 +02:00
|
|
|
if ((info->builtins_ & BuiltinBits::USE_PRINTF) == BuiltinBits::USE_PRINTF ||
|
2024-11-08 20:20:07 +01:00
|
|
|
(gpu_shader_dependency_force_gpu_print_injection() && is_material_shader))
|
2024-07-19 15:48:00 +02:00
|
|
|
{
|
|
|
|
|
info->additional_info("gpu_print");
|
|
|
|
|
}
|
|
|
|
|
#endif
|
|
|
|
|
|
2023-12-04 15:13:06 +01:00
|
|
|
#ifndef NDEBUG
|
2023-09-01 15:40:42 +02:00
|
|
|
/* Automatically amend the create info for ease of use of the debug feature. */
|
|
|
|
|
if ((info->builtins_ & BuiltinBits::USE_DEBUG_DRAW) == BuiltinBits::USE_DEBUG_DRAW) {
|
|
|
|
|
info->additional_info("draw_debug_draw");
|
2022-02-01 13:45:54 +01:00
|
|
|
}
|
2023-09-01 15:40:42 +02:00
|
|
|
#endif
|
2022-02-01 13:45:54 +01:00
|
|
|
}
|
|
|
|
|
|
2024-10-05 01:22:51 +02:00
|
|
|
for (ShaderCreateInfo *info : g_create_infos->values()) {
|
2024-10-01 20:14:56 +02:00
|
|
|
info->finalize(true);
|
|
|
|
|
}
|
|
|
|
|
|
2022-01-17 14:45:22 +01:00
|
|
|
/* TEST */
|
2023-12-11 10:44:09 +01:00
|
|
|
// gpu_shader_create_info_compile(nullptr);
|
2022-01-17 14:45:22 +01:00
|
|
|
}
|
2024-01-28 12:12:52 +01:00
|
|
|
#ifdef _MSC_VER
|
|
|
|
|
# pragma optimize("", on)
|
|
|
|
|
#endif
|
2022-01-17 14:45:22 +01:00
|
|
|
|
|
|
|
|
void gpu_shader_create_info_exit()
|
|
|
|
|
{
|
|
|
|
|
for (auto *value : g_create_infos->values()) {
|
|
|
|
|
delete value;
|
|
|
|
|
}
|
|
|
|
|
delete g_create_infos;
|
|
|
|
|
|
|
|
|
|
for (auto *value : g_interfaces->values()) {
|
|
|
|
|
delete value;
|
|
|
|
|
}
|
|
|
|
|
delete g_interfaces;
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-11 10:44:09 +01:00
|
|
|
bool gpu_shader_create_info_compile(const char *name_starts_with_filter)
|
2022-01-17 14:45:22 +01:00
|
|
|
{
|
2024-11-01 18:57:17 +01:00
|
|
|
using namespace blender;
|
2022-01-27 10:27:24 +01:00
|
|
|
using namespace blender::gpu;
|
2022-01-26 12:46:37 +01:00
|
|
|
int success = 0;
|
2023-12-11 10:44:09 +01:00
|
|
|
int skipped_filter = 0;
|
2022-03-26 11:51:23 +01:00
|
|
|
int skipped = 0;
|
2022-01-26 12:46:37 +01:00
|
|
|
int total = 0;
|
2024-11-01 18:57:17 +01:00
|
|
|
|
|
|
|
|
Vector<const GPUShaderCreateInfo *> infos;
|
|
|
|
|
|
2022-01-17 14:45:22 +01:00
|
|
|
for (ShaderCreateInfo *info : g_create_infos->values()) {
|
2022-07-15 11:03:42 +02:00
|
|
|
info->finalize();
|
2022-01-17 14:45:22 +01:00
|
|
|
if (info->do_static_compilation_) {
|
2023-12-11 10:44:09 +01:00
|
|
|
if (name_starts_with_filter &&
|
|
|
|
|
!info->name_.startswith(blender::StringRefNull(name_starts_with_filter)))
|
|
|
|
|
{
|
|
|
|
|
skipped_filter++;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2022-12-08 22:22:50 +01:00
|
|
|
if ((info->metal_backend_only_ && GPU_backend_get_type() != GPU_BACKEND_METAL) ||
|
2025-02-10 17:30:42 +01:00
|
|
|
(GPU_geometry_shader_support() == false && info->geometry_source_ != nullptr))
|
2022-07-15 11:03:42 +02:00
|
|
|
{
|
2022-03-26 11:51:23 +01:00
|
|
|
skipped++;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2022-01-26 12:46:37 +01:00
|
|
|
total++;
|
2022-01-27 10:27:24 +01:00
|
|
|
|
2024-11-01 18:57:17 +01:00
|
|
|
infos.append(reinterpret_cast<const GPUShaderCreateInfo *>(info));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-06-09 12:36:06 +02:00
|
|
|
BatchHandle batch = GPU_shader_batch_create_from_infos(infos);
|
2025-08-11 09:34:28 +02:00
|
|
|
Vector<blender::gpu::Shader *> result = GPU_shader_batch_finalize(batch);
|
2024-11-01 18:57:17 +01:00
|
|
|
|
|
|
|
|
for (int i : result.index_range()) {
|
|
|
|
|
const ShaderCreateInfo *info = reinterpret_cast<const ShaderCreateInfo *>(infos[i]);
|
|
|
|
|
if (result[i] == nullptr) {
|
|
|
|
|
std::cerr << "Compilation " << info->name_.c_str() << " Failed\n";
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
success++;
|
2022-01-27 10:27:24 +01:00
|
|
|
#if 0 /* TODO(fclem): This is too verbose for now. Make it a cmake option. */
|
|
|
|
|
/* Test if any resource is optimized out and print a warning if that's the case. */
|
|
|
|
|
/* TODO(fclem): Limit this to OpenGL backend. */
|
2025-08-11 09:34:28 +02:00
|
|
|
const ShaderInterface *interface = shader->interface;
|
2022-01-27 10:27:24 +01:00
|
|
|
|
2024-08-03 11:06:17 +02:00
|
|
|
blender::Vector<ShaderCreateInfo::Resource> all_resources = info->resources_get_all_();
|
2022-01-27 10:27:24 +01:00
|
|
|
|
|
|
|
|
for (ShaderCreateInfo::Resource &res : all_resources) {
|
|
|
|
|
blender::StringRefNull name = "";
|
|
|
|
|
const ShaderInput *input = nullptr;
|
|
|
|
|
|
|
|
|
|
switch (res.bind_type) {
|
|
|
|
|
case ShaderCreateInfo::Resource::BindType::UNIFORM_BUFFER:
|
|
|
|
|
input = interface->ubo_get(res.slot);
|
|
|
|
|
name = res.uniformbuf.name;
|
|
|
|
|
break;
|
|
|
|
|
case ShaderCreateInfo::Resource::BindType::STORAGE_BUFFER:
|
|
|
|
|
input = interface->ssbo_get(res.slot);
|
|
|
|
|
name = res.storagebuf.name;
|
|
|
|
|
break;
|
|
|
|
|
case ShaderCreateInfo::Resource::BindType::SAMPLER:
|
|
|
|
|
input = interface->texture_get(res.slot);
|
|
|
|
|
name = res.sampler.name;
|
|
|
|
|
break;
|
|
|
|
|
case ShaderCreateInfo::Resource::BindType::IMAGE:
|
|
|
|
|
input = interface->texture_get(res.slot);
|
|
|
|
|
name = res.image.name;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (input == nullptr) {
|
2023-08-09 18:15:26 +02:00
|
|
|
std::cerr << "Error: " << info->name_;
|
|
|
|
|
std::cerr << ": Resource « " << name << " » not found in the shader interface\n";
|
2022-01-27 10:27:24 +01:00
|
|
|
}
|
|
|
|
|
else if (input->location == -1) {
|
2023-08-09 18:15:26 +02:00
|
|
|
std::cerr << "Warning: " << info->name_;
|
|
|
|
|
std::cerr << ": Resource « " << name << " » is optimized out\n";
|
2022-01-27 10:27:24 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
#endif
|
2024-11-01 18:57:17 +01:00
|
|
|
GPU_shader_free(result[i]);
|
2022-01-17 14:45:22 +01:00
|
|
|
}
|
|
|
|
|
}
|
2024-11-01 18:57:17 +01:00
|
|
|
|
2022-03-26 11:51:23 +01:00
|
|
|
printf("Shader Test compilation result: %d / %d passed", success, total);
|
2023-12-11 10:44:09 +01:00
|
|
|
if (skipped_filter > 0) {
|
|
|
|
|
printf(" (skipped %d when filtering)", skipped_filter);
|
|
|
|
|
}
|
2022-03-26 11:51:23 +01:00
|
|
|
if (skipped > 0) {
|
|
|
|
|
printf(" (skipped %d for compatibility reasons)", skipped);
|
|
|
|
|
}
|
|
|
|
|
printf("\n");
|
2022-01-26 12:46:37 +01:00
|
|
|
return success == total;
|
2022-01-17 14:45:22 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const GPUShaderCreateInfo *gpu_shader_create_info_get(const char *info_name)
|
|
|
|
|
{
|
2022-01-26 12:46:37 +01:00
|
|
|
if (g_create_infos->contains(info_name) == false) {
|
|
|
|
|
printf("Error: Cannot find shader create info named \"%s\"\n", info_name);
|
2022-02-01 13:42:00 +01:00
|
|
|
return nullptr;
|
2022-01-26 12:46:37 +01:00
|
|
|
}
|
2022-01-17 14:45:22 +01:00
|
|
|
ShaderCreateInfo *info = g_create_infos->lookup(info_name);
|
|
|
|
|
return reinterpret_cast<const GPUShaderCreateInfo *>(info);
|
|
|
|
|
}
|