diff --git a/source/blender/compositor/realtime_compositor/CMakeLists.txt b/source/blender/compositor/realtime_compositor/CMakeLists.txt index bc457ad92a4..3b9fea035d9 100644 --- a/source/blender/compositor/realtime_compositor/CMakeLists.txt +++ b/source/blender/compositor/realtime_compositor/CMakeLists.txt @@ -132,6 +132,7 @@ set(GLSL_SRC shaders/compositor_normalize.glsl shaders/compositor_parallel_reduction.glsl shaders/compositor_plane_deform.glsl + shaders/compositor_plane_deform_motion_blur.glsl shaders/compositor_projector_lens_distortion.glsl shaders/compositor_read_pass.glsl shaders/compositor_realize_on_domain.glsl @@ -231,6 +232,7 @@ set(SRC_SHADER_CREATE_INFOS shaders/infos/compositor_normalize_info.hh shaders/infos/compositor_parallel_reduction_info.hh shaders/infos/compositor_plane_deform_info.hh + shaders/infos/compositor_plane_deform_motion_blur_info.hh shaders/infos/compositor_projector_lens_distortion_info.hh shaders/infos/compositor_read_pass_info.hh shaders/infos/compositor_realize_on_domain_info.hh diff --git a/source/blender/compositor/realtime_compositor/shaders/compositor_plane_deform_motion_blur.glsl b/source/blender/compositor/realtime_compositor/shaders/compositor_plane_deform_motion_blur.glsl new file mode 100644 index 00000000000..777f8598857 --- /dev/null +++ b/source/blender/compositor/realtime_compositor/shaders/compositor_plane_deform_motion_blur.glsl @@ -0,0 +1,31 @@ +void main() +{ + ivec2 texel = ivec2(gl_GlobalInvocationID.xy); + + /* Add 0.5 to evaluate the sampler at the center of the pixel and divide by the size to get the + * coordinates into the sampler's expected [0, 1] range. We choose the maximum between both + * output sizes because one of the outputs might be a dummy 1x1 image. */ + ivec2 output_size = max(imageSize(output_img), imageSize(mask_img)); + vec2 coordinates = (vec2(texel) + vec2(0.5)) / vec2(output_size); + + vec4 accumulated_color = vec4(0.0); + for (int i = 0; i < number_of_motion_blur_samples; i++) { + mat3 homography_matrix = mat3(homography_matrices[i]); + + vec3 transformed_coordinates = homography_matrix * vec3(coordinates, 1.0); + vec2 projected_coordinates = transformed_coordinates.xy / transformed_coordinates.z; + + /* The derivatives of the projected coordinates with respect to x and y are the first and + * second columns respectively, divided by the z projection factor as can be shown by + * differentiating the above matrix multiplication with respect to x and y. */ + vec2 x_gradient = homography_matrix[0].xy / transformed_coordinates.z; + vec2 y_gradient = homography_matrix[1].xy / transformed_coordinates.z; + + accumulated_color += textureGrad(input_tx, projected_coordinates, x_gradient, y_gradient); + } + + accumulated_color /= number_of_motion_blur_samples; + + imageStore(output_img, texel, accumulated_color); + imageStore(mask_img, texel, accumulated_color.aaaa); +} diff --git a/source/blender/compositor/realtime_compositor/shaders/infos/compositor_plane_deform_motion_blur_info.hh b/source/blender/compositor/realtime_compositor/shaders/infos/compositor_plane_deform_motion_blur_info.hh new file mode 100644 index 00000000000..822dd260f34 --- /dev/null +++ b/source/blender/compositor/realtime_compositor/shaders/infos/compositor_plane_deform_motion_blur_info.hh @@ -0,0 +1,13 @@ +/* SPDX-License-Identifier: GPL-2.0-or-later */ + +#include "gpu_shader_create_info.hh" + +GPU_SHADER_CREATE_INFO(compositor_plane_deform_motion_blur) + .local_group_size(16, 16) + .push_constant(Type::INT, "number_of_motion_blur_samples") + .uniform_buf(0, "mat4", "homography_matrices[64]") + .sampler(0, ImageType::FLOAT_2D, "input_tx") + .image(0, GPU_RGBA16F, Qualifier::WRITE, ImageType::FLOAT_2D, "output_img") + .image(1, GPU_R16F, Qualifier::WRITE, ImageType::FLOAT_2D, "mask_img") + .compute_source("compositor_plane_deform_motion_blur.glsl") + .do_static_compilation(true); diff --git a/source/blender/nodes/composite/nodes/node_composite_planetrackdeform.cc b/source/blender/nodes/composite/nodes/node_composite_planetrackdeform.cc index 6eb3cd48fea..cd9dc7f666e 100644 --- a/source/blender/nodes/composite/nodes/node_composite_planetrackdeform.cc +++ b/source/blender/nodes/composite/nodes/node_composite_planetrackdeform.cc @@ -5,6 +5,10 @@ * \ingroup cmpnodes */ +#include "BLI_array.hh" +#include "BLI_math_matrix_types.hh" +#include "BLI_math_vector_types.hh" + #include "BLT_translation.h" #include "DNA_movieclip_types.h" @@ -12,6 +16,7 @@ #include "BKE_context.h" #include "BKE_lib_id.h" +#include "BKE_movieclip.h" #include "BKE_tracking.h" #include "RNA_access.h" @@ -20,15 +25,22 @@ #include "UI_interface.h" #include "UI_resources.h" +#include "GPU_shader.h" +#include "GPU_texture.h" +#include "GPU_uniform_buffer.h" + #include "COM_node_operation.hh" +#include "COM_utilities.hh" #include "node_composite_util.hh" namespace blender::nodes::node_composite_planetrackdeform_cc { +NODE_STORAGE_FUNCS(NodePlaneTrackDeformData) + static void cmp_node_planetrackdeform_declare(NodeDeclarationBuilder &b) { - b.add_input(N_("Image")); + b.add_input(N_("Image")).compositor_skip_realization(); b.add_output(N_("Image")); b.add_output(N_("Plane")); } @@ -116,9 +128,144 @@ class PlaneTrackDeformOperation : public NodeOperation { void execute() override { - get_input("Image").pass_through(get_result("Image")); - get_result("Plane").allocate_invalid(); - context().set_info_message("Viewport compositor setup not fully supported"); + MovieTrackingPlaneTrack *plane_track = get_plane_track(); + + Result &input_image = get_input("Image"); + Result &output_image = get_result("Image"); + Result &output_mask = get_result("Plane"); + if (input_image.is_single_value() || !plane_track) { + if (output_image.should_compute()) { + input_image.pass_through(output_image); + } + if (output_mask.should_compute()) { + output_mask.allocate_single_value(); + output_mask.set_float_value(1.0f); + } + return; + } + + const Array homography_matrices = compute_homography_matrices(plane_track); + + GPUShader *shader = shader_manager().get("compositor_plane_deform_motion_blur"); + GPU_shader_bind(shader); + + GPU_shader_uniform_1i(shader, "number_of_motion_blur_samples", homography_matrices.size()); + + GPUUniformBuf *matrices_buffer = GPU_uniformbuf_create_ex( + homography_matrices.size() * sizeof(float4x4), + homography_matrices.data(), + "Plane Track Deform Homography Matrices"); + const int ubo_location = GPU_shader_get_ubo_binding(shader, "homography_matrices"); + GPU_uniformbuf_bind(matrices_buffer, ubo_location); + + GPU_texture_mipmap_mode(input_image.texture(), true, true); + GPU_texture_anisotropic_filter(input_image.texture(), true); + GPU_texture_extend_mode(input_image.texture(), GPU_SAMPLER_EXTEND_MODE_CLAMP_TO_BORDER); + input_image.bind_as_texture(shader, "input_tx"); + + const Domain domain = compute_domain(); + output_image.allocate_texture(domain); + output_image.bind_as_image(shader, "output_img"); + + output_mask.allocate_texture(domain); + output_mask.bind_as_image(shader, "mask_img"); + + compute_dispatch_threads_at_least(shader, domain.size); + + input_image.unbind_as_texture(); + output_image.unbind_as_image(); + output_mask.unbind_as_image(); + GPU_shader_unbind(); + + GPU_uniformbuf_unbind(matrices_buffer); + GPU_uniformbuf_free(matrices_buffer); + } + + Domain compute_domain() override + { + MovieTrackingPlaneTrack *plane_track = get_plane_track(); + + Result &input_image = get_input("Image"); + if (input_image.is_single_value() || !plane_track) { + return input_image.domain(); + } + + return Domain(get_movie_clip_size()); + } + + Array compute_homography_matrices(MovieTrackingPlaneTrack *plane_track) + { + /* We evaluate at the frames in the range [frame - shutter, frame + shutter], if no motion blur + * is enabled or the motion blur samples is set to 1, we just evaluate at the current frame. */ + const int samples = use_motion_blur() ? node_storage(bnode()).motion_blur_samples : 1; + const float shutter = samples != 1 ? node_storage(bnode()).motion_blur_shutter : 0.0f; + const float start_frame = context().get_frame_number() - shutter; + const float frame_step = (shutter * 2.0f) / samples; + + Array matrices(samples); + for (int i = 0; i < samples; i++) { + const float frame = start_frame + frame_step * i; + const float clip_frame = BKE_movieclip_remap_scene_to_clip_frame(get_movie_clip(), frame); + + float corners[4][2]; + BKE_tracking_plane_marker_get_subframe_corners(plane_track, clip_frame, corners); + + /* Compute a 2D projection matrix that projects from the corners of the image in normalized + * coordinates into the corners of the tracking plane. */ + float3x3 homography_matrix; + float identity_corners[4][2] = {{0.0f, 0.0f}, {1.0f, 0.0f}, {1.0f, 1.0f}, {0.0f, 1.0f}}; + BKE_tracking_homography_between_two_quads( + corners, identity_corners, homography_matrix.ptr()); + + /* Store in a 4x4 matrix due to the alignment requirements of GPU uniform buffers. */ + matrices[i] = float4x4(homography_matrix); + } + + return matrices; + } + + MovieTrackingPlaneTrack *get_plane_track() + { + MovieClip *movie_clip = get_movie_clip(); + + if (!movie_clip) { + return nullptr; + } + + MovieTrackingObject *tracking_object = BKE_tracking_object_get_named( + &movie_clip->tracking, node_storage(bnode()).tracking_object); + + if (!tracking_object) { + return nullptr; + } + + return BKE_tracking_object_find_plane_track_with_name(tracking_object, + node_storage(bnode()).plane_track_name); + } + + int2 get_movie_clip_size() + { + MovieClipUser user = *DNA_struct_default_get(MovieClipUser); + BKE_movieclip_user_set_frame(&user, context().get_frame_number()); + + int2 size; + BKE_movieclip_get_size(get_movie_clip(), &user, &size.x, &size.y); + return size; + } + + bool use_motion_blur() + { + return get_flags() & CMP_NODE_PLANE_TRACK_DEFORM_FLAG_MOTION_BLUR; + } + + CMPNodePlaneTrackDeformFlags get_flags() + { + return static_cast(node_storage(bnode()).flag); + } + + MovieClip *get_movie_clip() + { + return reinterpret_cast(bnode().id); } }; @@ -142,8 +289,6 @@ void register_node_type_cmp_planetrackdeform() node_type_storage( &ntype, "NodePlaneTrackDeformData", node_free_standard_storage, node_copy_standard_storage); ntype.get_compositor_operation = file_ns::get_compositor_operation; - ntype.realtime_compositor_unsupported_message = N_( - "Node not supported in the Viewport compositor"); nodeRegisterType(&ntype); }