Overlay-Next: Motion Path

Rel #102179

Fixes discrepancies on Mac.

Pull Request: https://projects.blender.org/blender/blender/pulls/127623
This commit is contained in:
Clément Foucault
2024-09-16 11:22:54 +02:00
committed by Clément Foucault
parent 8345ebc704
commit 11b07cf309
7 changed files with 469 additions and 1 deletions

View File

@@ -309,6 +309,7 @@ set(SRC
engines/overlay/overlay_next_mesh.hh
engines/overlay/overlay_next_metaball.hh
engines/overlay/overlay_next_mode_transfer.hh
engines/overlay/overlay_next_motion_path.hh
engines/overlay/overlay_next_origin.hh
engines/overlay/overlay_next_outline.hh
engines/overlay/overlay_next_paint.hh
@@ -734,6 +735,7 @@ set(GLSL_SRC
engines/overlay/shaders/overlay_image_vert.glsl
engines/overlay/shaders/overlay_motion_path_line_frag.glsl
engines/overlay/shaders/overlay_motion_path_line_geom.glsl
engines/overlay/shaders/overlay_motion_path_line_next_vert.glsl
engines/overlay/shaders/overlay_motion_path_line_vert.glsl
engines/overlay/shaders/overlay_motion_path_line_vert_no_geom.glsl
engines/overlay/shaders/overlay_motion_path_point_vert.glsl

View File

@@ -115,6 +115,7 @@ void Instance::begin_sync()
resources.begin_sync();
background.begin_sync(resources, state);
motion_paths.begin_sync(resources, state);
origins.begin_sync(state);
outline.begin_sync(resources, state);
@@ -285,6 +286,7 @@ void Instance::object_sync(ObjectRef &ob_ref, Manager &manager)
layer.particles.object_sync(manager, ob_ref, resources, state);
layer.relations.object_sync(ob_ref, resources, state);
motion_paths.object_sync(ob_ref, resources, state);
origins.object_sync(ob_ref, resources, state);
if (object_is_selected(ob_ref) && !in_edit_paint_mode) {
@@ -491,6 +493,7 @@ void Instance::draw(Manager &manager)
overlay_fb_draw(infront, resources.overlay_in_front_fb);
draw_layer(infront, resources.overlay_line_in_front_fb);
motion_paths.draw_color_only(resources.overlay_color_only_fb, manager, view);
xray_fade.draw(resources.overlay_color_only_fb, manager, view);
grid.draw(resources.overlay_color_only_fb, manager, view);

View File

@@ -30,6 +30,7 @@
#include "overlay_next_mesh.hh"
#include "overlay_next_metaball.hh"
#include "overlay_next_mode_transfer.hh"
#include "overlay_next_motion_path.hh"
#include "overlay_next_origin.hh"
#include "overlay_next_outline.hh"
#include "overlay_next_paint.hh"
@@ -66,6 +67,7 @@ class Instance {
Background background;
Origins origins;
Outline outline;
MotionPath motion_paths;
struct OverlayLayer {
const SelectionType selection_type_;

View File

@@ -0,0 +1,278 @@
/* SPDX-FileCopyrightText: 2023 Blender Authors
*
* SPDX-License-Identifier: GPL-2.0-or-later */
/** \file
* \ingroup overlay
*/
#pragma once
#include "BKE_paint.hh"
#include "DEG_depsgraph_query.hh"
#include "draw_manager_text.hh"
#include "overlay_next_armature.hh"
#include "overlay_next_private.hh"
namespace blender::draw::overlay {
class MotionPath {
private:
PassSimple motion_path_ps_ = {"motion_path_ps_"};
PassSimple::Sub *line_ps_ = nullptr;
PassSimple::Sub *vert_ps_ = nullptr;
bool enabled = false;
public:
void begin_sync(Resources &res, const State &state)
{
enabled = state.v3d && !(state.overlay.flag & V3D_OVERLAY_HIDE_MOTION_PATHS) &&
(res.selection_type == SelectionType::DISABLED);
if (!enabled) {
/* Not used. But release the data. */
motion_path_ps_.init();
return;
}
{
PassSimple &pass = motion_path_ps_;
pass.init();
pass.state_set(DRW_STATE_WRITE_COLOR, state.clipping_plane_count);
{
PassSimple::Sub &sub = pass.sub("Lines");
sub.shader_set(res.shaders.motion_path_line.get());
sub.bind_ubo("globalsBlock", &res.globals_buf);
line_ps_ = ⊂
}
{
PassSimple::Sub &sub = pass.sub("Points");
sub.shader_set(res.shaders.motion_path_vert.get());
sub.bind_ubo("globalsBlock", &res.globals_buf);
vert_ps_ = ⊂
}
}
}
void object_sync(const ObjectRef &ob_ref, Resources & /*res*/, const State &state)
{
if (!enabled) {
return;
}
const Object *object = ob_ref.object;
if (object->type == OB_ARMATURE) {
if (Armatures::is_pose_mode(object, state)) {
for (bPoseChannel *pchan : ListBaseWrapper<bPoseChannel>(&object->pose->chanbase)) {
if (pchan->mpath) {
motion_path_sync(state, object, pchan, object->pose->avs, pchan->mpath);
}
}
}
}
if (object->mpath) {
motion_path_sync(state, object, nullptr, object->avs, object->mpath);
}
}
void draw_color_only(Framebuffer &framebuffer, Manager &manager, View &view)
{
if (!enabled) {
return;
}
GPU_framebuffer_bind(framebuffer);
manager.submit(motion_path_ps_, view);
}
private:
void motion_path_sync(const State &state,
const Object *ob,
const bPoseChannel *pchan,
const bAnimVizSettings &avs,
bMotionPath *mpath)
{
/* Avoid 0 size allocations. Current code to calculate motion paths should
* sanitize this already [see animviz_verify_motionpaths()], we might however
* encounter an older file where this was still possible. */
if (mpath->length == 0) {
return;
}
const bool show_keyframes = (avs.path_viewflag & MOTIONPATH_VIEW_KFRAS);
const bool show_keyframes_number = (avs.path_viewflag & MOTIONPATH_VIEW_KFNOS);
const bool show_frame_number = (avs.path_viewflag & MOTIONPATH_VIEW_FNUMS);
const bool show_lines = (mpath->flag & MOTIONPATH_FLAG_LINES);
const bool custom_color = (mpath->flag & MOTIONPATH_FLAG_CUSTOM);
const bool selected = (pchan) ? (pchan->bone->flag & BONE_SELECTED) :
(ob->base_flag & BASE_SELECTED);
const float3 color_pre = custom_color ? float3(mpath->color) : float3(-1.0f);
const float3 color_post = custom_color ? float3(mpath->color_post) : float3(-1.0f);
int stride = max_ii(avs.path_step, 1);
int current_frame = state.cfra;
IndexRange frame_range;
{
int start, end;
if (avs.path_type == MOTIONPATH_TYPE_ACFRA) {
start = current_frame - avs.path_bc;
end = current_frame + avs.path_ac;
}
else {
start = avs.path_sf;
end = avs.path_ef;
}
if (start > end) {
std::swap(start, end);
}
start = math::clamp(start, mpath->start_frame, mpath->end_frame);
end = math::clamp(end, mpath->start_frame, mpath->end_frame);
frame_range = IndexRange::from_begin_end_inclusive(start, end);
}
if (frame_range.is_empty()) {
return;
}
int start_index = frame_range.start() - mpath->start_frame;
Object *camera_eval = nullptr;
if ((eMotionPath_BakeFlag(avs.path_bakeflag) & MOTIONPATH_BAKE_CAMERA_SPACE) &&
state.v3d->camera)
{
camera_eval = DEG_get_evaluated_object(state.depsgraph, state.v3d->camera);
}
/* Draw curve-line of path. */
if (show_lines) {
const int4 motion_path_settings(
current_frame, int(frame_range.start()), int(frame_range.last()), mpath->start_frame);
auto &sub = *line_ps_;
sub.push_constant("mpathLineSettings", motion_path_settings);
sub.push_constant("lineThickness", mpath->line_thickness);
sub.push_constant("selected", selected);
sub.push_constant("customColorPre", color_pre);
sub.push_constant("customColorPost", color_post);
sub.push_constant("camera_space_matrix",
camera_eval ? camera_eval->object_to_world() : float4x4::identity());
gpu::Batch *geom = mpath_batch_points_get(mpath);
/* Only draw the required range. */
sub.draw_expand(geom, GPU_PRIM_TRIS, 2, 1, frame_range.size() - 1, start_index);
}
/* Draw points. */
{
int pt_size = max_ii(mpath->line_thickness - 1, 1);
const int4 motion_path_settings = {pt_size, current_frame, mpath->start_frame, stride};
auto &sub = *vert_ps_;
sub.push_constant("mpathPointSettings", motion_path_settings);
sub.push_constant("showKeyFrames", show_keyframes);
sub.push_constant("customColorPre", color_pre);
sub.push_constant("customColorPost", color_post);
sub.push_constant("camera_space_matrix",
camera_eval ? camera_eval->object_to_world() : float4x4::identity());
gpu::Batch *geom = mpath_batch_points_get(mpath);
/* Only draw the required range. */
sub.draw(geom, 1, frame_range.size(), start_index);
}
/* Draw frame numbers at each frame-step value. */
if (show_frame_number || (show_keyframes_number && show_keyframes)) {
uchar4 col, col_kf;
/* Color Management: Exception here as texts are drawn in sRGB space directly. */
UI_GetThemeColor3ubv(TH_TEXT_HI, col);
UI_GetThemeColor3ubv(TH_VERTEX_SELECT, col_kf);
col.w = col_kf.w = 255;
Span<bMotionPathVert> mpv(mpath->points, mpath->length);
for (int i = 0; i < frame_range.size(); i += stride) {
const bMotionPathVert &mpv_curr = mpv[start_index + i];
int frame = frame_range.start() + i;
bool is_keyframe = (mpv_curr.flag & MOTIONPATH_VERT_KEY) != 0;
float3 vert_coordinate(mpv_curr.co);
if (camera_eval) {
/* Projecting the point into world space from the camera's POV. */
vert_coordinate = math::transform_point(camera_eval->object_to_world(), vert_coordinate);
}
if ((show_keyframes && show_keyframes_number && is_keyframe) ||
(show_frame_number && (i == 0)))
{
char numstr[32];
size_t numstr_len = SNPRINTF_RLEN(numstr, " %d", frame);
DRW_text_cache_add(state.dt,
vert_coordinate,
numstr,
numstr_len,
0,
0,
DRW_TEXT_CACHE_GLOBALSPACE,
(is_keyframe) ? col_kf : col);
}
else if (show_frame_number) {
const bMotionPathVert &mpv_prev = mpv[start_index + i - stride];
const bMotionPathVert &mpv_next = mpv[start_index + i + stride];
/* Only draw frame number if several consecutive highlighted points
* don't occur on same point. */
if (!math::is_equal(float3(mpv_curr.co), float3(mpv_prev.co)) ||
!math::is_equal(float3(mpv_curr.co), float3(mpv_next.co)))
{
char numstr[32];
size_t numstr_len = SNPRINTF_RLEN(numstr, " %d", frame);
DRW_text_cache_add(state.dt,
vert_coordinate,
numstr,
numstr_len,
0,
0,
DRW_TEXT_CACHE_GLOBALSPACE,
col);
}
}
}
}
}
/* Just convert the CPU cache to GPU cache. */
/* TODO(fclem) This should go into a draw_cache_impl_motionpath. */
blender::gpu::VertBuf *mpath_vbo_get(bMotionPath *mpath)
{
if (!mpath->points_vbo) {
GPUVertFormat format = {0};
/* Match structure of #bMotionPathVert. */
GPU_vertformat_attr_add(&format, "pos", GPU_COMP_F32, 3, GPU_FETCH_FLOAT);
GPU_vertformat_attr_add(&format, "flag", GPU_COMP_I32, 1, GPU_FETCH_INT);
mpath->points_vbo = GPU_vertbuf_create_with_format(format);
GPU_vertbuf_data_alloc(*mpath->points_vbo, mpath->length);
/* meh... a useless `memcpy`. */
mpath->points_vbo->data<bMotionPathVert>().copy_from({mpath->points, mpath->length});
}
return mpath->points_vbo;
}
blender::gpu::Batch *mpath_batch_points_get(bMotionPath *mpath)
{
if (!mpath->batch_points) {
mpath->batch_points = GPU_batch_create(GPU_PRIM_POINTS, mpath_vbo_get(mpath), nullptr);
}
return mpath->batch_points;
}
};
} // namespace blender::draw::overlay

View File

@@ -218,6 +218,8 @@ class ShaderModule {
ShaderPtr legacy_curve_edit_normals = shader("overlay_edit_curve_normals");
ShaderPtr legacy_curve_edit_handles = shader("overlay_edit_curve_handle_next");
ShaderPtr legacy_curve_edit_points;
ShaderPtr motion_path_line = shader("overlay_motion_path_line_next");
ShaderPtr motion_path_vert = shader("overlay_motion_path_point");
ShaderPtr mesh_analysis;
ShaderPtr mesh_edit_depth;
ShaderPtr mesh_edit_edge = shader("overlay_edit_mesh_edge_next");

View File

@@ -220,6 +220,22 @@ GPU_SHADER_CREATE_INFO(overlay_motion_path_line_no_geom)
.fragment_source("overlay_motion_path_line_frag.glsl")
.additional_info("draw_view", "draw_globals");
GPU_SHADER_CREATE_INFO(overlay_motion_path_line_next)
.do_static_compilation(true)
.storage_buf(0, Qualifier::READ, "float", "pos[]", Frequency::GEOMETRY)
.push_constant(Type::IVEC2, "gpu_attr_0")
.push_constant(Type::IVEC4, "mpathLineSettings")
.push_constant(Type::BOOL, "selected")
.push_constant(Type::VEC3, "customColorPre")
.push_constant(Type::VEC3, "customColorPost")
.push_constant(Type::INT, "lineThickness") /* In pixels. */
.push_constant(Type::MAT4, "camera_space_matrix")
.vertex_out(overlay_motion_path_line_iface)
.fragment_out(0, Type::VEC4, "fragColor")
.vertex_source("overlay_motion_path_line_next_vert.glsl")
.fragment_source("overlay_motion_path_line_frag.glsl")
.additional_info("draw_view", "gpu_index_load", "draw_globals");
GPU_SHADER_CREATE_INFO(overlay_motion_path_line_clipped)
.do_static_compilation(true)
.additional_info("overlay_motion_path_line", "drw_clipped");
@@ -235,7 +251,7 @@ GPU_SHADER_CREATE_INFO(overlay_motion_path_point)
.do_static_compilation(true)
.typedef_source("overlay_shader_shared.h")
.vertex_in(0, Type::VEC3, "pos")
.vertex_in(1, Type::UINT, "flag")
.vertex_in(1, Type::INT, "flag")
.push_constant(Type::IVEC4, "mpathPointSettings")
.push_constant(Type::BOOL, "showKeyFrames")
.push_constant(Type::VEC3, "customColorPre")

View File

@@ -0,0 +1,165 @@
/* SPDX-FileCopyrightText: 2018-2023 Blender Authors
*
* SPDX-License-Identifier: GPL-2.0-or-later */
#pragma BLENDER_REQUIRE(common_view_clipping_lib.glsl)
#pragma BLENDER_REQUIRE(draw_view_lib.glsl)
#pragma BLENDER_REQUIRE(gpu_shader_utildefines_lib.glsl)
#pragma BLENDER_REQUIRE(gpu_shader_attribute_load_lib.glsl)
#pragma BLENDER_REQUIRE(gpu_shader_index_load_lib.glsl)
#pragma BLENDER_REQUIRE(gpu_shader_math_vector_lib.glsl)
#pragma BLENDER_REQUIRE(gpu_shader_math_matrix_lib.glsl)
struct VertIn {
vec3 P;
uint vert_id;
};
VertIn input_assembly(uint in_vertex_id)
{
uint v_i = gpu_index_load(in_vertex_id);
VertIn vert_in;
vert_in.P = gpu_attr_load_float3(pos, gpu_attr_0, v_i);
vert_in.vert_id = v_i;
return vert_in;
}
struct VertOut {
vec3 ws_P;
vec4 hs_P;
vec2 ss_P;
vec4 color;
};
#define frameCurrent mpathLineSettings.x
#define frameStart mpathLineSettings.y
#define frameEnd mpathLineSettings.z
#define cacheStart mpathLineSettings.w
VertOut vertex_main(VertIn vert_in)
{
VertOut vert_out;
/* Optionally transform from view space to world space for screen space motion paths. */
vert_out.ws_P = transform_point(camera_space_matrix, vert_in.P);
vert_out.hs_P = drw_point_world_to_homogenous(vert_out.ws_P);
vert_out.ss_P = drw_ndc_to_screen(drw_perspective_divide(vert_out.hs_P)).xy * sizeViewport.xy;
int frame = int(vert_in.vert_id) + cacheStart;
vec3 blend_base = (abs(frame - frameCurrent) == 0) ?
colorCurrentFrame.rgb :
colorBackground.rgb; /* "bleed" CFRAME color to ease color blending */
bool use_custom_color = customColorPre.x >= 0.0;
if (frame < frameCurrent) {
vert_out.color.rgb = use_custom_color ? customColorPre : colorBeforeFrame.rgb;
}
else if (frame > frameCurrent) {
vert_out.color.rgb = use_custom_color ? customColorPost : colorAfterFrame.rgb;
}
else /* if (frame == frameCurrent) */ {
vert_out.color.rgb = use_custom_color ? colorCurrentFrame.rgb : blend_base;
}
vert_out.color.a = 1.0;
return vert_out;
}
struct GeomOut {
vec4 gpu_position;
vec4 color;
vec3 ws_P;
};
void strip_EmitVertex(const uint strip_index,
uint out_vertex_id,
uint out_primitive_id,
GeomOut geom_out)
{
bool is_odd_primitive = (out_primitive_id & 1u) != 0u;
/* Maps triangle list primitives to triangle strip indices. */
uint out_strip_index = (is_odd_primitive ? (2u - out_vertex_id) : out_vertex_id) +
out_primitive_id;
if (out_strip_index != strip_index) {
return;
}
interp.color = geom_out.color;
gl_Position = geom_out.gpu_position;
view_clipping_distances(geom_out.ws_P);
}
void geometry_main(VertOut geom_in[2],
uint out_vertex_id,
uint out_primitive_id,
uint out_invocation_id)
{
vec2 ss_P0 = geom_in[0].ss_P;
vec2 ss_P1 = geom_in[1].ss_P;
vec2 edge_dir = orthogonal(normalize(ss_P1 - ss_P0 + 1e-8)) * sizeViewportInv;
bool is_persp = (drw_view.winmat[3][3] == 0.0);
float line_size = float(lineThickness) * sizePixel;
GeomOut geom_out;
vec2 t0 = edge_dir * (line_size * (is_persp ? geom_in[0].hs_P.w : 1.0));
geom_out.gpu_position = geom_in[0].hs_P + vec4(t0, 0.0, 0.0);
geom_out.color = geom_in[0].color;
geom_out.ws_P = geom_in[0].ws_P;
strip_EmitVertex(0, out_vertex_id, out_primitive_id, geom_out);
geom_out.gpu_position = geom_in[0].hs_P - vec4(t0, 0.0, 0.0);
strip_EmitVertex(1, out_vertex_id, out_primitive_id, geom_out);
vec2 t1 = edge_dir * (line_size * (is_persp ? geom_in[1].hs_P.w : 1.0));
geom_out.gpu_position = geom_in[1].hs_P + vec4(t1, 0.0, 0.0);
geom_out.ws_P = geom_in[1].ws_P;
geom_out.color = geom_in[1].color;
strip_EmitVertex(2, out_vertex_id, out_primitive_id, geom_out);
geom_out.gpu_position = geom_in[1].hs_P - vec4(t1, 0.0, 0.0);
strip_EmitVertex(3, out_vertex_id, out_primitive_id, geom_out);
}
void main()
{
/* Point list primitive. */
const uint input_primitive_vertex_count = 1u;
/* Triangle list primitive. */
const uint ouput_primitive_vertex_count = 3u;
const uint ouput_primitive_count = 2u;
const uint ouput_invocation_count = 1u;
const uint output_vertex_count_per_invocation = ouput_primitive_count *
ouput_primitive_vertex_count;
const uint output_vertex_count_per_input_primitive = output_vertex_count_per_invocation *
ouput_invocation_count;
uint in_primitive_id = uint(gl_VertexID) / output_vertex_count_per_input_primitive;
uint in_primitive_first_vertex = in_primitive_id * input_primitive_vertex_count;
uint out_vertex_id = uint(gl_VertexID) % ouput_primitive_vertex_count;
uint out_primitive_id = (uint(gl_VertexID) / ouput_primitive_vertex_count) %
ouput_primitive_count;
uint out_invocation_id = (uint(gl_VertexID) / output_vertex_count_per_invocation) %
ouput_invocation_count;
/* Read current and next point. */
VertIn vert_in[2];
vert_in[0] = input_assembly(in_primitive_first_vertex + 0u);
vert_in[1] = input_assembly(in_primitive_first_vertex + 1u);
VertOut vert_out[2];
vert_out[0] = vertex_main(vert_in[0]);
vert_out[1] = vertex_main(vert_in[1]);
/* Discard by default. */
gl_Position = vec4(NAN_FLT);
geometry_main(vert_out, out_vertex_id, out_primitive_id, out_invocation_id);
}