Files
test2/source/blender/draw/engines/eevee/shaders/eevee_velocity_lib.glsl

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

149 lines
4.8 KiB
Plaintext
Raw Normal View History

/* SPDX-FileCopyrightText: 2022 Blender Authors
*
* SPDX-License-Identifier: GPL-2.0-or-later */
#pragma once
#include "infos/eevee_velocity_infos.hh"
SHADER_LIBRARY_CREATE_INFO(eevee_velocity_camera)
#include "draw_view_lib.glsl"
#include "gpu_shader_math_matrix_transform_lib.glsl"
float4 velocity_pack(float4 data)
{
return data * 0.01f;
}
float4 velocity_unpack(float4 data)
{
return data * 100.0f;
}
#ifdef VELOCITY_CAMERA
/**
* Given a triple of position, compute the previous and next motion vectors.
* Returns uv space motion vectors in pairs (motion_prev.xy, motion_next.xy).
*/
float4 velocity_surface(float3 P_prv, float3 P, float3 P_nxt)
{
/* NOTE: We use CameraData matrices instead of drw_view().persmat to avoid adding the TAA jitter
* to the velocity. */
float2 prev_uv = project_point(camera_prev.persmat, P_prv).xy;
float2 curr_uv = project_point(camera_curr.persmat, P).xy;
float2 next_uv = project_point(camera_next.persmat, P_nxt).xy;
/* Fix issue with perspective division. */
if (any(isnan(prev_uv))) {
prev_uv = curr_uv;
}
if (any(isnan(next_uv))) {
next_uv = curr_uv;
}
/* NOTE: We output both vectors in the same direction so we can reuse the same vector
2022-10-06 12:12:09 +11:00
* with RGRG swizzle in viewport. */
float4 motion = float4(prev_uv - curr_uv, curr_uv - next_uv);
/* Convert NDC velocity to UV velocity */
motion *= 0.5f;
return motion;
}
/**
* Given a view space view vector \a vV, compute the previous and next motion vectors for
* background pixels.
* Returns uv space motion vectors in pairs (motion_prev.xy, motion_next.xy).
*/
float4 velocity_background(float3 vV)
{
float3 V = transform_direction(camera_curr.viewinv, vV);
/* NOTE: We use CameraData matrices instead of drw_view().winmat to avoid adding the TAA jitter
* to the velocity. */
float2 prev_uv =
project_point(camera_prev.winmat, transform_direction(camera_prev.viewmat, V)).xy;
float2 curr_uv =
project_point(camera_curr.winmat, transform_direction(camera_curr.viewmat, V)).xy;
float2 next_uv =
project_point(camera_next.winmat, transform_direction(camera_next.viewmat, V)).xy;
/* NOTE: We output both vectors in the same direction so we can reuse the same vector
2022-10-06 12:12:09 +11:00
* with RGRG swizzle in viewport. */
float4 motion = float4(prev_uv - curr_uv, curr_uv - next_uv);
2022-07-24 09:18:56 +02:00
/* Convert NDC velocity to UV velocity */
motion *= 0.5f;
2022-07-24 09:18:56 +02:00
return motion;
}
float4 velocity_resolve(float4 vector, float2 uv, float depth)
{
if (vector.x == VELOCITY_INVALID) {
bool is_background = (depth == 1.0f);
if (is_background) {
/* NOTE: Use view vector to avoid imprecision if camera is far from origin. */
float3 vV = -drw_view_incident_vector(drw_point_screen_to_view(float3(uv, 1.0f)));
return velocity_background(vV);
}
else {
/* Static geometry. No translation in world space. */
float3 P = drw_point_screen_to_world(float3(uv, depth));
return velocity_surface(P, P, P);
}
}
return velocity_unpack(vector);
}
/**
* Load and resolve correct velocity as some pixels might still not have correct
* motion data for performance reasons.
* Returns motion vector in render UV space.
*/
float4 velocity_resolve(sampler2D vector_tx, int2 texel, float depth)
{
float2 uv = (float2(texel) + 0.5f) / float2(textureSize(vector_tx, 0).xy);
float4 vector = texelFetch(vector_tx, texel, 0);
return velocity_resolve(vector, uv, depth);
}
#endif
#ifdef MAT_VELOCITY
/**
* Given a triple of position, compute the previous and next motion vectors.
DRW: New Curve Drawing Implementation of the design task #142969. This adds the following: - Exact GPU interpolation of curves of all types. - Radius attribute support. - Cyclic curve support. - Resolution attribute support. - New Cylinder hair shape type. ![image.png](/attachments/a8e7aea0-b0e5-4694-b660-89fb3df1ddcd) What changed: - EEVEE doesn't compute random normals for strand hairs anymore. These are considered legacy now. - EEVEE now have an internal shadow bias to avoid self shadowing on hair. - Workbench Curves Strip display option is no longer flat and has better shading. - Legacy Hair particle system evaluates radius at control points before applying additional subdivision. This now matches Cycles. - Color Attribute Node without a name do not fetch the active color attribute anymore. This now matches Cycles. Notes: - This is not 100% matching the CPU implementation for interpolation (see the epsilons in the tests). - Legacy Hair Particle points is now stored in local space after interpolation. The new cylinder shape allows for more correct hair shading in workbench and better intersection in EEVEE. | | Strand | Strip | Cylinder | | ---- | --- | --- | --- | | Main | ![main_strand.png](/attachments/67d3b792-962c-4272-a92c-1c0c7c6cf8de) | ![main_strip.png](/attachments/f2aa3575-368e-4fbb-b888-74df845918f1) | N/A | | PR | ![pr_strand.png](/attachments/cc012483-25f0-491f-a06e-ad3029981d47) | ![pr_strip.png](/attachments/73fa2f5c-5252-4b30-a334-e935ed0fb938) | ![pr_cylinder.png](/attachments/3133b2d4-a6f2-41ee-8e2d-f6fd00db0c8d) | | | Strand | Strip | Cylinder | | ---- | --- | --- | --- | | Main | ![main_strand_closeup.png](/attachments/730bd79c-6762-446d-819b-3ea47961ff9f) |![main_strip_closeup.png](/attachments/d9ace578-cfeb-4895-9896-3625b6ad7a02) | N/A | | PR | ![pr_strand_closeup.png](/attachments/ac8f3b0c-6ef6-4d54-b714-6322f9865036)|![pr_strip_closeup.png](/attachments/8504711a-955b-4ab2-aa3d-c2d114baf9d4)| ![pr_cylinder_closeup.png](/attachments/1e2899a8-0a5c-431f-ac6c-5184d87e9598) | Cyclic Curve, Mixed curve type, and proper radius support: ![image.png](/attachments/7f0bf05e-62ee-4ae9-aef9-a5599249b8d7) Test file for attribute lookup: [test_attribute_lookup.blend](/attachments/1d54dd06-379b-4480-a1c5-96adc1953f77) Follow Up Tasks: - Correct full tube segments orientation based on tangent and normal attributes - Correct V resolution property per object - More attribute type support (currently only color) TODO: - [x] Attribute Loading Changes - [x] Generic Attributes - [x] Length Attribute - [x] Intercept Attribute - [x] Original Coordinate Attribute - [x] Cyclic Curves - [x] Legacy Hair Particle conversion - [x] Attribute Loading - [x] Additional Subdivision - [x] Move some function to generic headers (VertBuf, OffsetIndices) - [x] Fix default UV/Color attribute assignment Pull Request: https://projects.blender.org/blender/blender/pulls/143180
2025-08-27 09:49:43 +02:00
* Returns a tuple of local space motion deltas.
*/
void velocity_local_pos_get(float3 lP, int vert_id, out float3 lP_prev, out float3 lP_next)
{
VelocityIndex vel = velocity_indirection_buf[drw_resource_id()];
lP_next = lP_prev = lP;
if (vel.geo.do_deform) {
if (vel.geo.ofs[STEP_PREVIOUS] != -1) {
lP_prev = velocity_geo_prev_buf[vel.geo.ofs[STEP_PREVIOUS] + vert_id].xyz;
}
if (vel.geo.ofs[STEP_NEXT] != -1) {
lP_next = velocity_geo_next_buf[vel.geo.ofs[STEP_NEXT] + vert_id].xyz;
}
}
}
/**
* Given a triple of position, compute the previous and next motion vectors.
* Returns a tuple of world space motion deltas.
* WARNING: The returned motion_next is invalid when rendering the viewport.
*/
void velocity_vertex(
float3 lP_prev, float3 lP, float3 lP_next, out float3 motion_prev, out float3 motion_next)
{
VelocityIndex vel = velocity_indirection_buf[drw_resource_id()];
float4x4 obmat_prev = velocity_obj_prev_buf[vel.obj.ofs[STEP_PREVIOUS]];
float4x4 obmat_next = velocity_obj_next_buf[vel.obj.ofs[STEP_NEXT]];
float3 P_prev = transform_point(obmat_prev, lP_prev);
float3 P_next = transform_point(obmat_next, lP_next);
float3 P = transform_point(drw_modelmat(), lP);
motion_prev = P_prev - P;
motion_next = P_next - P;
}
#endif