Fix #110281: Keying node despill is wrong on GPU

The Keying node produces wrong despilling on the GPU evaluator.

That's because the despill amount could be negative, which was not
accounted for.

Additionally, the saturation indices were changed to match the CPU
implementation, just as was done in bdb042c243.
This commit is contained in:
Omar Emara
2023-07-20 09:44:58 +03:00
parent 246292f486
commit 6400190836

View File

@@ -1,6 +1,15 @@
#pragma BLENDER_REQUIRE(common_math_lib.glsl)
#pragma BLENDER_REQUIRE(gpu_shader_compositor_texture_utilities.glsl)
ivec3 compute_saturation_indices(vec3 v)
{
int index_of_max = ((v.x > v.y) ? ((v.x > v.z) ? 0 : 2) : ((v.y > v.z) ? 1 : 2));
ivec2 other_indices = ivec2(mod(ivec2(index_of_max) + ivec2(1, 2), ivec2(3)));
int min_index = min(other_indices.x, other_indices.y);
int max_index = max(other_indices.x, other_indices.y);
return ivec3(index_of_max, max_index, min_index);
}
void main()
{
ivec2 texel = ivec2(gl_GlobalInvocationID.xy);
@@ -13,9 +22,9 @@ void main()
color *= matte;
/* Color despill. */
ivec3 key_argmax = argmax(key.rgb);
float weighted_average = mix(color[key_argmax.y], color[key_argmax.z], despill_balance);
color[key_argmax.x] -= (color[key_argmax.x] - weighted_average) * despill_factor;
ivec3 indices = compute_saturation_indices(key.rgb);
float weighted_average = mix(color[indices.y], color[indices.z], despill_balance);
color[indices.x] -= max(0.0, (color[indices.x] - weighted_average) * despill_factor);
imageStore(output_img, texel, color);
}