EEVEE Next: Overscan support

Add overscan support for both viewport and final renders.

Pull Request: https://projects.blender.org/blender/blender/pulls/110313
This commit is contained in:
Miguel Pozo
2023-08-03 16:19:08 +02:00
parent eabff37483
commit 567a2e5a6f
9 changed files with 111 additions and 43 deletions

View File

@@ -858,11 +858,20 @@ class RENDER_PT_eevee_next_film(RenderButtonsPanel, Panel):
scene = context.scene
rd = scene.render
props = scene.eevee
col = layout.column()
col.prop(rd, "filter_size")
col.prop(rd, "film_transparent", text="Transparent")
col = layout.column(align=False, heading="Overscan")
row = col.row(align=True)
sub = row.row(align=True)
sub.prop(props, "use_overscan", text="")
sub = sub.row(align=True)
sub.active = props.use_overscan
sub.prop(props, "overscan_size", text="")
def draw_curves_settings(self, context):
layout = self.layout

View File

@@ -15,7 +15,9 @@
#include "BKE_camera.h"
#include "DEG_depsgraph_query.h"
#include "ED_view3d.h"
#include "RE_pipeline.h"
#include "render_types.h"
#include "eevee_camera.hh"
#include "eevee_instance.hh"
@@ -70,6 +72,12 @@ void Camera::init()
/* Light-probe baking. */
data.type = CAMERA_PERSP;
}
float overscan = 0.0f;
if ((inst_.scene->eevee.flag & SCE_EEVEE_OVERSCAN) && (inst_.drw_view || inst_.render)) {
overscan = inst_.scene->eevee.overscan / 100.0f;
}
overscan_changed_ = assign_if_different(overscan_, overscan);
}
void Camera::sync()
@@ -78,17 +86,15 @@ void Camera::sync()
CameraData &data = data_;
data.uv_scale = float2(1.0f);
data.uv_bias = float2(0.0f);
if (inst_.is_baking()) {
/* Any view so that shadows and light culling works during irradiance bake. */
draw::View &view = inst_.irradiance_cache.bake.view_z_;
data.viewmat = view.viewmat();
data.viewinv = view.viewinv();
data.winmat = view.winmat();
data.wininv = view.wininv();
data.persmat = data.winmat * data.viewmat;
data.persinv = math::invert(data.persmat);
data.uv_scale = float2(1.0f);
data.uv_bias = float2(0.0f);
data.type = CAMERA_ORTHO;
/* \note: Follow camera parameters where distances are positive in front of the camera. */
@@ -101,37 +107,47 @@ void Camera::sync()
else if (inst_.drw_view) {
DRW_view_viewmat_get(inst_.drw_view, data.viewmat.ptr(), false);
DRW_view_viewmat_get(inst_.drw_view, data.viewinv.ptr(), true);
DRW_view_winmat_get(inst_.drw_view, data.winmat.ptr(), false);
DRW_view_winmat_get(inst_.drw_view, data.wininv.ptr(), true);
DRW_view_persmat_get(inst_.drw_view, data.persmat.ptr(), false);
DRW_view_persmat_get(inst_.drw_view, data.persinv.ptr(), true);
if (overscan_ == 0.0f) {
DRW_view_winmat_get(inst_.drw_view, data.winmat.ptr(), false);
}
else {
rctf viewplane;
float clip_start;
float clip_end;
bool is_ortho = ED_view3d_viewplane_get(inst_.depsgraph,
inst_.v3d,
inst_.rv3d,
UNPACK2(inst_.film.display_extent_get()),
&viewplane,
&clip_start,
&clip_end,
nullptr);
RE_GetWindowMatrixWithOverscan(
is_ortho, clip_start, clip_end, viewplane, overscan_, data.winmat.ptr());
}
/* TODO(fclem): Derive from rv3d instead. */
data.uv_scale = float2(1.0f);
data.uv_bias = float2(0.0f);
}
else if (inst_.render) {
/* TODO(@fclem): Over-scan. */
// RE_GetCameraWindowWithOverscan(inst_.render->re, g_data->overscan, data.winmat);
RE_GetCameraWindow(inst_.render->re, camera_eval, data.winmat.ptr());
RE_GetCameraModelMatrix(inst_.render->re, camera_eval, data.viewinv.ptr());
invert_m4_m4(data.viewmat.ptr(), data.viewinv.ptr());
invert_m4_m4(data.wininv.ptr(), data.winmat.ptr());
mul_m4_m4m4(data.persmat.ptr(), data.winmat.ptr(), data.viewmat.ptr());
invert_m4_m4(data.persinv.ptr(), data.persmat.ptr());
data.uv_scale = float2(1.0f);
data.uv_bias = float2(0.0f);
data.viewmat = math::invert(data.viewinv);
RE_GetCameraWindow(inst_.render->re, camera_eval, data.winmat.ptr());
if (overscan_ != 0.0f) {
RE_GetCameraWindowWithOverscan(inst_.render->re, overscan_, data.winmat.ptr());
}
}
else {
data.viewmat = float4x4::identity();
data.viewinv = float4x4::identity();
data.winmat = math::projection::perspective(-0.1f, 0.1f, -0.1f, 0.1f, 0.1f, 1.0f);
data.wininv = math::invert(data.winmat);
data.persmat = data.winmat * data.viewmat;
data.persinv = math::invert(data.persmat);
data.uv_scale = float2(1.0f);
data.uv_bias = float2(0.0f);
}
data.wininv = math::invert(data.winmat);
data.persmat = data.winmat * data.viewmat;
data.persinv = math::invert(data.persmat);
if (camera_eval && camera_eval->type == OB_CAMERA) {
const ::Camera *cam = reinterpret_cast<const ::Camera *>(camera_eval->data);
data.clip_near = cam->clip_start;

View File

@@ -100,6 +100,9 @@ class Camera {
float radius;
} bound_sphere;
float overscan_;
bool overscan_changed_;
public:
Camera(Instance &inst) : inst_(inst){};
~Camera(){};
@@ -147,6 +150,14 @@ class Camera {
{
return bound_sphere.radius;
}
float overscan() const
{
return overscan_;
}
bool overscan_changed() const
{
return overscan_changed_;
}
private:
void update_bounds();

View File

@@ -248,16 +248,26 @@ void Film::init(const int2 &extent, const rcti *output_rect)
output_rect = &fallback_rect;
}
display_offset = int2(output_rect->xmin, output_rect->ymin);
FilmData data = data_;
data.extent = int2(BLI_rcti_size_x(output_rect), BLI_rcti_size_y(output_rect));
data.offset = int2(output_rect->xmin, output_rect->ymin);
data.offset = display_offset;
data.extent_inv = 1.0f / float2(data.extent);
/* Disable filtering if sample count is 1. */
data.filter_radius = (sampling.sample_count() == 1) ? 0.0f :
clamp_f(scene.r.gauss, 0.0f, 100.0f);
/* TODO(fclem): parameter hidden in experimental.
* We need to figure out LOD bias first in order to preserve texture crispiness. */
data.scaling_factor = 1;
data.render_extent = math::divide_ceil(extent, int2(data.scaling_factor));
if (inst_.camera.overscan() != 0.0f) {
int2 overscan = int2(inst_.camera.overscan() * math::max(UNPACK2(data.render_extent)));
data.render_extent += overscan * 2;
data.offset += overscan;
}
/* Disable filtering if sample count is 1. */
data.filter_radius = (sampling.sample_count() == 1) ? 0.0f :
clamp_f(scene.r.gauss, 0.0f, 100.0f);
data.cryptomatte_samples_len = inst_.view_layer->cryptomatte_levels;
data.background_opacity = (scene.r.alphamode == R_ALPHAPREMUL) ? 0.0f : 1.0f;
@@ -355,9 +365,6 @@ void Film::init(const int2 &extent, const rcti *output_rect)
data_.cryptomatte_material_id = cryptomatte_index_get(EEVEE_RENDER_PASS_CRYPTOMATTE_MATERIAL);
}
{
/* TODO(@fclem): Over-scans. */
data_.render_extent = math::divide_ceil(extent, int2(data_.scaling_factor));
int2 weight_extent = inst_.camera.is_panoramic() ? data_.extent : int2(data_.scaling_factor);
eGPUTextureFormat color_format = GPU_RGBA16F;
@@ -629,7 +636,7 @@ void Film::accumulate(const DRWView *view, GPUTexture *combined_final_tx)
float4 clear_color = {0.0f, 0.0f, 0.0f, 0.0f};
GPU_framebuffer_clear_color(dfbl->default_fb, clear_color);
}
GPU_framebuffer_viewport_set(dfbl->default_fb, UNPACK2(data_.offset), UNPACK2(data_.extent));
GPU_framebuffer_viewport_set(dfbl->default_fb, UNPACK2(display_offset), UNPACK2(data_.extent));
}
update_sample_table();
@@ -661,7 +668,7 @@ void Film::display()
DefaultFramebufferList *dfbl = DRW_viewport_framebuffer_list_get();
GPU_framebuffer_bind(dfbl->default_fb);
GPU_framebuffer_viewport_set(dfbl->default_fb, UNPACK2(data_.offset), UNPACK2(data_.extent));
GPU_framebuffer_viewport_set(dfbl->default_fb, UNPACK2(display_offset), UNPACK2(data_.extent));
combined_final_tx_ = inst_.render_buffers.combined_tx;

View File

@@ -64,6 +64,7 @@ class Film {
PassSimple cryptomatte_post_ps_ = {"Film.Cryptomatte.Post"};
FilmDataBuf data_;
int2 display_offset;
eViewLayerEEVEEPassType enabled_passes_ = eViewLayerEEVEEPassType(0);
@@ -94,6 +95,12 @@ class Film {
return data_.render_extent;
}
/** Returns render output resolution. */
int2 display_extent_get() const
{
return data_.extent;
}
float2 pixel_jitter_get() const;
float background_opacity_get() const

View File

@@ -125,7 +125,7 @@ void MotionBlurModule::sync()
{
/* Disable motion blur in viewport when changing camera projection type.
* Avoids really high velocities. */
if (inst_.velocity.camera_changed_projection()) {
if (inst_.velocity.camera_changed_projection() || inst_.camera.overscan_changed()) {
motion_blur_fx_enabled_ = false;
}

View File

@@ -4,7 +4,7 @@
void main()
{
ivec2 texel_film = ivec2(gl_FragCoord.xy) - film_buf.offset;
ivec2 texel_film = ivec2(gl_FragCoord.xy);
float out_depth;
if (film_buf.display_only) {

View File

@@ -468,6 +468,13 @@ void RE_GetCameraModelMatrix(const struct Render *re,
const struct Object *camera,
float r_modelmat[4][4]);
void RE_GetWindowMatrixWithOverscan(bool is_ortho,
float clip_start,
float clip_end,
rctf viewplane,
float overscan,
float r_winmat[4][4]);
struct Scene *RE_GetScene(struct Render *re);
void RE_SetScene(struct Render *re, struct Scene *sce);

View File

@@ -187,12 +187,28 @@ void RE_GetCameraWindow(Render *re, const Object *camera, float r_winmat[4][4])
}
void RE_GetCameraWindowWithOverscan(const Render *re, float overscan, float r_winmat[4][4])
{
RE_GetWindowMatrixWithOverscan(
re->winmat[3][3] != 0.0f, re->clip_start, re->clip_end, re->viewplane, overscan, r_winmat);
}
void RE_GetCameraModelMatrix(const Render *re, const Object *camera, float r_modelmat[4][4])
{
BKE_camera_multiview_model_matrix(&re->r, camera, re->viewname, r_modelmat);
}
void RE_GetWindowMatrixWithOverscan(bool is_ortho,
float clip_start,
float clip_end,
rctf viewplane,
float overscan,
float r_winmat[4][4])
{
CameraParams params;
params.is_ortho = re->winmat[3][3] != 0.0f;
params.clip_start = re->clip_start;
params.clip_end = re->clip_end;
params.viewplane = re->viewplane;
params.is_ortho = is_ortho;
params.clip_start = clip_start;
params.clip_end = clip_end;
params.viewplane = viewplane;
overscan *= max_ff(BLI_rctf_size_x(&params.viewplane), BLI_rctf_size_y(&params.viewplane));
@@ -204,11 +220,6 @@ void RE_GetCameraWindowWithOverscan(const Render *re, float overscan, float r_wi
copy_m4_m4(r_winmat, params.winmat);
}
void RE_GetCameraModelMatrix(const Render *re, const Object *camera, float r_modelmat[4][4])
{
BKE_camera_multiview_model_matrix(&re->r, camera, re->viewname, r_modelmat);
}
void RE_GetViewPlane(Render *re, rctf *r_viewplane, rcti *r_disprect)
{
*r_viewplane = re->viewplane;