EEVEE-Next: Add camera module

This commit is contained in:
Clément Foucault
2022-05-14 20:29:28 +02:00
parent 46114f0a36
commit 3ccdc362da
7 changed files with 456 additions and 6 deletions

View File

@@ -133,6 +133,7 @@ set(SRC
engines/eevee/eevee_subsurface.c
engines/eevee/eevee_temporal_sampling.c
engines/eevee/eevee_volumes.c
engines/eevee_next/eevee_camera.cc
engines/eevee_next/eevee_engine.cc
engines/eevee_next/eevee_instance.cc
engines/eevee_next/eevee_material.cc
@@ -352,6 +353,7 @@ set(GLSL_SRC
engines/eevee/shaders/world_vert.glsl
engines/eevee_next/shaders/eevee_attributes_lib.glsl
engines/eevee_next/shaders/eevee_camera_lib.glsl
engines/eevee_next/shaders/eevee_geom_curves_vert.glsl
engines/eevee_next/shaders/eevee_geom_gpencil_vert.glsl
engines/eevee_next/shaders/eevee_geom_mesh_vert.glsl

View File

@@ -0,0 +1,146 @@
/* SPDX-License-Identifier: GPL-2.0-or-later
* Copyright 2021 Blender Foundation.
*/
/** \file
* \ingroup eevee
*/
#include <array>
#include "DRW_render.h"
#include "DNA_camera_types.h"
#include "DNA_view3d_types.h"
#include "BKE_camera.h"
#include "DEG_depsgraph_query.h"
#include "RE_pipeline.h"
#include "eevee_camera.hh"
#include "eevee_instance.hh"
namespace blender::eevee {
/* -------------------------------------------------------------------- */
/** \name Camera
* \{ */
void Camera::init()
{
const Object *camera_eval = inst_.camera_eval_object;
synced_ = false;
data_.swap();
CameraData &data = data_.current();
if (camera_eval) {
const ::Camera *cam = reinterpret_cast<const ::Camera *>(camera_eval->data);
switch (cam->type) {
default:
case CAM_PERSP:
data.type = CAMERA_PERSP;
case CAM_ORTHO:
data.type = CAMERA_ORTHO;
#if 0 /* TODO(fclem): Make fisheye properties inside blender. */
case CAM_PANO: {
switch (cam->panorama_type) {
default:
case CAM_PANO_EQUIRECTANGULAR:
data.type = CAMERA_PANO_EQUIRECT;
case CAM_PANO_FISHEYE_EQUIDISTANT:
data.type = CAMERA_PANO_EQUIDISTANT;
case CAM_PANO_FISHEYE_EQUISOLID:
data.type = CAMERA_PANO_EQUISOLID;
case CAM_PANO_MIRRORBALL:
data.type = CAMERA_PANO_MIRROR;
}
}
#endif
}
}
else if (inst_.drw_view) {
data.type = DRW_view_is_persp_get(inst_.drw_view) ? CAMERA_PERSP : CAMERA_ORTHO;
}
else {
/* Lightprobe baking. */
data.type = CAMERA_PERSP;
}
}
void Camera::sync()
{
const Object *camera_eval = inst_.camera_eval_object;
CameraData &data = data_.current();
data.filter_size = inst_.scene->r.gauss;
if (inst_.drw_view) {
DRW_view_viewmat_get(inst_.drw_view, data.viewmat.ptr(), false);
DRW_view_viewmat_get(inst_.drw_view, data.viewinv.ptr(), true);
DRW_view_winmat_get(inst_.drw_view, data.winmat.ptr(), false);
DRW_view_winmat_get(inst_.drw_view, data.wininv.ptr(), true);
DRW_view_persmat_get(inst_.drw_view, data.persmat.ptr(), false);
DRW_view_persmat_get(inst_.drw_view, data.persinv.ptr(), true);
DRW_view_camtexco_get(inst_.drw_view, data.uv_scale);
}
else if (inst_.render) {
/* TODO(fclem) Overscan */
// RE_GetCameraWindowWithOverscan(inst_.render->re, g_data->overscan, data.winmat);
RE_GetCameraWindow(inst_.render->re, camera_eval, data.winmat.ptr());
RE_GetCameraModelMatrix(inst_.render->re, camera_eval, data.viewinv.ptr());
invert_m4_m4(data.viewmat.ptr(), data.viewinv.ptr());
invert_m4_m4(data.wininv.ptr(), data.winmat.ptr());
mul_m4_m4m4(data.persmat.ptr(), data.winmat.ptr(), data.viewmat.ptr());
invert_m4_m4(data.persinv.ptr(), data.persmat.ptr());
data.uv_scale = float2(1.0f);
data.uv_bias = float2(0.0f);
}
else {
data.viewmat = float4x4::identity();
data.viewinv = float4x4::identity();
perspective_m4(data.winmat.ptr(), -0.1f, 0.1f, -0.1f, 0.1f, 0.1f, 1.0f);
data.wininv = data.winmat.inverted();
data.persmat = data.winmat * data.viewmat;
data.persinv = data.persmat.inverted();
}
if (camera_eval) {
const ::Camera *cam = reinterpret_cast<const ::Camera *>(camera_eval->data);
data.clip_near = cam->clip_start;
data.clip_far = cam->clip_end;
#if 0 /* TODO(fclem): Make fisheye properties inside blender. */
data.fisheye_fov = cam->fisheye_fov;
data.fisheye_lens = cam->fisheye_lens;
data.equirect_bias.x = -cam->longitude_min + M_PI_2;
data.equirect_bias.y = -cam->latitude_min + M_PI_2;
data.equirect_scale.x = cam->longitude_min - cam->longitude_max;
data.equirect_scale.y = cam->latitude_min - cam->latitude_max;
/* Combine with uv_scale/bias to avoid doing extra computation. */
data.equirect_bias += data.uv_bias * data.equirect_scale;
data.equirect_scale *= data.uv_scale;
data.equirect_scale_inv = 1.0f / data.equirect_scale;
#endif
}
else if (inst_.drw_view) {
data.clip_near = DRW_view_near_distance_get(inst_.drw_view);
data.clip_far = DRW_view_far_distance_get(inst_.drw_view);
data.fisheye_fov = data.fisheye_lens = -1.0f;
data.equirect_bias = float2(0.0f);
data.equirect_scale = float2(0.0f);
}
data_.current().push_update();
synced_ = true;
/* Detect changes in parameters. */
if (data_.current() != data_.previous()) {
// inst_.sampling.reset();
}
}
/** \} */
} // namespace blender::eevee

View File

@@ -1,11 +1,14 @@
/* SPDX-License-Identifier: GPL-2.0-or-later
* Copyright 2021 Blender Foundation.
*/
* Copyright 2021 Blender Foundation. */
#pragma once
/** \file
* \ingroup eevee
*/
#include "eevee_shader_shared.hh"
namespace blender::eevee {
class Instance;
@@ -43,4 +46,85 @@ static const float cubeface_mat[6][4][4] = {
{0.0f, 0.0f, 0.0f, 1.0f}},
};
inline void cubeface_winmat_get(float4x4 &winmat, float near, float far)
{
/* Simple 90° FOV projection. */
perspective_m4(winmat.ptr(), -near, near, -near, near, near, far);
}
/* -------------------------------------------------------------------- */
/** \name CameraData operators
* \{ */
inline bool operator==(const CameraData &a, const CameraData &b)
{
return compare_m4m4(a.persmat.ptr(), b.persmat.ptr(), FLT_MIN) && (a.uv_scale == b.uv_scale) &&
(a.uv_bias == b.uv_bias) && (a.equirect_scale == b.equirect_scale) &&
(a.equirect_bias == b.equirect_bias) && (a.fisheye_fov == b.fisheye_fov) &&
(a.fisheye_lens == b.fisheye_lens) && (a.filter_size == b.filter_size) &&
(a.type == b.type);
}
inline bool operator!=(const CameraData &a, const CameraData &b)
{
return !(a == b);
}
/** \} */
/* -------------------------------------------------------------------- */
/** \name Camera
* \{ */
/**
* Point of view in the scene. Can be init from viewport or camera object.
*/
class Camera {
private:
Instance &inst_;
/** Double buffered to detect changes and have history for re-projection. */
SwapChain<CameraDataBuf, 2> data_;
/** Detects wrong usage. */
bool synced_ = false;
public:
Camera(Instance &inst) : inst_(inst){};
~Camera(){};
void init();
void sync();
/**
* Getters
**/
const CameraData &data_get() const
{
BLI_assert(synced_);
return data_.current();
}
const GPUUniformBuf *ubo_get() const
{
return data_.current();
}
bool is_panoramic() const
{
return eevee::is_panoramic(data_.current().type);
}
bool is_orthographic() const
{
return data_.current().type == CAMERA_ORTHO;
}
const float3 &position() const
{
return *reinterpret_cast<const float3 *>(data_.current().viewinv[3]);
}
const float3 &forward() const
{
return *reinterpret_cast<const float3 *>(data_.current().viewinv[2]);
}
};
/** \} */
} // namespace blender::eevee

View File

@@ -41,9 +41,10 @@ void Instance::init(const int2 &output_res,
const View3D *v3d_,
const RegionView3D *rv3d_)
{
UNUSED_VARS(light_probe_, camera_object_, output_rect);
UNUSED_VARS(light_probe_, output_rect);
render = render_;
depsgraph = depsgraph_;
camera_orig_object = camera_object_;
render_layer = render_layer_;
drw_view = drw_view_;
v3d = v3d_;
@@ -58,9 +59,9 @@ void Instance::update_eval_members()
{
scene = DEG_get_evaluated_scene(depsgraph);
view_layer = DEG_get_evaluated_view_layer(depsgraph);
// camera_eval_object = (camera_orig_object) ?
// DEG_get_evaluated_object(depsgraph, camera_orig_object) :
// nullptr;
camera_eval_object = (camera_orig_object) ?
DEG_get_evaluated_object(depsgraph, camera_orig_object) :
nullptr;
}
/** \} */

View File

@@ -42,6 +42,8 @@ class Instance {
/** Evaluated IDs. */
Scene *scene;
ViewLayer *view_layer;
Object *camera_eval_object;
Object *camera_orig_object;
/** Only available when rendering for final render. */
const RenderLayer *render_layer;
RenderEngine *render;

View File

@@ -19,6 +19,7 @@
namespace blender::eevee {
using draw::Framebuffer;
using draw::SwapChain;
using draw::Texture;
using draw::TextureFromPool;
@@ -26,6 +27,52 @@ using draw::TextureFromPool;
#define UBO_MIN_MAX_SUPPORTED_SIZE 1 << 14
/* -------------------------------------------------------------------- */
/** \name Camera
* \{ */
enum eCameraType : uint32_t {
CAMERA_PERSP = 0u,
CAMERA_ORTHO = 1u,
CAMERA_PANO_EQUIRECT = 2u,
CAMERA_PANO_EQUISOLID = 3u,
CAMERA_PANO_EQUIDISTANT = 4u,
CAMERA_PANO_MIRROR = 5u
};
static inline bool is_panoramic(eCameraType type)
{
return type > CAMERA_ORTHO;
}
struct CameraData {
/* View Matrices of the camera, not from any view! */
float4x4 persmat;
float4x4 persinv;
float4x4 viewmat;
float4x4 viewinv;
float4x4 winmat;
float4x4 wininv;
/** Camera UV scale and bias. Also known as viewcamtexcofac. */
float2 uv_scale;
float2 uv_bias;
/** Panorama parameters. */
float2 equirect_scale;
float2 equirect_scale_inv;
float2 equirect_bias;
float fisheye_fov;
float fisheye_lens;
/** Clipping distances. */
float clip_near;
float clip_far;
/** Film pixel filter radius. */
float filter_size;
eCameraType type;
};
BLI_STATIC_ASSERT_ALIGN(CameraData, 16)
/** \} */
/* -------------------------------------------------------------------- */
/** \name Ray-Tracing
* \{ */
@@ -83,5 +130,7 @@ float4 utility_tx_sample(sampler2DArray util_tx, float2 uv, float layer)
#ifdef __cplusplus
using CameraDataBuf = draw::UniformBuffer<CameraData>;
} // namespace blender::eevee
#endif

View File

@@ -0,0 +1,166 @@
/**
* Camera projection / uv functions and utils.
**/
#pragma BLENDER_REQUIRE(common_math_lib.glsl)
/* -------------------------------------------------------------------- */
/** \name Panoramic Projections
*
* Adapted from Cycles to match EEVEE's coordinate system.
* \{ */
vec2 camera_equirectangular_from_direction(CameraData cam, vec3 dir)
{
float phi = atan(-dir.z, dir.x);
float theta = acos(dir.y / length(dir));
return (vec2(phi, theta) - cam.equirect_bias) * cam.equirect_scale_inv;
}
vec3 camera_equirectangular_to_direction(CameraData cam, vec2 uv)
{
uv = uv * cam.equirect_scale + cam.equirect_bias;
float phi = uv.x;
float theta = uv.y;
float sin_theta = sin(theta);
return vec3(sin_theta * cos(phi), cos(theta), -sin_theta * sin(phi));
}
vec2 camera_fisheye_from_direction(CameraData cam, vec3 dir)
{
float r = atan(length(dir.xy), -dir.z) / cam.fisheye_fov;
float phi = atan(dir.y, dir.x);
vec2 uv = r * vec2(cos(phi), sin(phi)) + 0.5;
return (uv - cam.uv_bias) / cam.uv_scale;
}
vec3 camera_fisheye_to_direction(CameraData cam, vec2 uv)
{
uv = uv * cam.uv_scale + cam.uv_bias;
uv = (uv - 0.5) * 2.0;
float r = length(uv);
if (r > 1.0) {
return vec3(0.0);
}
float phi = safe_acos(uv.x * safe_rcp(r));
float theta = r * cam.fisheye_fov * 0.5;
if (uv.y < 0.0) {
phi = -phi;
}
return vec3(cos(phi) * sin(theta), sin(phi) * sin(theta), -cos(theta));
}
vec2 camera_mirror_ball_from_direction(CameraData cam, vec3 dir)
{
dir = normalize(dir);
dir.z -= 1.0;
dir *= safe_rcp(2.0 * safe_sqrt(-0.5 * dir.z));
vec2 uv = 0.5 * dir.xy + 0.5;
return (uv - cam.uv_bias) / cam.uv_scale;
}
vec3 camera_mirror_ball_to_direction(CameraData cam, vec2 uv)
{
uv = uv * cam.uv_scale + cam.uv_bias;
vec3 dir;
dir.xy = uv * 2.0 - 1.0;
if (len_squared(dir.xy) > 1.0) {
return vec3(0.0);
}
dir.z = -safe_sqrt(1.0 - sqr(dir.x) - sqr(dir.y));
const vec3 I = vec3(0.0, 0.0, 1.0);
return reflect(I, dir);
}
/** \} */
/* -------------------------------------------------------------------- */
/** \name Regular projections
* \{ */
vec3 camera_view_from_uv(mat4 projmat, vec2 uv)
{
return project_point(projmat, vec3(uv * 2.0 - 1.0, 0.0));
}
vec2 camera_uv_from_view(mat4 projmat, bool is_persp, vec3 vV)
{
vec4 tmp = projmat * vec4(vV, 1.0);
if (is_persp && tmp.w <= 0.0) {
/* Return invalid coordinates for points behind the camera.
* This can happen with panoramic projections. */
return vec2(-1.0);
}
return (tmp.xy / tmp.w) * 0.5 + 0.5;
}
/** \} */
/* -------------------------------------------------------------------- */
/** \name General functions handling all projections
* \{ */
vec3 camera_view_from_uv(CameraData cam, vec2 uv)
{
vec3 vV;
switch (cam.type) {
default:
case CAMERA_ORTHO:
case CAMERA_PERSP:
return camera_view_from_uv(cam.wininv, uv);
case CAMERA_PANO_EQUIRECT:
vV = camera_equirectangular_to_direction(cam, uv);
break;
case CAMERA_PANO_EQUIDISTANT:
/* ATTR_FALLTHROUGH; */
case CAMERA_PANO_EQUISOLID:
vV = camera_fisheye_to_direction(cam, uv);
break;
case CAMERA_PANO_MIRROR:
vV = camera_mirror_ball_to_direction(cam, uv);
break;
}
return vV;
}
vec2 camera_uv_from_view(CameraData cam, vec3 vV)
{
switch (cam.type) {
default:
case CAMERA_ORTHO:
return camera_uv_from_view(cam.winmat, false, vV);
case CAMERA_PERSP:
return camera_uv_from_view(cam.winmat, true, vV);
case CAMERA_PANO_EQUIRECT:
return camera_equirectangular_from_direction(cam, vV);
case CAMERA_PANO_EQUISOLID:
/* ATTR_FALLTHROUGH; */
case CAMERA_PANO_EQUIDISTANT:
return camera_fisheye_from_direction(cam, vV);
case CAMERA_PANO_MIRROR:
return camera_mirror_ball_from_direction(cam, vV);
}
}
vec2 camera_uv_from_world(CameraData cam, vec3 V)
{
vec3 vV = transform_point(cam.viewmat, V);
switch (cam.type) {
default:
case CAMERA_ORTHO:
return camera_uv_from_view(cam.persmat, false, V);
case CAMERA_PERSP:
return camera_uv_from_view(cam.persmat, true, V);
case CAMERA_PANO_EQUIRECT:
return camera_equirectangular_from_direction(cam, vV);
case CAMERA_PANO_EQUISOLID:
/* ATTR_FALLTHROUGH; */
case CAMERA_PANO_EQUIDISTANT:
return camera_fisheye_from_direction(cam, vV);
case CAMERA_PANO_MIRROR:
return camera_mirror_ball_from_direction(cam, vV);
}
}
/** \} */