Commit 6c6d1a9b63 allowed several units to OSL camera shaders'
parameters. Only meters 'm' were supported as distance units, because
others cannot be converted automatically into the shader' unit.
This commit allows using 'mm' in addition to 'm', and makes the
Blender property use a 'DISTANCE_CAMERA' subtype. This assumes that
someone using 'mm' specifically wants to use the value for camera
parameters, which means it can be used as is, without any conversion
in the shader.
The camera templates were updated to use a focal length in mm.
Pull Request: https://projects.blender.org/blender/blender/pulls/147347
79 lines
2.5 KiB
Plaintext
79 lines
2.5 KiB
Plaintext
/* An advanced perspective camera, implementing several examples
|
|
* of things you can do with custom cameras. */
|
|
|
|
float invertDistortionModel(float x, float y, float k1, float k2, float k3)
|
|
{
|
|
// Solves u = d*(1 + k1*d^2 + k2*d^4 + k3*d^6) for d.
|
|
// Returns stretch factor.
|
|
float u = sqrt(x * x + y * y);
|
|
float d = u;
|
|
for (int i = 0; i < 50; i++) {
|
|
float d2 = d * d;
|
|
float f = u - d * (1 + d2 * (k1 + d2 * (k2 + d2 * k3)));
|
|
float fp = -(1 + d2 * (3 * k1 + d2 * (5 * k2 + d2 * 7 * k3)));
|
|
float diff = f / fp;
|
|
d -= diff;
|
|
if (fabs(diff) < 1e-7) {
|
|
// Iteration is converging, return result.
|
|
return d / u;
|
|
}
|
|
if (d > 100.0) {
|
|
// Iteration is diverging, give up.
|
|
return -1.0;
|
|
}
|
|
}
|
|
// Reached iteration limit, give up.
|
|
return -1.0;
|
|
}
|
|
|
|
shader camera(float focal_length = 50.0 [[ float min = 0.0,
|
|
string unit = "mm",
|
|
float sensitivity = 0.2 ]],
|
|
int do_distortion = 0 [[ string widget = "checkBox"]],
|
|
int do_swirl = 0 [[ string widget = "checkBox"]],
|
|
int do_dof = 0 [[ string widget = "checkBox"]],
|
|
float distortion_k1 = -0.2,
|
|
float distortion_k2 = 0.0,
|
|
float distortion_k3 = 0.0,
|
|
float swirl_scale = 100.0,
|
|
float swirl_amplitude = 0.01,
|
|
float swirl_W = 0.0 [[ float sensitivity = 1]],
|
|
output point position = 0.0,
|
|
output vector direction = 0.0,
|
|
output color throughput = 1.0)
|
|
{
|
|
point Pcam = camera_shader_raster_position() - vector(0.5);
|
|
|
|
if (do_distortion) {
|
|
float distort = invertDistortionModel(
|
|
Pcam.x, Pcam.y, distortion_k1, distortion_k2, distortion_k3);
|
|
if (distort < 0.0) {
|
|
// Distortion model failed, skip the path.
|
|
throughput = color(0.0);
|
|
return;
|
|
}
|
|
Pcam *= distort;
|
|
}
|
|
|
|
vector sensor_size;
|
|
getattribute("cam:sensor_size", sensor_size);
|
|
|
|
Pcam = Pcam * sensor_size / focal_length;
|
|
|
|
if (do_swirl) {
|
|
Pcam += swirl_amplitude * noise("perlin", Pcam * swirl_scale, swirl_W);
|
|
}
|
|
|
|
direction = normalize(vector(Pcam.x, Pcam.y, 1.0));
|
|
|
|
if (do_dof) {
|
|
float focal_distance;
|
|
getattribute("cam:focal_distance", focal_distance);
|
|
getattribute("cam:aperture_position", position);
|
|
position *= focal_length * 1e-3;
|
|
|
|
point Pfocus = direction * focal_distance / direction.z;
|
|
direction = normalize(Pfocus - position);
|
|
}
|
|
}
|