Video: HDR video input/output support
HDR video files are properly read into Blender, and can be rendered out
of Blender.
HDR video reading / decoding:
- Two flavors of HDR are recognized, based on color related video
metadata: "PQ" (Rec.2100 Perceptual Quantizer, aka SMPTE 2084) and
"HLG" (Rec.2100 Hybrid-Log-Gamma, aka ARIB STD B67). Both are read
effectively into floating point images, and their color space
transformations are done through OpenColorIO.
- The OCIO config shipped in Blender has been extended to contain
Rec.2100-PQ and Rec.2100-HLG color spaces.
- Note that if you already had a HDR video in sequencer or movie clip,
it would have looked "incorrect" previously, and it will continue to
look incorrect, since it already has "wrong" color space assigned to
it. Either re-add it (which should assign the correct color space),
or manually change the color space to PQ or HLG one as needed.
HDR video writing / encoding"
- For H.265 and AV1 the video encoding options now display the HDR mode.
Similar to reading, there are PQ and HLG HDR mode options.
- Reference white is assumed to be 100 nits.
- YUV uses "full" ("PC/jpeg") color range.
- No mastering display metadata is written into the video file, since
generally that information is not known inside Blender.
More details and screenshots in the PR.
Co-authored-by: Sergey Sharybin <sergey@blender.org>
Pull Request: https://projects.blender.org/blender/blender/pulls/120033
This commit is contained in:
committed by
Aras Pranckevicius
parent
7c7c68fd7a
commit
d89c9c5155
@@ -461,6 +461,38 @@ colorspaces:
|
||||
- !<FileTransform> {src: pbrNeutral.cube, interpolation: tetrahedral}
|
||||
- !<ColorSpaceTransform> {src: Linear Rec.709, dst: sRGB}
|
||||
|
||||
- !<ColorSpace>
|
||||
name: Rec.2100-PQ
|
||||
aliases: [rec2100_pq_display, Rec.2100-PQ - Display, pq_rec2020_display]
|
||||
family: Display
|
||||
equalitygroup: ""
|
||||
bitdepth: 32f
|
||||
description: Rec.2100-PQ 10000 nits peak display with reference white at 100 nits
|
||||
isdata: false
|
||||
categories: [file-io]
|
||||
encoding: hdr-video
|
||||
allocation: uniform
|
||||
from_scene_reference: !<GroupTransform>
|
||||
children:
|
||||
- !<ColorSpaceTransform> {src: Linear CIE-XYZ E, dst: Linear CIE-XYZ D65}
|
||||
- !<BuiltinTransform> {style: DISPLAY - CIE-XYZ-D65_to_REC.2100-PQ}
|
||||
|
||||
- !<ColorSpace>
|
||||
name: Rec.2100-HLG
|
||||
aliases: [rec2100_hlg_display, Rec.2100-HLG - Display, hlg_rec2020_display]
|
||||
family: Display
|
||||
equalitygroup: ""
|
||||
bitdepth: 32f
|
||||
description: Rec.2100-HLG 1000 nits peak display with reference white at 100 nits
|
||||
isdata: false
|
||||
categories: [file-io]
|
||||
encoding: hdr-video
|
||||
allocation: uniform
|
||||
from_scene_reference: !<GroupTransform>
|
||||
children:
|
||||
- !<ColorSpaceTransform> {src: Linear CIE-XYZ E, dst: Linear CIE-XYZ D65}
|
||||
- !<BuiltinTransform> {style: DISPLAY - CIE-XYZ-D65_to_REC.2100-HLG-1000nit}
|
||||
|
||||
looks:
|
||||
- !<Look>
|
||||
name: Very High Contrast
|
||||
|
||||
@@ -528,13 +528,21 @@ class RENDER_PT_encoding_video(RenderOutputButtonsPanel, Panel):
|
||||
if needs_codec and ffmpeg.codec == 'NONE':
|
||||
return
|
||||
|
||||
image_settings = context.scene.render.image_settings
|
||||
|
||||
# Color depth. List of codecs needs to be in sync with
|
||||
# `IMB_ffmpeg_valid_bit_depths` in source code.
|
||||
use_bpp = needs_codec and ffmpeg.codec in {'H264', 'H265', 'AV1', 'PRORES', 'FFV1'}
|
||||
if use_bpp:
|
||||
image_settings = context.scene.render.image_settings
|
||||
layout.prop(image_settings, "color_depth", expand=True)
|
||||
|
||||
# HDR options.
|
||||
use_hdr = needs_codec and ffmpeg.codec in {
|
||||
'H265', 'AV1'} and image_settings.color_depth in {
|
||||
'10', '12'} and image_settings.color_mode != 'BW'
|
||||
if use_hdr:
|
||||
layout.prop(ffmpeg, "video_hdr")
|
||||
|
||||
if ffmpeg.codec == 'DNXHD':
|
||||
layout.prop(ffmpeg, "use_lossless_output")
|
||||
|
||||
|
||||
@@ -47,10 +47,22 @@ void IMB_colormanagegent_copy_settings(ImBuf *ibuf_src, ImBuf *ibuf_dst);
|
||||
void IMB_colormanagement_assign_float_colorspace(ImBuf *ibuf, const char *name);
|
||||
void IMB_colormanagement_assign_byte_colorspace(ImBuf *ibuf, const char *name);
|
||||
|
||||
const char *IMB_colormanagement_get_float_colorspace(ImBuf *ibuf);
|
||||
const char *IMB_colormanagement_get_rect_colorspace(ImBuf *ibuf);
|
||||
const char *IMB_colormanagement_get_float_colorspace(const ImBuf *ibuf);
|
||||
const char *IMB_colormanagement_get_rect_colorspace(const ImBuf *ibuf);
|
||||
const char *IMB_colormanagement_space_from_filepath_rules(const char *filepath);
|
||||
|
||||
/* Get colorspace name used for Rec.2100 PQ Display conversion.
|
||||
*
|
||||
* Searches for one of the color spaces or aliases: Rec.2100-PQ, Rec.2100-PQ - Display, rec2100_pq,
|
||||
* rec2100_pq_display. If none found returns nullptr. */
|
||||
const char *IMB_colormanagement_get_rec2100_pq_display_colorspace();
|
||||
|
||||
/* Get colorspace name used for Rec.2100 HLG Display conversion.
|
||||
*
|
||||
* Searches for one of the color spaces or aliases: Rec.2100-HLG, Rec.2100-HLG - Display,
|
||||
* rec2100_hlg, rec2100_hlg_display. If none found returns nullptr. */
|
||||
const char *IMB_colormanagement_get_rec2100_hlg_display_colorspace();
|
||||
|
||||
const ColorSpace *IMB_colormanagement_space_get_named(const char *name);
|
||||
bool IMB_colormanagement_space_is_data(const ColorSpace *colorspace);
|
||||
bool IMB_colormanagement_space_is_scene_linear(const ColorSpace *colorspace);
|
||||
|
||||
@@ -1140,7 +1140,7 @@ void IMB_colormanagement_assign_byte_colorspace(ImBuf *ibuf, const char *name)
|
||||
}
|
||||
}
|
||||
|
||||
const char *IMB_colormanagement_get_float_colorspace(ImBuf *ibuf)
|
||||
const char *IMB_colormanagement_get_float_colorspace(const ImBuf *ibuf)
|
||||
{
|
||||
if (ibuf->float_buffer.colorspace) {
|
||||
return ibuf->float_buffer.colorspace->name().c_str();
|
||||
@@ -1149,7 +1149,7 @@ const char *IMB_colormanagement_get_float_colorspace(ImBuf *ibuf)
|
||||
return IMB_colormanagement_role_colorspace_name_get(COLOR_ROLE_SCENE_LINEAR);
|
||||
}
|
||||
|
||||
const char *IMB_colormanagement_get_rect_colorspace(ImBuf *ibuf)
|
||||
const char *IMB_colormanagement_get_rect_colorspace(const ImBuf *ibuf)
|
||||
{
|
||||
if (ibuf->byte_buffer.colorspace) {
|
||||
return ibuf->byte_buffer.colorspace->name().c_str();
|
||||
@@ -1163,6 +1163,35 @@ const char *IMB_colormanagement_space_from_filepath_rules(const char *filepath)
|
||||
return g_config->get_color_space_from_filepath(filepath);
|
||||
}
|
||||
|
||||
static const char *get_first_resolved_colorspace_name(const blender::Span<const char *> names)
|
||||
{
|
||||
for (const char *name : names) {
|
||||
const ColorSpace *colorspace = IMB_colormanagement_space_get_named(name);
|
||||
if (colorspace) {
|
||||
return colorspace->name().c_str();
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const char *IMB_colormanagement_get_rec2100_pq_display_colorspace()
|
||||
{
|
||||
return get_first_resolved_colorspace_name({"Rec.2100-PQ",
|
||||
"Rec.2100-PQ - Display",
|
||||
"rec2100_pq",
|
||||
"rec2100_pq_display",
|
||||
"pq_rec2020_display"});
|
||||
}
|
||||
|
||||
const char *IMB_colormanagement_get_rec2100_hlg_display_colorspace()
|
||||
{
|
||||
return get_first_resolved_colorspace_name({"Rec.2100-HLG",
|
||||
"Rec.2100-HLG - Display",
|
||||
"rec2100_hlg",
|
||||
"rec2100_hlg_display",
|
||||
"hlg_rec2020_display"});
|
||||
}
|
||||
|
||||
const ColorSpace *IMB_colormanagement_space_get_named(const char *name)
|
||||
{
|
||||
return g_config->get_color_space(name);
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
#include <sys/types.h>
|
||||
|
||||
#include "BLI_path_utils.hh"
|
||||
#include "BLI_span.hh"
|
||||
#include "BLI_string.h"
|
||||
#include "BLI_task.hh"
|
||||
#include "BLI_threads.h"
|
||||
@@ -55,6 +56,8 @@ extern "C" {
|
||||
static void free_anim_ffmpeg(MovieReader *anim);
|
||||
#endif
|
||||
|
||||
static bool anim_getnew(MovieReader *anim);
|
||||
|
||||
void MOV_close(MovieReader *anim)
|
||||
{
|
||||
if (anim == nullptr) {
|
||||
@@ -98,6 +101,45 @@ IDProperty *MOV_load_metadata(MovieReader *anim)
|
||||
return anim->metadata;
|
||||
}
|
||||
|
||||
static void probe_video_colorspace(MovieReader *anim, char r_colorspace_name[IM_MAX_SPACE])
|
||||
{
|
||||
/* Use default role as fallback (i.e. it is an unknown combination of colorspace and primaries)
|
||||
*/
|
||||
BLI_strncpy(r_colorspace_name,
|
||||
IMB_colormanagement_role_colorspace_name_get(COLOR_ROLE_DEFAULT_BYTE),
|
||||
IM_MAX_SPACE);
|
||||
|
||||
if (anim->state == MovieReader::State::Uninitialized) {
|
||||
if (!anim_getnew(anim)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const AVColorTransferCharacteristic color_trc = anim->pCodecCtx->color_trc;
|
||||
const AVColorSpace colorspace = anim->pCodecCtx->colorspace;
|
||||
const AVColorPrimaries color_primaries = anim->pCodecCtx->color_primaries;
|
||||
|
||||
if (color_trc == AVCOL_TRC_ARIB_STD_B67 && color_primaries == AVCOL_PRI_BT2020 &&
|
||||
colorspace == AVCOL_SPC_BT2020_NCL)
|
||||
{
|
||||
const char *hlg_name = IMB_colormanagement_get_rec2100_hlg_display_colorspace();
|
||||
if (hlg_name) {
|
||||
BLI_strncpy(r_colorspace_name, hlg_name, IM_MAX_SPACE);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (color_trc == AVCOL_TRC_SMPTEST2084 && color_primaries == AVCOL_PRI_BT2020 &&
|
||||
colorspace == AVCOL_SPC_BT2020_NCL)
|
||||
{
|
||||
const char *pq_name = IMB_colormanagement_get_rec2100_pq_display_colorspace();
|
||||
if (pq_name) {
|
||||
BLI_strncpy(r_colorspace_name, pq_name, IM_MAX_SPACE);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
MovieReader *MOV_open_file(const char *filepath,
|
||||
const int ib_flags,
|
||||
const int streamindex,
|
||||
@@ -110,20 +152,27 @@ MovieReader *MOV_open_file(const char *filepath,
|
||||
|
||||
anim = MEM_new<MovieReader>("anim struct");
|
||||
if (anim != nullptr) {
|
||||
/* Initialize colorspace to default if not yet set. */
|
||||
const char *default_colorspace = IMB_colormanagement_role_colorspace_name_get(
|
||||
COLOR_ROLE_DEFAULT_BYTE);
|
||||
if (colorspace && colorspace[0] == '\0') {
|
||||
BLI_strncpy(colorspace, default_colorspace, IM_MAX_SPACE);
|
||||
}
|
||||
|
||||
/* Inherit colorspace from argument if provided. */
|
||||
STRNCPY(anim->colorspace, colorspace ? colorspace : default_colorspace);
|
||||
|
||||
STRNCPY(anim->filepath, filepath);
|
||||
anim->ib_flags = ib_flags;
|
||||
anim->streamindex = streamindex;
|
||||
anim->keep_original_colorspace = keep_original_colorspace;
|
||||
|
||||
if (colorspace && colorspace[0] != '\0') {
|
||||
/* Use colorspace from argument, if provided. */
|
||||
STRNCPY(anim->colorspace, colorspace);
|
||||
}
|
||||
else {
|
||||
/* Try to initialize colorspace from the FFmpeg stream by interpreting color information from
|
||||
* it. */
|
||||
char file_colorspace[IM_MAX_SPACE];
|
||||
probe_video_colorspace(anim, file_colorspace);
|
||||
STRNCPY(anim->colorspace, file_colorspace);
|
||||
if (colorspace) {
|
||||
/* Copy the used colorspace into output argument. */
|
||||
BLI_strncpy(colorspace, file_colorspace, IM_MAX_SPACE);
|
||||
}
|
||||
}
|
||||
}
|
||||
return anim;
|
||||
}
|
||||
|
||||
@@ -39,6 +39,8 @@
|
||||
# include "MOV_enums.hh"
|
||||
# include "MOV_util.hh"
|
||||
|
||||
# include "IMB_colormanagement.hh"
|
||||
|
||||
# include "ffmpeg_swscale.hh"
|
||||
# include "movie_util.hh"
|
||||
|
||||
@@ -200,15 +202,167 @@ static bool write_video_frame(MovieWriter *context, AVFrame *frame, ReportList *
|
||||
return success;
|
||||
}
|
||||
|
||||
/* read and encode a frame of video from the buffer */
|
||||
static AVFrame *generate_video_frame(MovieWriter *context, const ImBuf *image)
|
||||
/* Allocate new ImBuf of the size of the given input which only contains float buffer with pixels
|
||||
* from the input.
|
||||
*
|
||||
* For the float image buffers it is similar to IMB_dupImBuf() but it ensures that the byte buffer
|
||||
* is not allocated.
|
||||
*
|
||||
* For the byte image buffers it is similar to IMB_dupImBuf() followed by IMB_float_from_byte(),
|
||||
* but without temporary allocation, and result containing only single float buffer.
|
||||
*
|
||||
* No color space conversion is performed. The result float buffer might be in a non-linear space
|
||||
* denoted by the float_buffer.colorspace. */
|
||||
static ImBuf *alloc_imbuf_for_hdr_transform(const ImBuf *input_ibuf)
|
||||
{
|
||||
if (!input_ibuf) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* Allocate new image buffer without float buffer just yet.
|
||||
* This allows to properly initialize the number of channels used in the buffer. */
|
||||
/* TODO(sergey): Make it a reusable function.
|
||||
* This is a common pattern used in few areas with the goal to bypass the hardcoded number of
|
||||
* channels used by IMB_allocImBuf(). */
|
||||
ImBuf *result_ibuf = IMB_allocImBuf(input_ibuf->x, input_ibuf->y, input_ibuf->planes, 0);
|
||||
result_ibuf->channels = input_ibuf->float_buffer.data ? input_ibuf->channels : 4;
|
||||
|
||||
/* Allocate float buffer with the proper number of channels. */
|
||||
const size_t num_pixels = IMB_get_pixel_count(input_ibuf);
|
||||
float *buffer = MEM_malloc_arrayN<float>(num_pixels * result_ibuf->channels, "movie hdr image");
|
||||
IMB_assign_float_buffer(result_ibuf, buffer, IB_TAKE_OWNERSHIP);
|
||||
|
||||
/* Transfer flags related to color space conversion from the original image buffer. */
|
||||
result_ibuf->flags |= (input_ibuf->flags & IB_alphamode_channel_packed);
|
||||
|
||||
if (input_ibuf->float_buffer.data) {
|
||||
/* Simple case: copy pixels from the source image as-is, without any conversion.
|
||||
* The result has the same colorspace as the input. */
|
||||
memcpy(result_ibuf->float_buffer.data,
|
||||
input_ibuf->float_buffer.data,
|
||||
num_pixels * input_ibuf->channels * sizeof(float));
|
||||
result_ibuf->float_buffer.colorspace = input_ibuf->float_buffer.colorspace;
|
||||
}
|
||||
else {
|
||||
/* Convert byte buffer to float buffer.
|
||||
* The exact profile is not important here: it should match for the source and destination so
|
||||
* that the function only does alpha and byte->float conversions. */
|
||||
const bool predivide = IMB_alpha_affects_rgb(input_ibuf);
|
||||
IMB_buffer_float_from_byte(buffer,
|
||||
input_ibuf->byte_buffer.data,
|
||||
IB_PROFILE_SRGB,
|
||||
IB_PROFILE_SRGB,
|
||||
predivide,
|
||||
input_ibuf->x,
|
||||
input_ibuf->y,
|
||||
result_ibuf->x,
|
||||
input_ibuf->x);
|
||||
}
|
||||
|
||||
return result_ibuf;
|
||||
}
|
||||
|
||||
static ImBuf *do_pq_transform(const ImBuf *input_ibuf)
|
||||
{
|
||||
ImBuf *ibuf = alloc_imbuf_for_hdr_transform(input_ibuf);
|
||||
if (!ibuf) {
|
||||
/* Error in input or allocation has failed. */
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* Get `Rec.2100-PQ Display` or its alias from the OpenColorIO configuration. */
|
||||
const char *rec2100_pq_colorspace = IMB_colormanagement_get_rec2100_pq_display_colorspace();
|
||||
if (!rec2100_pq_colorspace) {
|
||||
/* TODO(sergey): Error reporting if the colorspace is not found. */
|
||||
return ibuf;
|
||||
}
|
||||
|
||||
/* Convert from the current floating point buffer colorspace to Rec.2100-PQ. */
|
||||
IMB_colormanagement_transform_float(ibuf->float_buffer.data,
|
||||
ibuf->x,
|
||||
ibuf->y,
|
||||
ibuf->channels,
|
||||
IMB_colormanagement_get_float_colorspace(input_ibuf),
|
||||
rec2100_pq_colorspace,
|
||||
IMB_alpha_affects_rgb(ibuf));
|
||||
|
||||
return ibuf;
|
||||
}
|
||||
|
||||
static ImBuf *do_hlg_transform(const ImBuf *input_ibuf)
|
||||
{
|
||||
ImBuf *ibuf = alloc_imbuf_for_hdr_transform(input_ibuf);
|
||||
if (!ibuf) {
|
||||
/* Error in input or allocation has failed. */
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* Get `Rec.2100-HLG Display` or its alias from the OpenColorIO configuration.
|
||||
* The color space is supposed to be Rec.2100-HLG, 1000 nit. */
|
||||
const char *rec2100_hlg_colorspace = IMB_colormanagement_get_rec2100_hlg_display_colorspace();
|
||||
if (!rec2100_hlg_colorspace) {
|
||||
/* TODO(sergey): Error reporting if the colorspace is not found. */
|
||||
return ibuf;
|
||||
}
|
||||
|
||||
/* Convert from the current floating point buffer colorspace to Rec.2100-HLG, 1000 nit. */
|
||||
IMB_colormanagement_transform_float(ibuf->float_buffer.data,
|
||||
ibuf->x,
|
||||
ibuf->y,
|
||||
ibuf->channels,
|
||||
IMB_colormanagement_get_float_colorspace(input_ibuf),
|
||||
rec2100_hlg_colorspace,
|
||||
IMB_alpha_affects_rgb(ibuf));
|
||||
|
||||
return ibuf;
|
||||
}
|
||||
|
||||
static const ImBuf *do_hdr_transform_if_needed(MovieWriter *context, const ImBuf *input_ibuf)
|
||||
{
|
||||
if (!input_ibuf) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (!context || !context->video_codec) {
|
||||
return input_ibuf;
|
||||
}
|
||||
|
||||
const AVCodecContext &codec = *context->video_codec;
|
||||
|
||||
const AVColorTransferCharacteristic color_trc = codec.color_trc;
|
||||
const AVColorSpace colorspace = codec.colorspace;
|
||||
const AVColorPrimaries color_primaries = codec.color_primaries;
|
||||
|
||||
if (color_trc == AVCOL_TRC_SMPTEST2084 && color_primaries == AVCOL_PRI_BT2020 &&
|
||||
colorspace == AVCOL_SPC_BT2020_NCL)
|
||||
{
|
||||
return do_pq_transform(input_ibuf);
|
||||
}
|
||||
|
||||
if (color_trc == AVCOL_TRC_ARIB_STD_B67 && color_primaries == AVCOL_PRI_BT2020 &&
|
||||
colorspace == AVCOL_SPC_BT2020_NCL)
|
||||
{
|
||||
return do_hlg_transform(input_ibuf);
|
||||
}
|
||||
|
||||
return input_ibuf;
|
||||
}
|
||||
|
||||
/* read and encode a frame of video from the buffer */
|
||||
static AVFrame *generate_video_frame(MovieWriter *context, const ImBuf *input_ibuf)
|
||||
{
|
||||
const ImBuf *image = do_hdr_transform_if_needed(context, input_ibuf);
|
||||
|
||||
const uint8_t *pixels = image->byte_buffer.data;
|
||||
const float *pixels_fl = image->float_buffer.data;
|
||||
|
||||
/* Use float input if needed. */
|
||||
const bool use_float = context->img_convert_frame != nullptr &&
|
||||
context->img_convert_frame->format != AV_PIX_FMT_RGBA;
|
||||
if ((!use_float && (pixels == nullptr)) || (use_float && (pixels_fl == nullptr))) {
|
||||
if (image != input_ibuf) {
|
||||
IMB_freeImBuf(const_cast<ImBuf *>(image));
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
@@ -291,6 +445,10 @@ static AVFrame *generate_video_frame(MovieWriter *context, const ImBuf *image)
|
||||
ffmpeg_sws_scale_frame(context->img_convert_ctx, context->current_frame, rgb_frame);
|
||||
}
|
||||
|
||||
if (image != input_ibuf) {
|
||||
IMB_freeImBuf(const_cast<ImBuf *>(image));
|
||||
}
|
||||
|
||||
return context->current_frame;
|
||||
}
|
||||
|
||||
@@ -770,6 +928,15 @@ static AVStream *alloc_video_stream(MovieWriter *context,
|
||||
const bool is_10_bpp = rd->im_format.depth == R_IMF_CHAN_DEPTH_10;
|
||||
const bool is_12_bpp = rd->im_format.depth == R_IMF_CHAN_DEPTH_12;
|
||||
const bool is_16_bpp = rd->im_format.depth == R_IMF_CHAN_DEPTH_16;
|
||||
|
||||
eFFMpegVideoHdr hdr = eFFMpegVideoHdr(rd->ffcodecdata.video_hdr);
|
||||
/* Never use HDR for non-10/12 bpp or grayscale outputs. */
|
||||
if ((!is_10_bpp && !is_12_bpp) || rd->im_format.planes == R_IMF_PLANES_BW) {
|
||||
hdr = FFM_VIDEO_HDR_NONE;
|
||||
}
|
||||
const bool is_hdr_pq = hdr == FFM_VIDEO_HDR_REC2100_PQ;
|
||||
const bool is_hdr_hlg = hdr == FFM_VIDEO_HDR_REC2100_HLG;
|
||||
|
||||
if (is_10_bpp) {
|
||||
c->pix_fmt = AV_PIX_FMT_YUV420P10LE;
|
||||
}
|
||||
@@ -916,7 +1083,21 @@ static AVStream *alloc_video_stream(MovieWriter *context,
|
||||
/* If output pixel format is not RGB(A), setup colorspace metadata. */
|
||||
const AVPixFmtDescriptor *pix_fmt_desc = av_pix_fmt_desc_get(c->pix_fmt);
|
||||
const bool set_bt709 = (pix_fmt_desc->flags & AV_PIX_FMT_FLAG_RGB) == 0;
|
||||
if (set_bt709) {
|
||||
if (is_hdr_pq) {
|
||||
/* TODO(sergey): Consider making the range an option to cover more use-cases. */
|
||||
c->color_range = AVCOL_RANGE_JPEG;
|
||||
c->color_primaries = AVCOL_PRI_BT2020;
|
||||
c->color_trc = AVCOL_TRC_SMPTEST2084;
|
||||
c->colorspace = AVCOL_SPC_BT2020_NCL;
|
||||
}
|
||||
else if (is_hdr_hlg) {
|
||||
/* TODO(sergey): Consider making the range an option to cover more use-cases. */
|
||||
c->color_range = AVCOL_RANGE_JPEG;
|
||||
c->color_primaries = AVCOL_PRI_BT2020;
|
||||
c->color_trc = AVCOL_TRC_ARIB_STD_B67;
|
||||
c->colorspace = AVCOL_SPC_BT2020_NCL;
|
||||
}
|
||||
else if (set_bt709) {
|
||||
c->color_range = AVCOL_RANGE_MPEG;
|
||||
c->color_primaries = AVCOL_PRI_BT709;
|
||||
c->color_trc = AVCOL_TRC_BT709;
|
||||
@@ -966,22 +1147,40 @@ static AVStream *alloc_video_stream(MovieWriter *context,
|
||||
context->img_convert_ctx = nullptr;
|
||||
}
|
||||
else {
|
||||
/* Output pixel format is different, allocate frame for conversion. */
|
||||
AVPixelFormat src_format = is_10_bpp || is_12_bpp || is_16_bpp ? AV_PIX_FMT_GBRAPF32LE :
|
||||
AV_PIX_FMT_RGBA;
|
||||
/* Output pixel format is different, allocate frame for conversion.
|
||||
* Setup RGB->YUV conversion with proper coefficients (depending on whether it is SDR BT.709,
|
||||
* or HDR BT.2020). */
|
||||
const AVPixelFormat src_format = is_10_bpp || is_12_bpp || is_16_bpp ? AV_PIX_FMT_GBRAPF32LE :
|
||||
AV_PIX_FMT_RGBA;
|
||||
context->img_convert_frame = alloc_frame(src_format, c->width, c->height);
|
||||
/* Setup BT.709 coefficients for RGB->YUV conversion, if needed. */
|
||||
context->img_convert_ctx = ffmpeg_sws_get_context(c->width,
|
||||
c->height,
|
||||
src_format,
|
||||
false,
|
||||
-1,
|
||||
c->width,
|
||||
c->height,
|
||||
c->pix_fmt,
|
||||
false,
|
||||
set_bt709 ? AVCOL_SPC_BT709 : -1,
|
||||
SWS_BICUBIC);
|
||||
if (is_hdr_pq || is_hdr_hlg) {
|
||||
/* Special conversion for the Rec.2100 PQ and HLG output: the result color space is BT.2020,
|
||||
* and also use full range. */
|
||||
context->img_convert_ctx = ffmpeg_sws_get_context(c->width,
|
||||
c->height,
|
||||
src_format,
|
||||
true,
|
||||
-1,
|
||||
c->width,
|
||||
c->height,
|
||||
c->pix_fmt,
|
||||
true,
|
||||
AVCOL_SPC_BT2020_NCL,
|
||||
SWS_BICUBIC);
|
||||
}
|
||||
else {
|
||||
context->img_convert_ctx = ffmpeg_sws_get_context(c->width,
|
||||
c->height,
|
||||
src_format,
|
||||
false,
|
||||
-1,
|
||||
c->width,
|
||||
c->height,
|
||||
c->pix_fmt,
|
||||
false,
|
||||
set_bt709 ? AVCOL_SPC_BT709 : -1,
|
||||
SWS_BICUBIC);
|
||||
}
|
||||
}
|
||||
|
||||
avcodec_parameters_from_context(st->codecpar, c);
|
||||
|
||||
@@ -155,6 +155,12 @@ typedef enum IMB_Ffmpeg_Codec_ID {
|
||||
FFMPEG_CODEC_ID_OPUS = 86076,
|
||||
} IMB_Ffmpeg_Codec_ID;
|
||||
|
||||
typedef enum eFFMpegVideoHdr {
|
||||
FFM_VIDEO_HDR_NONE = 0,
|
||||
FFM_VIDEO_HDR_REC2100_HLG = 1,
|
||||
FFM_VIDEO_HDR_REC2100_PQ = 2,
|
||||
} eFFMpegVideoHdr;
|
||||
|
||||
typedef struct FFMpegCodecData {
|
||||
int type;
|
||||
int codec; /* Use `codec_id_get()` instead! IMB_Ffmpeg_Codec_ID */
|
||||
@@ -178,7 +184,7 @@ typedef struct FFMpegCodecData {
|
||||
int rc_buffer_size;
|
||||
int mux_packet_size;
|
||||
int mux_rate;
|
||||
char _pad0[4];
|
||||
int video_hdr; /* eFFMpegVideoHdr */
|
||||
|
||||
#ifdef __cplusplus
|
||||
IMB_Ffmpeg_Codec_ID codec_id_get() const
|
||||
|
||||
@@ -6481,6 +6481,21 @@ static void rna_def_scene_ffmpeg_settings(BlenderRNA *brna)
|
||||
{0, nullptr, 0, nullptr, nullptr},
|
||||
};
|
||||
|
||||
static const EnumPropertyItem ffmpeg_hdr_items[] = {
|
||||
{FFM_VIDEO_HDR_NONE, "NONE", 0, "None", "No High Dynamic Range"},
|
||||
{FFM_VIDEO_HDR_REC2100_PQ,
|
||||
"REQ2100_PQ",
|
||||
0,
|
||||
"Rec.2100 PQ",
|
||||
"Rec.2100 color space with Perceptual Quantizer HDR encoding"},
|
||||
{FFM_VIDEO_HDR_REC2100_HLG,
|
||||
"REQ2100_HLG",
|
||||
0,
|
||||
"Rec.2100 HLG",
|
||||
"Rec.2100 color space with Hybrid-Log Gamma HDR encoding"},
|
||||
{0, nullptr, 0, nullptr, nullptr},
|
||||
};
|
||||
|
||||
static const EnumPropertyItem ffmpeg_audio_codec_items[] = {
|
||||
{FFMPEG_CODEC_ID_NONE,
|
||||
"NONE",
|
||||
@@ -6543,6 +6558,14 @@ static void rna_def_scene_ffmpeg_settings(BlenderRNA *brna)
|
||||
RNA_def_property_ui_text(prop, "Bitrate", "Video bitrate (kbit/s)");
|
||||
RNA_def_property_update(prop, NC_SCENE | ND_RENDER_OPTIONS, nullptr);
|
||||
|
||||
prop = RNA_def_property(srna, "video_hdr", PROP_ENUM, PROP_NONE);
|
||||
RNA_def_property_enum_sdna(prop, nullptr, "video_hdr");
|
||||
RNA_def_property_clear_flag(prop, PROP_ANIMATABLE);
|
||||
RNA_def_property_enum_items(prop, ffmpeg_hdr_items);
|
||||
RNA_def_property_enum_default(prop, FFM_VIDEO_HDR_NONE);
|
||||
RNA_def_property_ui_text(prop, "HDR", "High Dynamic Range options");
|
||||
RNA_def_property_update(prop, NC_SCENE | ND_RENDER_OPTIONS, nullptr);
|
||||
|
||||
prop = RNA_def_property(srna, "minrate", PROP_INT, PROP_NONE);
|
||||
RNA_def_property_int_sdna(prop, nullptr, "rc_min_rate");
|
||||
RNA_def_property_clear_flag(prop, PROP_ANIMATABLE);
|
||||
|
||||
BIN
tests/files/sequence_editing/ffmpeg/hdr_input_hlg_12bit.blend
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/hdr_input_hlg_12bit.blend
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/ffmpeg/hdr_input_pq_12bit.blend
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/hdr_input_pq_12bit.blend
(Stored with Git LFS)
Normal file
Binary file not shown.
@@ -0,0 +1,14 @@
|
||||
.blend files used to generate:
|
||||
- hdr_simple_export_hlg_12bit.mov
|
||||
- hdr_simple_export_pq_12bit.mov
|
||||
|
||||
Step 1:
|
||||
Open and render hdr_simple_still_test_file.blend
|
||||
It will generate hdr_simple_still_test_file.exr file
|
||||
|
||||
Step 2:
|
||||
Open and render hdr_simple_export_hlg_12bit.blend and hdr_simple_export_pq_12bit.blend.
|
||||
These files generate videos in the out/ folder.
|
||||
|
||||
Step 3:
|
||||
Copy files from the out/ folder to their resired final destination.
|
||||
BIN
tests/files/sequence_editing/ffmpeg/media/generate/hdr_simple_export_hlg_12bit.blend
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/media/generate/hdr_simple_export_hlg_12bit.blend
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/ffmpeg/media/generate/hdr_simple_export_pq_12bit.blend
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/media/generate/hdr_simple_export_pq_12bit.blend
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/ffmpeg/media/generate/hdr_simple_still_test_file.blend
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/media/generate/hdr_simple_still_test_file.blend
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/ffmpeg/media/hdr_simple_export_hlg_12bit.mov
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/media/hdr_simple_export_hlg_12bit.mov
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/ffmpeg/media/hdr_simple_export_pq_12bit.mov
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/media/hdr_simple_export_pq_12bit.mov
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/ffmpeg/media/hdr_simple_still_test_file.exr
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/media/hdr_simple_still_test_file.exr
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/ffmpeg/reference/hdr_input_hlg_12bit.png
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/reference/hdr_input_hlg_12bit.png
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/ffmpeg/reference/hdr_input_pq_12bit.png
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/ffmpeg/reference/hdr_input_pq_12bit.png
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/video_output/reference/video_output_hlg_12bit_mov.png
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/video_output/reference/video_output_hlg_12bit_mov.png
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/video_output/reference/video_output_pq_12bit_mov.png
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/video_output/reference/video_output_pq_12bit_mov.png
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/video_output/video_output_hlg_12bit_mov.blend
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/video_output/video_output_hlg_12bit_mov.blend
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
tests/files/sequence_editing/video_output/video_output_pq_12bit_mov.blend
(Stored with Git LFS)
Normal file
BIN
tests/files/sequence_editing/video_output/video_output_pq_12bit_mov.blend
(Stored with Git LFS)
Normal file
Binary file not shown.
@@ -1261,6 +1261,10 @@ if(TEST_SRC_DIR_EXISTS)
|
||||
ffmpeg
|
||||
)
|
||||
|
||||
set(video_output_tests
|
||||
video_output
|
||||
)
|
||||
|
||||
foreach(render_test ${render_tests})
|
||||
add_render_test(
|
||||
sequencer_render_${render_test}
|
||||
@@ -1269,6 +1273,22 @@ if(TEST_SRC_DIR_EXISTS)
|
||||
--outdir "${TEST_OUT_DIR}/sequence_editing"
|
||||
)
|
||||
endforeach()
|
||||
|
||||
foreach(video_output_test ${video_output_tests})
|
||||
add_render_test(
|
||||
sequencer_render_${video_output_test}
|
||||
${CMAKE_CURRENT_LIST_DIR}/sequencer_video_output_tests.py
|
||||
--testdir "${TEST_SRC_DIR}/sequence_editing/${video_output_test}"
|
||||
--outdir "${TEST_OUT_DIR}/sequence_editing"
|
||||
)
|
||||
endforeach()
|
||||
|
||||
add_blender_test(
|
||||
sequencer_input_colorspace
|
||||
--python ${CMAKE_CURRENT_LIST_DIR}/sequencer_input_colorspace.py
|
||||
--
|
||||
--testdir "${TEST_SRC_DIR}/sequence_editing"
|
||||
)
|
||||
endif()
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
@@ -550,6 +550,7 @@ class Report:
|
||||
remaining_filepaths.pop(0)
|
||||
file_crashed = False
|
||||
for test in self._get_filepath_tests(filepath):
|
||||
self.postprocess_test(blender, test)
|
||||
if not os.path.exists(test.tmp_out_img) or os.path.getsize(test.tmp_out_img) == 0:
|
||||
if crash:
|
||||
# In case of crash, stop after missing files and re-render remaining
|
||||
@@ -589,6 +590,14 @@ class Report:
|
||||
|
||||
return test_results
|
||||
|
||||
def postprocess_test(self, blender, test):
|
||||
"""
|
||||
Post-process test result after the Blender has run.
|
||||
For example, this function is where conversion from video to a still image suitable for image diffing.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def _run_all_tests(self, dirname, dirpath, blender, arguments_cb, batch, fail_silently):
|
||||
passed_tests = []
|
||||
failed_tests = []
|
||||
|
||||
58
tests/python/sequencer_input_colorspace.py
Normal file
58
tests/python/sequencer_input_colorspace.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# SPDX-FileCopyrightText: 2015-2025 Blender Authors
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# ./blender.bin --background --factory-startup \
|
||||
# --python tests/python/sequencer_input_colorspace.py -- --testdir tests/files/sequence_editing/
|
||||
|
||||
import bpy
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
TEST_DIR: Path
|
||||
|
||||
|
||||
class MovieInputTest(unittest.TestCase):
|
||||
def get_movie_colorspace(self, filepath: Path):
|
||||
scene = bpy.context.scene
|
||||
ed = scene.sequence_editor_create()
|
||||
strip = ed.strips.new_movie(name='input', filepath=str(filepath), channel=1, frame_start=1)
|
||||
colorspace = strip.colorspace_settings.name
|
||||
ed.strips.remove(strip)
|
||||
return colorspace
|
||||
|
||||
|
||||
class FFmpegHDRColorspace(MovieInputTest):
|
||||
def test_pq(self):
|
||||
prefix = TEST_DIR / Path("ffmpeg") / "media"
|
||||
|
||||
self.assertEqual(self.get_movie_colorspace(prefix / "hdr_simple_export_pq_12bit.mov"), "Rec.2100-PQ")
|
||||
|
||||
def test_hlg(self):
|
||||
prefix = TEST_DIR / Path("ffmpeg") / "media"
|
||||
|
||||
self.assertEqual(self.get_movie_colorspace(prefix / "hdr_simple_export_hlg_12bit.mov"), "Rec.2100-HLG")
|
||||
|
||||
|
||||
def main():
|
||||
global TEST_DIR
|
||||
|
||||
argv = [sys.argv[0]]
|
||||
if '--' in sys.argv:
|
||||
argv += sys.argv[sys.argv.index('--') + 1:]
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--testdir', required=True, type=Path)
|
||||
|
||||
args, remaining = parser.parse_known_args(argv)
|
||||
|
||||
TEST_DIR = args.testdir
|
||||
unittest.main(argv=remaining)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -9,6 +9,13 @@ import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
BLOCKLIST = [
|
||||
"hdr_simple_export_hlg_12bit.blend",
|
||||
"hdr_simple_export_pq_12bit.blend",
|
||||
"hdr_simple_still_test_file.blend",
|
||||
]
|
||||
|
||||
|
||||
def get_arguments(filepath, output_filepath):
|
||||
dirname = os.path.dirname(filepath)
|
||||
basedir = os.path.dirname(dirname)
|
||||
@@ -21,8 +28,9 @@ def get_arguments(filepath, output_filepath):
|
||||
"--debug-exit-on-error",
|
||||
filepath,
|
||||
"-o", output_filepath,
|
||||
"-F", "PNG",
|
||||
"-f", "1",
|
||||
"-F", "PNG"]
|
||||
]
|
||||
|
||||
return args
|
||||
|
||||
@@ -44,7 +52,7 @@ def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
from modules import render_report
|
||||
report = render_report.Report("Sequencer", args.outdir, args.oiiotool)
|
||||
report = render_report.Report("Sequencer", args.outdir, args.oiiotool, blocklist=BLOCKLIST)
|
||||
report.set_pixelated(True)
|
||||
# Default error tolerances are quite large, lower them.
|
||||
report.set_fail_threshold(2.0 / 255.0)
|
||||
|
||||
123
tests/python/sequencer_video_output_tests.py
Normal file
123
tests/python/sequencer_video_output_tests.py
Normal file
@@ -0,0 +1,123 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-FileCopyrightText: 2015-2025 Blender Authors
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from pathlib import Path
|
||||
from modules import render_report
|
||||
|
||||
|
||||
def get_movie_file_suffix(filepath):
|
||||
"""
|
||||
Get suffix used for the video output.
|
||||
The script does not have access to the .blend file content, so deduct it from the .blend filename.
|
||||
"""
|
||||
|
||||
return Path(filepath).stem.split("_")[-1]
|
||||
|
||||
|
||||
def get_arguments(filepath, output_filepath):
|
||||
suffix = get_movie_file_suffix(filepath)
|
||||
|
||||
args = [
|
||||
"--background",
|
||||
"--factory-startup",
|
||||
"--enable-autoexec",
|
||||
"--debug-memory",
|
||||
"--debug-exit-on-error",
|
||||
filepath,
|
||||
"-o", f"{output_filepath}.{suffix}",
|
||||
"-a",
|
||||
]
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def create_argparse():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Run test script for each blend file in TESTDIR, comparing the render result with known output."
|
||||
)
|
||||
parser.add_argument("--blender", required=True)
|
||||
parser.add_argument("--testdir", required=True)
|
||||
parser.add_argument("--outdir", required=True)
|
||||
parser.add_argument("--oiiotool", required=True)
|
||||
parser.add_argument("--batch", default=False, action="store_true")
|
||||
return parser
|
||||
|
||||
|
||||
class VideoOutputReport(render_report.Report):
|
||||
def postprocess_test(self, blender, test):
|
||||
suffix = get_movie_file_suffix(test.filepath)
|
||||
|
||||
video_file = Path(f"{test.tmp_out_img_base}.{suffix}").as_posix()
|
||||
|
||||
# If oiiotool supports the FFmpeg this could be used instead.
|
||||
"""
|
||||
command = (
|
||||
self.oiiotool,
|
||||
"-i", video_file,
|
||||
"-o", test.tmp_out_img,
|
||||
)
|
||||
|
||||
try:
|
||||
subprocess.check_output(command)
|
||||
except subprocess.CalledProcessError as e:
|
||||
pass
|
||||
"""
|
||||
|
||||
# Blender's render pipeline always appends frame suffix unless # is present in the file path.
|
||||
# Here we need the file name to match exactly, so we trick Blender by going 0001 -> #### mask
|
||||
# allowing render piepline to expand it back to 0001.
|
||||
out_filepath = test.tmp_out_img.replace("0001", "####")
|
||||
|
||||
python_expr = (
|
||||
f"""
|
||||
import bpy
|
||||
scene = bpy.context.scene
|
||||
scene.render.resolution_x = 1920
|
||||
scene.render.resolution_y = 1080
|
||||
scene.render.resolution_percentage = 25
|
||||
ed = scene.sequence_editor_create()
|
||||
strip = ed.strips.new_movie(name='input', filepath='{video_file}', channel=1, frame_start=1)
|
||||
strip.colorspace_settings.name = 'Non-Color'
|
||||
""")
|
||||
|
||||
command = (
|
||||
blender,
|
||||
"--background",
|
||||
"--factory-startup",
|
||||
"--enable-autoexec",
|
||||
"--python-expr", python_expr,
|
||||
"-o", out_filepath,
|
||||
"-F", "PNG",
|
||||
"-f", "1",
|
||||
)
|
||||
|
||||
try:
|
||||
subprocess.check_output(command)
|
||||
except subprocess.CalledProcessError as e:
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
parser = create_argparse()
|
||||
args = parser.parse_args()
|
||||
|
||||
report = VideoOutputReport("Sequencer", args.outdir, args.oiiotool)
|
||||
report.set_pixelated(True)
|
||||
# Default error tolerances are quite large, lower them.
|
||||
report.set_fail_threshold(2.0 / 255.0)
|
||||
report.set_fail_percent(0.01)
|
||||
report.set_reference_dir("reference")
|
||||
|
||||
ok = report.run(args.testdir, args.blender, get_arguments, batch=args.batch)
|
||||
|
||||
sys.exit(not ok)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user