2020-11-01 21:33:38 +01:00
|
|
|
#!/usr/bin/env python3
|
2023-08-16 00:20:26 +10:00
|
|
|
# SPDX-FileCopyrightText: 2015-2022 Blender Authors
|
2023-06-15 13:09:04 +10:00
|
|
|
#
|
2022-02-11 09:07:11 +11:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
2020-11-01 21:33:38 +01:00
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
|
import os
|
|
|
|
|
import sys
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
|
Video: HDR video input/output support
HDR video files are properly read into Blender, and can be rendered out
of Blender.
HDR video reading / decoding:
- Two flavors of HDR are recognized, based on color related video
metadata: "PQ" (Rec.2100 Perceptual Quantizer, aka SMPTE 2084) and
"HLG" (Rec.2100 Hybrid-Log-Gamma, aka ARIB STD B67). Both are read
effectively into floating point images, and their color space
transformations are done through OpenColorIO.
- The OCIO config shipped in Blender has been extended to contain
Rec.2100-PQ and Rec.2100-HLG color spaces.
- Note that if you already had a HDR video in sequencer or movie clip,
it would have looked "incorrect" previously, and it will continue to
look incorrect, since it already has "wrong" color space assigned to
it. Either re-add it (which should assign the correct color space),
or manually change the color space to PQ or HLG one as needed.
HDR video writing / encoding"
- For H.265 and AV1 the video encoding options now display the HDR mode.
Similar to reading, there are PQ and HLG HDR mode options.
- Reference white is assumed to be 100 nits.
- YUV uses "full" ("PC/jpeg") color range.
- No mastering display metadata is written into the video file, since
generally that information is not known inside Blender.
More details and screenshots in the PR.
Co-authored-by: Sergey Sharybin <sergey@blender.org>
Pull Request: https://projects.blender.org/blender/blender/pulls/120033
2025-07-21 19:26:07 +02:00
|
|
|
BLOCKLIST = [
|
|
|
|
|
"hdr_simple_export_hlg_12bit.blend",
|
|
|
|
|
"hdr_simple_export_pq_12bit.blend",
|
|
|
|
|
"hdr_simple_still_test_file.blend",
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
2020-11-01 21:33:38 +01:00
|
|
|
def get_arguments(filepath, output_filepath):
|
|
|
|
|
dirname = os.path.dirname(filepath)
|
|
|
|
|
basedir = os.path.dirname(dirname)
|
|
|
|
|
|
|
|
|
|
args = [
|
|
|
|
|
"--background",
|
|
|
|
|
"--factory-startup",
|
|
|
|
|
"--enable-autoexec",
|
|
|
|
|
"--debug-memory",
|
|
|
|
|
"--debug-exit-on-error",
|
|
|
|
|
filepath,
|
|
|
|
|
"-o", output_filepath,
|
Video: HDR video input/output support
HDR video files are properly read into Blender, and can be rendered out
of Blender.
HDR video reading / decoding:
- Two flavors of HDR are recognized, based on color related video
metadata: "PQ" (Rec.2100 Perceptual Quantizer, aka SMPTE 2084) and
"HLG" (Rec.2100 Hybrid-Log-Gamma, aka ARIB STD B67). Both are read
effectively into floating point images, and their color space
transformations are done through OpenColorIO.
- The OCIO config shipped in Blender has been extended to contain
Rec.2100-PQ and Rec.2100-HLG color spaces.
- Note that if you already had a HDR video in sequencer or movie clip,
it would have looked "incorrect" previously, and it will continue to
look incorrect, since it already has "wrong" color space assigned to
it. Either re-add it (which should assign the correct color space),
or manually change the color space to PQ or HLG one as needed.
HDR video writing / encoding"
- For H.265 and AV1 the video encoding options now display the HDR mode.
Similar to reading, there are PQ and HLG HDR mode options.
- Reference white is assumed to be 100 nits.
- YUV uses "full" ("PC/jpeg") color range.
- No mastering display metadata is written into the video file, since
generally that information is not known inside Blender.
More details and screenshots in the PR.
Co-authored-by: Sergey Sharybin <sergey@blender.org>
Pull Request: https://projects.blender.org/blender/blender/pulls/120033
2025-07-21 19:26:07 +02:00
|
|
|
"-F", "PNG",
|
2020-11-01 21:33:38 +01:00
|
|
|
"-f", "1",
|
Video: HDR video input/output support
HDR video files are properly read into Blender, and can be rendered out
of Blender.
HDR video reading / decoding:
- Two flavors of HDR are recognized, based on color related video
metadata: "PQ" (Rec.2100 Perceptual Quantizer, aka SMPTE 2084) and
"HLG" (Rec.2100 Hybrid-Log-Gamma, aka ARIB STD B67). Both are read
effectively into floating point images, and their color space
transformations are done through OpenColorIO.
- The OCIO config shipped in Blender has been extended to contain
Rec.2100-PQ and Rec.2100-HLG color spaces.
- Note that if you already had a HDR video in sequencer or movie clip,
it would have looked "incorrect" previously, and it will continue to
look incorrect, since it already has "wrong" color space assigned to
it. Either re-add it (which should assign the correct color space),
or manually change the color space to PQ or HLG one as needed.
HDR video writing / encoding"
- For H.265 and AV1 the video encoding options now display the HDR mode.
Similar to reading, there are PQ and HLG HDR mode options.
- Reference white is assumed to be 100 nits.
- YUV uses "full" ("PC/jpeg") color range.
- No mastering display metadata is written into the video file, since
generally that information is not known inside Blender.
More details and screenshots in the PR.
Co-authored-by: Sergey Sharybin <sergey@blender.org>
Pull Request: https://projects.blender.org/blender/blender/pulls/120033
2025-07-21 19:26:07 +02:00
|
|
|
]
|
2020-11-01 21:33:38 +01:00
|
|
|
|
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def create_argparse():
|
2024-12-10 14:52:34 +01:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
|
description="Run test script for each blend file in TESTDIR, comparing the render result with known output."
|
|
|
|
|
)
|
|
|
|
|
parser.add_argument("--blender", required=True)
|
|
|
|
|
parser.add_argument("--testdir", required=True)
|
|
|
|
|
parser.add_argument("--outdir", required=True)
|
|
|
|
|
parser.add_argument("--oiiotool", required=True)
|
2024-09-03 21:22:34 +10:00
|
|
|
parser.add_argument("--batch", default=False, action="store_true")
|
2020-11-01 21:33:38 +01:00
|
|
|
return parser
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
parser = create_argparse()
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
from modules import render_report
|
Video: HDR video input/output support
HDR video files are properly read into Blender, and can be rendered out
of Blender.
HDR video reading / decoding:
- Two flavors of HDR are recognized, based on color related video
metadata: "PQ" (Rec.2100 Perceptual Quantizer, aka SMPTE 2084) and
"HLG" (Rec.2100 Hybrid-Log-Gamma, aka ARIB STD B67). Both are read
effectively into floating point images, and their color space
transformations are done through OpenColorIO.
- The OCIO config shipped in Blender has been extended to contain
Rec.2100-PQ and Rec.2100-HLG color spaces.
- Note that if you already had a HDR video in sequencer or movie clip,
it would have looked "incorrect" previously, and it will continue to
look incorrect, since it already has "wrong" color space assigned to
it. Either re-add it (which should assign the correct color space),
or manually change the color space to PQ or HLG one as needed.
HDR video writing / encoding"
- For H.265 and AV1 the video encoding options now display the HDR mode.
Similar to reading, there are PQ and HLG HDR mode options.
- Reference white is assumed to be 100 nits.
- YUV uses "full" ("PC/jpeg") color range.
- No mastering display metadata is written into the video file, since
generally that information is not known inside Blender.
More details and screenshots in the PR.
Co-authored-by: Sergey Sharybin <sergey@blender.org>
Pull Request: https://projects.blender.org/blender/blender/pulls/120033
2025-07-21 19:26:07 +02:00
|
|
|
report = render_report.Report("Sequencer", args.outdir, args.oiiotool, blocklist=BLOCKLIST)
|
2020-11-01 21:33:38 +01:00
|
|
|
report.set_pixelated(True)
|
2024-09-03 21:22:34 +10:00
|
|
|
# Default error tolerances are quite large, lower them.
|
2024-11-18 12:27:50 +01:00
|
|
|
report.set_fail_threshold(2.0 / 255.0)
|
2024-02-02 16:28:51 +01:00
|
|
|
report.set_fail_percent(0.01)
|
2020-11-01 21:33:38 +01:00
|
|
|
report.set_reference_dir("reference")
|
|
|
|
|
|
2024-12-10 14:52:34 +01:00
|
|
|
ok = report.run(args.testdir, args.blender, get_arguments, batch=args.batch)
|
2020-11-01 21:33:38 +01:00
|
|
|
|
|
|
|
|
sys.exit(not ok)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
main()
|