2023-08-16 00:20:26 +10:00
|
|
|
# SPDX-FileCopyrightText: 2018-2023 Blender Authors
|
2023-06-15 13:09:04 +10:00
|
|
|
#
|
2022-02-11 09:07:11 +11:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
2022-02-09 23:25:53 +11:00
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
Compare renders or screenshots against reference versions and generate
|
|
|
|
|
a HTML report showing the differences, for regression testing.
|
|
|
|
|
"""
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
import glob
|
|
|
|
|
import os
|
|
|
|
|
import pathlib
|
|
|
|
|
import shutil
|
|
|
|
|
import subprocess
|
|
|
|
|
import time
|
2025-02-21 19:14:17 +01:00
|
|
|
import multiprocessing
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2019-05-09 13:45:46 +02:00
|
|
|
from . import global_report
|
2023-02-20 19:04:34 -08:00
|
|
|
from .colored_print import (print_message, use_message_colors)
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
|
2025-01-30 04:15:27 +01:00
|
|
|
def blend_list(dirpath, blocklist):
|
2020-11-27 16:40:16 +01:00
|
|
|
import re
|
|
|
|
|
|
2018-09-28 14:09:42 +02:00
|
|
|
for root, dirs, files in os.walk(dirpath):
|
|
|
|
|
for filename in files:
|
2020-11-27 16:40:16 +01:00
|
|
|
if not filename.lower().endswith(".blend"):
|
2020-01-09 11:40:10 +01:00
|
|
|
continue
|
2020-11-27 16:40:16 +01:00
|
|
|
|
|
|
|
|
skip = False
|
2024-07-18 17:31:52 +02:00
|
|
|
for blocklist_entry in blocklist:
|
|
|
|
|
if re.match(blocklist_entry, filename):
|
2021-01-14 12:04:09 +01:00
|
|
|
skip = True
|
|
|
|
|
break
|
2020-11-27 16:40:16 +01:00
|
|
|
|
|
|
|
|
if not skip:
|
2018-09-28 14:09:42 +02:00
|
|
|
filepath = os.path.join(root, filename)
|
|
|
|
|
yield filepath
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2018-07-03 06:47:49 +02:00
|
|
|
|
2018-02-14 17:33:06 +01:00
|
|
|
def test_get_name(filepath):
|
|
|
|
|
filename = os.path.basename(filepath)
|
|
|
|
|
return os.path.splitext(filename)[0]
|
|
|
|
|
|
2018-07-03 06:47:49 +02:00
|
|
|
|
2025-02-20 17:18:59 +01:00
|
|
|
def test_get_images(output_dir, filepath, testname, reference_dir, reference_override_dir):
|
2018-02-14 17:33:06 +01:00
|
|
|
dirpath = os.path.dirname(filepath)
|
|
|
|
|
|
2018-05-18 16:40:41 +02:00
|
|
|
old_dirpath = os.path.join(dirpath, reference_dir)
|
2018-02-14 17:33:06 +01:00
|
|
|
old_img = os.path.join(old_dirpath, testname + ".png")
|
2022-08-01 10:57:18 +02:00
|
|
|
if reference_override_dir:
|
|
|
|
|
override_dirpath = os.path.join(dirpath, reference_override_dir)
|
|
|
|
|
override_img = os.path.join(override_dirpath, testname + ".png")
|
|
|
|
|
if os.path.exists(override_img):
|
|
|
|
|
old_dirpath = override_dirpath
|
|
|
|
|
old_img = override_img
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
ref_dirpath = os.path.join(output_dir, os.path.basename(dirpath), "ref")
|
|
|
|
|
ref_img = os.path.join(ref_dirpath, testname + ".png")
|
2019-05-09 13:45:46 +02:00
|
|
|
os.makedirs(ref_dirpath, exist_ok=True)
|
2018-02-14 17:33:06 +01:00
|
|
|
if os.path.exists(old_img):
|
|
|
|
|
shutil.copy(old_img, ref_img)
|
|
|
|
|
|
|
|
|
|
new_dirpath = os.path.join(output_dir, os.path.basename(dirpath))
|
2019-05-09 13:45:46 +02:00
|
|
|
os.makedirs(new_dirpath, exist_ok=True)
|
2018-02-14 17:33:06 +01:00
|
|
|
new_img = os.path.join(new_dirpath, testname + ".png")
|
|
|
|
|
|
|
|
|
|
diff_dirpath = os.path.join(output_dir, os.path.basename(dirpath), "diff")
|
2019-05-09 13:45:46 +02:00
|
|
|
os.makedirs(diff_dirpath, exist_ok=True)
|
2024-01-25 10:04:16 +01:00
|
|
|
diff_color_img = os.path.join(diff_dirpath, testname + ".diff_color.png")
|
|
|
|
|
diff_alpha_img = os.path.join(diff_dirpath, testname + ".diff_alpha.png")
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2024-01-25 10:04:16 +01:00
|
|
|
return old_img, ref_img, new_img, diff_color_img, diff_alpha_img
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
|
2025-02-20 17:18:59 +01:00
|
|
|
class TestResult:
|
|
|
|
|
def __init__(self, report, filepath, name):
|
|
|
|
|
self.filepath = filepath
|
|
|
|
|
self.name = name
|
|
|
|
|
self.error = None
|
|
|
|
|
self.tmp_out_img_base = os.path.join(report.output_dir, "tmp_" + name)
|
|
|
|
|
self.tmp_out_img = self.tmp_out_img_base + '0001.png'
|
|
|
|
|
self.old_img, self.ref_img, self.new_img, self.diff_color_img, self.diff_alpha_img = test_get_images(
|
|
|
|
|
report.output_dir, filepath, name, report.reference_dir, report.reference_override_dir)
|
|
|
|
|
|
|
|
|
|
|
2025-02-21 19:14:17 +01:00
|
|
|
def diff_output(test, oiiotool, fail_threshold, fail_percent, verbose, update):
|
|
|
|
|
# Create reference render directory.
|
|
|
|
|
old_dirpath = os.path.dirname(test.old_img)
|
|
|
|
|
os.makedirs(old_dirpath, exist_ok=True)
|
|
|
|
|
|
|
|
|
|
# Copy temporary to new image.
|
|
|
|
|
if os.path.exists(test.new_img):
|
|
|
|
|
os.remove(test.new_img)
|
|
|
|
|
if os.path.exists(test.tmp_out_img):
|
|
|
|
|
shutil.copy(test.tmp_out_img, test.new_img)
|
|
|
|
|
|
|
|
|
|
if os.path.exists(test.ref_img):
|
|
|
|
|
# Diff images test with threshold.
|
|
|
|
|
command = (
|
|
|
|
|
oiiotool,
|
|
|
|
|
test.ref_img,
|
|
|
|
|
test.tmp_out_img,
|
|
|
|
|
"--fail", str(fail_threshold),
|
|
|
|
|
"--failpercent", str(fail_percent),
|
|
|
|
|
"--diff",
|
|
|
|
|
)
|
|
|
|
|
try:
|
|
|
|
|
subprocess.check_output(command)
|
|
|
|
|
failed = False
|
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
|
if verbose:
|
|
|
|
|
print_message(e.output.decode("utf-8", 'ignore'))
|
|
|
|
|
failed = e.returncode != 0
|
|
|
|
|
else:
|
|
|
|
|
if not update:
|
|
|
|
|
test.error = "VERIFY"
|
|
|
|
|
return test
|
|
|
|
|
|
|
|
|
|
failed = True
|
|
|
|
|
|
|
|
|
|
if failed and update:
|
|
|
|
|
# Update reference image if requested.
|
|
|
|
|
shutil.copy(test.new_img, test.ref_img)
|
|
|
|
|
shutil.copy(test.new_img, test.old_img)
|
|
|
|
|
failed = False
|
|
|
|
|
|
|
|
|
|
# Generate color diff image.
|
|
|
|
|
command = (
|
|
|
|
|
oiiotool,
|
|
|
|
|
test.ref_img,
|
|
|
|
|
"--ch", "R,G,B",
|
|
|
|
|
test.tmp_out_img,
|
|
|
|
|
"--ch", "R,G,B",
|
|
|
|
|
"--sub",
|
|
|
|
|
"--abs",
|
|
|
|
|
"--mulc", "16",
|
|
|
|
|
"-o", test.diff_color_img,
|
|
|
|
|
)
|
|
|
|
|
try:
|
|
|
|
|
subprocess.check_output(command, stderr=subprocess.STDOUT)
|
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
|
if verbose:
|
|
|
|
|
print_message(e.output.decode("utf-8", 'ignore'))
|
|
|
|
|
|
|
|
|
|
# Generate alpha diff image.
|
|
|
|
|
command = (
|
|
|
|
|
oiiotool,
|
|
|
|
|
test.ref_img,
|
|
|
|
|
"--ch", "A",
|
|
|
|
|
test.tmp_out_img,
|
|
|
|
|
"--ch", "A",
|
|
|
|
|
"--sub",
|
|
|
|
|
"--abs",
|
|
|
|
|
"--mulc", "16",
|
|
|
|
|
"-o", test.diff_alpha_img,
|
|
|
|
|
)
|
|
|
|
|
try:
|
|
|
|
|
subprocess.check_output(command, stderr=subprocess.STDOUT)
|
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
|
if self.verbose:
|
|
|
|
|
msg = e.output.decode("utf-8", 'ignore')
|
|
|
|
|
for line in msg.splitlines():
|
|
|
|
|
# Ignore warnings for images without alpha channel.
|
|
|
|
|
if "--ch: Unknown channel name" not in line:
|
|
|
|
|
print_message(line)
|
|
|
|
|
|
|
|
|
|
if failed:
|
|
|
|
|
test.error = "VERIFY"
|
|
|
|
|
else:
|
|
|
|
|
test.error = None
|
|
|
|
|
|
|
|
|
|
return test
|
|
|
|
|
|
|
|
|
|
|
2018-02-14 17:33:06 +01:00
|
|
|
class Report:
|
|
|
|
|
__slots__ = (
|
|
|
|
|
'title',
|
2024-01-25 18:55:28 +01:00
|
|
|
'engine_name',
|
2018-02-14 17:33:06 +01:00
|
|
|
'output_dir',
|
2020-10-28 16:19:03 +01:00
|
|
|
'global_dir',
|
2018-05-18 16:40:41 +02:00
|
|
|
'reference_dir',
|
2022-08-01 10:57:18 +02:00
|
|
|
'reference_override_dir',
|
2024-01-25 10:04:16 +01:00
|
|
|
'oiiotool',
|
2018-02-14 17:33:06 +01:00
|
|
|
'pixelated',
|
2020-08-05 11:20:25 +02:00
|
|
|
'fail_threshold',
|
|
|
|
|
'fail_percent',
|
2018-02-14 17:33:06 +01:00
|
|
|
'verbose',
|
|
|
|
|
'update',
|
|
|
|
|
'failed_tests',
|
2018-05-18 17:52:46 +02:00
|
|
|
'passed_tests',
|
|
|
|
|
'compare_tests',
|
2020-10-28 16:19:03 +01:00
|
|
|
'compare_engine',
|
2024-07-18 17:31:52 +02:00
|
|
|
'blocklist',
|
2018-07-03 06:47:49 +02:00
|
|
|
)
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2025-01-30 04:15:27 +01:00
|
|
|
def __init__(self, title, output_dir, oiiotool, variation=None, blocklist=[]):
|
2018-02-14 17:33:06 +01:00
|
|
|
self.title = title
|
2025-01-31 22:31:43 +01:00
|
|
|
|
|
|
|
|
# Normalize the path to avoid output_dir and global_dir being the same when a directory
|
|
|
|
|
# ends with a trailing slash.
|
|
|
|
|
self.output_dir = os.path.normpath(output_dir)
|
|
|
|
|
self.global_dir = os.path.dirname(self.output_dir)
|
|
|
|
|
|
2018-05-18 16:40:41 +02:00
|
|
|
self.reference_dir = 'reference_renders'
|
2022-08-01 10:57:18 +02:00
|
|
|
self.reference_override_dir = None
|
2024-01-25 10:04:16 +01:00
|
|
|
self.oiiotool = oiiotool
|
2020-10-28 16:19:03 +01:00
|
|
|
self.compare_engine = None
|
2020-08-05 11:20:25 +02:00
|
|
|
self.fail_threshold = 0.016
|
|
|
|
|
self.fail_percent = 1
|
2024-01-25 18:55:28 +01:00
|
|
|
self.engine_name = self.title.lower().replace(" ", "_")
|
2024-07-18 17:31:52 +02:00
|
|
|
self.blocklist = [] if os.getenv('BLENDER_TEST_IGNORE_BLOCKLIST') is not None else blocklist
|
2020-10-28 16:19:03 +01:00
|
|
|
|
2025-01-30 04:15:27 +01:00
|
|
|
if variation:
|
|
|
|
|
self.title = self._engine_title(title, variation)
|
|
|
|
|
self.output_dir = self._engine_path(self.output_dir, variation.lower())
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
self.pixelated = False
|
|
|
|
|
self.verbose = os.environ.get("BLENDER_VERBOSE") is not None
|
|
|
|
|
self.update = os.getenv('BLENDER_TEST_UPDATE') is not None
|
|
|
|
|
|
|
|
|
|
if os.environ.get("BLENDER_TEST_COLOR") is not None:
|
2023-02-20 19:04:34 -08:00
|
|
|
use_message_colors()
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
self.failed_tests = ""
|
|
|
|
|
self.passed_tests = ""
|
2018-05-18 17:52:46 +02:00
|
|
|
self.compare_tests = ""
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2019-05-09 13:45:46 +02:00
|
|
|
os.makedirs(output_dir, exist_ok=True)
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
def set_pixelated(self, pixelated):
|
|
|
|
|
self.pixelated = pixelated
|
|
|
|
|
|
2020-08-05 11:20:25 +02:00
|
|
|
def set_fail_threshold(self, threshold):
|
|
|
|
|
self.fail_threshold = threshold
|
|
|
|
|
|
2022-11-10 10:29:00 +01:00
|
|
|
def set_fail_percent(self, percent):
|
|
|
|
|
self.fail_percent = percent
|
|
|
|
|
|
2018-05-18 16:40:41 +02:00
|
|
|
def set_reference_dir(self, reference_dir):
|
|
|
|
|
self.reference_dir = reference_dir
|
|
|
|
|
|
2022-08-01 10:57:18 +02:00
|
|
|
def set_reference_override_dir(self, reference_override_dir):
|
|
|
|
|
self.reference_override_dir = reference_override_dir
|
|
|
|
|
|
2025-01-30 04:15:27 +01:00
|
|
|
def set_compare_engine(self, other_engine, other_variation=None):
|
|
|
|
|
self.compare_engine = (other_engine, other_variation)
|
2018-05-18 17:52:46 +02:00
|
|
|
|
2024-01-25 18:55:28 +01:00
|
|
|
def set_engine_name(self, engine_name):
|
|
|
|
|
self.engine_name = engine_name
|
|
|
|
|
|
2024-01-29 15:39:14 +01:00
|
|
|
def run(self, dirpath, blender, arguments_cb, batch=False, fail_silently=False):
|
2018-02-14 17:33:06 +01:00
|
|
|
# Run tests and output report.
|
|
|
|
|
dirname = os.path.basename(dirpath)
|
2024-01-29 15:39:14 +01:00
|
|
|
ok = self._run_all_tests(dirname, dirpath, blender, arguments_cb, batch, fail_silently)
|
2018-05-18 17:52:46 +02:00
|
|
|
self._write_data(dirname)
|
|
|
|
|
self._write_html()
|
2020-10-28 16:19:03 +01:00
|
|
|
if self.compare_engine:
|
2018-05-18 17:52:46 +02:00
|
|
|
self._write_html(comparison=True)
|
2018-02-14 17:33:06 +01:00
|
|
|
return ok
|
|
|
|
|
|
2018-05-18 17:52:46 +02:00
|
|
|
def _write_data(self, dirname):
|
2018-02-14 17:33:06 +01:00
|
|
|
# Write intermediate data for single test.
|
|
|
|
|
outdir = os.path.join(self.output_dir, dirname)
|
2019-05-09 13:45:46 +02:00
|
|
|
os.makedirs(outdir, exist_ok=True)
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
filepath = os.path.join(outdir, "failed.data")
|
|
|
|
|
pathlib.Path(filepath).write_text(self.failed_tests)
|
|
|
|
|
|
|
|
|
|
filepath = os.path.join(outdir, "passed.data")
|
|
|
|
|
pathlib.Path(filepath).write_text(self.passed_tests)
|
|
|
|
|
|
2020-10-28 16:19:03 +01:00
|
|
|
if self.compare_engine:
|
2018-05-18 17:52:46 +02:00
|
|
|
filepath = os.path.join(outdir, "compare.data")
|
|
|
|
|
pathlib.Path(filepath).write_text(self.compare_tests)
|
|
|
|
|
|
2019-06-25 19:48:14 +02:00
|
|
|
def _navigation_item(self, title, href, active):
|
|
|
|
|
if active:
|
|
|
|
|
return """<li class="breadcrumb-item active" aria-current="page">%s</li>""" % title
|
|
|
|
|
else:
|
|
|
|
|
return """<li class="breadcrumb-item"><a href="%s">%s</a></li>""" % (href, title)
|
|
|
|
|
|
2025-01-30 04:15:27 +01:00
|
|
|
def _engine_title(self, engine, variation):
|
|
|
|
|
if variation:
|
|
|
|
|
return engine.title() + ' ' + variation
|
2020-10-28 16:19:03 +01:00
|
|
|
else:
|
|
|
|
|
return engine.title()
|
|
|
|
|
|
2025-01-30 04:15:27 +01:00
|
|
|
def _engine_path(self, path, variation):
|
|
|
|
|
if variation:
|
2025-02-03 04:16:27 +01:00
|
|
|
variation = variation.replace(' ', '_')
|
2025-01-30 04:15:27 +01:00
|
|
|
return os.path.join(path, variation.lower())
|
2020-10-28 16:19:03 +01:00
|
|
|
else:
|
|
|
|
|
return path
|
|
|
|
|
|
2019-06-25 19:48:14 +02:00
|
|
|
def _navigation_html(self, comparison):
|
|
|
|
|
html = """<nav aria-label="breadcrumb"><ol class="breadcrumb">"""
|
2020-10-28 16:19:03 +01:00
|
|
|
base_path = os.path.relpath(self.global_dir, self.output_dir)
|
|
|
|
|
global_report_path = os.path.join(base_path, "report.html")
|
|
|
|
|
html += self._navigation_item("Test Reports", global_report_path, False)
|
2019-06-25 19:48:14 +02:00
|
|
|
html += self._navigation_item(self.title, "report.html", not comparison)
|
2020-10-28 16:19:03 +01:00
|
|
|
if self.compare_engine:
|
|
|
|
|
compare_title = "Compare with %s" % self._engine_title(*self.compare_engine)
|
2019-06-25 19:48:14 +02:00
|
|
|
html += self._navigation_item(compare_title, "compare.html", comparison)
|
|
|
|
|
html += """</ol></nav>"""
|
|
|
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
2018-07-03 06:58:34 +02:00
|
|
|
def _write_html(self, comparison=False):
|
2018-02-14 17:33:06 +01:00
|
|
|
# Gather intermediate data for all tests.
|
2018-05-18 17:52:46 +02:00
|
|
|
if comparison:
|
|
|
|
|
failed_data = []
|
|
|
|
|
passed_data = sorted(glob.glob(os.path.join(self.output_dir, "*/compare.data")))
|
|
|
|
|
else:
|
|
|
|
|
failed_data = sorted(glob.glob(os.path.join(self.output_dir, "*/failed.data")))
|
|
|
|
|
passed_data = sorted(glob.glob(os.path.join(self.output_dir, "*/passed.data")))
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
failed_tests = ""
|
|
|
|
|
passed_tests = ""
|
|
|
|
|
|
|
|
|
|
for filename in failed_data:
|
|
|
|
|
filepath = os.path.join(self.output_dir, filename)
|
|
|
|
|
failed_tests += pathlib.Path(filepath).read_text()
|
|
|
|
|
for filename in passed_data:
|
|
|
|
|
filepath = os.path.join(self.output_dir, filename)
|
|
|
|
|
passed_tests += pathlib.Path(filepath).read_text()
|
|
|
|
|
|
|
|
|
|
tests_html = failed_tests + passed_tests
|
|
|
|
|
|
|
|
|
|
# Write html for all tests.
|
|
|
|
|
if self.pixelated:
|
|
|
|
|
image_rendering = 'pixelated'
|
|
|
|
|
else:
|
|
|
|
|
image_rendering = 'auto'
|
|
|
|
|
|
2019-06-25 19:48:14 +02:00
|
|
|
# Navigation
|
|
|
|
|
menu = self._navigation_html(comparison)
|
|
|
|
|
|
2019-05-09 13:45:46 +02:00
|
|
|
failed = len(failed_tests) > 0
|
|
|
|
|
if failed:
|
2019-06-25 19:48:14 +02:00
|
|
|
message = """<div class="alert alert-danger" role="alert">"""
|
2022-06-14 13:02:39 +02:00
|
|
|
message += """<p>Run this command to regenerate reference (ground truth) images:</p>"""
|
2024-01-25 18:55:28 +01:00
|
|
|
message += """<p><tt>BLENDER_TEST_UPDATE=1 ctest -R %s</tt></p>""" % self.engine_name
|
2022-06-14 13:02:39 +02:00
|
|
|
message += """<p>This then happens for new and failing tests; reference images of """ \
|
|
|
|
|
"""passing test cases will not be updated. Be sure to commit the new reference """ \
|
2024-03-17 09:47:40 +01:00
|
|
|
"""images to the tests/data git submodule afterwards.</p>"""
|
2019-06-25 19:48:14 +02:00
|
|
|
message += """</div>"""
|
2018-02-14 17:33:06 +01:00
|
|
|
else:
|
|
|
|
|
message = ""
|
|
|
|
|
|
2018-05-18 17:52:46 +02:00
|
|
|
if comparison:
|
2019-06-25 19:48:14 +02:00
|
|
|
title = self.title + " Test Compare"
|
2020-10-28 16:19:03 +01:00
|
|
|
engine_self = self.title
|
|
|
|
|
engine_other = self._engine_title(*self.compare_engine)
|
2019-06-25 19:48:14 +02:00
|
|
|
columns_html = "<tr><th>Name</th><th>%s</th><th>%s</th>" % (engine_self, engine_other)
|
2018-05-18 17:52:46 +02:00
|
|
|
else:
|
2019-06-25 19:48:14 +02:00
|
|
|
title = self.title + " Test Report"
|
2024-01-25 10:04:16 +01:00
|
|
|
columns_html = "<tr><th>Name</th><th>New</th><th>Reference</th><th>Diff Color</th><th>Diff Alpha</th>"
|
2018-05-18 17:52:46 +02:00
|
|
|
|
2024-01-25 10:04:16 +01:00
|
|
|
html = f"""
|
2018-02-14 17:33:06 +01:00
|
|
|
<html>
|
|
|
|
|
<head>
|
|
|
|
|
<title>{title}</title>
|
|
|
|
|
<style>
|
2024-01-25 10:04:16 +01:00
|
|
|
div.page_container {{
|
|
|
|
|
text-align: center;
|
|
|
|
|
}}
|
|
|
|
|
div.page_container div {{
|
|
|
|
|
text-align: left;
|
|
|
|
|
}}
|
|
|
|
|
div.page_content {{
|
|
|
|
|
display: inline-block;
|
|
|
|
|
}}
|
2018-02-14 17:33:06 +01:00
|
|
|
img {{ image-rendering: {image_rendering}; width: 256px; background-color: #000; }}
|
|
|
|
|
img.render {{
|
|
|
|
|
background-color: #fff;
|
|
|
|
|
background-image:
|
|
|
|
|
-moz-linear-gradient(45deg, #eee 25%, transparent 25%),
|
|
|
|
|
-moz-linear-gradient(-45deg, #eee 25%, transparent 25%),
|
|
|
|
|
-moz-linear-gradient(45deg, transparent 75%, #eee 75%),
|
|
|
|
|
-moz-linear-gradient(-45deg, transparent 75%, #eee 75%);
|
|
|
|
|
background-image:
|
|
|
|
|
-webkit-gradient(linear, 0 100%, 100% 0, color-stop(.25, #eee), color-stop(.25, transparent)),
|
|
|
|
|
-webkit-gradient(linear, 0 0, 100% 100%, color-stop(.25, #eee), color-stop(.25, transparent)),
|
|
|
|
|
-webkit-gradient(linear, 0 100%, 100% 0, color-stop(.75, transparent), color-stop(.75, #eee)),
|
|
|
|
|
-webkit-gradient(linear, 0 0, 100% 100%, color-stop(.75, transparent), color-stop(.75, #eee));
|
|
|
|
|
|
|
|
|
|
-moz-background-size:50px 50px;
|
|
|
|
|
background-size:50px 50px;
|
2021-07-11 15:28:20 +10:00
|
|
|
-webkit-background-size:50px 51px; /* Override value for silly webkit. */
|
2018-02-14 17:33:06 +01:00
|
|
|
|
|
|
|
|
background-position:0 0, 25px 0, 25px -25px, 0px 25px;
|
|
|
|
|
}}
|
|
|
|
|
table td:first-child {{ width: 256px; }}
|
2022-06-14 13:02:39 +02:00
|
|
|
p {{ margin-bottom: 0.5rem; }}
|
2018-02-14 17:33:06 +01:00
|
|
|
</style>
|
2019-06-25 19:48:14 +02:00
|
|
|
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T" crossorigin="anonymous">
|
2018-02-14 17:33:06 +01:00
|
|
|
</head>
|
|
|
|
|
<body>
|
2024-01-25 10:04:16 +01:00
|
|
|
<div class="page_container"><div class="page_content">
|
2018-02-14 17:33:06 +01:00
|
|
|
<br/>
|
|
|
|
|
<h1>{title}</h1>
|
2019-06-25 19:48:14 +02:00
|
|
|
{menu}
|
2018-02-14 17:33:06 +01:00
|
|
|
{message}
|
|
|
|
|
<table class="table table-striped">
|
2019-06-25 19:48:14 +02:00
|
|
|
<thead class="thead-dark">
|
2018-05-18 17:52:46 +02:00
|
|
|
{columns_html}
|
2018-02-14 17:33:06 +01:00
|
|
|
</thead>
|
|
|
|
|
{tests_html}
|
|
|
|
|
</table>
|
|
|
|
|
<br/>
|
2024-01-25 10:04:16 +01:00
|
|
|
</div></div>
|
2018-02-14 17:33:06 +01:00
|
|
|
</body>
|
|
|
|
|
</html>
|
2024-01-25 10:04:16 +01:00
|
|
|
"""
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2018-05-18 17:52:46 +02:00
|
|
|
filename = "report.html" if not comparison else "compare.html"
|
|
|
|
|
filepath = os.path.join(self.output_dir, filename)
|
2018-02-14 17:33:06 +01:00
|
|
|
pathlib.Path(filepath).write_text(html)
|
|
|
|
|
|
|
|
|
|
print_message("Report saved to: " + pathlib.Path(filepath).as_uri())
|
|
|
|
|
|
2019-05-09 13:45:46 +02:00
|
|
|
# Update global report
|
2019-06-25 19:48:14 +02:00
|
|
|
if not comparison:
|
|
|
|
|
global_failed = failed if not comparison else None
|
2020-10-28 16:19:03 +01:00
|
|
|
global_report.add(self.global_dir, "Render", self.title, filepath, global_failed)
|
2019-05-09 13:45:46 +02:00
|
|
|
|
2018-02-14 17:33:06 +01:00
|
|
|
def _relative_url(self, filepath):
|
|
|
|
|
relpath = os.path.relpath(filepath, self.output_dir)
|
|
|
|
|
return pathlib.Path(relpath).as_posix()
|
|
|
|
|
|
2025-02-20 17:18:59 +01:00
|
|
|
def _write_test_html(self, test_category, test_result):
|
|
|
|
|
name = test_result.name.replace('_', ' ')
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2025-02-20 17:18:59 +01:00
|
|
|
status = test_result.error if test_result.error else ""
|
|
|
|
|
tr_style = """ class="table-danger" """ if test_result.error else ""
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2025-02-20 17:18:59 +01:00
|
|
|
new_url = self._relative_url(test_result.new_img)
|
|
|
|
|
ref_url = self._relative_url(test_result.ref_img)
|
|
|
|
|
diff_color_url = self._relative_url(test_result.diff_color_img)
|
|
|
|
|
diff_alpha_url = self._relative_url(test_result.diff_alpha_img)
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2024-01-25 10:04:16 +01:00
|
|
|
test_html = f"""
|
2018-02-14 17:33:06 +01:00
|
|
|
<tr{tr_style}>
|
2025-02-20 17:18:59 +01:00
|
|
|
<td><b>{name}</b><br/>{test_category}<br/>{status}</td>
|
2018-02-14 17:33:06 +01:00
|
|
|
<td><img src="{new_url}" onmouseover="this.src='{ref_url}';" onmouseout="this.src='{new_url}';" class="render"></td>
|
|
|
|
|
<td><img src="{ref_url}" onmouseover="this.src='{new_url}';" onmouseout="this.src='{ref_url}';" class="render"></td>
|
2024-01-25 10:04:16 +01:00
|
|
|
<td><img src="{diff_color_url}"></td>
|
|
|
|
|
<td><img src="{diff_alpha_url}"></td>
|
|
|
|
|
</tr>"""
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2025-02-20 17:18:59 +01:00
|
|
|
if test_result.error:
|
2018-02-14 17:33:06 +01:00
|
|
|
self.failed_tests += test_html
|
|
|
|
|
else:
|
|
|
|
|
self.passed_tests += test_html
|
|
|
|
|
|
2020-10-28 16:19:03 +01:00
|
|
|
if self.compare_engine:
|
|
|
|
|
base_path = os.path.relpath(self.global_dir, self.output_dir)
|
|
|
|
|
ref_url = os.path.join(base_path, self._engine_path(*self.compare_engine), new_url)
|
2018-05-18 17:52:46 +02:00
|
|
|
|
|
|
|
|
test_html = """
|
|
|
|
|
<tr{tr_style}>
|
|
|
|
|
<td><b>{name}</b><br/>{testname}<br/>{status}</td>
|
|
|
|
|
<td><img src="{new_url}" onmouseover="this.src='{ref_url}';" onmouseout="this.src='{new_url}';" class="render"></td>
|
|
|
|
|
<td><img src="{ref_url}" onmouseover="this.src='{new_url}';" onmouseout="this.src='{ref_url}';" class="render"></td>
|
|
|
|
|
</tr>""" . format(tr_style=tr_style,
|
|
|
|
|
name=name,
|
2025-02-20 17:18:59 +01:00
|
|
|
testname=test_result.name,
|
2018-05-18 17:52:46 +02:00
|
|
|
status=status,
|
|
|
|
|
new_url=new_url,
|
|
|
|
|
ref_url=ref_url)
|
|
|
|
|
|
|
|
|
|
self.compare_tests += test_html
|
|
|
|
|
|
2024-08-14 17:00:48 +02:00
|
|
|
def _get_render_arguments(self, arguments_cb, filepath, base_output_filepath):
|
|
|
|
|
# Each render test can override this method to provide extra functionality.
|
|
|
|
|
# See Cycles render tests for an example.
|
|
|
|
|
# Do not delete.
|
|
|
|
|
return arguments_cb(filepath, base_output_filepath)
|
|
|
|
|
|
2025-01-30 04:15:27 +01:00
|
|
|
def _get_arguments_suffix(self):
|
|
|
|
|
# Get command line arguments that need to be provided after all file-specific ones.
|
|
|
|
|
# For example the Cycles render device argument needs to be added at the end of
|
|
|
|
|
# the argument list, otherwise tests can't be batched together.
|
|
|
|
|
#
|
|
|
|
|
# Each render test is supposed to override this method.
|
|
|
|
|
return []
|
|
|
|
|
|
2025-02-20 17:18:59 +01:00
|
|
|
def _get_filepath_tests(self, filepath):
|
|
|
|
|
list_filepath = filepath.replace('.blend', '_permutations.txt')
|
|
|
|
|
if os.path.exists(list_filepath):
|
|
|
|
|
with open(list_filepath, 'r') as file:
|
|
|
|
|
return [TestResult(self, filepath, testname.rstrip('\n')) for testname in file]
|
|
|
|
|
else:
|
|
|
|
|
testname = test_get_name(filepath)
|
|
|
|
|
return [TestResult(self, filepath, testname)]
|
|
|
|
|
|
2019-05-10 23:00:35 +02:00
|
|
|
def _run_tests(self, filepaths, blender, arguments_cb, batch):
|
|
|
|
|
# Run multiple tests in a single Blender process since startup can be
|
|
|
|
|
# a significant factor. In case of crashes, re-run the remaining tests.
|
|
|
|
|
verbose = os.environ.get("BLENDER_VERBOSE") is not None
|
2018-02-14 17:33:06 +01:00
|
|
|
|
2019-05-10 23:00:35 +02:00
|
|
|
remaining_filepaths = filepaths[:]
|
2025-02-20 17:18:59 +01:00
|
|
|
test_results = []
|
2019-05-10 23:00:35 +02:00
|
|
|
|
2019-05-10 23:00:35 +02:00
|
|
|
while len(remaining_filepaths) > 0:
|
|
|
|
|
command = [blender]
|
2025-02-20 17:18:59 +01:00
|
|
|
running_tests = []
|
2019-05-10 23:00:35 +02:00
|
|
|
|
2019-05-10 23:00:35 +02:00
|
|
|
# Construct output filepaths and command to run
|
|
|
|
|
for filepath in remaining_filepaths:
|
2025-02-20 17:18:59 +01:00
|
|
|
running_tests.append(filepath)
|
|
|
|
|
|
2019-05-10 23:00:35 +02:00
|
|
|
testname = test_get_name(filepath)
|
|
|
|
|
print_message(testname, 'SUCCESS', 'RUN')
|
|
|
|
|
|
|
|
|
|
base_output_filepath = os.path.join(self.output_dir, "tmp_" + testname)
|
|
|
|
|
output_filepath = base_output_filepath + '0001.png'
|
|
|
|
|
if os.path.exists(output_filepath):
|
|
|
|
|
os.remove(output_filepath)
|
|
|
|
|
|
2024-08-14 17:00:48 +02:00
|
|
|
command.extend(self._get_render_arguments(arguments_cb, filepath, base_output_filepath))
|
2019-05-10 23:00:35 +02:00
|
|
|
|
|
|
|
|
# Only chain multiple commands for batch
|
|
|
|
|
if not batch:
|
|
|
|
|
break
|
|
|
|
|
|
2025-01-30 04:15:27 +01:00
|
|
|
command.extend(self._get_arguments_suffix())
|
2020-10-28 16:19:03 +01:00
|
|
|
|
2019-05-10 23:00:35 +02:00
|
|
|
# Run process
|
|
|
|
|
crash = False
|
2019-08-25 17:26:11 +02:00
|
|
|
output = None
|
2019-05-10 23:00:35 +02:00
|
|
|
try:
|
2020-08-26 22:02:02 +02:00
|
|
|
completed_process = subprocess.run(command, stdout=subprocess.PIPE)
|
|
|
|
|
if completed_process.returncode != 0:
|
|
|
|
|
crash = True
|
|
|
|
|
output = completed_process.stdout
|
2024-10-01 13:18:46 +10:00
|
|
|
except Exception:
|
2019-05-10 23:00:35 +02:00
|
|
|
crash = True
|
|
|
|
|
|
|
|
|
|
if verbose:
|
|
|
|
|
print(" ".join(command))
|
2020-08-26 22:02:02 +02:00
|
|
|
if (verbose or crash) and output:
|
2021-09-28 20:36:20 +02:00
|
|
|
print(output.decode("utf-8", 'ignore'))
|
2019-05-10 23:00:35 +02:00
|
|
|
|
2025-02-21 19:14:17 +01:00
|
|
|
tests_to_check = []
|
|
|
|
|
|
2019-05-10 23:00:35 +02:00
|
|
|
# Detect missing filepaths and consider those errors
|
2025-02-20 17:18:59 +01:00
|
|
|
for filepath in running_tests:
|
2019-05-10 23:00:35 +02:00
|
|
|
remaining_filepaths.pop(0)
|
2025-02-20 17:18:59 +01:00
|
|
|
file_crashed = False
|
|
|
|
|
for test in self._get_filepath_tests(filepath):
|
2025-02-21 19:14:17 +01:00
|
|
|
if not os.path.exists(test.tmp_out_img) or os.path.getsize(test.tmp_out_img) == 0:
|
|
|
|
|
if crash:
|
|
|
|
|
# In case of crash, stop after missing files and re-render remaining
|
2025-02-20 17:18:59 +01:00
|
|
|
test.error = "CRASH"
|
2025-02-21 19:14:17 +01:00
|
|
|
test_results.append(test)
|
2025-02-20 17:18:59 +01:00
|
|
|
file_crashed = True
|
|
|
|
|
break
|
2025-02-21 19:14:17 +01:00
|
|
|
else:
|
|
|
|
|
test.error = "NO OUTPUT"
|
|
|
|
|
test_results.append(test)
|
2025-02-20 17:18:59 +01:00
|
|
|
else:
|
2025-02-21 19:14:17 +01:00
|
|
|
tests_to_check.append(test)
|
2025-02-20 17:18:59 +01:00
|
|
|
if file_crashed:
|
|
|
|
|
break
|
2019-05-10 23:00:35 +02:00
|
|
|
|
2025-02-21 19:14:17 +01:00
|
|
|
pool = multiprocessing.Pool(multiprocessing.cpu_count())
|
|
|
|
|
test_results.extend(pool.starmap(diff_output,
|
|
|
|
|
[(test, self.oiiotool, self.fail_threshold, self.fail_percent, self.verbose, self.update)
|
|
|
|
|
for test in tests_to_check]))
|
|
|
|
|
pool.close()
|
|
|
|
|
|
|
|
|
|
for test in test_results:
|
|
|
|
|
if test.error == "CRASH":
|
|
|
|
|
print_message("Crash running Blender")
|
|
|
|
|
print_message(test.name, 'FAILURE', 'FAILED')
|
|
|
|
|
elif test.error == "NO OUTPUT":
|
|
|
|
|
print_message("No render result file found")
|
|
|
|
|
print_message(test.tmp_out_img, 'FAILURE', 'FAILED')
|
|
|
|
|
elif test.error == "VERIFY":
|
|
|
|
|
print_message("Render result is different from reference image")
|
|
|
|
|
print_message(test.name, 'FAILURE', 'FAILED')
|
|
|
|
|
else:
|
|
|
|
|
print_message(test.name, 'SUCCESS', 'OK')
|
|
|
|
|
|
|
|
|
|
if os.path.exists(test.tmp_out_img):
|
|
|
|
|
os.remove(test.tmp_out_img)
|
|
|
|
|
|
2025-02-20 17:18:59 +01:00
|
|
|
return test_results
|
2019-05-10 23:00:35 +02:00
|
|
|
|
2024-01-29 15:39:14 +01:00
|
|
|
def _run_all_tests(self, dirname, dirpath, blender, arguments_cb, batch, fail_silently):
|
2018-02-14 17:33:06 +01:00
|
|
|
passed_tests = []
|
|
|
|
|
failed_tests = []
|
2024-01-29 15:39:14 +01:00
|
|
|
silently_failed_tests = []
|
2025-01-30 04:15:27 +01:00
|
|
|
all_files = list(blend_list(dirpath, self.blocklist))
|
2018-02-14 17:33:06 +01:00
|
|
|
all_files.sort()
|
2025-01-30 05:44:20 +01:00
|
|
|
if not list(blend_list(dirpath, [])):
|
2025-01-29 23:53:31 +01:00
|
|
|
print_message("No .blend files found in '{}'!".format(dirpath), 'FAILURE', 'FAILED')
|
|
|
|
|
return False
|
|
|
|
|
|
2018-02-14 17:33:06 +01:00
|
|
|
print_message("Running {} tests from 1 test case." .
|
|
|
|
|
format(len(all_files)),
|
|
|
|
|
'SUCCESS', "==========")
|
|
|
|
|
time_start = time.time()
|
2025-02-20 17:18:59 +01:00
|
|
|
test_results = self._run_tests(all_files, blender, arguments_cb, batch)
|
|
|
|
|
for test in test_results:
|
|
|
|
|
if test.error:
|
|
|
|
|
if test.error == "NO_ENGINE":
|
2018-02-14 17:33:06 +01:00
|
|
|
return False
|
2025-02-20 17:18:59 +01:00
|
|
|
elif test.error == "NO_START":
|
2018-02-14 17:33:06 +01:00
|
|
|
return False
|
2024-01-29 15:39:14 +01:00
|
|
|
|
2025-02-20 17:18:59 +01:00
|
|
|
if fail_silently and test.error != 'CRASH':
|
|
|
|
|
silently_failed_tests.append(test.name)
|
2024-01-29 15:39:14 +01:00
|
|
|
else:
|
2025-02-20 17:18:59 +01:00
|
|
|
failed_tests.append(test.name)
|
2018-02-14 17:33:06 +01:00
|
|
|
else:
|
2025-02-20 17:18:59 +01:00
|
|
|
passed_tests.append(test.name)
|
|
|
|
|
self._write_test_html(dirname, test)
|
2018-02-14 17:33:06 +01:00
|
|
|
time_end = time.time()
|
|
|
|
|
elapsed_ms = int((time_end - time_start) * 1000)
|
|
|
|
|
print_message("")
|
|
|
|
|
print_message("{} tests from 1 test case ran. ({} ms total)" .
|
|
|
|
|
format(len(all_files), elapsed_ms),
|
|
|
|
|
'SUCCESS', "==========")
|
|
|
|
|
print_message("{} tests." .
|
|
|
|
|
format(len(passed_tests)),
|
|
|
|
|
'SUCCESS', 'PASSED')
|
2024-01-29 15:39:14 +01:00
|
|
|
all_failed_tests = silently_failed_tests + failed_tests
|
|
|
|
|
if all_failed_tests:
|
2018-02-14 17:33:06 +01:00
|
|
|
print_message("{} tests, listed below:" .
|
2024-01-29 15:39:14 +01:00
|
|
|
format(len(all_failed_tests)),
|
2018-07-03 06:47:49 +02:00
|
|
|
'FAILURE', 'FAILED')
|
2024-01-29 15:39:14 +01:00
|
|
|
all_failed_tests.sort()
|
|
|
|
|
for test in all_failed_tests:
|
2018-02-14 17:33:06 +01:00
|
|
|
print_message("{}" . format(test), 'FAILURE', "FAILED")
|
|
|
|
|
|
|
|
|
|
return not bool(failed_tests)
|