Add 'system python' validation for some py scripts.
The goal of this test is to try to import some critical py scripts with the system python of the building machine. The main target is to ensure that these py scripts remain usable by all buildbot machines, as some of them are using fairly outdated python versions. Current status: * Scripts in `build_files` and `docs` are checked. * Some python scripts in `build_files` were 'reverted' to be compatible with older required python version currently (3.6). * A few scripts are excluded from the test, mostly because they use Blender's `bpy` module, which means they are only intended to be ran with Blender's python anyway. * The test is only enabled for Linux buildbots currently, as they use the oldest Python by far. Notes: * Some more scripts are likely to be moved around in the future. * Whether these tests need to be enabled on windows or macos platforms remains an open question. Pull Request: https://projects.blender.org/blender/blender/pulls/130746
This commit is contained in:
committed by
Bastien Montagne
parent
2040b8e864
commit
2c9ab53273
@@ -878,6 +878,24 @@ mark_as_advanced(WITH_TESTS_SINGLE_BINARY)
|
|||||||
set(TEST_PYTHON_EXE "" CACHE PATH "Python executable to run unit tests")
|
set(TEST_PYTHON_EXE "" CACHE PATH "Python executable to run unit tests")
|
||||||
mark_as_advanced(TEST_PYTHON_EXE)
|
mark_as_advanced(TEST_PYTHON_EXE)
|
||||||
|
|
||||||
|
|
||||||
|
# System python tests.
|
||||||
|
option(WITH_SYSTEM_PYTHON_TESTS "\
|
||||||
|
Enable tests validating some build-related scripts against the 'system' version of Python \
|
||||||
|
(buildbots currently can use significantly older versions of Python than Blender's)"
|
||||||
|
OFF
|
||||||
|
)
|
||||||
|
mark_as_advanced(WITH_SYSTEM_PYTHON_TESTS)
|
||||||
|
# We could use `find_package (Python3 COMPONENTS Interpreter)` to set that value automatically.
|
||||||
|
# However, on some buildbots this will give the default python version of the current virtual environment,
|
||||||
|
# which may differ from the OS default python version.
|
||||||
|
# And it would set that global 'python3 exec path' CMake value for all CMake scripts, which could have
|
||||||
|
# unexpected and dangerous side effects.
|
||||||
|
# So this has to be set explicitely for all builders.
|
||||||
|
set(TEST_SYSTEM_PYTHON_EXE "" CACHE PATH "Python executable used to run 'system python' tests")
|
||||||
|
mark_as_advanced(TEST_SYSTEM_PYTHON_EXE)
|
||||||
|
|
||||||
|
|
||||||
# Documentation
|
# Documentation
|
||||||
if(UNIX AND NOT APPLE)
|
if(UNIX AND NOT APPLE)
|
||||||
option(WITH_DOC_MANPAGE "Create a manual page (Unix manpage)" OFF)
|
option(WITH_DOC_MANPAGE "Create a manual page (Unix manpage)" OFF)
|
||||||
|
|||||||
@@ -18,3 +18,8 @@ set(HIPRT_COMPILER_PARALLEL_JOBS 6 CACHE STRING "" FORCE)
|
|||||||
set(SYCL_OFFLINE_COMPILER_PARALLEL_JOBS 6 CACHE STRING "" FORCE)
|
set(SYCL_OFFLINE_COMPILER_PARALLEL_JOBS 6 CACHE STRING "" FORCE)
|
||||||
|
|
||||||
set(WITH_LINUX_OFFICIAL_RELEASE_TESTS ON CACHE BOOL "" FORCE)
|
set(WITH_LINUX_OFFICIAL_RELEASE_TESTS ON CACHE BOOL "" FORCE)
|
||||||
|
|
||||||
|
# Validate that some python scripts in out `build_files` and `docs` directories
|
||||||
|
# can be used with the builder's system python.
|
||||||
|
set(WITH_SYSTEM_PYTHON_TESTS ON CACHE BOOL "" FORCE)
|
||||||
|
set(TEST_SYSTEM_PYTHON_EXE "/usr/bin/python3.6" CACHE PATH "" FORCE)
|
||||||
|
|||||||
@@ -8,6 +8,10 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from typing import (
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
cmakelists_file = sys.argv[-1]
|
cmakelists_file = sys.argv[-1]
|
||||||
|
|
||||||
|
|
||||||
@@ -22,7 +26,7 @@ def count_backslashes_before_pos(file_data: str, pos: int) -> int:
|
|||||||
return slash_count
|
return slash_count
|
||||||
|
|
||||||
|
|
||||||
def extract_cmake_string_at_pos(file_data: str, pos_beg: int) -> str | None:
|
def extract_cmake_string_at_pos(file_data: str, pos_beg: int) -> Union[str, None]:
|
||||||
assert file_data[pos_beg - 1] == '"'
|
assert file_data[pos_beg - 1] == '"'
|
||||||
|
|
||||||
pos = pos_beg
|
pos = pos_beg
|
||||||
|
|||||||
@@ -27,7 +27,11 @@ __all__ = (
|
|||||||
"init",
|
"init",
|
||||||
)
|
)
|
||||||
|
|
||||||
from collections.abc import (
|
from typing import (
|
||||||
|
List,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
# Proxies for `collections.abc`
|
||||||
Callable,
|
Callable,
|
||||||
Iterator,
|
Iterator,
|
||||||
)
|
)
|
||||||
@@ -82,7 +86,7 @@ def init(cmake_path: str) -> bool:
|
|||||||
|
|
||||||
def source_list(
|
def source_list(
|
||||||
path: str,
|
path: str,
|
||||||
filename_check: Callable[[str], bool] | None = None,
|
filename_check: Union[Callable[[str], bool], None] = None,
|
||||||
) -> Iterator[str]:
|
) -> Iterator[str]:
|
||||||
for dirpath, dirnames, filenames in os.walk(path):
|
for dirpath, dirnames, filenames in os.walk(path):
|
||||||
# skip '.git'
|
# skip '.git'
|
||||||
@@ -129,8 +133,8 @@ def is_project_file(filename: str) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
def cmake_advanced_info() -> (
|
def cmake_advanced_info() -> (
|
||||||
tuple[list[str], list[tuple[str, str]]] |
|
Union[Tuple[List[str], List[Tuple[str, str]]],
|
||||||
tuple[None, None]
|
Tuple[None, None]]
|
||||||
):
|
):
|
||||||
""" Extract includes and defines from cmake.
|
""" Extract includes and defines from cmake.
|
||||||
"""
|
"""
|
||||||
@@ -212,12 +216,15 @@ def cmake_advanced_info() -> (
|
|||||||
return includes, defines
|
return includes, defines
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_var(var: str) -> str | None:
|
def cmake_cache_var(var: str) -> Union[str, None]:
|
||||||
|
def l_strip_gen(cache_file):
|
||||||
|
for l in cache_file:
|
||||||
|
yield l.strip()
|
||||||
|
|
||||||
with open(os.path.join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8') as cache_file:
|
with open(os.path.join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8') as cache_file:
|
||||||
lines = [
|
lines = [
|
||||||
l_strip for l in cache_file
|
l_strip for l_strip in l_strip_gen(cache_file)
|
||||||
if (l_strip := l.strip())
|
if l_strip and not l_strip.startswith(("//", "#"))
|
||||||
if not l_strip.startswith(("//", "#"))
|
|
||||||
]
|
]
|
||||||
|
|
||||||
for l in lines:
|
for l in lines:
|
||||||
@@ -226,7 +233,7 @@ def cmake_cache_var(var: str) -> str | None:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def cmake_compiler_defines() -> list[str] | None:
|
def cmake_compiler_defines() -> Union[List[str], None]:
|
||||||
compiler = cmake_cache_var("CMAKE_C_COMPILER") # could do CXX too
|
compiler = cmake_cache_var("CMAKE_C_COMPILER") # could do CXX too
|
||||||
|
|
||||||
if compiler is None:
|
if compiler is None:
|
||||||
@@ -248,5 +255,5 @@ def cmake_compiler_defines() -> list[str] | None:
|
|||||||
return lines
|
return lines
|
||||||
|
|
||||||
|
|
||||||
def project_name_get() -> str | None:
|
def project_name_get() -> Union[str, None]:
|
||||||
return cmake_cache_var("CMAKE_PROJECT_NAME")
|
return cmake_cache_var("CMAKE_PROJECT_NAME")
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"build_info",
|
"build_info",
|
||||||
"SOURCE_DIR",
|
"SOURCE_DIR",
|
||||||
@@ -23,8 +24,10 @@ import subprocess
|
|||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
IO,
|
IO,
|
||||||
)
|
List,
|
||||||
from collections.abc import (
|
Tuple,
|
||||||
|
Union,
|
||||||
|
# Proxies for `collections.abc`
|
||||||
Callable,
|
Callable,
|
||||||
Iterator,
|
Iterator,
|
||||||
Sequence,
|
Sequence,
|
||||||
@@ -56,7 +59,7 @@ def is_c_any(filename: str) -> bool:
|
|||||||
CMAKE_DIR = "."
|
CMAKE_DIR = "."
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_var_iter() -> Iterator[tuple[str, str, str]]:
|
def cmake_cache_var_iter() -> Iterator[Tuple[str, str, str]]:
|
||||||
import re
|
import re
|
||||||
re_cache = re.compile(r'([A-Za-z0-9_\-]+)?:?([A-Za-z0-9_\-]+)?=(.*)$')
|
re_cache = re.compile(r'([A-Za-z0-9_\-]+)?:?([A-Za-z0-9_\-]+)?=(.*)$')
|
||||||
with open(join(CMAKE_DIR, "CMakeCache.txt"), 'r', encoding='utf-8') as cache_file:
|
with open(join(CMAKE_DIR, "CMakeCache.txt"), 'r', encoding='utf-8') as cache_file:
|
||||||
@@ -67,7 +70,7 @@ def cmake_cache_var_iter() -> Iterator[tuple[str, str, str]]:
|
|||||||
yield (var, type_ or "", val)
|
yield (var, type_ or "", val)
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_var(var: str) -> str | None:
|
def cmake_cache_var(var: str) -> Union[str, None]:
|
||||||
for var_iter, _type_iter, value_iter in cmake_cache_var_iter():
|
for var_iter, _type_iter, value_iter in cmake_cache_var_iter():
|
||||||
if var == var_iter:
|
if var == var_iter:
|
||||||
return value_iter
|
return value_iter
|
||||||
@@ -82,7 +85,7 @@ def cmake_cache_var_or_exit(var: str) -> str:
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def do_ignore(filepath: str, ignore_prefix_list: Sequence[str] | None) -> bool:
|
def do_ignore(filepath: str, ignore_prefix_list: Union[Sequence[str], None]) -> bool:
|
||||||
if ignore_prefix_list is None:
|
if ignore_prefix_list is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -90,7 +93,7 @@ def do_ignore(filepath: str, ignore_prefix_list: Sequence[str] | None) -> bool:
|
|||||||
return any([relpath.startswith(prefix) for prefix in ignore_prefix_list])
|
return any([relpath.startswith(prefix) for prefix in ignore_prefix_list])
|
||||||
|
|
||||||
|
|
||||||
def makefile_log() -> list[str]:
|
def makefile_log() -> List[str]:
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -130,8 +133,8 @@ def makefile_log() -> list[str]:
|
|||||||
def build_info(
|
def build_info(
|
||||||
use_c: bool = True,
|
use_c: bool = True,
|
||||||
use_cxx: bool = True,
|
use_cxx: bool = True,
|
||||||
ignore_prefix_list: list[str] | None = None,
|
ignore_prefix_list: Union[List[str], None] = None,
|
||||||
) -> list[tuple[str, list[str], list[str]]]:
|
) -> List[Tuple[str, List[str], List[str]]]:
|
||||||
makelog = makefile_log()
|
makelog = makefile_log()
|
||||||
|
|
||||||
source = []
|
source = []
|
||||||
@@ -149,7 +152,7 @@ def build_info(
|
|||||||
print("parsing make log ...")
|
print("parsing make log ...")
|
||||||
|
|
||||||
for line in makelog:
|
for line in makelog:
|
||||||
args_orig: str | list[str] = line.split()
|
args_orig: Union[str, List[str]] = line.split()
|
||||||
args = [fake_compiler if c in compilers else c for c in args_orig]
|
args = [fake_compiler if c in compilers else c for c in args_orig]
|
||||||
if args == args_orig:
|
if args == args_orig:
|
||||||
# No compilers in the command, skip.
|
# No compilers in the command, skip.
|
||||||
@@ -216,7 +219,7 @@ def build_defines_as_source() -> str:
|
|||||||
return stdout.read().strip().decode('ascii')
|
return stdout.read().strip().decode('ascii')
|
||||||
|
|
||||||
|
|
||||||
def build_defines_as_args() -> list[str]:
|
def build_defines_as_args() -> List[str]:
|
||||||
return [
|
return [
|
||||||
("-D" + "=".join(l.split(maxsplit=2)[1:]))
|
("-D" + "=".join(l.split(maxsplit=2)[1:]))
|
||||||
for l in build_defines_as_source().split("\n")
|
for l in build_defines_as_source().split("\n")
|
||||||
@@ -224,7 +227,7 @@ def build_defines_as_args() -> list[str]:
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def process_make_non_blocking(proc: subprocess.Popen[Any]) -> subprocess.Popen[Any]:
|
def process_make_non_blocking(proc: subprocess.Popen) -> subprocess.Popen:
|
||||||
import fcntl
|
import fcntl
|
||||||
for fh in (proc.stderr, proc.stdout):
|
for fh in (proc.stderr, proc.stdout):
|
||||||
if fh is None:
|
if fh is None:
|
||||||
@@ -238,11 +241,11 @@ def process_make_non_blocking(proc: subprocess.Popen[Any]) -> subprocess.Popen[A
|
|||||||
# could be moved elsewhere!, this just happens to be used by scripts that also
|
# could be moved elsewhere!, this just happens to be used by scripts that also
|
||||||
# use this module.
|
# use this module.
|
||||||
def queue_processes(
|
def queue_processes(
|
||||||
process_funcs: Sequence[tuple[Callable[..., subprocess.Popen[Any]], tuple[Any, ...]]],
|
process_funcs: Sequence[Tuple[Callable[..., subprocess.Popen], Tuple[Any, ...]]],
|
||||||
*,
|
*,
|
||||||
job_total: int = -1,
|
job_total: int = -1,
|
||||||
sleep: float = 0.1,
|
sleep: float = 0.1,
|
||||||
process_finalize: Callable[[subprocess.Popen[Any], bytes, bytes], int | None] | None = None,
|
process_finalize: Union[Callable[[subprocess.Popen, bytes, bytes], Union[int, None]], None] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
""" Takes a list of function arg pairs, each function must return a process
|
""" Takes a list of function arg pairs, each function must return a process
|
||||||
"""
|
"""
|
||||||
@@ -266,18 +269,21 @@ def queue_processes(
|
|||||||
|
|
||||||
if process_finalize is not None:
|
if process_finalize is not None:
|
||||||
def poll_and_finalize(
|
def poll_and_finalize(
|
||||||
p: subprocess.Popen[Any],
|
p: subprocess.Popen,
|
||||||
stdout: list[bytes],
|
stdout: List[bytes],
|
||||||
stderr: list[bytes],
|
stderr: List[bytes],
|
||||||
) -> int | None:
|
) -> Union[int, None]:
|
||||||
assert p.stdout is not None
|
assert p.stdout is not None
|
||||||
if data := p.stdout.read():
|
data = p.stdout.read()
|
||||||
|
if data:
|
||||||
stdout.append(data)
|
stdout.append(data)
|
||||||
assert p.stderr is not None
|
assert p.stderr is not None
|
||||||
if data := p.stderr.read():
|
data = p.stderr.read()
|
||||||
|
if data:
|
||||||
stderr.append(data)
|
stderr.append(data)
|
||||||
|
|
||||||
if (returncode := p.poll()) is not None:
|
returncode = p.poll()
|
||||||
|
if returncode is not None:
|
||||||
data_stdout, data_stderr = p.communicate()
|
data_stdout, data_stderr = p.communicate()
|
||||||
if data_stdout:
|
if data_stdout:
|
||||||
stdout.append(data_stdout)
|
stdout.append(data_stdout)
|
||||||
@@ -287,13 +293,13 @@ def queue_processes(
|
|||||||
return returncode
|
return returncode
|
||||||
else:
|
else:
|
||||||
def poll_and_finalize(
|
def poll_and_finalize(
|
||||||
p: subprocess.Popen[Any],
|
p: subprocess.Popen,
|
||||||
stdout: list[bytes],
|
stdout: List[bytes],
|
||||||
stderr: list[bytes],
|
stderr: List[bytes],
|
||||||
) -> int | None:
|
) -> Union[int, None]:
|
||||||
return p.poll()
|
return p.poll()
|
||||||
|
|
||||||
processes: list[tuple[subprocess.Popen[Any], list[bytes], list[bytes]]] = []
|
processes: List[Tuple[subprocess.Popen, List[bytes], List[bytes]]] = []
|
||||||
for func, args in process_funcs:
|
for func, args in process_funcs:
|
||||||
# wait until a thread is free
|
# wait until a thread is free
|
||||||
while 1:
|
while 1:
|
||||||
|
|||||||
@@ -37,7 +37,9 @@ import string
|
|||||||
import setuptools
|
import setuptools
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from collections.abc import (
|
from typing import (
|
||||||
|
Tuple,
|
||||||
|
# Proxies for `collections.abc`
|
||||||
Iterator,
|
Iterator,
|
||||||
Sequence,
|
Sequence,
|
||||||
)
|
)
|
||||||
@@ -95,7 +97,7 @@ def find_dominating_file(
|
|||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# CMake Cache Access
|
# CMake Cache Access
|
||||||
|
|
||||||
def cmake_cache_var_iter(filepath_cmake_cache: str) -> Iterator[tuple[str, str, str]]:
|
def cmake_cache_var_iter(filepath_cmake_cache: str) -> Iterator[Tuple[str, str, str]]:
|
||||||
re_cache = re.compile(r"([A-Za-z0-9_\-]+)?:?([A-Za-z0-9_\-]+)?=(.*)$")
|
re_cache = re.compile(r"([A-Za-z0-9_\-]+)?:?([A-Za-z0-9_\-]+)?=(.*)$")
|
||||||
with open(filepath_cmake_cache, "r", encoding="utf-8") as cache_file:
|
with open(filepath_cmake_cache, "r", encoding="utf-8") as cache_file:
|
||||||
for l in cache_file:
|
for l in cache_file:
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ from pathlib import Path
|
|||||||
from typing import (
|
from typing import (
|
||||||
TextIO,
|
TextIO,
|
||||||
Any,
|
Any,
|
||||||
)
|
Union,
|
||||||
from collections.abc import (
|
# Proxies for `collections.abc`
|
||||||
Iterable,
|
Iterable,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ def manifest_path(tarball: Path) -> Path:
|
|||||||
return without_suffix.with_name(f"{name}-manifest.txt")
|
return without_suffix.with_name(f"{name}-manifest.txt")
|
||||||
|
|
||||||
|
|
||||||
def packages_path(current_directory: Path, cli_args: Any) -> Path | None:
|
def packages_path(current_directory: Path, cli_args: Any) -> Union[Path, None]:
|
||||||
if not cli_args.include_packages:
|
if not cli_args.include_packages:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -116,7 +116,7 @@ def create_manifest(
|
|||||||
version: make_utils.BlenderVersion,
|
version: make_utils.BlenderVersion,
|
||||||
outpath: Path,
|
outpath: Path,
|
||||||
blender_srcdir: Path,
|
blender_srcdir: Path,
|
||||||
packages_dir: Path | None,
|
packages_dir: Union[Path, None],
|
||||||
) -> None:
|
) -> None:
|
||||||
print(f'Building manifest of files: "{outpath}"...', end="", flush=True)
|
print(f'Building manifest of files: "{outpath}"...', end="", flush=True)
|
||||||
with outpath.open("w", encoding="utf-8") as outfile:
|
with outpath.open("w", encoding="utf-8") as outfile:
|
||||||
@@ -164,7 +164,7 @@ def create_tarball(
|
|||||||
tarball: Path,
|
tarball: Path,
|
||||||
manifest: Path,
|
manifest: Path,
|
||||||
blender_srcdir: Path,
|
blender_srcdir: Path,
|
||||||
packages_dir: Path | None,
|
packages_dir: Union[Path, None],
|
||||||
) -> None:
|
) -> None:
|
||||||
print(f'Creating archive: "{tarball}" ...', end="", flush=True)
|
print(f'Creating archive: "{tarball}" ...', end="", flush=True)
|
||||||
|
|
||||||
@@ -238,7 +238,7 @@ def git_ls_files(directory: Path = Path(".")) -> Iterable[Path]:
|
|||||||
yield path
|
yield path
|
||||||
|
|
||||||
|
|
||||||
def git_command(*cli_args: bytes | str | Path) -> Iterable[str]:
|
def git_command(*cli_args: Union[bytes, str, Path]) -> Iterable[str]:
|
||||||
"""Generator, yields lines of output from a Git command."""
|
"""Generator, yields lines of output from a Git command."""
|
||||||
command = ("git", *cli_args)
|
command = ("git", *cli_args)
|
||||||
|
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ import time
|
|||||||
|
|
||||||
from typing import (
|
from typing import (
|
||||||
TextIO,
|
TextIO,
|
||||||
|
Dict,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -31,7 +32,7 @@ def man_format(data: str) -> str:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def blender_extract_info(blender_bin: str) -> dict[str, str]:
|
def blender_extract_info(blender_bin: str) -> Dict[str, str]:
|
||||||
blender_env = {
|
blender_env = {
|
||||||
"ASAN_OPTIONS": (
|
"ASAN_OPTIONS": (
|
||||||
os.environ.get("ASAN_OPTIONS", "") +
|
os.environ.get("ASAN_OPTIONS", "") +
|
||||||
|
|||||||
@@ -21,6 +21,13 @@ file(MAKE_DIRECTORY ${TEST_OUT_DIR}/blendfile_io)
|
|||||||
# message(FATAL_ERROR "CMake test directory not found!")
|
# message(FATAL_ERROR "CMake test directory not found!")
|
||||||
# endif()
|
# endif()
|
||||||
|
|
||||||
|
if(WITH_SYSTEM_PYTHON_TESTS)
|
||||||
|
if(NOT EXISTS "${TEST_SYSTEM_PYTHON_EXE}")
|
||||||
|
message(ERROR "'System Python' tests requested but no valid system python path, set TEST_SYSTEM_PYTHON_EXE.")
|
||||||
|
set(WITH_SYSTEM_PYTHON_TESTS OFF)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
# Run Blender command with parameters.
|
# Run Blender command with parameters.
|
||||||
function(add_blender_test_impl testname envvars_list exe)
|
function(add_blender_test_impl testname envvars_list exe)
|
||||||
add_test(
|
add_test(
|
||||||
@@ -116,6 +123,29 @@ function(add_render_test testname testscript)
|
|||||||
add_python_test(${testname} ${testscript} ${_args})
|
add_python_test(${testname} ${testscript} ${_args})
|
||||||
endfunction()
|
endfunction()
|
||||||
|
|
||||||
|
# Run Python script outside Blender, using system default Python3 interpreter,
|
||||||
|
# NOT the one specified in `TEST_PYTHON_EXE`.
|
||||||
|
function(add_system_python_test testname testscript)
|
||||||
|
if(NOT WITH_SYSTEM_PYTHON_TESTS)
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_test(
|
||||||
|
NAME ${testname}
|
||||||
|
COMMAND ${TEST_SYSTEM_PYTHON_EXE} ${testscript} ${ARGN}
|
||||||
|
WORKING_DIRECTORY $<TARGET_FILE_DIR:blender>
|
||||||
|
)
|
||||||
|
blender_test_set_envvars("${testname}" "")
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# TESTS USING SYSTEM PYTHON
|
||||||
|
add_system_python_test(
|
||||||
|
script_validate_on_system_python
|
||||||
|
${CMAKE_CURRENT_LIST_DIR}/system_python/load_tool_scripts.py
|
||||||
|
--root-dir ${CMAKE_SOURCE_DIR}
|
||||||
|
)
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# GENERAL PYTHON CORRECTNESS TESTS
|
# GENERAL PYTHON CORRECTNESS TESTS
|
||||||
add_blender_test(
|
add_blender_test(
|
||||||
|
|||||||
134
tests/python/system_python/load_tool_scripts.py
Normal file
134
tests/python/system_python/load_tool_scripts.py
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
# SPDX-FileCopyrightText: 2024 Blender Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import pathlib
|
||||||
|
import importlib.util
|
||||||
|
import contextlib
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
|
||||||
|
# For some reasons, these two modules do not work with 'file-path-based' import method used by this script.
|
||||||
|
# So 'pre-load' them here instead.
|
||||||
|
# They are _not used_ directly by this script.
|
||||||
|
import multiprocessing
|
||||||
|
import typing
|
||||||
|
|
||||||
|
|
||||||
|
# Allow-list of directories, relative to the given `--root-dir` argument.
|
||||||
|
INCLUDED_DIRS = {
|
||||||
|
"build_files",
|
||||||
|
# Used to generate the manual and API documentations.
|
||||||
|
"doc",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Block-list of paths to python modules, relative to the given `--root-dir` argument.
|
||||||
|
EXCLUDED_FILE_PATHS = {
|
||||||
|
# Require `bpy` module.
|
||||||
|
"doc/python_api/sphinx_doc_gen.py",
|
||||||
|
|
||||||
|
# XXX These scripts execute on import! bad, need to be fixed or removed.
|
||||||
|
# FIXME: Should be reasonably trivial to fix/cleanup for most of them.
|
||||||
|
"doc/python_api/conf.py",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def add_to_sys_path(syspath):
|
||||||
|
old_path = sys.path
|
||||||
|
old_modules = sys.modules
|
||||||
|
sys.modules = old_modules.copy()
|
||||||
|
sys.path = sys.path[:]
|
||||||
|
sys.path.insert(0, str(syspath))
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
sys.path = old_path
|
||||||
|
sys.modules = old_modules
|
||||||
|
|
||||||
|
|
||||||
|
def import_module(file_path):
|
||||||
|
"""Returns `...` if not a python module, `True` if it's a package, `False` otherwise."""
|
||||||
|
file_path = pathlib.Path(file_path)
|
||||||
|
if file_path.suffix != ".py":
|
||||||
|
return ...
|
||||||
|
file_name = file_path.name
|
||||||
|
if not file_name:
|
||||||
|
return ...
|
||||||
|
is_package = file_name == "__init__.py"
|
||||||
|
if is_package:
|
||||||
|
assert (len(file_path.parts) >= 2)
|
||||||
|
module_name = file_path.parts[-2]
|
||||||
|
else:
|
||||||
|
module_name = file_path.stem
|
||||||
|
|
||||||
|
with add_to_sys_path(file_path.parent):
|
||||||
|
spec = importlib.util.spec_from_file_location(
|
||||||
|
module_name, file_path, submodule_search_locations=file_path.parent)
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
sys.modules[module_name] = module
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
|
||||||
|
return module_name, is_package
|
||||||
|
|
||||||
|
|
||||||
|
def import_modules(root_dir):
|
||||||
|
print("+++", sys.executable)
|
||||||
|
|
||||||
|
included_directories = [os.path.join(root_dir, p) for p in INCLUDED_DIRS]
|
||||||
|
excluded_file_paths = {os.path.join(root_dir, p) for p in EXCLUDED_FILE_PATHS}
|
||||||
|
|
||||||
|
has_failures = False
|
||||||
|
directories = included_directories[:]
|
||||||
|
while directories:
|
||||||
|
path = directories.pop(0)
|
||||||
|
sub_directories = []
|
||||||
|
is_package = False
|
||||||
|
with os.scandir(path) as it:
|
||||||
|
for entry in it:
|
||||||
|
if entry.is_dir():
|
||||||
|
if entry.name.startswith('.'):
|
||||||
|
continue
|
||||||
|
sub_directories.append(entry.path)
|
||||||
|
continue
|
||||||
|
if not entry.is_file():
|
||||||
|
continue
|
||||||
|
if not entry.name.endswith(".py"):
|
||||||
|
continue
|
||||||
|
if entry.path in excluded_file_paths:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
is_current_package = import_module(entry.path)
|
||||||
|
is_package = is_package or is_current_package
|
||||||
|
except SystemExit as e:
|
||||||
|
has_failures = True
|
||||||
|
print(f"+++ Failed to import {entry.path} (module called `sys.exit`), {e}")
|
||||||
|
except Exception as e:
|
||||||
|
has_failures = True
|
||||||
|
print(f"+++ Failed to import {entry.path} ({e.__class__}), {e}")
|
||||||
|
traceback.print_tb(e.__traceback__)
|
||||||
|
print("\n\n")
|
||||||
|
# Do not attempt to import individual modules of a package. For now assume that if the top-level package can
|
||||||
|
# be imported, it is good enough. This may have to be revisited at some point though. Currently there are
|
||||||
|
# no packages in target directories anyway.
|
||||||
|
if not is_package:
|
||||||
|
directories += sub_directories
|
||||||
|
|
||||||
|
if has_failures:
|
||||||
|
raise Exception("Some module imports failed")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
import sys
|
||||||
|
if sys.argv[1] == "--root-dir":
|
||||||
|
root_dir = sys.argv[2]
|
||||||
|
import_modules(root_dir=root_dir)
|
||||||
|
else:
|
||||||
|
raise Exception("Missing --root-dir parameter")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user