USD: Animated camera property import

Imports in the following animated data from UsdGeomCameras:
- Focal length
- DOF distance
- DOF fstop
- Clip start, Clip end
- Tilt shift x, Tilt shift y
- Aperture sizes (with caveats)

Implementation wise, it's more complicated than I'd like due to needing
to read in both non-animated and animated data for each property. And
because I've tried to reduce the duplication of various transforms we
have to do on each value. E.g. scaling values by "tenth of scene units"
or the extraction of the USD clipping range from a GfVec2f into 2
separate properties, and 2 separate fcurves, in Blender etc. The current
approach was the best I could come up with so far.

Aperture sizes remain problematic for import, with animation data and
without, due to how Blender selects the largest sensor dimension to base
downstream calculations on and for which there's no concept in USD to
strictly dictate which dimension to use. Additionally, changing the
sensor size will impact the Tilt values as well. This means that if the
Aperture sizes are animated, we must also animate the tilt values; leads
to more fcurves being created than perhaps expected.

The `projection` attribute (perspective and orthographic) remains
unchanged (non animated only) due to differences in how USD<>Blender
interoperate with the Orthographic projection method. Note: Blender only
exports perspective cameras due to the same reason.

Pull Request: https://projects.blender.org/blender/blender/pulls/137487
This commit is contained in:
Jesse Yurkovich
2025-04-25 19:57:12 +02:00
committed by Jesse Yurkovich
parent 90644b30b2
commit ba60868477
7 changed files with 374 additions and 76 deletions

View File

@@ -5,7 +5,11 @@
#include "usd_armature_utils.hh"
#include "usd_utils.hh"
#include "ANIM_action.hh"
#include "ANIM_fcurve.hh"
#include "BKE_armature.hh"
#include "BKE_fcurve.hh"
#include "BKE_modifier.hh"
#include "BLI_listbase.h"
@@ -19,6 +23,29 @@
namespace blender::io::usd {
/* Utility: create new fcurve and add it as a channel to a group. */
FCurve *create_fcurve(blender::animrig::Channelbag &channelbag,
const blender::animrig::FCurveDescriptor &fcurve_descriptor,
const int sample_count)
{
FCurve *fcurve = channelbag.fcurve_create_unique(nullptr, fcurve_descriptor);
BLI_assert_msg(fcurve, "The same F-Curve is being created twice, this is unexpected.");
BKE_fcurve_bezt_resize(fcurve, sample_count);
return fcurve;
}
/* Utility: fill in a single fcurve sample at the provided index. */
void set_fcurve_sample(FCurve *fcu, uint sample_index, const float frame, const float value)
{
BLI_assert(sample_index >= 0 && sample_index < fcu->totvert);
BezTriple &bez = fcu->bezt[sample_index];
bez.vec[1][0] = frame;
bez.vec[1][1] = value;
bez.ipo = BEZT_IPO_LIN;
bez.f1 = bez.f2 = bez.f3 = SELECT;
bez.h1 = bez.h2 = HD_AUTO;
}
/* Recursively invoke the 'visitor' function on the given bone and its children. */
static void visit_bones(const Bone *bone, FunctionRef<void(const Bone *)> visitor)
{

View File

@@ -16,14 +16,28 @@
struct Bone;
struct Depsgraph;
struct FCurve;
struct ModifierData;
struct Object;
namespace blender::animrig {
class Channelbag;
struct FCurveDescriptor;
} // namespace blender::animrig
namespace blender::io::usd {
/* Custom Blender Primvar name used for storing armature bone lengths. */
inline const pxr::TfToken BlenderBoneLengths("blender:bone_lengths", pxr::TfToken::Immortal);
/* Utility: create new fcurve and add it as a channel to a group. */
FCurve *create_fcurve(blender::animrig::Channelbag &channelbag,
const blender::animrig::FCurveDescriptor &fcurve_descriptor,
const int sample_count);
/* Utility: fill in a single fcurve sample at the provided index. */
void set_fcurve_sample(FCurve *fcu, uint sample_index, const float frame, const float value);
/**
* Recursively invoke the given function on the given armature object's bones.
* This function is a no-op if the object isn't an armature.

View File

@@ -6,84 +6,335 @@
* Adapted from the Blender Alembic importer implementation. */
#include "usd_reader_camera.hh"
#include "usd_armature_utils.hh"
#include "DNA_camera_types.h"
#include "DNA_object_types.h"
#include "ANIM_action.hh"
#include "ANIM_animdata.hh"
#include "BLI_math_base.h"
#include "BKE_camera.h"
#include "BKE_fcurve.hh"
#include "BKE_object.hh"
#include "DNA_camera_types.h"
#include "DNA_object_types.h"
#include <pxr/usd/usdGeom/camera.h>
#include <array>
#include <optional>
namespace blender::io::usd {
namespace {
template<typename T> struct SampleData {
float frame;
T value;
};
template<typename T> struct AttributeData {
std::optional<T> initial_value = std::nullopt;
Vector<SampleData<T>> samples;
void reset()
{
initial_value = std::nullopt;
samples.clear();
}
};
template<typename T>
bool read_attribute_values(const pxr::UsdAttribute &attr,
const pxr::UsdTimeCode initial_time,
AttributeData<T> &data)
{
data.reset(); /* Clear any prior data. */
T value{};
if (attr.Get(&value, initial_time)) {
data.initial_value = value;
}
else {
data.initial_value = std::nullopt;
}
if (attr.ValueMightBeTimeVarying()) {
std::vector<double> times;
attr.GetTimeSamples(&times);
data.samples.resize(times.size());
for (int64_t i = 0; i < times.size(); i++) {
data.samples[i].frame = float(times[i]);
attr.Get(&data.samples[i].value, times[i]);
}
}
return data.initial_value.has_value() || !data.samples.is_empty();
}
void read_aperture_data(Camera *camera,
const pxr::UsdAttribute &usd_horiz_aperture,
const pxr::UsdAttribute &usd_vert_aperture,
const pxr::UsdAttribute &usd_horiz_offset,
const pxr::UsdAttribute &usd_vert_offset,
const pxr::UsdTimeCode initial_time,
const double tenth_unit_to_millimeters,
animrig::Channelbag &channelbag)
{
/* If the Aperture values are changing, that effects the sensor_fit and shift_x|y values as
* well. We need to put animation data on all of them. */
if (usd_horiz_aperture.ValueMightBeTimeVarying() || usd_vert_aperture.ValueMightBeTimeVarying())
{
std::vector<double> times;
pxr::UsdAttribute::GetUnionedTimeSamples(
{usd_horiz_aperture, usd_vert_aperture, usd_horiz_offset, usd_vert_offset}, &times);
std::array<FCurve *, 5> curves = {
create_fcurve(channelbag, {"sensor_width", 0}, times.size()),
create_fcurve(channelbag, {"sensor_height", 0}, times.size()),
create_fcurve(channelbag, {"sensor_fit", 0}, times.size()),
create_fcurve(channelbag, {"shift_x", 0}, times.size()),
create_fcurve(channelbag, {"shift_y", 0}, times.size())};
for (int64_t i = 0; i < times.size(); i++) {
const double time = times[i];
float horiz_aperture, vert_aperture;
float shift_x, shift_y;
usd_horiz_aperture.Get(&horiz_aperture, time);
usd_vert_aperture.Get(&vert_aperture, time);
usd_horiz_offset.Get(&shift_x, time);
usd_vert_offset.Get(&shift_y, time);
const float sensor_x = horiz_aperture * tenth_unit_to_millimeters;
const float sensor_y = vert_aperture * tenth_unit_to_millimeters;
const char sensor_fit = horiz_aperture >= vert_aperture ? CAMERA_SENSOR_FIT_HOR :
CAMERA_SENSOR_FIT_VERT;
const float sensor_size = sensor_x >= sensor_y ? sensor_x : sensor_y;
shift_x = (shift_x * tenth_unit_to_millimeters) / sensor_size;
shift_y = (shift_y * tenth_unit_to_millimeters) / sensor_size;
set_fcurve_sample(curves[0], i, float(time), sensor_x);
set_fcurve_sample(curves[1], i, float(time), sensor_y);
set_fcurve_sample(curves[2], i, float(time), sensor_fit);
set_fcurve_sample(curves[3], i, float(time), shift_x);
set_fcurve_sample(curves[4], i, float(time), shift_y);
}
}
else if (usd_horiz_offset.ValueMightBeTimeVarying() || usd_vert_offset.ValueMightBeTimeVarying())
{
/* Only the shift_x|y values are changing. Load in the initial values for aperture and
* sensor_fit and use those when setting the shift_x|y curves. */
float horiz_aperture, vert_aperture;
usd_horiz_aperture.Get(&horiz_aperture, initial_time);
usd_vert_aperture.Get(&vert_aperture, initial_time);
camera->sensor_x = horiz_aperture * tenth_unit_to_millimeters;
camera->sensor_y = vert_aperture * tenth_unit_to_millimeters;
camera->sensor_fit = camera->sensor_x >= camera->sensor_y ? CAMERA_SENSOR_FIT_HOR :
CAMERA_SENSOR_FIT_VERT;
const float sensor_size = camera->sensor_x >= camera->sensor_y ? camera->sensor_x :
camera->sensor_y;
std::vector<double> times;
if (usd_horiz_offset.GetTimeSamples(&times)) {
FCurve *fcu = create_fcurve(channelbag, {"shift_x", 0}, times.size());
for (int64_t i = 0; i < times.size(); i++) {
const double time = times[i];
float shift;
usd_horiz_offset.Get(&shift, time);
shift = (shift * tenth_unit_to_millimeters) / sensor_size;
set_fcurve_sample(fcu, i, float(time), shift);
}
}
if (usd_vert_offset.GetTimeSamples(&times)) {
FCurve *fcu = create_fcurve(channelbag, {"shift_y", 0}, times.size());
for (int64_t i = 0; i < times.size(); i++) {
const double time = times[i];
float shift;
usd_vert_offset.Get(&shift, time);
shift = (shift * tenth_unit_to_millimeters) / sensor_size;
set_fcurve_sample(fcu, i, float(time), shift);
}
}
}
else {
/* No animation data. */
float horiz_aperture, vert_aperture;
float shift_x, shift_y;
usd_horiz_aperture.Get(&horiz_aperture, initial_time);
usd_vert_aperture.Get(&vert_aperture, initial_time);
usd_horiz_offset.Get(&shift_x, initial_time);
usd_vert_offset.Get(&shift_y, initial_time);
camera->sensor_x = horiz_aperture * tenth_unit_to_millimeters;
camera->sensor_y = vert_aperture * tenth_unit_to_millimeters;
camera->sensor_fit = camera->sensor_x >= camera->sensor_y ? CAMERA_SENSOR_FIT_HOR :
CAMERA_SENSOR_FIT_VERT;
const float sensor_size = camera->sensor_x >= camera->sensor_y ? camera->sensor_x :
camera->sensor_y;
camera->shiftx = (shift_x * tenth_unit_to_millimeters) / sensor_size;
camera->shifty = (shift_y * tenth_unit_to_millimeters) / sensor_size;
}
}
} // namespace
void USDCameraReader::create_object(Main *bmain)
{
Camera *bcam = BKE_camera_add(bmain, name_.c_str());
Camera *camera = BKE_camera_add(bmain, name_.c_str());
object_ = BKE_object_add_only_object(bmain, OB_CAMERA, name_.c_str());
object_->data = bcam;
object_->data = camera;
}
void USDCameraReader::read_object_data(Main *bmain, const double motionSampleTime)
{
Camera *bcam = (Camera *)object_->data;
pxr::UsdAttribute usd_focal_length = cam_prim_.GetFocalLengthAttr();
pxr::UsdAttribute usd_focus_dist = cam_prim_.GetFocusDistanceAttr();
pxr::UsdAttribute usd_fstop = cam_prim_.GetFStopAttr();
pxr::UsdAttribute usd_clipping_range = cam_prim_.GetClippingRangeAttr();
pxr::UsdAttribute usd_horiz_aperture = cam_prim_.GetHorizontalApertureAttr();
pxr::UsdAttribute usd_vert_aperture = cam_prim_.GetVerticalApertureAttr();
pxr::UsdAttribute usd_horiz_offset = cam_prim_.GetHorizontalApertureOffsetAttr();
pxr::UsdAttribute usd_vert_offset = cam_prim_.GetVerticalApertureOffsetAttr();
pxr::VtValue val;
cam_prim_.GetFocalLengthAttr().Get(&val, motionSampleTime);
pxr::VtValue verApOffset;
cam_prim_.GetVerticalApertureOffsetAttr().Get(&verApOffset, motionSampleTime);
pxr::VtValue horApOffset;
cam_prim_.GetHorizontalApertureOffsetAttr().Get(&horApOffset, motionSampleTime);
pxr::VtValue clippingRangeVal;
cam_prim_.GetClippingRangeAttr().Get(&clippingRangeVal, motionSampleTime);
pxr::VtValue focalDistanceVal;
cam_prim_.GetFocusDistanceAttr().Get(&focalDistanceVal, motionSampleTime);
pxr::VtValue fstopVal;
cam_prim_.GetFStopAttr().Get(&fstopVal, motionSampleTime);
pxr::VtValue projectionVal;
cam_prim_.GetProjectionAttr().Get(&projectionVal, motionSampleTime);
pxr::VtValue verAp;
cam_prim_.GetVerticalApertureAttr().Get(&verAp, motionSampleTime);
pxr::VtValue horAp;
cam_prim_.GetHorizontalApertureAttr().Get(&horAp, motionSampleTime);
/* If any of the camera attributes are time varying, then prepare the animation data. */
const bool is_time_varying = usd_focal_length.ValueMightBeTimeVarying() ||
usd_focus_dist.ValueMightBeTimeVarying() ||
usd_fstop.ValueMightBeTimeVarying() ||
usd_clipping_range.ValueMightBeTimeVarying() ||
usd_horiz_aperture.ValueMightBeTimeVarying() ||
usd_vert_aperture.ValueMightBeTimeVarying() ||
usd_horiz_offset.ValueMightBeTimeVarying() ||
usd_vert_offset.ValueMightBeTimeVarying();
Camera *camera = (Camera *)object_->data;
bAction *action = nullptr;
if (is_time_varying) {
action = blender::animrig::id_action_ensure(bmain, &camera->id);
}
animrig::Channelbag empty{};
animrig::Channelbag &channelbag = is_time_varying ?
animrig::action_channelbag_ensure(*action, camera->id) :
empty;
/*
* For USD, these camera properties are in tenths of a world unit.
* In USD, some camera properties are in tenths of a world unit.
* https://graphics.pixar.com/usd/release/api/class_usd_geom_camera.html#UsdGeom_CameraUnits
*
* tenth_unit_to_meters = stage_meters_per_unit / 10
* tenth_unit_to_millimeters = 1000 * unit_to_tenth_unit
* tenth_unit_to_millimeters = 1000 * tenth_unit_to_meters
* = 100 * stage_meters_per_unit
*/
const double tenth_unit_to_millimeters = 100.0 * settings_->stage_meters_per_unit;
bcam->lens = val.Get<float>() * tenth_unit_to_millimeters;
bcam->sensor_x = horAp.Get<float>() * tenth_unit_to_millimeters;
bcam->sensor_y = verAp.Get<float>() * tenth_unit_to_millimeters;
auto scale_default = [](std::optional<float> input, double scale, float default_value) {
return input.has_value() ? input.value() * scale : default_value;
};
bcam->sensor_fit = bcam->sensor_x >= bcam->sensor_y ? CAMERA_SENSOR_FIT_HOR :
CAMERA_SENSOR_FIT_VERT;
AttributeData<float> data;
if (read_attribute_values(usd_focal_length, motionSampleTime, data)) {
camera->lens = scale_default(data.initial_value, tenth_unit_to_millimeters, camera->lens);
float sensor_size = bcam->sensor_x >= bcam->sensor_y ? bcam->sensor_x : bcam->sensor_y;
bcam->shiftx = (horApOffset.Get<float>() * tenth_unit_to_millimeters) / sensor_size;
bcam->shifty = (verApOffset.Get<float>() * tenth_unit_to_millimeters) / sensor_size;
if (!data.samples.is_empty()) {
FCurve *fcu = create_fcurve(channelbag, {"lens", 0}, data.samples.size());
for (int64_t i = 0; i < data.samples.size(); i++) {
const SampleData<float> &sample = data.samples[i];
set_fcurve_sample(fcu, i, sample.frame, sample.value * tenth_unit_to_millimeters);
}
}
}
bcam->type = (projectionVal.Get<pxr::TfToken>().GetString() == "perspective") ? CAM_PERSP :
CAM_ORTHO;
if (read_attribute_values(usd_focus_dist, motionSampleTime, data)) {
camera->dof.focus_distance = scale_default(
data.initial_value, this->settings_->scene_scale, camera->dof.focus_distance);
/* Call UncheckedGet() to silence compiler warnings.
* Clamp to 1e-6 matching range defined in RNA. */
bcam->clip_start = max_ff(
1e-6f, clippingRangeVal.UncheckedGet<pxr::GfVec2f>()[0] * settings_->scene_scale);
bcam->clip_end = clippingRangeVal.UncheckedGet<pxr::GfVec2f>()[1] * settings_->scene_scale;
if (!data.samples.is_empty()) {
FCurve *fcu = create_fcurve(channelbag, {"dof.focus_distance", 0}, data.samples.size());
for (int64_t i = 0; i < data.samples.size(); i++) {
const SampleData<float> &sample = data.samples[i];
set_fcurve_sample(fcu, i, sample.frame, sample.value * this->settings_->scene_scale);
}
}
}
bcam->dof.focus_distance = focalDistanceVal.Get<float>() * settings_->scene_scale;
bcam->dof.aperture_fstop = float(fstopVal.Get<float>());
if (read_attribute_values(usd_fstop, motionSampleTime, data)) {
camera->dof.aperture_fstop = scale_default(data.initial_value, 1, camera->dof.aperture_fstop);
if (bcam->type == CAM_ORTHO) {
bcam->ortho_scale = max_ff(verAp.Get<float>(), horAp.Get<float>());
if (!data.samples.is_empty()) {
FCurve *fcu = create_fcurve(channelbag, {"dof.aperture_fstop", 0}, data.samples.size());
for (int64_t i = 0; i < data.samples.size(); i++) {
const SampleData<float> &sample = data.samples[i];
set_fcurve_sample(fcu, i, sample.frame, sample.value);
}
}
}
AttributeData<pxr::GfVec2f> clip_data;
if (read_attribute_values(usd_clipping_range, motionSampleTime, clip_data)) {
auto clamp_clip = [this](pxr::GfVec2f value) {
/* Clamp the value for clip-start, matching the range defined in RNA. */
return pxr::GfVec2f(max_ff(1e-6f, value[0] * settings_->scene_scale),
value[1] * settings_->scene_scale);
};
pxr::GfVec2f clip_range = clip_data.initial_value.has_value() ?
clamp_clip(clip_data.initial_value.value()) :
pxr::GfVec2f(camera->clip_start, camera->clip_end);
camera->clip_start = clip_range[0];
camera->clip_end = clip_range[1];
if (!clip_data.samples.is_empty()) {
std::array<FCurve *, 2> curves = {
create_fcurve(channelbag, {"clip_start", 0}, clip_data.samples.size()),
create_fcurve(channelbag, {"clip_end", 0}, clip_data.samples.size())};
for (int64_t i = 0; i < clip_data.samples.size(); i++) {
const SampleData<pxr::GfVec2f> &sample = clip_data.samples[i];
clip_range = clamp_clip(sample.value);
set_fcurve_sample(curves[0], i, sample.frame, clip_range[0]);
set_fcurve_sample(curves[1], i, sample.frame, clip_range[1]);
}
}
}
/* Aperture data impacts sensor size, sensor fit, and shift values simultaneously. */
read_aperture_data(camera,
usd_horiz_aperture,
usd_vert_aperture,
usd_horiz_offset,
usd_vert_offset,
motionSampleTime,
tenth_unit_to_millimeters,
channelbag);
/* USD Orthographic cameras have very limited support. Support a basic, non-animated, translation
* between USD and Blender. */
pxr::TfToken projection;
cam_prim_.GetProjectionAttr().Get(&projection, motionSampleTime);
camera->type = (projection.GetString() == "perspective") ? CAM_PERSP : CAM_ORTHO;
if (camera->type == CAM_ORTHO) {
float horiz_aperture, vert_aperture;
usd_horiz_aperture.Get(&horiz_aperture, motionSampleTime);
usd_vert_aperture.Get(&vert_aperture, motionSampleTime);
camera->ortho_scale = max_ff(vert_aperture, horiz_aperture);
}
/* Enable depth of field when needed. */
const bool requires_dof = usd_focus_dist.IsAuthored() || usd_fstop.IsAuthored();
camera->dof.flag |= requires_dof ? CAM_DOF_ENABLED : 0;
/* Recalculate any animation curve handles. */
for (FCurve *fcu : channelbag.fcurves()) {
BKE_fcurve_handles_recalc(fcu);
}
USDXformReader::read_object_data(bmain, motionSampleTime);

View File

@@ -71,32 +71,6 @@ void resize_fcurve(FCurve *fcu, uint bezt_count)
BKE_fcurve_bezt_resize(fcu, bezt_count);
}
/* Utility: create curve at the given array index and add it as a channel to a group. */
FCurve *create_fcurve(blender::animrig::Channelbag &channelbag,
const blender::animrig::FCurveDescriptor &fcurve_descriptor,
const int totvert)
{
FCurve *fcurve = channelbag.fcurve_create_unique(nullptr, fcurve_descriptor);
BLI_assert_msg(fcurve, "The same F-Curve is being created twice, this is unexpected.");
BKE_fcurve_bezt_resize(fcurve, totvert);
return fcurve;
}
/* Utility: add curve sample. */
void add_bezt(FCurve *fcu,
uint bezt_index,
const float frame,
const float value,
const eBezTriple_Interpolation ipo = BEZT_IPO_LIN)
{
BezTriple &bez = fcu->bezt[bezt_index];
bez.vec[1][0] = frame;
bez.vec[1][1] = value;
bez.ipo = ipo; /* use default interpolation mode here... */
bez.f1 = bez.f2 = bez.f3 = SELECT;
bez.h1 = bez.h2 = HD_AUTO;
}
/**
* Import a USD skeleton animation as an action on the given armature object.
* This assumes bones have already been created on the armature.
@@ -114,6 +88,8 @@ void import_skeleton_curves(Main *bmain,
const blender::Map<pxr::TfToken, std::string> &joint_to_bone_map,
ReportList *reports)
{
using namespace blender::io::usd;
if (!(bmain && arm_obj && skel_query)) {
return;
}
@@ -285,7 +261,7 @@ void import_skeleton_curves(Main *bmain,
break;
}
if (FCurve *fcu = fcurves[k]) {
add_bezt(fcu, bezt_index, frame, t[j]);
set_fcurve_sample(fcu, bezt_index, frame, t[j]);
}
}
@@ -297,10 +273,10 @@ void import_skeleton_curves(Main *bmain,
}
if (FCurve *fcu = fcurves[k]) {
if (j == 0) {
add_bezt(fcu, bezt_index, frame, re);
set_fcurve_sample(fcu, bezt_index, frame, re);
}
else {
add_bezt(fcu, bezt_index, frame, im[j - 1]);
set_fcurve_sample(fcu, bezt_index, frame, im[j - 1]);
}
}
}
@@ -312,7 +288,7 @@ void import_skeleton_curves(Main *bmain,
break;
}
if (FCurve *fcu = fcurves[k]) {
add_bezt(fcu, bezt_index, frame, s[j]);
set_fcurve_sample(fcu, bezt_index, frame, s[j]);
}
}
}
@@ -657,7 +633,7 @@ void import_blendshapes(Main *bmain,
Span<float> weights = Span(usd_weights.cdata(), usd_weights.size());
for (int wi = 0; wi < weights.size(); ++wi) {
if (curves[wi] != nullptr) {
add_bezt(curves[wi], bezt_index, frame, weights[wi]);
set_fcurve_sample(curves[wi], bezt_index, frame, weights[wi]);
}
}

View File

@@ -1136,6 +1136,7 @@ if(WITH_USD AND TEST_SRC_DIR_EXISTS)
--python ${CMAKE_CURRENT_LIST_DIR}/bl_usd_import_test.py
--
--testdir "${TEST_SRC_DIR}/usd"
--outdir "${TEST_OUT_DIR}/io_usd"
)
endif()

View File

@@ -1847,6 +1847,34 @@ class USDImportTest(AbstractUSDTest):
bpy.utils.unregister_class(ImportMtlxTextureUSDHook)
class USDImportComparisonTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.testdir = args.testdir
cls.output_dir = args.outdir
def test_import_usd(self):
comparisondir = self.testdir.joinpath("compare")
input_files = sorted(pathlib.Path(comparisondir).glob("*.usd*"))
self.passed_tests = []
self.failed_tests = []
self.updated_tests = []
from modules import io_report
report = io_report.Report("USD Import", self.output_dir, comparisondir, comparisondir.joinpath("reference"))
for input_file in input_files:
with self.subTest(pathlib.Path(input_file).stem):
bpy.ops.wm.open_mainfile(filepath=str(self.testdir / "empty.blend"))
ok = report.import_and_check(
input_file, lambda filepath, params: bpy.ops.wm.usd_import(
filepath=str(input_file), import_subdiv=True, **params))
if not ok:
self.fail(f"{input_file.stem} import result does not match expectations")
report.finish("io_usd_import")
class GetPrimMapUsdImportHook(bpy.types.USDHook):
bl_idname = "get_prim_map_usd_import_hook"
bl_label = "Get Prim Map Usd Import Hook"
@@ -1962,6 +1990,7 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument('--testdir', required=True, type=pathlib.Path)
parser.add_argument('--outdir', required=True, type=pathlib.Path)
args, remaining = parser.parse_known_args(argv)
unittest.main(argv=remaining, verbosity=0)