diff --git a/intern/cycles/bvh/hiprt.cpp b/intern/cycles/bvh/hiprt.cpp index 363d53b3a8a..5aa57a6d40f 100644 --- a/intern/cycles/bvh/hiprt.cpp +++ b/intern/cycles/bvh/hiprt.cpp @@ -30,12 +30,10 @@ BVHHIPRT::BVHHIPRT(const BVHParams ¶ms, BVHHIPRT::~BVHHIPRT() { - HIPRTDevice *hiprt_device = static_cast(device); - hiprtContext hiprt_context = hiprt_device->get_hiprt_context(); custom_primitive_bound.free(); triangle_index.free(); vertex_data.free(); - hiprtDestroyGeometry(hiprt_context, hiprt_geom); + device->release_bvh(this); } CCL_NAMESPACE_END diff --git a/intern/cycles/device/hiprt/device_impl.cpp b/intern/cycles/device/hiprt/device_impl.cpp index 7ffa202b408..c251a8d09cf 100644 --- a/intern/cycles/device/hiprt/device_impl.cpp +++ b/intern/cycles/device/hiprt/device_impl.cpp @@ -114,6 +114,7 @@ HIPRTDevice::HIPRTDevice(const DeviceInfo &info, HIPRTDevice::~HIPRTDevice() { HIPContextScope scope(this); + free_bvh_memory_delayed(); user_instance_id.free(); prim_visibility.free(); hiprt_blas_ptr.free(); @@ -1150,12 +1151,33 @@ hiprtScene HIPRTDevice::build_tlas(BVHHIPRT *bvh, return scene; } +void HIPRTDevice::free_bvh_memory_delayed() +{ + thread_scoped_lock lock(hiprt_mutex); + if (stale_bvh.size()) { + for (int bvh_index = 0; bvh_index < stale_bvh.size(); bvh_index++) { + hiprtGeometry hiprt_geom = stale_bvh[bvh_index]; + hiprtDestroyGeometry(hiprt_context, hiprt_geom); + hiprt_geom = nullptr; + } + stale_bvh.clear(); + } +} + +void HIPRTDevice::release_bvh(BVH *bvh) +{ + BVHHIPRT *current_bvh = static_cast(bvh); + thread_scoped_lock lock(hiprt_mutex); + /* Tracks BLAS pointers whose BVH destructors have been called. */ + stale_bvh.push_back(current_bvh->hiprt_geom); +} + void HIPRTDevice::build_bvh(BVH *bvh, Progress &progress, bool refit) { if (have_error()) { return; } - + free_bvh_memory_delayed(); progress.set_substatus("Building HIPRT acceleration structure"); hiprtBuildOptions options; @@ -1173,6 +1195,7 @@ void HIPRTDevice::build_bvh(BVH *bvh, Progress &progress, bool refit) if (scene) { hiprtDestroyScene(hiprt_context, scene); + scene = nullptr; } scene = build_tlas(bvh_rt, bvh_rt->objects, options, refit); } diff --git a/intern/cycles/device/hiprt/device_impl.h b/intern/cycles/device/hiprt/device_impl.h index a6465d787dd..02f44b65485 100644 --- a/intern/cycles/device/hiprt/device_impl.h +++ b/intern/cycles/device/hiprt/device_impl.h @@ -46,6 +46,8 @@ class HIPRTDevice : public HIPDevice { void build_bvh(BVH *bvh, Progress &progress, bool refit) override; + void release_bvh(BVH *bvh) override; + hiprtContext get_hiprt_context() { return hiprt_context; @@ -65,7 +67,7 @@ class HIPRTDevice : public HIPDevice { const vector &objects, hiprtBuildOptions options, bool refit); - + void free_bvh_memory_delayed(); hiprtContext hiprt_context; hiprtScene scene; hiprtFuncTable functions_table; @@ -74,6 +76,13 @@ class HIPRTDevice : public HIPDevice { size_t scratch_buffer_size; device_vector scratch_buffer; + /* This vector tracks the hiprt_geom members of BVHRT so that device memory + * can be managed/released in HIPRTDevice. + * Even if synchronization occurs before memory release, a GPU job may still + * launch between synchronization and release, potentially causing the GPU + * to access unmapped memory. */ + vector stale_bvh; + /* Is this scene using motion blur? Note there might exist motion data even if * motion blur is disabled, for render passes. */ bool use_motion_blur = false;