2023-08-16 00:20:26 +10:00
|
|
|
/* SPDX-FileCopyrightText: 2023 Blender Authors
|
2023-05-31 16:19:06 +02:00
|
|
|
*
|
|
|
|
|
* SPDX-License-Identifier: GPL-2.0-or-later */
|
2022-11-08 15:50:49 +01:00
|
|
|
|
|
|
|
|
#include "BLI_cache_mutex.hh"
|
|
|
|
|
#include "BLI_task.hh"
|
|
|
|
|
|
|
|
|
|
namespace blender {
|
|
|
|
|
|
|
|
|
|
void CacheMutex::ensure(const FunctionRef<void()> compute_cache)
|
|
|
|
|
{
|
|
|
|
|
if (cache_valid_.load(std::memory_order_acquire)) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
std::scoped_lock lock{mutex_};
|
|
|
|
|
/* Double checked lock. */
|
|
|
|
|
if (cache_valid_.load(std::memory_order_relaxed)) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
/* Use task isolation because a mutex is locked and the cache computation might use
|
|
|
|
|
* multi-threading. */
|
|
|
|
|
threading::isolate_task(compute_cache);
|
|
|
|
|
|
|
|
|
|
cache_valid_.store(true, std::memory_order_release);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
} // namespace blender
|