Update refs to sgct and ghoul from master

This commit is contained in:
Gene Payne
2024-03-28 09:52:17 -06:00
34 changed files with 266 additions and 103 deletions

View File

@@ -4,6 +4,10 @@
asset.require("./base_blank")
-- We load the SDSS dataset first as that is the one that takes the longest, so the
-- earlier we start, the sooner the loading is done
asset.require("scene/digitaluniverse/sdss")
-- Specifying which other assets should be loaded in this scene
asset.require("scene/solarsystem/sun/sun")
asset.require("scene/solarsystem/sun/glare")
@@ -61,7 +65,6 @@ asset.require("scene/digitaluniverse/openclusters")
asset.require("scene/digitaluniverse/planetarynebulae")
asset.require("scene/digitaluniverse/pulsars")
asset.require("scene/digitaluniverse/quasars")
asset.require("scene/digitaluniverse/sdss")
asset.require("scene/digitaluniverse/starlabels")
asset.require("scene/digitaluniverse/starorbits")
asset.require("scene/digitaluniverse/stars")

View File

@@ -1,3 +1,4 @@
asset.require("./sdss")
asset.require("./2dF")
asset.require("./2mass")
asset.require("./6dF")
@@ -26,7 +27,6 @@ asset.require("./openclusters")
asset.require("./planetarynebulae")
asset.require("./pulsars")
asset.require("./quasars")
asset.require("./sdss")
asset.require("./starlabels")
asset.require("./starorbits")
asset.require("./stars")

View File

@@ -48,19 +48,6 @@ class LabelsComponent : public properties::PropertyOwner, public Fadeable {
public:
explicit LabelsComponent(const ghoul::Dictionary& dictionary);
/**
* Create a labels component from an already loaded dataset. That dataset should have
* a comment per point to be used for the labels.
*
* \param dictionary A dictionary with the other information used for constructing
* the dataset
* \param dataset The dataset to create the labelset from, including xyz position and
* a string to be used for the text.
* \param unit The unit to use when interpreting the point information in the dataset
*/
explicit LabelsComponent(const ghoul::Dictionary& dictionary,
const dataloader::Dataset& dataset, DistanceUnit unit);
~LabelsComponent() override = default;
dataloader::Labelset& labelSet();
@@ -68,6 +55,18 @@ public:
void initialize();
/**
* Create the labels from an already loaded dataset. That dataset should have a comment
* per point to be used for the labels.
*
* This function should be called before the labels are initialized
*
* \param dataset The dataset to create the labelset from, including xyz position and
* a string to be used for the text.
* \param unit The unit to use when interpreting the point information in the dataset
*/
void loadLabelsFromDataset(const dataloader::Dataset& dataset, DistanceUnit unit);
void loadLabels();
bool isReady() const;

View File

@@ -27,6 +27,7 @@
#include <ghoul/logging/log.h>
#include <ghoul/misc/profiling.h>
#include <chrono>
#include <mutex>
#include <string_view>
@@ -124,7 +125,7 @@ private:
/// A mutex to ensure thread-safety since the logging and the removal of expired
/// entires can occur on different threads
mutable std::mutex _mutex;
mutable TracyLockable(std::mutex, _mutex);
};
} // namespace openspace

View File

@@ -586,6 +586,8 @@ RenderablePointCloud::RenderablePointCloud(const ghoul::Dictionary& dictionary)
, _colorSettings(dictionary)
, _sizeSettings(dictionary)
{
ZoneScoped;
const Parameters p = codegen::bake<Parameters>(dictionary);
addProperty(Fadeable::_opacity);
@@ -697,38 +699,19 @@ RenderablePointCloud::RenderablePointCloud(const ghoul::Dictionary& dictionary)
});
}
if (_hasDataFile) {
bool useCaching = p.useCaching.value_or(true);
if (useCaching) {
_dataset = dataloader::data::loadFileWithCache(_dataFile, _dataMapping);
}
else {
_dataset = dataloader::data::loadFile(_dataFile, _dataMapping);
}
_nDataPoints = static_cast<unsigned int>(_dataset.entries.size());
_useCaching = p.useCaching.value_or(true);
// If no scale exponent was specified, compute one that will at least show the
// points based on the scale of the positions in the dataset
if (!p.sizeSettings.has_value() || !p.sizeSettings->scaleExponent.has_value()) {
double dist = _dataset.maxPositionComponent * toMeter(_unit);
if (dist > 0.0) {
float exponent = static_cast<float>(std::log10(dist));
// Reduce the actually used exponent a little bit, as just using the
// logarithm as is leads to very large points
_sizeSettings.scaleExponent = 0.9f * exponent;
}
}
// If no scale exponent was specified, compute one that will at least show the
// points based on the scale of the positions in the dataset
if (!p.sizeSettings.has_value() || !p.sizeSettings->scaleExponent.has_value()) {
_shouldComputeScaleExponent = true;
}
if (p.labels.has_value()) {
if (!p.labels->hasKey("File") && _hasDataFile) {
// Load the labelset from the dataset if no label file was included
_labels = std::make_unique<LabelsComponent>(*p.labels, _dataset, _unit);
_createLabelsFromDataset = true;
}
else {
_labels = std::make_unique<LabelsComponent>(*p.labels);
}
_labels = std::make_unique<LabelsComponent>(*p.labels);
_hasLabels = true;
addPropertySubOwner(_labels.get());
// Fading of the labels should depend on the fading of the renderable
@@ -764,11 +747,36 @@ void RenderablePointCloud::initialize() {
break;
}
if (_hasDataFile) {
if (_useCaching) {
_dataset = dataloader::data::loadFileWithCache(_dataFile, _dataMapping);
}
else {
_dataset = dataloader::data::loadFile(_dataFile, _dataMapping);
}
_nDataPoints = static_cast<unsigned int>(_dataset.entries.size());
// If no scale exponent was specified, compute one that will at least show the
// points based on the scale of the positions in the dataset
if (_shouldComputeScaleExponent) {
double dist = _dataset.maxPositionComponent * toMeter(_unit);
if (dist > 0.0) {
float exponent = static_cast<float>(std::log10(dist));
// Reduce the actually used exponent a little bit, as just using the
// logarithm as is leads to very large points
_sizeSettings.scaleExponent = 0.9f * exponent;
}
}
}
if (_hasDataFile && _hasColorMapFile) {
_colorSettings.colorMapping->initialize(_dataset);
}
if (_hasLabels) {
if (_createLabelsFromDataset) {
_labels->loadLabelsFromDataset(_dataset, _unit);
}
_labels->initialize();
}
}

View File

@@ -227,6 +227,10 @@ protected:
DistanceUnit _unit = DistanceUnit::Parsec;
bool _useCaching = true;
bool _shouldComputeScaleExponent = false;
bool _createLabelsFromDataset = false;
dataloader::Dataset _dataset;
dataloader::DataMapping _dataMapping;

View File

@@ -782,6 +782,8 @@ RenderableGalaxy::Result RenderableGalaxy::loadPointFile() {
RenderableGalaxy::Result RenderableGalaxy::loadCachedFile(
const std::filesystem::path& file)
{
ZoneScoped;
std::ifstream fileStream = std::ifstream(file, std::ifstream::binary);
if (!fileStream.good()) {
LERROR(std::format("Error opening file '{}' for loading cache file", file));

View File

@@ -221,6 +221,8 @@ void MemoryAwareTileCache::TextureContainer::reset() {
ghoul::opengl::Texture::FilterMode::AnisotropicMipMap;
for (size_t i = 0; i < _numTextures; i++) {
ZoneScopedN("Texture");
using namespace ghoul::opengl;
std::unique_ptr<Texture> tex = std::make_unique<Texture>(
@@ -235,8 +237,8 @@ void MemoryAwareTileCache::TextureContainer::reset() {
);
tex->setDataOwnership(Texture::TakeOwnership::Yes);
tex->uploadTexture();
tex->setFilter(mode);
tex->uploadTexture();
_textures.push_back(std::move(tex));
}

View File

@@ -167,6 +167,8 @@ void Connection::handleJson(const nlohmann::json& json) {
auto topicIt = _topics.find(topicId);
if (topicIt == _topics.end()) {
ZoneScopedN("New Topic");
// The topic id is not registered: Initialize a new topic.
auto typeJson = json.find(MessageKeyType);
if (typeJson == json.end() || !typeJson->is_string()) {
@@ -174,6 +176,7 @@ void Connection::handleJson(const nlohmann::json& json) {
return;
}
const std::string type = *typeJson;
ZoneText(type.c_str(), type.size());
if (!isAuthorized() && (type != "authorize")) {
LERROR("Connection is not authorized");
@@ -188,6 +191,8 @@ void Connection::handleJson(const nlohmann::json& json) {
}
}
else {
ZoneScopedN("Existing Topic");
if (!isAuthorized()) {
LERROR("Connection is not authorized");
return;

View File

@@ -52,14 +52,18 @@ namespace {
namespace openspace {
void GetPropertyTopic::handleJson(const nlohmann::json& json) {
ZoneScoped;
const std::string requestedKey = json.at("property").get<std::string>();
ZoneText(requestedKey.c_str(), requestedKey.size());
LDEBUG("Getting property '" + requestedKey + "'...");
nlohmann::json response;
if (requestedKey == AllPropertiesValue) {
response = allProperties();
}
else if (requestedKey == AllNodesValue) {
response = wrappedPayload(sceneGraph()->allSceneGraphNodes());
const std::vector<SceneGraphNode*>& nodes = sceneGraph()->allSceneGraphNodes();
response = wrappedPayload(nodes);
}
else if (requestedKey == AllScreenSpaceRenderablesValue) {
response = wrappedPayload({

View File

@@ -253,6 +253,8 @@ namespace openspace {
void WwtDataHandler::loadImages(const std::string& root,
const std::filesystem::path& directory)
{
ZoneScoped;
// Steps to download new images
// 1. Create the target directory if it doesn't already exist
// 2. If the 'root' has an associated hash file, download and compare it with the

View File

@@ -79,22 +79,22 @@ glm::uvec3 RawVolumeReader<VoxelType>::indexToCoords(size_t linear) const {
template <typename VoxelType>
std::unique_ptr<RawVolume<VoxelType>> RawVolumeReader<VoxelType>::read(bool invertZ) {
glm::uvec3 dims = dimensions();
auto volume = std::make_unique<RawVolume<VoxelType>>(dims);
std::ifstream file(_path, std::ios::binary);
char* buffer = reinterpret_cast<char*>(volume->data());
ZoneScoped;
std::ifstream file = std::ifstream(_path, std::ios::binary);
if (file.fail()) {
throw ghoul::FileNotFoundError("Volume file not found");
}
size_t length = static_cast<size_t>(dims.x) *
static_cast<size_t>(dims.y) *
static_cast<size_t>(dims.z) *
sizeof(VoxelType);
glm::uvec3 dims = dimensions();
auto volume = std::make_unique<RawVolume<VoxelType>>(dims);
file.read(buffer, length);
char* buffer = reinterpret_cast<char*>(volume->data());
size_t length = glm::compMul(dims) * sizeof(VoxelType);
{
ZoneScopedN("read");
file.read(buffer, length);
}
if (file.fail()) {
throw ghoul::RuntimeError("Error reading volume file");

View File

@@ -40,7 +40,7 @@
#include <string_view>
namespace {
constexpr int8_t DataCacheFileVersion = 11;
constexpr int8_t DataCacheFileVersion = 13;
constexpr int8_t LabelCacheFileVersion = 11;
constexpr int8_t ColorCacheFileVersion = 11;
@@ -75,6 +75,8 @@ namespace {
std::is_same_v<T, openspace::dataloader::ColorMap>
);
ZoneScoped;
std::string info;
if (specs.has_value()) {
info = openspace::dataloader::generateHashString(*specs);
@@ -93,12 +95,13 @@ namespace {
std::optional<T> dataset = loadCacheFunction(cached);
if (dataset.has_value()) {
// We could load the cache file and we are now done with this
return *dataset;
return std::move(*dataset);
}
else {
FileSys.cacheManager()->removeCacheFile(cached);
}
}
LINFOC("DataLoader", std::format("Loading file '{}'", filePath));
T dataset = loadFunction(filePath, specs);
@@ -116,6 +119,8 @@ namespace openspace::dataloader {
namespace data {
Dataset loadFile(std::filesystem::path path, std::optional<DataMapping> specs) {
ZoneScoped;
ghoul_assert(std::filesystem::exists(path), "File must exist");
const std::ifstream file = std::ifstream(path);
@@ -143,6 +148,8 @@ Dataset loadFile(std::filesystem::path path, std::optional<DataMapping> specs) {
}
std::optional<Dataset> loadCachedFile(const std::filesystem::path& path) {
ZoneScoped;
std::ifstream file = std::ifstream(path, std::ios::binary);
if (!file.good()) {
return std::nullopt;
@@ -163,6 +170,8 @@ std::optional<Dataset> loadCachedFile(const std::filesystem::path& path) {
file.read(reinterpret_cast<char*>(&nVariables), sizeof(uint16_t));
result.variables.resize(nVariables);
for (int i = 0; i < nVariables; i += 1) {
ZoneScopedN("Variable");
Dataset::Variable var;
int16_t idx = 0;
@@ -183,6 +192,8 @@ std::optional<Dataset> loadCachedFile(const std::filesystem::path& path) {
file.read(reinterpret_cast<char*>(&nTextures), sizeof(uint16_t));
result.textures.resize(nTextures);
for (int i = 0; i < nTextures; i += 1) {
ZoneScopedN("Texture");
Dataset::Texture tex;
int16_t idx = 0;
@@ -214,27 +225,68 @@ std::optional<Dataset> loadCachedFile(const std::filesystem::path& path) {
result.entries.reserve(nEntries);
for (uint64_t i = 0; i < nEntries; i += 1) {
Dataset::Entry e;
file.read(reinterpret_cast<char*>(&e.position.x), sizeof(float));
file.read(reinterpret_cast<char*>(&e.position.y), sizeof(float));
file.read(reinterpret_cast<char*>(&e.position.z), sizeof(float));
uint16_t nValues = 0;
file.read(reinterpret_cast<char*>(&nValues), sizeof(uint16_t));
e.data.resize(nValues);
file.read(reinterpret_cast<char*>(e.data.data()), nValues * sizeof(float));
file.read(reinterpret_cast<char*>(&e.position.x), 3 * sizeof(float));
// For now we just store the length of the comment. Since the comments are stored
// in one block after the data entries, we can use the length later to extract the
// contents of this entries comment out of the big block
uint16_t len = 0;
file.read(reinterpret_cast<char*>(&len), sizeof(uint16_t));
if (len > 0) {
// If there is a comment, we already allocate the space for it here. This way
// we don't need to separately store the length of it, but can use the size of
// the vector instead
std::string comment;
comment.resize(len);
file.read(comment.data(), len);
e.comment = std::move(comment);
}
result.entries.push_back(std::move(e));
}
//
// Read the data values next
uint16_t nValues = 0;
file.read(reinterpret_cast<char*>(&nValues), sizeof(uint16_t));
std::vector<float> entriesBuffer;
entriesBuffer.resize(nEntries * nValues);
file.read(
reinterpret_cast<char*>(entriesBuffer.data()),
nEntries * nValues * sizeof(float)
);
//
// Read comments in one block and then assign them to the data entries
uint64_t totalCommentLength = 0;
file.read(reinterpret_cast<char*>(&totalCommentLength), sizeof(uint64_t));
std::vector<char> commentBuffer;
commentBuffer.resize(totalCommentLength);
file.read(commentBuffer.data(), totalCommentLength);
//
// Now we have the comments and the data values, we need to implant them into the
// data entries
// commentIdx is the running index into the total comment buffer
int commentIdx = 0;
int valuesIdx = 0;
for (Dataset::Entry& e : result.entries) {
e.data.resize(nValues);
std::memcpy(e.data.data(), entriesBuffer.data() + valuesIdx, nValues);
valuesIdx += nValues;
if (e.comment.has_value()) {
ghoul_assert(commentIdx < commentBuffer.size(), "Index too large");
// If we have a comment, we need to extract its length's worth of characters
// from the buffer
std::memcpy(e.comment->data(), &commentBuffer[commentIdx], e.comment->size());
// and then advance the index
commentIdx += e.comment->size();
}
}
//
// Read max data point variable
float max = 0.f;
@@ -245,6 +297,8 @@ std::optional<Dataset> loadCachedFile(const std::filesystem::path& path) {
}
void saveCachedFile(const Dataset& dataset, const std::filesystem::path& path) {
ZoneScoped;
std::ofstream file = std::ofstream(path, std::ofstream::binary);
file.write(reinterpret_cast<const char*>(&DataCacheFileVersion), sizeof(int8_t));
@@ -297,18 +351,19 @@ void saveCachedFile(const Dataset& dataset, const std::filesystem::path& path) {
checkSize<uint64_t>(dataset.entries.size(), "Too many entries");
uint64_t nEntries = static_cast<uint64_t>(dataset.entries.size());
file.write(reinterpret_cast<const char*>(&nEntries), sizeof(uint64_t));
for (const Dataset::Entry& e : dataset.entries) {
file.write(reinterpret_cast<const char*>(&e.position.x), sizeof(float));
file.write(reinterpret_cast<const char*>(&e.position.y), sizeof(float));
file.write(reinterpret_cast<const char*>(&e.position.z), sizeof(float));
checkSize<uint16_t>(e.data.size(), "Too many data variables");
uint16_t nValues = static_cast<uint16_t>(e.data.size());
file.write(reinterpret_cast<const char*>(&nValues), sizeof(uint16_t));
file.write(
reinterpret_cast<const char*>(e.data.data()),
e.data.size() * sizeof(float)
);
// We assume the number of values for each dataset to be the same, so we can store
// them upfront
uint16_t nValues = dataset.entries.empty() ? 0 : dataset.entries[0].data.size();
checkSize<uint16_t>(nValues, "Too many data variables");
std::vector<float> valuesBuffer;
valuesBuffer.reserve(dataset.entries.size() * nValues);
uint64_t totalCommentLength = 0;
for (const Dataset::Entry& e : dataset.entries) {
file.write(reinterpret_cast<const char*>(&e.position.x), 3 * sizeof(float));
valuesBuffer.insert(valuesBuffer.end(), e.data.begin(), e.data.end());
if (e.comment.has_value()) {
checkSize<uint16_t>(e.comment->size(), "Comment too long");
@@ -317,6 +372,22 @@ void saveCachedFile(const Dataset& dataset, const std::filesystem::path& path) {
static_cast<uint16_t>(e.comment->size()) :
0;
file.write(reinterpret_cast<const char*>(&commentLen), sizeof(uint16_t));
totalCommentLength += commentLen;
}
// Write all of the datavalues next
file.write(reinterpret_cast<const char*>(&nValues), sizeof(uint16_t));
file.write(
reinterpret_cast<const char*>(valuesBuffer.data()),
valuesBuffer.size() * sizeof(float)
);
//
// Write all of the comments next. We don't have to store the individual comment
// lengths as the data values written before already have those stored. And since we
// are reading the comments in the same order as the dataset entries, we're good
file.write(reinterpret_cast<const char*>(&totalCommentLength), sizeof(uint64_t));
for (const Dataset::Entry& e : dataset.entries) {
if (e.comment.has_value()) {
file.write(e.comment->data(), e.comment->size());
}
@@ -345,6 +416,8 @@ Dataset loadFileWithCache(std::filesystem::path path, std::optional<DataMapping>
namespace label {
Labelset loadFile(std::filesystem::path path, std::optional<DataMapping>) {
ZoneScoped;
ghoul_assert(std::filesystem::exists(path), "File must exist");
const std::ifstream file = std::ifstream(path);

View File

@@ -151,6 +151,8 @@ documentation::Documentation DataMapping::Documentation() {
}
DataMapping DataMapping::createFromDictionary(const ghoul::Dictionary& dictionary) {
ZoneScoped;
const Parameters p = codegen::bake<Parameters>(dictionary);
DataMapping result;

View File

@@ -757,11 +757,10 @@ void OpenSpaceEngine::loadAssets() {
std::unique_ptr<SceneInitializer> sceneInitializer;
if (global::configuration->useMultithreadedInitialization) {
const unsigned int nAvailableThreads = std::min(
std::thread::hardware_concurrency() - 1,
const unsigned int nThreads = std::max(
std::thread::hardware_concurrency() / 4,
4u
);
const unsigned int nThreads = nAvailableThreads == 0 ? 2 : nAvailableThreads;
sceneInitializer = std::make_unique<MultiThreadedSceneInitializer>(nThreads);
}
else {

View File

@@ -163,6 +163,8 @@ std::string PropertyOwner::propertyGroupName(const std::string& groupID) const {
}
void PropertyOwner::addProperty(Property* prop) {
ZoneScoped;
ghoul_precondition(prop != nullptr, "prop must not be nullptr");
if (prop->identifier().empty()) {
@@ -207,6 +209,8 @@ void PropertyOwner::addProperty(Property& prop) {
}
void PropertyOwner::addPropertySubOwner(openspace::properties::PropertyOwner* owner) {
ZoneScoped;
ghoul_precondition(owner != nullptr, "owner must not be nullptr");
ghoul_precondition(
!owner->identifier().empty(),

View File

@@ -330,6 +330,8 @@ ghoul::opengl::Texture* ColorMappingComponent::texture() const {
}
void ColorMappingComponent::initialize(const dataloader::Dataset& dataset) {
ZoneScoped;
_colorMap = dataloader::color::loadFileWithCache(colorMapFile.value());
initializeParameterData(dataset);

View File

@@ -223,21 +223,6 @@ LabelsComponent::LabelsComponent(const ghoul::Dictionary& dictionary)
_transformationMatrix = p.transformationMatrix.value_or(_transformationMatrix);
}
LabelsComponent::LabelsComponent(const ghoul::Dictionary& dictionary,
const dataloader::Dataset& dataset,
DistanceUnit unit)
: LabelsComponent(dictionary)
{
// The unit should match the one in the dataset, not the one that was included in the
// asset (if any)
_unit = unit;
// Load the labelset directly based on the dataset, and keep track of that it has
// already been loaded this way
_labelset = dataloader::label::loadFromDataset(dataset);
_createdFromDataset = true;
}
dataloader::Labelset& LabelsComponent::labelSet() {
return _labelset;
}
@@ -247,6 +232,8 @@ const dataloader::Labelset& LabelsComponent::labelSet() const {
}
void LabelsComponent::initialize() {
ZoneScoped;
_font = global::fontManager->font(
"Mono",
_fontSize,
@@ -257,14 +244,34 @@ void LabelsComponent::initialize() {
loadLabels();
}
void LabelsComponent::loadLabelsFromDataset(const dataloader::Dataset& dataset,
DistanceUnit unit)
{
ZoneScoped;
LINFO("Loading labels from dataset");
// The unit should match the one in the dataset, not the one that was included in the
// asset (if any)
_unit = unit;
// Load the labelset directly based on the dataset, and keep track of that it has
// already been loaded this way
_labelset = dataloader::label::loadFromDataset(dataset);
_createdFromDataset = true;
}
void LabelsComponent::loadLabels() {
LINFO(std::format("Loading label file '{}'", _labelFile));
ZoneScoped;
if (_createdFromDataset) {
// The labelset should already have been loaded
return;
}
LINFO(std::format("Loading label file '{}'", _labelFile));
if (_useCache) {
_labelset = dataloader::label::loadFileWithCache(_labelFile);
}

View File

@@ -118,6 +118,8 @@ documentation::Documentation Renderable::Documentation() {
ghoul::mm_unique_ptr<Renderable> Renderable::createFromDictionary(
const ghoul::Dictionary& dictionary)
{
ZoneScoped;
if (!dictionary.hasKey(KeyType)) {
throw ghoul::RuntimeError("Tried to create Renderable but no 'Type' was found");
}

View File

@@ -918,6 +918,7 @@ Asset* AssetManager::retrieveAsset(const std::filesystem::path& path,
void AssetManager::callOnInitialize(Asset* asset) const {
ZoneScoped;
ZoneText(asset->path().string().c_str(), asset->path().string().length());
ghoul_precondition(asset, "Asset must not be nullptr");
auto it = _onInitializeFunctionRefs.find(asset);

View File

@@ -54,6 +54,8 @@ documentation::Documentation Rotation::Documentation() {
ghoul::mm_unique_ptr<Rotation> Rotation::createFromDictionary(
const ghoul::Dictionary& dictionary)
{
ZoneScoped;
const Parameters p = codegen::bake<Parameters>(dictionary);
Rotation* result = FactoryManager::ref().factory<Rotation>()->create(

View File

@@ -53,6 +53,8 @@ documentation::Documentation Scale::Documentation() {
ghoul::mm_unique_ptr<Scale> Scale::createFromDictionary(
const ghoul::Dictionary& dictionary)
{
ZoneScoped;
const Parameters p = codegen::bake<Parameters>(dictionary);
Scale* result = FactoryManager::ref().factory<Scale>()->create(

View File

@@ -200,6 +200,8 @@ void Scene::updateNodeRegistry() {
}
void Scene::sortTopologically() {
ZoneScoped;
_topologicallySortedNodes.insert(
_topologicallySortedNodes.end(),
std::make_move_iterator(_circularNodes.begin()),
@@ -310,7 +312,7 @@ void Scene::update(const UpdateData& data) {
void Scene::render(const RenderData& data, RendererTasks& tasks) {
ZoneScoped;
ZoneName(
ZoneText(
renderBinToString(data.renderBinMask),
strlen(renderBinToString(data.renderBinMask))
);
@@ -366,6 +368,8 @@ const std::vector<SceneGraphNode*>& Scene::allSceneGraphNodes() const {
}
SceneGraphNode* Scene::loadNode(const ghoul::Dictionary& nodeDictionary) {
ZoneScoped;
// First interpret the dictionary
std::vector<std::string> dependencyNames;

View File

@@ -619,6 +619,8 @@ namespace {
* Loads the SceneGraphNode described in the table and adds it to the SceneGraph.
*/
[[codegen::luawrap]] void addSceneGraphNode(ghoul::Dictionary node) {
ZoneScoped;
using namespace openspace;
try {
SceneGraphNode* n = global::renderEngine->scene()->loadNode(node);

View File

@@ -330,6 +330,8 @@ int SceneGraphNode::nextIndex = 0;
ghoul::mm_unique_ptr<SceneGraphNode> SceneGraphNode::createFromDictionary(
const ghoul::Dictionary& dictionary)
{
ZoneScoped;
const Parameters p = codegen::bake<Parameters>(dictionary);
SceneGraphNode* n = global::memoryManager->PersistentMemory.alloc<SceneGraphNode>();
@@ -342,6 +344,8 @@ ghoul::mm_unique_ptr<SceneGraphNode> SceneGraphNode::createFromDictionary(
result->setIdentifier(p.identifier);
if (p.gui.has_value()) {
ZoneScopedN("GUI");
if (p.gui->name.has_value()) {
result->setGuiName(*p.gui->name);
result->_guiDisplayName = result->guiName();
@@ -375,6 +379,8 @@ ghoul::mm_unique_ptr<SceneGraphNode> SceneGraphNode::createFromDictionary(
result->_reachFactor = p.reachFactor.value_or(result->_reachFactor);
if (p.transform.has_value()) {
ZoneScopedN("Transform");
if (p.transform->translation.has_value()) {
result->_transform.translation = Translation::createFromDictionary(
*p.transform->translation
@@ -409,6 +415,8 @@ ghoul::mm_unique_ptr<SceneGraphNode> SceneGraphNode::createFromDictionary(
if (p.timeFrame.has_value()) {
ZoneScopedN("TimeFrame");
result->_timeFrame = TimeFrame::createFromDictionary(*p.timeFrame);
LDEBUG(std::format(
@@ -419,17 +427,21 @@ ghoul::mm_unique_ptr<SceneGraphNode> SceneGraphNode::createFromDictionary(
// We initialize the renderable last as it probably has the most dependencies
if (p.renderable.has_value()) {
ZoneScopedN("Renderable");
result->_renderable = Renderable::createFromDictionary(*p.renderable);
ghoul_assert(result->_renderable, "Failed to create Renderable");
result->_renderable->_parent = result.get();
result->addPropertySubOwner(result->_renderable.get());
LDEBUG(std::format(
"Successfully created renderable for '{}'", result->identifier()
));
//LDEBUG(std::format(
// "Successfully created renderable for '{}'", result->identifier()
//));
}
// Extracting the actions from the dictionary
if (p.onApproach.has_value()) {
ZoneScopedN("OnApproach");
if (std::holds_alternative<std::string>(*p.onApproach)) {
result->_onApproachAction = { std::get<std::string>(*p.onApproach) };
}
@@ -439,6 +451,8 @@ ghoul::mm_unique_ptr<SceneGraphNode> SceneGraphNode::createFromDictionary(
}
if (p.onReach.has_value()) {
ZoneScopedN("OnReach");
if (std::holds_alternative<std::string>(*p.onReach)) {
result->_onReachAction = { std::get<std::string>(*p.onReach) };
}
@@ -448,6 +462,8 @@ ghoul::mm_unique_ptr<SceneGraphNode> SceneGraphNode::createFromDictionary(
}
if (p.onRecede.has_value()) {
ZoneScopedN("OnRecede");
if (std::holds_alternative<std::string>(*p.onRecede)) {
result->_onRecedeAction = { std::get<std::string>(*p.onRecede) };
}
@@ -457,6 +473,8 @@ ghoul::mm_unique_ptr<SceneGraphNode> SceneGraphNode::createFromDictionary(
}
if (p.onExit.has_value()) {
ZoneScopedN("OnExit");
if (std::holds_alternative<std::string>(*p.onExit)) {
result->_onExitAction = { std::get<std::string>(*p.onExit) };
}
@@ -466,6 +484,8 @@ ghoul::mm_unique_ptr<SceneGraphNode> SceneGraphNode::createFromDictionary(
}
if (p.tag.has_value()) {
ZoneScopedN("Tag");
if (std::holds_alternative<std::string>(*p.tag)) {
result->addTag(std::get<std::string>(*p.tag));
}
@@ -611,6 +631,7 @@ void SceneGraphNode::initialize() {
void SceneGraphNode::initializeGL() {
ZoneScoped;
ZoneName(identifier().c_str(), identifier().size());
TracyGpuZone("initializeGL")
LDEBUG(std::format("Initializing GL: {}", identifier()));

View File

@@ -51,7 +51,11 @@ MultiThreadedSceneInitializer::MultiThreadedSceneInitializer(unsigned int nThrea
{}
void MultiThreadedSceneInitializer::initializeNode(SceneGraphNode* node) {
ZoneScoped;
auto initFunction = [this, node]() {
ZoneScopedN("MultiThreadedInit");
LoadingScreen* loadingScreen = global::openSpaceEngine->loadingScreen();
LoadingScreen::ProgressInfo progressInfo;

View File

@@ -53,6 +53,8 @@ documentation::Documentation TimeFrame::Documentation() {
ghoul::mm_unique_ptr<TimeFrame> TimeFrame::createFromDictionary(
const ghoul::Dictionary& dict)
{
ZoneScoped;
const Parameters p = codegen::bake<Parameters>(dict);
TimeFrame* result = FactoryManager::ref().factory<TimeFrame>()->create(p.type, dict);

View File

@@ -52,6 +52,8 @@ documentation::Documentation Translation::Documentation() {
ghoul::mm_unique_ptr<Translation> Translation::createFromDictionary(
const ghoul::Dictionary& dictionary)
{
ZoneScoped;
const Parameters p = codegen::bake<Parameters>(dictionary);
Translation* result = FactoryManager::ref().factory<Translation>()->create(

View File

@@ -165,6 +165,7 @@ bool ScriptEngine::hasLibrary(const std::string& name) {
bool ScriptEngine::runScript(const std::string& script, const ScriptCallback& callback) {
ZoneScoped;
ZoneText(script.c_str(), script.size());
ghoul_assert(!script.empty(), "Script must not be empty");
@@ -177,7 +178,7 @@ bool ScriptEngine::runScript(const std::string& script, const ScriptCallback& ca
if (callback) {
ghoul::Dictionary returnValue =
ghoul::lua::loadArrayDictionaryFromString(script, _state);
callback(returnValue);
callback(std::move(returnValue));
}
else {
ghoul::lua::runScript(_state, script);

View File

@@ -66,6 +66,7 @@ void OpenSpaceModule::initialize(const ghoul::Dictionary& configuration) {
void OpenSpaceModule::initializeGL() {
ZoneScoped;
ZoneName(identifier().c_str(), identifier().size());
TracyGpuZone("initializeGL")
internalInitializeGL();
}

View File

@@ -49,6 +49,8 @@ void ScreenLog::removeExpiredEntries() {
}
void ScreenLog::log(LogLevel level, std::string_view category, std::string_view message) {
ZoneScoped;
const std::lock_guard guard(_mutex);
if (level >= _logLevel) {
_entries.push_back({