From cdf98f78899f5040361c17cf7498c2bdd0b9c270 Mon Sep 17 00:00:00 2001 From: Alexander Bock Date: Thu, 28 Mar 2024 14:30:12 +0100 Subject: [PATCH] Optimizing the data loading. Adding more Tracy macros --- modules/server/src/connection.cpp | 5 ++ .../server/src/topics/getpropertytopic.cpp | 6 +- modules/skybrowser/src/wwtdatahandler.cpp | 2 + src/data/dataloader.cpp | 70 ++++++++++++------- src/engine/openspaceengine.cpp | 5 +- src/scripting/scriptengine.cpp | 3 +- support/coding/codegen | 2 +- 7 files changed, 61 insertions(+), 32 deletions(-) diff --git a/modules/server/src/connection.cpp b/modules/server/src/connection.cpp index 6007ee706c..74b367af8c 100644 --- a/modules/server/src/connection.cpp +++ b/modules/server/src/connection.cpp @@ -167,6 +167,8 @@ void Connection::handleJson(const nlohmann::json& json) { auto topicIt = _topics.find(topicId); if (topicIt == _topics.end()) { + ZoneScopedN("New Topic"); + // The topic id is not registered: Initialize a new topic. auto typeJson = json.find(MessageKeyType); if (typeJson == json.end() || !typeJson->is_string()) { @@ -174,6 +176,7 @@ void Connection::handleJson(const nlohmann::json& json) { return; } const std::string type = *typeJson; + ZoneText(type.c_str(), type.size()); if (!isAuthorized() && (type != "authorize")) { LERROR("Connection is not authorized"); @@ -188,6 +191,8 @@ void Connection::handleJson(const nlohmann::json& json) { } } else { + ZoneScopedN("Existing Topic"); + if (!isAuthorized()) { LERROR("Connection is not authorized"); return; diff --git a/modules/server/src/topics/getpropertytopic.cpp b/modules/server/src/topics/getpropertytopic.cpp index 4c0f037ba6..9089704baf 100644 --- a/modules/server/src/topics/getpropertytopic.cpp +++ b/modules/server/src/topics/getpropertytopic.cpp @@ -52,14 +52,18 @@ namespace { namespace openspace { void GetPropertyTopic::handleJson(const nlohmann::json& json) { + ZoneScoped; + const std::string requestedKey = json.at("property").get(); + ZoneText(requestedKey.c_str(), requestedKey.size()); LDEBUG("Getting property '" + requestedKey + "'..."); nlohmann::json response; if (requestedKey == AllPropertiesValue) { response = allProperties(); } else if (requestedKey == AllNodesValue) { - response = wrappedPayload(sceneGraph()->allSceneGraphNodes()); + const std::vector& nodes = sceneGraph()->allSceneGraphNodes(); + response = wrappedPayload(nodes); } else if (requestedKey == AllScreenSpaceRenderablesValue) { response = wrappedPayload({ diff --git a/modules/skybrowser/src/wwtdatahandler.cpp b/modules/skybrowser/src/wwtdatahandler.cpp index 7da2fe075b..3789c998a2 100644 --- a/modules/skybrowser/src/wwtdatahandler.cpp +++ b/modules/skybrowser/src/wwtdatahandler.cpp @@ -253,6 +253,8 @@ namespace openspace { void WwtDataHandler::loadImages(const std::string& root, const std::filesystem::path& directory) { + ZoneScoped; + // Steps to download new images // 1. Create the target directory if it doesn't already exist // 2. If the 'root' has an associated hash file, download and compare it with the diff --git a/src/data/dataloader.cpp b/src/data/dataloader.cpp index 9402acc9bc..bf3c16bae7 100644 --- a/src/data/dataloader.cpp +++ b/src/data/dataloader.cpp @@ -40,7 +40,7 @@ #include namespace { - constexpr int8_t DataCacheFileVersion = 12; + constexpr int8_t DataCacheFileVersion = 13; constexpr int8_t LabelCacheFileVersion = 11; constexpr int8_t ColorCacheFileVersion = 11; @@ -224,16 +224,9 @@ std::optional loadCachedFile(const std::filesystem::path& path) { file.read(reinterpret_cast(&nEntries), sizeof(uint64_t)); result.entries.reserve(nEntries); for (uint64_t i = 0; i < nEntries; i += 1) { - ZoneScopedN("Dataset"); - Dataset::Entry e; file.read(reinterpret_cast(&e.position.x), 3 * sizeof(float)); - uint16_t nValues = 0; - file.read(reinterpret_cast(&nValues), sizeof(uint16_t)); - e.data.resize(nValues); - file.read(reinterpret_cast(e.data.data()), nValues * sizeof(float)); - // For now we just store the length of the comment. Since the comments are stored // in one block after the data entries, we can use the length later to extract the // contents of this entries comment out of the big block @@ -251,6 +244,17 @@ std::optional loadCachedFile(const std::filesystem::path& path) { result.entries.push_back(std::move(e)); } + // + // Read the data values next + uint16_t nValues = 0; + file.read(reinterpret_cast(&nValues), sizeof(uint16_t)); + std::vector entriesBuffer; + entriesBuffer.resize(nEntries * nValues); + file.read( + reinterpret_cast(entriesBuffer.data()), + nEntries * nValues * sizeof(float) + ); + // // Read comments in one block and then assign them to the data entries uint64_t totalCommentLength = 0; @@ -258,18 +262,28 @@ std::optional loadCachedFile(const std::filesystem::path& path) { std::vector commentBuffer; commentBuffer.resize(totalCommentLength); file.read(commentBuffer.data(), totalCommentLength); - // idx is the running index into the total comment buffer - int idx = 0; + + // + // Now we have the comments and the data values, we need to implant them into the + // data entries + + // commentIdx is the running index into the total comment buffer + int commentIdx = 0; + int valuesIdx = 0; for (Dataset::Entry& e : result.entries) { + e.data.resize(nValues); + std::memcpy(e.data.data(), entriesBuffer.data() + valuesIdx, nValues); + valuesIdx += nValues; + if (e.comment.has_value()) { - ghoul_assert(idx < commentBuffer.size(), "Index too large"); + ghoul_assert(commentIdx < commentBuffer.size(), "Index too large"); // If we have a comment, we need to extract its length's worth of characters // from the buffer - std::memcpy(e.comment->data(), &commentBuffer[idx], e.comment->size()); + std::memcpy(e.comment->data(), &commentBuffer[commentIdx], e.comment->size()); // and then advance the index - idx += e.comment->size(); + commentIdx += e.comment->size(); } } @@ -337,19 +351,19 @@ void saveCachedFile(const Dataset& dataset, const std::filesystem::path& path) { checkSize(dataset.entries.size(), "Too many entries"); uint64_t nEntries = static_cast(dataset.entries.size()); file.write(reinterpret_cast(&nEntries), sizeof(uint64_t)); + + // We assume the number of values for each dataset to be the same, so we can store + // them upfront + uint16_t nValues = dataset.entries.empty() ? 0 : dataset.entries[0].data.size(); + checkSize(nValues, "Too many data variables"); + std::vector valuesBuffer; + valuesBuffer.reserve(dataset.entries.size() * nValues); + uint64_t totalCommentLength = 0; for (const Dataset::Entry& e : dataset.entries) { - file.write(reinterpret_cast(&e.position.x), sizeof(float)); - file.write(reinterpret_cast(&e.position.y), sizeof(float)); - file.write(reinterpret_cast(&e.position.z), sizeof(float)); + file.write(reinterpret_cast(&e.position.x), 3 * sizeof(float)); - checkSize(e.data.size(), "Too many data variables"); - uint16_t nValues = static_cast(e.data.size()); - file.write(reinterpret_cast(&nValues), sizeof(uint16_t)); - file.write( - reinterpret_cast(e.data.data()), - e.data.size() * sizeof(float) - ); + valuesBuffer.insert(valuesBuffer.end(), e.data.begin(), e.data.end()); if (e.comment.has_value()) { checkSize(e.comment->size(), "Comment too long"); @@ -359,11 +373,15 @@ void saveCachedFile(const Dataset& dataset, const std::filesystem::path& path) { 0; file.write(reinterpret_cast(&commentLen), sizeof(uint16_t)); totalCommentLength += commentLen; - //if (e.comment.has_value()) { - // file.write(e.comment->data(), e.comment->size()); - //} } + // Write all of the datavalues next + file.write(reinterpret_cast(&nValues), sizeof(uint16_t)); + file.write( + reinterpret_cast(valuesBuffer.data()), + valuesBuffer.size() * sizeof(float) + ); + // // Write all of the comments next. We don't have to store the individual comment // lengths as the data values written before already have those stored. And since we diff --git a/src/engine/openspaceengine.cpp b/src/engine/openspaceengine.cpp index cb64544433..54d74d0f65 100644 --- a/src/engine/openspaceengine.cpp +++ b/src/engine/openspaceengine.cpp @@ -757,11 +757,10 @@ void OpenSpaceEngine::loadAssets() { std::unique_ptr sceneInitializer; if (global::configuration->useMultithreadedInitialization) { - const unsigned int nAvailableThreads = std::max( - std::thread::hardware_concurrency() / 2, + const unsigned int nThreads = std::max( + std::thread::hardware_concurrency() / 4, 4u ); - const unsigned int nThreads = nAvailableThreads == 0 ? 2 : nAvailableThreads; sceneInitializer = std::make_unique(nThreads); } else { diff --git a/src/scripting/scriptengine.cpp b/src/scripting/scriptengine.cpp index 1fbadf409f..f7361e9705 100644 --- a/src/scripting/scriptengine.cpp +++ b/src/scripting/scriptengine.cpp @@ -165,6 +165,7 @@ bool ScriptEngine::hasLibrary(const std::string& name) { bool ScriptEngine::runScript(const std::string& script, const ScriptCallback& callback) { ZoneScoped; + ZoneText(script.c_str(), script.size()); ghoul_assert(!script.empty(), "Script must not be empty"); @@ -177,7 +178,7 @@ bool ScriptEngine::runScript(const std::string& script, const ScriptCallback& ca if (callback) { ghoul::Dictionary returnValue = ghoul::lua::loadArrayDictionaryFromString(script, _state); - callback(returnValue); + callback(std::move(returnValue)); } else { ghoul::lua::runScript(_state, script); diff --git a/support/coding/codegen b/support/coding/codegen index ef31f904b6..e8fe5144e0 160000 --- a/support/coding/codegen +++ b/support/coding/codegen @@ -1 +1 @@ -Subproject commit ef31f904b6ee02c5d1cc4c4bbddef821dbb505c8 +Subproject commit e8fe5144e0903e85ca931b6c59992119579aeb69