Added SceneGraphNode and functions to extract information from json-files

This commit is contained in:
Emilie
2020-05-20 17:25:01 +02:00
parent f7c6d9c544
commit 47892926bd
5 changed files with 505 additions and 262 deletions
@@ -21,7 +21,7 @@
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE *
* OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *
****************************************************************************************/
//including our own h file
//including our own h file
#include <modules/fieldlinessequence/rendering/renderablestreamnodes.h>
//includes from fieldlinessequence, might not need all of them
@@ -41,7 +41,7 @@
#include <ghoul/logging/consolelog.h>
#include <ghoul/logging/visualstudiooutputlog.h>
#include <ghoul/opengl/programobject.h>
#include <ghoul/opengl/textureunit.h>
#include <fstream>
@@ -57,8 +57,8 @@ namespace {
//gl variables for shaders, probably needed some of them atleast
constexpr const GLuint VaPosition = 0; // MUST CORRESPOND TO THE SHADER PROGRAM
constexpr const GLuint VaColor = 1; // MUST CORRESPOND TO THE SHADER PROGRAM
constexpr const GLuint VaMasking = 2; // MUST CORRESPOND TO THE SHADER PROGRAM
constexpr const GLuint VaColor = 1; // MUST CORRESPOND TO THE SHADER PROGRAM
constexpr const GLuint VaMasking = 2; // MUST CORRESPOND TO THE SHADER PROGRAM
// ----- KEYS POSSIBLE IN MODFILE. EXPECTED DATA TYPE OF VALUE IN [BRACKETS] ----- //
@@ -87,7 +87,7 @@ namespace {
constexpr const char* ValueInputFileTypeJson = "json";
constexpr const char* ValueInputFileTypeOsfls = "osfls";
//properties::PropertyOwner _pStreamGroup;
// Size of simulated flow particles
constexpr openspace::properties::Property::PropertyInfo StreamColorInfo = {
@@ -101,28 +101,33 @@ namespace {
"Toggles the rendering of moving particles along the lines. Can, for example, "
"illustrate magnetic flow."
};
constexpr openspace::properties::Property::PropertyInfo NodeSizeInfo = {
"Node size",
"Size of nodes",
"Change the size of the nodes"
};
enum class SourceFileType : int {
Json = 0,
Cdf,
Osfls,
Invalid
};
enum class SourceFileType : int {
Json = 0,
Cdf,
Osfls,
Invalid
};
float stringToFloat(const std::string input, const float backupValue = 0.f) {
float tmp;
try {
tmp = std::stof(input);
}
catch (const std::invalid_argument& ia) {
LWARNING(fmt::format(
"Invalid argument: {}. '{}' is NOT a valid number", ia.what(), input
));
return backupValue;
}
return tmp;
}
} //namespace
float stringToFloat(const std::string input, const float backupValue = 0.f) {
float tmp;
try {
tmp = std::stof(input);
}
catch (const std::invalid_argument& ia) {
LWARNING(fmt::format(
"Invalid argument: {}. '{}' is NOT a valid number", ia.what(), input
));
return backupValue;
}
return tmp;
}
} //namespace
namespace openspace {
using namespace properties;
@@ -134,15 +139,18 @@ namespace openspace {
glm::vec4(0.f),
glm::vec4(1.f))
, _pStreamsEnabled(StreamsenabledInfo, true)
, _pStreamGroup({"Streams"})
, _pStreamGroup({ "Streams" })
, _pNodeSize(NodeSizeInfo, 1)
{
{
_dictionary = std::make_unique<ghoul::Dictionary>(dictionary);
}
}
void RenderableStreamNodes::initializeGL() {
// EXTRACT MANDATORY INFORMATION FROM DICTIONARY
auto vec = LoadJsonfile();
//LDEBUG("testar json");
//log(ghoul::logging::LogLevel::Debug, _loggerCat, "testar json");
SourceFileType sourceFileType = SourceFileType::Invalid;
if (!extractMandatoryInfoFromDictionary(sourceFileType)) {
return;
@@ -150,7 +158,7 @@ namespace openspace {
// EXTRACT OPTIONAL INFORMATION FROM DICTIONARY
std::string outputFolderPath;
extractOptionalInfoFromDictionary(outputFolderPath);
//extractOptionalInfoFromDictionary(outputFolderPath);
// EXTRACT SOURCE FILE TYPE SPECIFIC INFOMRATION FROM DICTIONARY & GET STATES FROM
// SOURCE
@@ -168,200 +176,217 @@ namespace openspace {
setupProperties();
// Setup shader program
_shaderProgram = global::renderEngine.buildRenderProgram(
"Streamnodes",
absPath("${MODULE_FIELDLINESSEQUENCE}/shaders/streamnodes_vs.glsl"),
absPath("${MODULE_FIELDLINESSEQUENCE}/shaders/streamnodes_fs.glsl")
);
glGenVertexArrays(1, &_vertexArrayObject);
glGenBuffers(1, &_vertexPositionBuffer);
glGenBuffers(1, &_vertexColorBuffer);
glGenBuffers(1, &_vertexMaskingBuffer);
// Setup shader program
_shaderProgram = global::renderEngine.buildRenderProgram(
"Streamnodes",
absPath("${MODULE_FIELDLINESSEQUENCE}/shaders/streamnodes_vs.glsl"),
absPath("${MODULE_FIELDLINESSEQUENCE}/shaders/streamnodes_fs.glsl")
);
glGenVertexArrays(1, &_vertexArrayObject);
glGenBuffers(1, &_vertexPositionBuffer);
glGenBuffers(1, &_vertexColorBuffer);
// Probably not needed, seems to be needed for additive blending
setRenderBin(Renderable::RenderBin::Overlay);
}
/**
* Extracts the general information (from the lua modfile) that is mandatory for the class
* to function; such as the file type and the location of the source files.
* Returns false if it fails to extract mandatory information!
*/
bool RenderableStreamNodes::extractMandatoryInfoFromDictionary(
SourceFileType& sourceFileType)
{
_dictionary->getValue(SceneGraphNode::KeyIdentifier, _identifier);
// ------------------- EXTRACT MANDATORY VALUES FROM DICTIONARY ------------------- //
std::string inputFileTypeString;
if(!_dictionary->getValue(KeyInputFileType, inputFileTypeString)){
LERROR(fmt::format("{}: The field {} is missing", _identifier, KeyInputFileType));
// Probably not needed, seems to be needed for additive blending
setRenderBin(Renderable::RenderBin::Overlay);
}
else{
// Verify that the input type is corrects
if (inputFileTypeString == ValueInputFileTypeJson) {
sourceFileType = SourceFileType::Json;
/**
* Extracts the general information (from the lua modfile) that is mandatory for the class
* to function; such as the file type and the location of the source files.
* Returns false if it fails to extract mandatory information!
*/
bool RenderableStreamNodes::extractMandatoryInfoFromDictionary(
SourceFileType& sourceFileType)
{
_dictionary->getValue(SceneGraphNode::KeyIdentifier, _identifier);
// ------------------- EXTRACT MANDATORY VALUES FROM DICTIONARY ------------------- //
std::string inputFileTypeString;
if (!_dictionary->getValue(KeyInputFileType, inputFileTypeString)) {
LERROR(fmt::format("{}: The field {} is missing", _identifier, KeyInputFileType));
}
else {
// Verify that the input type is corrects
if (inputFileTypeString == ValueInputFileTypeJson) {
sourceFileType = SourceFileType::Json;
}
else {
LERROR(fmt::format(
"{}: {} is not a recognized {}",
_identifier, inputFileTypeString, KeyInputFileType
));
sourceFileType = SourceFileType::Invalid;
return false;
}
}
std::string sourceFolderPath;
if (!_dictionary->getValue(KeySourceFolder, sourceFolderPath)) {
LERROR(fmt::format("{}: The field {} is missing", _identifier, KeySourceFolder));
return false;
}
// Ensure that the source folder exists and then extract
// the files with the same extension as <inputFileTypeString>
ghoul::filesystem::Directory sourceFolder(sourceFolderPath);
if (FileSys.directoryExists(sourceFolder)) {
// Extract all file paths from the provided folder
_sourceFiles = sourceFolder.readFiles(
ghoul::filesystem::Directory::Recursive::No,
ghoul::filesystem::Directory::Sort::Yes
);
// Ensure that there are available and valid source files left
if (_sourceFiles.empty()) {
LERROR(fmt::format(
"{}: {} contains no {} files",
_identifier, sourceFolderPath, inputFileTypeString
));
return false;
}
}
else {
LERROR(fmt::format(
"{}: {} is not a recognized {}",
_identifier, inputFileTypeString, KeyInputFileType
));
sourceFileType = SourceFileType::Invalid;
return false;
}
}
std::string sourceFolderPath;
if (!_dictionary->getValue(KeySourceFolder, sourceFolderPath)) {
LERROR(fmt::format("{}: The field {} is missing", _identifier, KeySourceFolder));
return false;
}
// Ensure that the source folder exists and then extract
// the files with the same extension as <inputFileTypeString>
ghoul::filesystem::Directory sourceFolder(sourceFolderPath);
if (FileSys.directoryExists(sourceFolder)) {
// Extract all file paths from the provided folder
_sourceFiles = sourceFolder.readFiles(
ghoul::filesystem::Directory::Recursive::No,
ghoul::filesystem::Directory::Sort::Yes
);
// Ensure that there are available and valid source files left
if (_sourceFiles.empty()) {
LERROR(fmt::format(
"{}: {} contains no {} files",
_identifier, sourceFolderPath, inputFileTypeString
"{}: FieldlinesSequence {} is not a valid directory",
_identifier,
sourceFolderPath
));
return false;
}
return true;
}
else {
LERROR(fmt::format(
"{}: FieldlinesSequence {} is not a valid directory",
_identifier,
sourceFolderPath
));
return false;
}
return true;
}
//void RenderableStreamNodes::extractOptionalInfoFromDictionary(
// std::string& outputFolderPath)
//{
// ------------------- EXTRACT OPTIONAL VALUES FROM DICTIONARY ------------------- //
// bool streamsEnabled;
//if (_dictionary->getValue(KeyStreamsEnabled, streamsEnabledValue)) {
//_pStreamsEnabled = streamsEnabledValue;
//void RenderableStreamNodes::extractOptionalInfoFromDictionary(
// std::string& outputFolderPath)
//{
// ------------------- EXTRACT OPTIONAL VALUES FROM DICTIONARY ------------------- //
// bool streamsEnabled;
//if (_dictionary->getValue(KeyStreamsEnabled, streamsEnabledValue)) {
//_pStreamsEnabled = streamsEnabledValue;
//}
//}
//}
bool RenderableStreamNodes::extractJsonInfoFromDictionary(fls::Model& model){
std::string modelStr;
if (_dictionary->getValue(KeyJsonSimulationModel, modelStr)) {
std::transform(
modelStr.begin(),
modelStr.end(),
modelStr.begin(),
[](char c) { return static_cast<char>(::tolower(c)); }
);
model = fls::stringToModel(modelStr);
}
else {
LERROR(fmt::format(
"{}: Must specify '{}'", _identifier, KeyJsonSimulationModel
));
return false;
bool RenderableStreamNodes::extractJsonInfoFromDictionary(fls::Model& model) {
std::string modelStr;
if (_dictionary->getValue(KeyJsonSimulationModel, modelStr)) {
std::transform(
modelStr.begin(),
modelStr.end(),
modelStr.begin(),
[](char c) { return static_cast<char>(::tolower(c)); }
);
model = fls::stringToModel(modelStr);
}
else {
LERROR(fmt::format(
"{}: Must specify '{}'", _identifier, KeyJsonSimulationModel
));
return false;
}
float scaleFactor;
if (_dictionary->getValue(KeyJsonScalingFactor, scaleFactor)) {
_scalingFactor = scaleFactor;
}
else {
LWARNING(fmt::format(
"{}: Does not provide scalingFactor. Assumes coordinates are in meters",
_identifier
));
}
return true;
}
float scaleFactor;
if (_dictionary->getValue(KeyJsonScalingFactor, scaleFactor)) {
_scalingFactor = scaleFactor;
void RenderableStreamNodes::setupProperties() {
// ----------------------------- Add Property Groups ----------------------------- //
addPropertySubOwner(_pStreamGroup);
// ------------------------- Add Properties to the groups ------------------------- //
_pStreamGroup.addProperty(_pStreamColor);
_pStreamGroup.addProperty(_pNodeSize);
// -------------- Add non-grouped properties (enablers and buttons) -------------- //
addProperty(_pStreamsEnabled);
}
else {
LWARNING(fmt::format(
"{}: Does not provide scalingFactor. Assumes coordinates are in meters",
_identifier
));
void RenderableStreamNodes::deinitializeGL() {
glDeleteVertexArrays(1, &_vertexArrayObject);
_vertexArrayObject = 0;
glDeleteBuffers(1, &_vertexPositionBuffer);
_vertexPositionBuffer = 0;
glDeleteBuffers(1, &_vertexColorBuffer);
_vertexColorBuffer = 0;
if (_shaderProgram) {
global::renderEngine.removeRenderProgram(_shaderProgram.get());
_shaderProgram = nullptr;
}
}
return true;
}
void RenderableStreamNodes::setupProperties() {
// -------------- Add non-grouped properties (enablers and buttons) -------------- //
addPropertySubOwner(_pStreamGroup);
_pStreamGroup.addProperty(_pStreamColor);
_pStreamGroup.addProperty(_pStreamsEnabled);
}
void RenderableStreamNodes::deinitializeGL() {
if (_shaderProgram) {
global::renderEngine.removeRenderProgram(_shaderProgram.get());
_shaderProgram = nullptr;
bool RenderableStreamNodes::isReady() const {
return _shaderProgram != nullptr;
}
}
bool RenderableStreamNodes::isReady() const {
return _shaderProgram != nullptr;
}
// Extract J2000 time from file names
// Requires files to be named as such: 'YYYY-MM-DDTHH-MM-SS-XXX.osfls'
void RenderableStreamNodes::extractTriggerTimesFromFileNames() {
// number of characters in filename (excluding '.osfls')
constexpr const int FilenameSize = 23;
// size(".osfls")
constexpr const int ExtSize = 6;
// Extract J2000 time from file names
// Requires files to be named as such: 'YYYY-MM-DDTHH-MM-SS-XXX.osfls'
void RenderableStreamNodes::extractTriggerTimesFromFileNames() {
// number of characters in filename (excluding '.osfls')
constexpr const int FilenameSize = 23;
// size(".osfls")
constexpr const int ExtSize = 6;
for (const std::string& filePath : _sourceFiles) {
const size_t strLength = filePath.size();
// Extract the filename from the path (without extension)
std::string timeString = filePath.substr(
strLength - FilenameSize - ExtSize,
FilenameSize - 1
);
// Ensure the separators are correct
timeString.replace(4, 1, "-");
timeString.replace(7, 1, "-");
timeString.replace(13, 1, ":");
timeString.replace(16, 1, ":");
timeString.replace(19, 1, ".");
const double triggerTime = Time::convertTime(timeString);
_startTimes.push_back(triggerTime);
for (const std::string& filePath : _sourceFiles) {
const size_t strLength = filePath.size();
// Extract the filename from the path (without extension)
std::string timeString = filePath.substr(
strLength - FilenameSize - ExtSize,
FilenameSize - 1
);
// Ensure the separators are correct
timeString.replace(4, 1, "-");
timeString.replace(7, 1, "-");
timeString.replace(13, 1, ":");
timeString.replace(16, 1, ":");
timeString.replace(19, 1, ".");
const double triggerTime = Time::convertTime(timeString);
_startTimes.push_back(triggerTime);
}
}
}
void RenderableStreamNodes::render(const RenderData& data, RendererTasks&) {
if (_activeTriggerTimeIndex != -1) {
_shaderProgram->activate();
void RenderableStreamNodes::render(const RenderData& data, RendererTasks&) {
if (_activeTriggerTimeIndex != -1) {
_shaderProgram->activate();
// Calculate Model View MatrixProjection
const glm::dmat4 rotMat = glm::dmat4(data.modelTransform.rotation);
const glm::dmat4 modelMat =
glm::translate(glm::dmat4(1.0), data.modelTransform.translation) *
rotMat *
glm::dmat4(glm::scale(glm::dmat4(1), glm::dvec3(data.modelTransform.scale)));
const glm::dmat4 modelViewMat = data.camera.combinedViewMatrix() * modelMat;
// Calculate Model View MatrixProjection
const glm::dmat4 rotMat = glm::dmat4(data.modelTransform.rotation);
const glm::dmat4 modelMat =
glm::translate(glm::dmat4(1.0), data.modelTransform.translation) *
rotMat *
glm::dmat4(glm::scale(glm::dmat4(1), glm::dvec3(data.modelTransform.scale)));
const glm::dmat4 modelViewMat = data.camera.combinedViewMatrix() * modelMat;
_shaderProgram->setUniform("modelViewProjection",
data.camera.sgctInternal.projectionMatrix() * glm::mat4(modelViewMat));
_shaderProgram->setUniform("modelViewProjection",
data.camera.sgctInternal.projectionMatrix() * glm::mat4(modelViewMat));
_shaderProgram->setUniform("lineColor", _pStreamColor);
// Flow/Particles
_shaderProgram->setUniform("usingParticles", _pStreamsEnabled);
// Flow/Particles
_shaderProgram->setUniform("streamColor", _pStreamColor);
_shaderProgram->setUniform("usingParticles", _pStreamsEnabled);
_shaderProgram->setUniform("nodeSize", 1);
// how do we set uniform the _fs?
_shaderProgram->setUniform("usingAdditiveBlending", false);
glBindVertexArray(_vertexArrayObject);
}
}
}
void RenderableStreamNodes::update(const UpdateData& data) {
if (_shaderProgram->isDirty()) {
_shaderProgram->rebuildFromFile();
void RenderableStreamNodes::update(const UpdateData& data) {
if (_shaderProgram->isDirty()) {
_shaderProgram->rebuildFromFile();
}
}
}
@@ -370,14 +395,77 @@ void RenderableStreamNodes::update(const UpdateData& data) {
return std::vector<std::string>();
}
*/
std::string data;
std::ifstream streamdata("data.json", std::ifstream::binary);
json jsonobj;
streamdata >> jsonobj;
//'YYYY-MM-DDTHH-MM-SS-XXX.osfls'
std::string filename = "C:/Users/Viktor/Desktop/EmilieOpenSpace/OpenSpace/sync/http/bastille_day_streamnodes/1/newdata.json";
double d = 3.14;
std::ofstream(filename, std::ios::binary).write(reinterpret_cast<char*>(&d), sizeof d)
<< 123 << "abc";
std::ifstream streamdata("C:/Users/Viktor/Desktop/EmilieOpenSpace/OpenSpace/sync/http/bastille_day_streamnodes/1/datawithoutprettyprint.json");
if (!streamdata.is_open())
{
LDEBUG("did not read the data.json file");
}
json jsonobj = json::parse(streamdata);
//json jsonobj;
//streamdata >> jsonobj;
log(ghoul::logging::LogLevel::Debug, _loggerCat, "testar json");
//printDebug(jsonobj["stream0"]);
LDEBUG(jsonobj["stream0"]);
log(ghoul::logging::LogLevel::Debug, _loggerCat, jsonobj["stream0"]);
//LDEBUG(jsonobj["stream0"]);
std::ofstream o("C:/Users/Viktor/Desktop/EmilieOpenSpace/OpenSpace/sync/http/bastille_day_streamnodes/1/newdata2.json");
o << jsonobj << std::endl;
const char* sNode = "node0";
const char* sStream = "stream0";
const char* sData = "data";
const json& jTmp = *(jsonobj.begin()); // First node in the file
const char* sTime = "time";
//double testtime = jTmp[sTime];
std::string testtime = jsonobj["time"];
//double testtime = Time::now();
//const json::value_type& variableNameVec = jTmp[sStream][sNode][sData];
//const size_t nVariables = variableNameVec.size();
size_t lineStartIdx = 0;
//Loop through all the nodes
const int numberofStreams = 4;
const int coordToMeters = 1;
for (int i = 0; i < numberofStreams; i++) {
for (json::iterator lineIter = jsonobj["stream" + std::to_string(i)].begin(); lineIter != jsonobj["stream" + std::to_string(i)].end(); ++lineIter) {
LDEBUG("testar debuggen");
log(ghoul::logging::LogLevel::Debug, _loggerCat, lineIter.key());
LDEBUG("Phi value: " + (*lineIter)["Phi"].get<std::string>());
LDEBUG("Theta value: " + (*lineIter)["Theta"].get<std::string>());
LDEBUG("R value: " + (*lineIter)["R"].get<std::string>());
LDEBUG("Flux value: " + (*lineIter)["Flux"].get<std::string>());
//_vertexPositions.push_back(
// coordToMeters * glm::vec3(
// variables[xIdx],
// variables[yIdx],
// variables[zIdx]
// )
//);
//for (json::iterator lineIter2 = lineIter.begin(); lineIter2 != lineIter.end(); ++lineIter2) {
}
}
//log(ghoul::logging::LogLevel::Debug, _loggerCat, lineIter.value());
// }
// for (auto& el : jsonobj.items())
// {
// LDEBUG(el.key());
// }
log(ghoul::logging::LogLevel::Debug, _loggerCat, testtime);
//openspace::printDebug("testar json"):
//for
//LWARNING(fmt::format("Testar json", data));
@@ -385,9 +473,94 @@ void RenderableStreamNodes::update(const UpdateData& data) {
return std::vector<std::string>();
}
/*
bool FieldlinesState::loadStateFromJson(const std::string& pathToJsonFile,
fls::Model Model, float coordToMeters)
{
bool RenderableStreamNodes::loadJsonStatesIntoRAM(const std::string& outputFolder) {
return true;
}
// --------------------- ENSURE FILE IS VALID, THEN PARSE IT --------------------- //
std::ifstream ifs(pathToJsonFile);
if (!ifs.is_open()) {
LERROR(fmt::format("FAILED TO OPEN FILE: {}", pathToJsonFile));
return false;
}
json jFile;
ifs >> jFile;
// -------------------------------------------------------------------------------- //
_model = Model;
const char* sData = "data";
const char* sTrace = "trace";
// ----- EXTRACT THE EXTRA QUANTITY NAMES & TRIGGER TIME (same for all lines) ----- //
{
const char* sTime = "time";
const json& jTmp = *(jFile.begin()); // First field line in the file
_triggerTime = Time::convertTime(jTmp[sTime]);
const char* sColumns = "columns";
const json::value_type& variableNameVec = jTmp[sTrace][sColumns];
const size_t nVariables = variableNameVec.size();
const size_t nPosComponents = 3; // x,y,z
if (nVariables < nPosComponents) {
LERROR(
pathToJsonFile + ": Each field '" + sColumns +
"' must contain the variables: 'x', 'y' and 'z' (order is important)."
);
return false;
}
for (size_t i = nPosComponents; i < nVariables; ++i) {
_extraQuantityNames.push_back(variableNameVec[i]);
}
}
const size_t nExtras = _extraQuantityNames.size();
_extraQuantities.resize(nExtras);
size_t lineStartIdx = 0;
// Loop through all fieldlines
for (json::iterator lineIter = jFile.begin(); lineIter != jFile.end(); ++lineIter) {
// The 'data' field in the 'trace' variable contains all vertex positions and the
// extra quantities. Each element is an array related to one vertex point.
const std::vector<std::vector<float>>& jData = (*lineIter)[sTrace][sData];
const size_t nPoints = jData.size();
for (size_t j = 0; j < nPoints; ++j) {
const std::vector<float>& variables = jData[j];
// Expects the x, y and z variables to be stored first!
const size_t xIdx = 0;
const size_t yIdx = 1;
const size_t zIdx = 2;
_vertexPositions.push_back(
coordToMeters * glm::vec3(
variables[xIdx],
variables[yIdx],
variables[zIdx]
)
);
// Add the extra quantites. Stored in the same array as the x,y,z variables.
// Hence index of the first extra quantity = 3
for (size_t xtraIdx = 3, k = 0; k < nExtras; ++k, ++xtraIdx) {
_extraQuantities[k].push_back(variables[xtraIdx]);
}
}
_lineCount.push_back(static_cast<GLsizei>(nPoints));
_lineStart.push_back(static_cast<GLsizei>(lineStartIdx));
lineStartIdx += nPoints;
}
return true;
}
*/
bool RenderableStreamNodes::loadJsonStatesIntoRAM(const std::string& outputFolder) {
return true;
}
} // namespace openspace