diff --git a/include/openspace/util/spicemanager.h b/include/openspace/util/spicemanager.h index 68e057dd25..158ee9562e 100644 --- a/include/openspace/util/spicemanager.h +++ b/include/openspace/util/spicemanager.h @@ -461,6 +461,33 @@ public: AberrationCorrection aberrationCorrection, double ephemerisTime, double& lightTime) const; + /** + * Returns the \p position of a \p target body relative to an \p observer in a + * specific \p referenceFrame, optionally corrected for \p lightTime (planetary + * aberration) and stellar aberration (\p aberrationCorrection). + * \param target The target body name or the target body's NAIF ID + * \param observer The observing body name or the observing body's NAIF ID + * \param referenceFrame The reference frame of the output position vector + * \param aberrationCorrection The aberration correction used for the position + * calculation + * \param ephemerisTime The time at which the position is to be queried + * \return The position of the \p target relative to the \p observer in the specified + * \p referenceFrame + * \throws SpiceException If the \p target or \p observer do not name a valid + * NAIF object, \p referenceFrame does not name a valid reference frame or if there is + * not sufficient data available to compute the position or neither the target nor the + * observer have coverage. + * \pre \p target must not be empty. + * \pre \p observer must not be empty. + * \pre \p referenceFrame must not be empty. + * \post If an exception is thrown, \p lightTime will not be modified. + * \sa http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkpos_c.html + * \sa http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/req/naif_ids.html + */ + glm::dvec3 targetPosition(const std::string& target, + const std::string& observer, const std::string& referenceFrame, + AberrationCorrection aberrationCorrection, double ephemerisTime) const; + /** * This method returns the transformation matrix that defines the transformation from * the reference frame \p from to the reference frame \p to. As both reference frames diff --git a/modules/newhorizons/newhorizonsmodule.cpp b/modules/newhorizons/newhorizonsmodule.cpp index 476e948ad1..12eaf6f95c 100644 --- a/modules/newhorizons/newhorizonsmodule.cpp +++ b/modules/newhorizons/newhorizonsmodule.cpp @@ -72,6 +72,7 @@ void NewHorizonsModule::internalInitialize() { std::vector NewHorizonsModule::documentations() const { return { + RenderableFov::Documentation(), RenderableModelProjection::Documentation(), RenderablePlanetProjection::Documentation(), ProjectionComponent::Documentation() diff --git a/modules/newhorizons/rendering/renderablecrawlingline.cpp b/modules/newhorizons/rendering/renderablecrawlingline.cpp index ebbc4183c1..c6712731e1 100644 --- a/modules/newhorizons/rendering/renderablecrawlingline.cpp +++ b/modules/newhorizons/rendering/renderablecrawlingline.cpp @@ -24,26 +24,84 @@ #include +#include +#include #include #include #include #include namespace { - const std::string _loggerCat = "RenderableCrawlingLine"; - const char* KeySource = "Source"; const char* KeyTarget = "Target"; const char* KeyInstrument = "Instrument"; - const char* KeyReferenceFrame = "Frame"; - const char* KeyColor = "RGB"; + const char* KeyColor = "Color"; + const char* KeyColorStart = "Start"; + const char* KeyColorEnd = "End"; - static const int SourcePosition = 0; - static const int TargetPosition = 1; -} + struct VBOData { + float position[3]; + float color[4]; + }; + +} // namespace namespace openspace { +documentation::Documentation RenderableCrawlingLine::Documentation() { + using namespace documentation; + return { + "RenderableCrawlingLine", + "newhorizons_renderable_crawlingline", + { + { + KeySource, + new StringVerifier, + "Denotes the SPICE name of the source of the renderable crawling line, " + "for example, the space craft", + Optional::No + }, + { + KeyTarget, + new StringVerifier, + "Denotes the SPICE name of the target of the crawling line", + Optional::Yes + }, + { + KeyInstrument, + new StringVerifier, + "Denotes the SPICE name of the instrument that is used to render the " + "crawling line", + Optional::No + }, + { + KeyColor, + new TableVerifier({ + { + { + KeyColorStart, + new DoubleVector4Verifier, + "The color at the start of the line", + Optional::No + }, + { + KeyColorEnd, + new DoubleVector4Verifier, + "The color at the end of the line", + Optional::No + } + }, + Exhaustive::Yes + }), + "Specifies the colors that are used for the crawling line. One value " + "determines the starting color of the line, the second value is the " + "color at the end of the line.", + Optional::No + } + } + }; +} + RenderableCrawlingLine::RenderableCrawlingLine(const ghoul::Dictionary& dictionary) : Renderable(dictionary) , _program(nullptr) @@ -53,54 +111,60 @@ RenderableCrawlingLine::RenderableCrawlingLine(const ghoul::Dictionary& dictiona , _frameCounter(0) , _drawLine(false) { - dictionary.getValue(KeySource, _source); - dictionary.getValue(KeyTarget, _target); - dictionary.getValue(KeyInstrument, _instrumentName); - dictionary.getValue(KeyReferenceFrame, _referenceFrame); + documentation::testSpecificationAndThrow( + Documentation(), + dictionary, + "RenderableCrawlingLine" + ); + _source = dictionary.value(KeySource); + _target = dictionary.value(KeyTarget); + _instrumentName = dictionary.value(KeyInstrument); - if (dictionary.hasKeyAndValue(KeyColor)) { - dictionary.getValue(KeyColor, _lineColor); - } - else { - _lineColor = glm::vec3(1); - } + _lineColorBegin = dictionary.value( + std::string(KeyColor) + "." + KeyColorStart + ); + + _lineColorEnd = dictionary.value( + std::string(KeyColor) + "." + KeyColorEnd + ); } bool RenderableCrawlingLine::isReady() const { - bool ready = true; - ready &= !_source.empty(); - ready &= !_target.empty(); - ready &= !_instrumentName.empty(); - ready &= (_program != nullptr); - return ready; + return (_program != nullptr); } bool RenderableCrawlingLine::initialize() { - bool completeSuccess = true; - RenderEngine& renderEngine = OsEng.renderEngine(); - _program = renderEngine.buildRenderProgram("RenderableCrawlingLine", + _program = renderEngine.buildRenderProgram( + "RenderableCrawlingLine", "${MODULE_NEWHORIZONS}/shaders/crawlingline_vs.glsl", - "${MODULE_NEWHORIZONS}/shaders/crawlingline_fs.glsl"); - - - if (!_program) - return false; + "${MODULE_NEWHORIZONS}/shaders/crawlingline_fs.glsl" + ); glGenVertexArrays(1, &_vao); glGenBuffers(1, &_vbo); glBindVertexArray(_vao); glBindBuffer(GL_ARRAY_BUFFER, _vbo); - glBufferData(GL_ARRAY_BUFFER, 2 * sizeof(psc), NULL, GL_DYNAMIC_DRAW); + glBufferData(GL_ARRAY_BUFFER, 2 * sizeof(VBOData), NULL, GL_DYNAMIC_DRAW); glEnableVertexAttribArray(0); - glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, (void*)0); + glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, sizeof(VBOData), (void*)0); + + glEnableVertexAttribArray(1); + glVertexAttribPointer( + 1, + 4, + GL_FLOAT, + GL_FALSE, + sizeof(VBOData), + reinterpret_cast(offsetof(VBOData, color)) + ); glBindVertexArray(0); - return completeSuccess; + return true; } bool RenderableCrawlingLine::deinitialize(){ @@ -119,73 +183,107 @@ bool RenderableCrawlingLine::deinitialize(){ } void RenderableCrawlingLine::render(const RenderData& data) { - if (_drawLine) { - _program->activate(); - _frameCounter++; - // fetch data - psc currentPosition = data.position; - psc campos = data.camera.position(); - glm::mat4 camrot = glm::mat4(data.camera.viewRotationMatrix()); - - glm::mat4 transform = glm::mat4(1); - - // setup the data to the shader - _program->setUniform("ViewProjection", data.camera.viewProjectionMatrix()); - _program->setUniform("ModelTransform", transform); - - int frame = _frameCounter % 60; - float fadingFactor = static_cast(sin((frame * 3.14159) / 60)); - float alpha = 0.6f + fadingFactor*0.4f; - - glLineWidth(2.f); - - _program->setUniform("_alpha", alpha); - _program->setUniform("color", _lineColor); - setPscUniforms(*_program.get(), data.camera, data.position); - - glBindVertexArray(_vao); - glBindBuffer(GL_ARRAY_BUFFER, _vbo); - glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(psc) * 2, _positions); - - glEnableVertexAttribArray(0); - glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, 0); - - glDrawArrays(GL_LINES, 0, 2); - glBindVertexArray(0); - - _program->deactivate(); + if (!_drawLine) { + return; } + + _program->activate(); + _frameCounter++; + + glm::dmat4 modelTransform = + glm::translate(glm::dmat4(1.0), data.modelTransform.translation) * // Translation + glm::dmat4(data.modelTransform.rotation) * // Spice rotation + glm::scale(glm::dmat4(1.0), glm::dvec3(data.modelTransform.scale)); + + glm::dmat4 modelViewProjectionTransform = + data.camera.projectionMatrix() * + glm::mat4(data.camera.combinedViewMatrix() * + modelTransform + ) + ; + //glm::dmat4 modelViewTransform = data.camera.combinedViewMatrix() * modelTransform; + + // setup the data to the shader + //_program->setUniform("modelViewTransform", glm::mat4(modelViewTransform)); + //_program->setUniform("projectionTransform", data.camera.projectionMatrix()); + _program->setUniform("modelViewProjection", modelViewProjectionTransform); + + int frame = _frameCounter % 60; + float fadingFactor = static_cast(sin((frame * 3.14159) / 60)); + float alpha = 0.6f + fadingFactor*0.4f; + + glLineWidth(2.f); + + _program->setUniform("_alpha", alpha); + //_program->setUniform("color", _lineColor); + //setPscUniforms(*_program.get(), data.camera, data.position); + + glBindVertexArray(_vao); + + glDrawArrays(GL_LINES, 0, 2); + glBindVertexArray(0); + + _program->deactivate(); } void RenderableCrawlingLine::update(const UpdateData& data) { - if (_program->isDirty()) + if (_program->isDirty()) { _program->rebuildFromFile(); - glm::dmat3 transformMatrix = SpiceManager::ref().positionTransformMatrix(_source, _referenceFrame, data.time); - - glm::mat4 tmp = glm::mat4(1); - for (int i = 0; i < 3; i++) { - for (int j = 0; j < 3; j++){ - tmp[i][j] = static_cast(transformMatrix[i][j]); - } } - _positions[SourcePosition] = PowerScaledCoordinate::CreatePowerScaledCoordinate(0, 0, 0); + glm::dmat3 transformMatrix = SpiceManager::ref().positionTransformMatrix( + _source, + //"ECLIPJ2000", + "GALACTIC", + data.time + ); + + glm::dmat3 tm = SpiceManager::ref().frameTransformationMatrix(_instrumentName, "ECLIPJ2000", data.time); + + //_positions[SourcePosition] = { 0.f, 0.f, 0.f, 0.f }; glm::dvec3 boresight; - try { + //try { SpiceManager::FieldOfViewResult res = SpiceManager::ref().fieldOfView(_source); boresight = res.boresightVector; - - } - catch (const SpiceManager::SpiceException& e) { - LERROR(e.what()); - } + + //} + //catch (const SpiceManager::SpiceException& e) { + //LERROR(e.what()); + //} glm::vec4 target(boresight[0], boresight[1], boresight[2], 12); - target = tmp * target; + //target = glm::dmat4(tm) * target; - _positions[TargetPosition] = target; + //_positions[TargetPosition] = target; + //_positions[TargetPosition] = { + // target.x * pow(10, target.w), + // target.y * pow(10, target.w), + // target.z * pow(10, target.w), + // 0 + //}; + + VBOData vboData[2] = { + { + { 0.f, 0.f, 0.f }, + _lineColorBegin.r, _lineColorBegin.g, _lineColorBegin.b, _lineColorBegin.a + }, + { + { target.x * pow(10, target.w), target.y * pow(10, target.w), target.z * pow(10, target.w) }, + { _lineColorEnd.r, _lineColorEnd.g, _lineColorEnd.b, _lineColorEnd.a } + } + }; + + + glBindBuffer(GL_ARRAY_BUFFER, _vbo); + glBufferSubData( + GL_ARRAY_BUFFER, + 0, + 2 * sizeof(VBOData), + vboData + ); + //glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(psc) * 2, _positions); if (ImageSequencer::ref().isReady()) { _imageSequenceTime = ImageSequencer::ref().instrumentActiveTime(_instrumentName); @@ -193,5 +291,4 @@ void RenderableCrawlingLine::update(const UpdateData& data) { } } - -} +} // namespace openspace diff --git a/modules/newhorizons/rendering/renderablecrawlingline.h b/modules/newhorizons/rendering/renderablecrawlingline.h index 06753a995c..1f350834e5 100644 --- a/modules/newhorizons/rendering/renderablecrawlingline.h +++ b/modules/newhorizons/rendering/renderablecrawlingline.h @@ -27,8 +27,12 @@ #include +#include + namespace openspace { +namespace documentation { struct Documentation; } + class RenderableCrawlingLine : public Renderable { public: RenderableCrawlingLine(const ghoul::Dictionary& dictionary); @@ -41,6 +45,8 @@ public: void render(const RenderData& data) override; void update(const UpdateData& data) override; + static documentation::Documentation Documentation(); + private: std::unique_ptr _program; @@ -48,9 +54,10 @@ private: std::string _source; std::string _target; std::string _referenceFrame; - glm::vec3 _lineColor; + + glm::vec4 _lineColorBegin; + glm::vec4 _lineColorEnd; - psc _positions[2]; int _frameCounter; bool _drawLine; diff --git a/modules/newhorizons/rendering/renderablefov.cpp b/modules/newhorizons/rendering/renderablefov.cpp index 4b8623989b..036e0d5156 100644 --- a/modules/newhorizons/rendering/renderablefov.cpp +++ b/modules/newhorizons/rendering/renderablefov.cpp @@ -26,6 +26,8 @@ #include +#include +#include #include #include @@ -36,294 +38,798 @@ #include namespace { - const std::string _loggerCat = "RenderableFov"; + const char* KeyBody = "Body"; + const char* KeyFrame = "Frame"; + const char* KeyColor = "RGB"; - const char* keyBody = "Body"; - const char* keyFrame = "Frame"; - const char* keyPathModule = "ModulePath"; - const char* keyColor = "RGB"; - const char* keyInstrument = "Instrument.Name"; - const char* keyInstrumentMethod = "Instrument.Method"; - const char* keyInstrumentAberration = "Instrument.Aberration"; - const char* keyPotentialTargets = "PotentialTargets"; - const char* keyFrameConversions = "FrameConversions"; + const char* KeyInstrument = "Instrument"; + const char* KeyInstrumentName = "Name"; + const char* KeyInstrumentAberration = "Aberration"; + + const char* KeyPotentialTargets = "PotentialTargets"; + const char* KeyFrameConversions = "FrameConversions"; const int InterpolationSteps = 10; - const int Stride = 8; -} +} // namespace namespace openspace { +documentation::Documentation RenderableFov::Documentation() { + using namespace documentation; + return { + "RenderableFieldOfView", + "newhorizons_renderable_fieldofview", + { + { + KeyBody, + new StringVerifier, + "The SPICE name of the source body for which the field of view should be " + "rendered.", + Optional::No + }, + { + KeyFrame, + new StringVerifier, + "The SPICE name of the source body's frame in which the field of view " + "should be rendered.", + Optional::No + }, + { + KeyInstrument, + new TableVerifier({ + { + KeyInstrumentName, + new StringVerifier, + "The SPICE name of the instrument that is rendered", + Optional::No + }, + { + KeyInstrumentAberration, + new StringInListVerifier({ + // Taken from SpiceManager::AberrationCorrection + "NONE", + "LT", "LT+S", + "CN", "CN+S", + "XLT", "XLT+S", + "XCN", "XCN+S" + }), + "The aberration correction that is used for this field of view. " + "The default is 'NONE'.", + Optional::Yes + } + }), + "A table describing the instrument whose field of view should be " + "rendered.", + Optional::No + }, + { + KeyPotentialTargets, + new StringListVerifier, + "A list of potential targets (specified as SPICE names) that the field " + "of view should be tested against.", + Optional::No + }, + { + KeyFrameConversions, + new TableVerifier({ + { + DocumentationEntry::Wildcard, + new StringVerifier + } + }), + "A list of frame conversions that should be registered with the " + "SpiceManager.", + Optional::Yes + } + } + }; +} + + RenderableFov::RenderableFov(const ghoul::Dictionary& dictionary) : Renderable(dictionary) , _lineWidth("lineWidth", "Line Width", 1.f, 1.f, 20.f) , _drawSolid("solidDraw", "Draw as Quads", false) + , _colors({ + { + "colors.defaultStart", + "Start of default color", + glm::vec4(0.4f) + }, + { + "colors.defaultEnd", + "End of default color", + glm::vec4(0.85f, 0.85f, 0.85f, 1.f) + }, + { + "colors.active", + "Active Color", + glm::vec4(0.f, 1.f, 0.f, 1.f) + }, + { + "colors.targetInFieldOfView", + "Target-in-field-of-view Color", + glm::vec4(0.f, 0.5f, 0.7f, 1.f) + }, + { + "colors.intersectionStart", + "Start of the intersection", + glm::vec4(1.f, 0.89f, 0.f, 1.f) + }, + { + "colors.intersectionEnd", + "End of the intersection", + glm::vec4(1.f, 0.29f, 0.f, 1.f) + }, + { + "colors.square", + "Orthogonal Square", + glm::vec4(0.85f, 0.85f, 0.85f, 1.f) + } + }) , _programObject(nullptr) - , _texture(nullptr) , _drawFOV(false) - , _mode(GL_LINES) - //, _interceptTag{false, false, false, false, false, false, false, false} - , _withinFOV(false) - , _vBoundsSize(0) - , _vPlaneSize(40) { - bool success = dictionary.getValue(keyBody, _spacecraft); - ghoul_assert(success, ""); + documentation::testSpecificationAndThrow( + Documentation(), + dictionary, + "RenderableFov" + ); + + _instrument.spacecraft = dictionary.value(KeyBody); + _instrument.referenceFrame = dictionary.value(KeyFrame); + + _instrument.name = dictionary.value( + std::string(KeyInstrument) + "." + KeyInstrumentName + ); - success = dictionary.getValue(keyFrame, _frame); - ghoul_assert(success, ""); - - success = dictionary.getValue(keyInstrument, _instrumentID); - ghoul_assert(success, ""); - -// success = dictionary.getValue(keyInstrumentMethod, _method); -// ghoul_assert(success, ""); - - std::string a = "NONE"; - success = dictionary.getValue(keyInstrumentAberration, a); - a = SpiceManager::AberrationCorrection(a); - ghoul_assert(success, ""); - - ghoul::Dictionary potentialTargets; - success = dictionary.getValue(keyPotentialTargets, potentialTargets); - ghoul_assert(success, ""); - - _potentialTargets.resize(potentialTargets.size()); - for (int i = 0; i < potentialTargets.size(); ++i) { - std::string target; - potentialTargets.getValue(std::to_string(i + 1), target); - _potentialTargets[i] = target; + std::string ia = std::string(KeyInstrument) + "." + KeyInstrumentAberration; + if (dictionary.hasKey(ia)) { + std::string ac = dictionary.value(ia); + _instrument.aberrationCorrection = SpiceManager::AberrationCorrection(ac); } - ghoul::Dictionary frameConversions; - success = dictionary.getValue(keyFrameConversions, frameConversions); - if (success) { - for (const std::string& key : frameConversions.keys()) { + ghoul::Dictionary pt = dictionary.value(KeyPotentialTargets); + _instrument.potentialTargets.reserve(pt.size()); + for (int i = 1; i <= pt.size(); ++i) { + std::string target = pt.value(std::to_string(i)); + _instrument.potentialTargets.push_back(target); + } + + if (dictionary.hasKey(KeyFrameConversions)) { + ghoul::Dictionary fc = dictionary.value(KeyFrameConversions); + for (const std::string& key : fc.keys()) { openspace::SpiceManager::ref().addFrame( key, - frameConversions.value(key) + fc.value(key) ); } } addProperty(_lineWidth); addProperty(_drawSolid); -} -void RenderableFov::allocateData() { - // fetch data for specific instrument (shape, boresight, bounds etc) - try { - SpiceManager::FieldOfViewResult res = SpiceManager::ref().fieldOfView(_instrumentID); - - _bounds = std::move(res.bounds); - _boresight = std::move(res.boresightVector); - - _projectionBounds.resize(_bounds.size()); - int initBoundPoints = 2 * (_bounds.size() + 1); - _fovBounds.resize(initBoundPoints * Stride); - _vBoundsSize = static_cast(_fovBounds.size()); - // allocate second vbo data - _fovPlane.resize(_vPlaneSize); - - } - catch (const SpiceManager::SpiceException& e) { - LERROR(e.what()); - } + addProperty(_colors.defaultStart); + addProperty(_colors.defaultEnd); + addProperty(_colors.active); + addProperty(_colors.targetInFieldOfView); + addProperty(_colors.intersectionStart); + addProperty(_colors.intersectionEnd); + addProperty(_colors.square); } bool RenderableFov::initialize() { - bool completeSuccess = true; - if (_programObject == nullptr) { + RenderEngine& renderEngine = OsEng.renderEngine(); + _programObject = renderEngine.buildRenderProgram( + "FovProgram", + "${MODULE_NEWHORIZONS}/shaders/fov_vs.glsl", + "${MODULE_NEWHORIZONS}/shaders/fov_fs.glsl" + ); - RenderEngine& renderEngine = OsEng.renderEngine(); - _programObject = renderEngine.buildRenderProgram("FovProgram", - "${MODULE_NEWHORIZONS}/shaders/fov_vs.glsl", - "${MODULE_NEWHORIZONS}/shaders/fov_fs.glsl"); + // Fetch information about the specific instrument + SpiceManager::FieldOfViewResult res = SpiceManager::ref().fieldOfView(_instrument.name); - if (!_programObject) - return false; + // Right now, we can only deal with rectangles or polygons. Circles and ellipses only + // return one or two bound vectors that have to used to construct an approximation + const bool supportedShape = + res.shape == SpiceManager::FieldOfViewResult::Shape::Polygon || + res.shape == SpiceManager::FieldOfViewResult::Shape::Rectangle; + if (!supportedShape) { + LWARNINGC("RenderableFov", "'" << _instrument.name << "' has unsupported shape"); + return false; } - allocateData(); - sendToGPU(); - return completeSuccess; + + _instrument.bounds = std::move(res.bounds); + _instrument.boresight = std::move(res.boresightVector); + + // These vectors hold the data that we will want to render. We need to subdivide the + // range as an intersection test between the corners and the object might fail for + // sufficiently small objects: + // + // x---------------------x Field of view + // | | + // | | + // | ***** | + // | * * | + // x-----*-------*-------x + // * * + // ***** Target object + // + + // The orthogonal plane shows the footprint of the instrument on the surface of the + // object. Since it should follow the potential curvature, we might need to + // interpolate, hence the extra storage + _orthogonalPlane.data.resize(_instrument.bounds.size() * InterpolationSteps); + + // The field of views are originating from the space craft, so the location of the + // space craft has to be repeated for each vertex, hence the * 2. On the other hand, + // the field of view does **not** need to be interpolated + _fieldOfViewBounds.data.resize(2 * _instrument.bounds.size()); + + // Field of view boundaries + glGenVertexArrays(1, &_fieldOfViewBounds.vao); + glBindVertexArray(_fieldOfViewBounds.vao); + glGenBuffers(1, &_fieldOfViewBounds.vbo); + glBindBuffer(GL_ARRAY_BUFFER, _fieldOfViewBounds.vbo); + glBufferData( + GL_ARRAY_BUFFER, + _fieldOfViewBounds.data.size() * sizeof(RenderInformation::VBOData), + NULL, + GL_STREAM_DRAW + ); + glEnableVertexAttribArray(0); + glVertexAttribPointer( + 0, + 3, + GL_FLOAT, + GL_FALSE, + sizeof(RenderInformation::VBOData), + reinterpret_cast(offsetof(RenderInformation::VBOData, position)) + ); + glEnableVertexAttribArray(1); + glVertexAttribIPointer( + 1, + 1, + GL_INT, + sizeof(RenderInformation::VBOData), + reinterpret_cast(offsetof(RenderInformation::VBOData, color)) + ); + + // Orthogonal Plane + glGenVertexArrays(1, &_orthogonalPlane.vao); + glGenBuffers(1, &_orthogonalPlane.vbo); + glBindVertexArray(_orthogonalPlane.vao); + glBindBuffer(GL_ARRAY_BUFFER, _orthogonalPlane.vbo); + glBufferData( + GL_ARRAY_BUFFER, + _orthogonalPlane.data.size() * sizeof(RenderInformation::VBOData), + NULL, + GL_STREAM_DRAW + ); + + glEnableVertexAttribArray(0); + glVertexAttribPointer( + 0, + 3, + GL_FLOAT, + GL_FALSE, + sizeof(RenderInformation::VBOData), + reinterpret_cast(offsetof(RenderInformation::VBOData, position)) + ); + glEnableVertexAttribArray(1); + glVertexAttribIPointer( + 1, + 1, + GL_INT, + sizeof(RenderInformation::VBOData), + reinterpret_cast(offsetof(RenderInformation::VBOData, color)) + ); + + glBindVertexArray(0); + + return true; } bool RenderableFov::deinitialize() { - RenderEngine& renderEngine = OsEng.renderEngine(); - if (_programObject) { - renderEngine.removeRenderProgram(_programObject); - _programObject = nullptr; - } + OsEng.renderEngine().removeRenderProgram(_programObject); + _programObject = nullptr; + + glDeleteBuffers(1, &_orthogonalPlane.vbo); + glDeleteVertexArrays(1, &_orthogonalPlane.vao); + + glDeleteBuffers(1, &_fieldOfViewBounds.vbo); + glDeleteVertexArrays(1, &_fieldOfViewBounds.vao); return true; } bool RenderableFov::isReady() const { - return _programObject != nullptr && !_bounds.empty(); + return _programObject != nullptr && !_instrument.bounds.empty(); } -void RenderableFov::sendToGPU() { - // Initialize and upload to graphics card - - // FOV lines - glGenVertexArrays(1, &_fovBoundsVAO); - glGenBuffers(1, &_fovBoundsVBO); - glBindVertexArray(_fovBoundsVAO); - glBindBuffer(GL_ARRAY_BUFFER, _fovBoundsVBO); - glBufferData(GL_ARRAY_BUFFER, _vBoundsSize * sizeof(GLfloat), NULL, GL_STATIC_DRAW); // orphaning the buffer, sending NULL data. - glBufferSubData(GL_ARRAY_BUFFER, 0, _vBoundsSize * sizeof(GLfloat), _fovBounds.data()); - - GLsizei st = sizeof(GLfloat) * Stride; - - glEnableVertexAttribArray(0); - glEnableVertexAttribArray(1); - glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, st, (void*)0); - glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, st, (void*)(4 * sizeof(GLfloat))); - - glBindVertexArray(0); - - // Orthogonal Plane - glGenVertexArrays(1, &_fovPlaneVAO); - glGenBuffers(1, &_fovPlaneVBO); - - glBindVertexArray(_fovPlaneVAO); - glBindBuffer(GL_ARRAY_BUFFER, _fovPlaneVBO); - glBufferData(GL_ARRAY_BUFFER, _vPlaneSize * sizeof(GLfloat), NULL, GL_STATIC_DRAW); // orphaning the buffer, sending NULL data. - glBufferSubData(GL_ARRAY_BUFFER, 0, _vPlaneSize * sizeof(GLfloat), _fovPlane.data()); - - glEnableVertexAttribArray(0); - glEnableVertexAttribArray(1); - glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, st, (void*)0); - glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, st, (void*)(4 * sizeof(GLfloat))); - - glBindVertexArray(0); -} - -void RenderableFov::updateGPU() { - PerfMeasure("updateGPU"); - glBindBuffer(GL_ARRAY_BUFFER, _fovBoundsVBO); - glBufferSubData(GL_ARRAY_BUFFER, 0, _vBoundsSize * sizeof(GLfloat), _fovBounds.data()); - if (!_rebuild) { - // no new points - glBindBuffer(GL_ARRAY_BUFFER, _fovPlaneVBO); - glBufferSubData(GL_ARRAY_BUFFER, 0, _vPlaneSize * sizeof(GLfloat), _fovPlane.data()); - } - else { - // new points - memory change - glBindVertexArray(_fovPlaneVAO); - glBindBuffer(GL_ARRAY_BUFFER, _fovPlaneVBO); - glBufferData(GL_ARRAY_BUFFER, _vPlaneSize * sizeof(GLfloat), NULL, GL_STATIC_DRAW); // orphaning the buffer, sending NULL data. - glBufferSubData(GL_ARRAY_BUFFER, 0, _vPlaneSize * sizeof(GLfloat), _fovPlane.data()); - - GLsizei st = sizeof(GLfloat) * Stride; - glEnableVertexAttribArray(0); - glEnableVertexAttribArray(1); - glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, st, (void*)0); - glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, st, (void*)(4 * sizeof(GLfloat))); - } - - glBindVertexArray(0); -} - -// various helper methods -void RenderableFov::insertPoint(std::vector& arr, glm::vec4 p, glm::vec4 c) { - for (int i = 0; i < 4; i++){ - arr.push_back(p[i]); - } - for (int i = 0; i < 4; i++){ - arr.push_back(c[i]); - } - _nrInserted++; -} - -glm::dvec3 RenderableFov::interpolate(glm::dvec3 p0, glm::dvec3 p1, float t) { - assert(t >= 0 && t <= 1); - float t2 = (1.f - t); - return glm::dvec3(p0.x*t2 + p1.x*t, p0.y*t2 + p1.y*t, p0.z*t2 + p1.z*t); -} - - -// This method is the current bottleneck. -psc RenderableFov::checkForIntercept(glm::dvec3 ray) { - std::string bodyfixed = "IAU_"; - bool convert = (_frame.find(bodyfixed) == std::string::npos); - if (convert) - bodyfixed = SpiceManager::ref().frameFromBody(_fovTarget); - else - bodyfixed = _frame; - - SpiceManager::SurfaceInterceptResult result = SpiceManager::ref().surfaceIntercept( - _fovTarget, _spacecraft, _instrumentID, bodyfixed, _aberrationCorrection, _time, ray); - - if (convert) { - result.surfaceVector = SpiceManager::ref().frameTransformationMatrix(bodyfixed, _frame, _time) * result.surfaceVector; - } - - ipoint = result.surfaceIntercept; - ivec = result.surfaceVector; -// bool intercepted = result.interceptFound; - - ivec *= 0.9999;// because fov lands exactly on top of surface we need to move it out slightly - _interceptVector = PowerScaledCoordinate::CreatePowerScaledCoordinate(ivec[0], ivec[1], ivec[2]); - _interceptVector[3] += 3; - - return _interceptVector; -} // Orthogonal projection next to planets surface -psc RenderableFov::orthogonalProjection(glm::dvec3 vecFov) { - double lt; - glm::dvec3 vecToTarget = - SpiceManager::ref().targetPosition(_fovTarget, _spacecraft, _frame, _aberrationCorrection, _time, lt); - vecFov = SpiceManager::ref().frameTransformationMatrix(_instrumentID, _frame, _time) * vecFov; - glm::dvec3 p = glm::proj(vecToTarget, vecFov); - - psc projection = PowerScaledCoordinate::CreatePowerScaledCoordinate(p[0], p[1], p[2]); - projection[3] += 3; - - return projection; +glm::dvec3 RenderableFov::orthogonalProjection(const glm::dvec3& vecFov, double time, const std::string& target) const { + glm::dvec3 vecToTarget = SpiceManager::ref().targetPosition(target, _instrument.spacecraft, _instrument.referenceFrame, _instrument.aberrationCorrection, time); + glm::dvec3 fov = SpiceManager::ref().frameTransformationMatrix(_instrument.name, _instrument.referenceFrame, time) * vecFov; + glm::dvec3 p = glm::proj(vecToTarget, fov); + return p * 1000.0; // km -> m } -// Bisection method, simple recurtion -glm::dvec3 RenderableFov::bisection(glm::dvec3 p1, glm::dvec3 p2) { - const double Tolerance = 0.000000001; // very low tolerance factor - - //check if point is on surface - glm::dvec3 half = interpolate(p1, p2, 0.5f); - - std::string bodyfixed = "IAU_"; - bool convert = (_frame.find(bodyfixed) == std::string::npos); - if (convert) - bodyfixed = SpiceManager::ref().frameFromBody(_fovTarget); - else - bodyfixed = _frame; - - SpiceManager::SurfaceInterceptResult result = SpiceManager::ref().surfaceIntercept( - _fovTarget, _spacecraft, _instrumentID, bodyfixed, _aberrationCorrection, _time, half); - - if (convert) { - result.surfaceVector = SpiceManager::ref().frameTransformationMatrix(bodyfixed, _frame, _time) * result.surfaceVector; +template +double bisect(const glm::dvec3& p1, const glm::dvec3& p2, Func testFunction, + const glm::dvec3& previousHalf = glm::dvec3(std::numeric_limits::max())) +{ + const double Tolerance = 0.00000001; + const glm::dvec3 half = glm::mix(p1, p2, 0.5); + if (glm::distance(previousHalf, half) < Tolerance) { + // The two points are so close to each other that we can stop + return 0.5; } - - ipoint = result.surfaceIntercept; - ivec = result.surfaceVector; - bool intercepted = result.interceptFound; - - if (glm::distance(_previousHalf, half) < Tolerance) { - _previousHalf = glm::dvec3(0); - return half; - } - _previousHalf = half; - //recursive search - if (!intercepted) { - return bisection(p1, half); + if (testFunction(half)) { + return 0.5 + 0.5 * bisect(half, p2, testFunction, half); } else { - return bisection(half, p2); + return 0.5 * bisect(p1, half, testFunction, half); } } -void RenderableFov::fovSurfaceIntercept(bool H[], std::vector bounds) { - _nrInserted = 0; +void RenderableFov::computeIntercepts(const UpdateData& data, const std::string& target, bool isInFov) { + auto makeBodyFixedReferenceFrame = [&target](std::string ref) -> std::pair { + bool convert = (ref.find("IAU_") == std::string::npos); + if (convert) { + return { SpiceManager::ref().frameFromBody(target), true }; + } + else { + return { ref, false }; + } + }; + + //std::vector intersects(_instrument.bounds.size()); + + // First we fill the field-of-view bounds array by testing each bounds vector against + // the object. We need to test it against the object (rather than using a fixed + // distance) as the field of view rendering should stop at the surface and not + // continue + for (size_t i = 0; i < _instrument.bounds.size(); ++i) { + const glm::dvec3& bound = _instrument.bounds[i]; + + RenderInformation::VBOData& first = _fieldOfViewBounds.data[2 * i]; + RenderInformation::VBOData& second = _fieldOfViewBounds.data[2 * i + 1]; + + // Regardless of what happens next, the position of every second element is going + // to be the same. Only the color attribute might change + first = { + 0.f, 0.f, 0.f, + RenderInformation::VertexColorTypeDefaultStart + }; + + if (!isInFov) { + // If the target is not in the field of view, we don't need to perform any + // surface intercepts + glm::vec3 o = orthogonalProjection(bound, data.time, target); + + second = { + o.x, o.y, o.z, + RenderInformation::VertexColorTypeDefaultEnd // This had a different color (0.4) before ---abock + }; + } + else { + // The target is in the field of view, but not the entire field of view has to + // be filled by the target + auto ref = makeBodyFixedReferenceFrame(_instrument.referenceFrame); + SpiceManager::SurfaceInterceptResult r = SpiceManager::ref().surfaceIntercept( + target, + _instrument.spacecraft, + _instrument.name, + ref.first, + _instrument.aberrationCorrection, + data.time, + bound + ); + + //intersects[i] = r.interceptFound; + + + if (r.interceptFound) { + // This point intersected the target + first.color = RenderInformation::VertexColorTypeIntersectionStart; + + // If we had to convert the reference frame into a body-fixed frame, we + // need to apply this change here: + if (ref.second) { + r.surfaceVector = SpiceManager::ref().frameTransformationMatrix( + ref.first, + _instrument.referenceFrame, + data.time + ) * r.surfaceVector; + } + + // Convert the KM scale that SPICE uses to meter + glm::vec3 srfVec = r.surfaceVector * 1000.0; + + // Standoff distance, we would otherwise end up *exactly* on the surface + srfVec *= 0.999; + + second = { + srfVec.x, srfVec.y, srfVec.z, + RenderInformation::VertexColorTypeIntersectionEnd + }; + } + else { + // This point did not intersect the target though others did + glm::vec3 o = orthogonalProjection(bound, data.time, target); + second = { + o.x, o.y, o.z, + RenderInformation::VertexColorTypeInFieldOfView + }; + } + } + } + + // After finding the positions for the field of view boundaries, we can create the + // vertices for the orthogonal plane as well, reusing the computations we performed + // earlier + + // Each boundary in _instrument.bounds has 'InterpolationSteps' steps between + auto indexForBounds = [](size_t idx) -> size_t { + return idx * InterpolationSteps; + }; + + //auto boundsForIndex = [](size_t bnds) -> size_t { + // return bnds % InterpolationSteps; + //}; + + auto copyFieldOfViewValues = [&](size_t iBound, size_t begin, size_t end) -> void { + std::fill( + _orthogonalPlane.data.begin() + begin, + _orthogonalPlane.data.begin() + end, + _fieldOfViewBounds.data[2 * iBound + 1] + ); + }; + + // An early out for when the target is not in field of view + if (!isInFov) { + for (size_t i = 0; i < _instrument.bounds.size(); ++i) { + const glm::dvec3& bound = _instrument.bounds[i]; + // If none of the points are able to intersect with the target, we can just + // copy the values from the field-of-view boundary. So we take each second + // item (the first one is (0,0,0)) and replicate it 'InterpolationSteps' times + copyFieldOfViewValues(i, indexForBounds(i), indexForBounds(i + 1)); + } + + } + else { + // At least one point will intersect + for (size_t i = 0; i < _instrument.bounds.size(); ++i) { + // Wrap around the array index to 0 + const size_t j = (i == _instrument.bounds.size() - 1) ? 0 : i + 1; + + const glm::dvec3& iBound = _instrument.bounds[i]; + const glm::dvec3& jBound = _instrument.bounds[j]; + + auto intercepts = [&](const glm::dvec3& probe) -> bool { + return SpiceManager::ref().surfaceIntercept( + target, + _instrument.spacecraft, + _instrument.name, + makeBodyFixedReferenceFrame(_instrument.referenceFrame).first, + _instrument.aberrationCorrection, + data.time, + probe + ).interceptFound; + }; + + // Computes the intercept vector between the 'probe' and the target + // the intercept vector is in meter and contains a standoff distance offset + auto interceptVector = [&](const glm::dvec3& probe) -> glm::dvec3 { + auto ref = makeBodyFixedReferenceFrame(_instrument.referenceFrame); + SpiceManager::SurfaceInterceptResult r = SpiceManager::ref().surfaceIntercept( + target, + _instrument.spacecraft, + _instrument.name, + ref.first, + _instrument.aberrationCorrection, + data.time, + probe + ); + + if (ref.second) { + r.surfaceVector = SpiceManager::ref().frameTransformationMatrix( + ref.first, + _instrument.referenceFrame, + data.time + ) * r.surfaceVector; + } + + // Convert the KM scale that SPICE uses to meter + // Standoff distance, we would otherwise end up *exactly* on the surface + return r.surfaceVector * 1000.0 * 0.999; + }; + + for (size_t m = 0; m < InterpolationSteps; ++m) { + const double t = static_cast(m) / (InterpolationSteps); + + const glm::dvec3 tBound = glm::mix(iBound, jBound, t); + + if (intercepts(tBound)) { + const glm::vec3 icpt = interceptVector(tBound); + _orthogonalPlane.data[indexForBounds(i) + m] = { + icpt.x, icpt.y, icpt.z, + RenderInformation::VertexColorTypeSquare + }; + } + else { + const glm::vec3 o = orthogonalProjection(tBound, data.time, target); + + _orthogonalPlane.data[indexForBounds(i) + m] = { + o.x, o.y, o.z, + RenderInformation::VertexColorTypeSquare + }; + } + + } + + + } + } + + +#ifdef DEBUG_THIS + // At least one point will intersect + for (size_t i = 0; i < _instrument.bounds.size(); ++i) { + // Wrap around the array index to 0 + const size_t j = (i == _instrument.bounds.size() - 1) ? 0 : i + 1; + + const glm::dvec3& iBound = _instrument.bounds[i]; + const glm::dvec3& jBound = _instrument.bounds[j]; + + auto intercepts = [&](const glm::dvec3& probe) -> bool { + return SpiceManager::ref().surfaceIntercept( + target, + _instrument.spacecraft, + _instrument.name, + makeBodyFixedReferenceFrame(_instrument.referenceFrame).first, + _instrument.aberrationCorrection, + data.time, + probe + ).interceptFound; + }; + + static const uint8_t NoIntersect = 0b00; + static const uint8_t ThisIntersect = 0b01; + static const uint8_t NextIntersect = 0b10; + static const uint8_t BothIntersect = 0b11; + + const uint8_t type = (intersects[i] ? 1 : 0) + (intersects[j] ? 2 : 0); + switch (type) { + case NoIntersect: + { + // If both points don't intercept, the target might still pass between + // them, so we need to check the intermediate point + + const glm::dvec3 half = glm::mix(iBound, jBound, 0.5); + if (intercepts(half)) { + // The two outer points do not intersect, but the middle point + // does; so we need to find the intersection points + const double t1 = bisect(half, iBound, intercepts); + const double t2 = 0.5 + bisect(half, jBound, intercepts); + + // + // The target is sticking out somewhere between i and j, so we + // have three regions here: + // The first (0,t1) and second (t2,1) are not intersecting + // The third between (t1,t2) is intersecting + // + // i p1 p2 j + // ***** + // x-------* *-------x + // 0 t1 t2 1 + + // OBS: i and j are in bounds-space, p1, p2 are in + // _orthogonalPlane-space + const size_t p1 = static_cast(indexForBounds(i) + t1 * InterpolationSteps); + const size_t p2 = static_cast(indexForBounds(i) + t2 * InterpolationSteps); + + // We can copy the non-intersecting parts + copyFieldOfViewValues(i, indexForBounds(i), p1); + copyFieldOfViewValues(i, p2, indexForBounds(j)); + + // Are recompute the intersecting ones + for (size_t k = 0; k <= (p2 - p1); ++k) { + const double t = t1 + k * (t2 - t1); + const glm::dvec3 interpolated = glm::mix(iBound, jBound, t); + const glm::vec3 icpt = interceptVector(interpolated); + _orthogonalPlane.data[p1 + k] = { + icpt.x, icpt.y, icpt.z, + RenderInformation::VertexColorTypeSquare + }; + } + } + else { + copyFieldOfViewValues(i, indexForBounds(i), indexForBounds(i + 1)); + } + break; + } + case ThisIntersect: + { + break; + } + case NextIntersect: + { + break; + } + case BothIntersect: + { + break; + } + default: + ghoul_assert(false, "Missing case label"); + } + } + } + //size_t k = (i + 1 > _instrument.bounds.size() - 1) ? 0 : i + 1; + + //glm::dvec3 mid; + //glm::dvec3 interpolated; + + //const glm::dvec3& current = _instrument.bounds[i]; + //const glm::dvec3& next = _instrument.bounds[k]; + + //if (intercepts[i] == false) { // If point is non-interceptive, project it. + + + // insertPoint(_fovPlane, glm::vec4(orthogonalProjection(current, data.time, target), 0.0), tmp); + // _rebuild = true; + // if (intercepts[i + 1] == false) { + // // IFF incident point is also non-interceptive BUT something is within FOV + // // we need then to check if this segment makes contact with surface + // glm::dvec3 half = interpolate(current, next, 0.5f); + + // std::string bodyfixed = "IAU_"; + // bool convert = (_instrument.referenceFrame.find(bodyfixed) == std::string::npos); + // if (convert) { + // bodyfixed = SpiceManager::ref().frameFromBody(target); + // } + // else { + // bodyfixed = _instrument.referenceFrame; + // } + + // SpiceManager::SurfaceInterceptResult res = + // SpiceManager::ref().surfaceIntercept(target, _instrument.spacecraft, + // _instrument.name, bodyfixed, _instrument.aberrationCorrection, data.time, half); + + // if (convert) { + // res.surfaceVector = SpiceManager::ref().frameTransformationMatrix(bodyfixed, _instrument.referenceFrame, data.time) * res.surfaceVector; + // } + + // bool intercepted = res.interceptFound; + + // if (intercepted) { + // // find the two outer most points of intersection + // glm::dvec3 root1 = bisection(half, current, data.time, target); + // glm::dvec3 root2 = bisection(half, next, data.time, target); + + // insertPoint(_fovPlane, glm::vec4(orthogonalProjection(root1, data.time, target), 0.0), squareColor(diffTime)); + // for (int j = 1; j < InterpolationSteps; ++j) { + // float t = (static_cast(j) / InterpolationSteps); + // interpolated = interpolate(root1, root2, t); + // glm::dvec3 ivec = checkForIntercept(interpolated, data.time, target); + // insertPoint(_fovPlane, glm::vec4(ivec, 0.0), squareColor(diffTime)); + // } + // insertPoint(_fovPlane, glm::vec4(orthogonalProjection(root2, data.time, target), 0.0), squareColor(diffTime)); + // } + // } + //} + //if (interceptTag[i] == true && interceptTag[i + 1] == false) { // current point is interceptive, next is not + // // find outer most point for interpolation + // mid = bisection(current, next, data.time, target); + // for (int j = 1; j <= InterpolationSteps; ++j) { + // float t = (static_cast(j) / InterpolationSteps); + // interpolated = interpolate(current, mid, t); + // glm::dvec3 ivec = (j < InterpolationSteps) ? checkForIntercept(interpolated, data.time, target) : orthogonalProjection(interpolated, data.time, target); + // insertPoint(_fovPlane, glm::vec4(ivec, 0.0), squareColor(diffTime)); + // _rebuild = true; + // } + //} + //if (interceptTag[i] == false && interceptTag[i + 1] == true) { // current point is non-interceptive, next is + // mid = bisection(next, current, data.time, target); + // for (int j = 1; j <= InterpolationSteps; ++j) { + // float t = (static_cast(j) / InterpolationSteps); + // interpolated = interpolate(mid, next, t); + // glm::dvec3 ivec = (j > 1) ? checkForIntercept(interpolated, data.time, target) : orthogonalProjection(interpolated, data.time, target); + // insertPoint(_fovPlane, glm::vec4(ivec, 0.0), squareColor(diffTime)); + // _rebuild = true; + // } + //} + //if (interceptTag[i] == true && interceptTag[i + 1] == true) { // both points intercept + // for (int j = 0; j <= InterpolationSteps; ++j) { + // float t = (static_cast(j) / InterpolationSteps); + // interpolated = interpolate(current, next, t); + // glm::dvec3 ivec = checkForIntercept(interpolated, data.time, target); + // insertPoint(_fovPlane, glm::vec4(ivec, 0.0), squareColor(diffTime)); + // _rebuild = true; + // } + //} + //// @CLEANUP-END + //} + //} +#endif + +} + +#if 0 +void RenderableFov::computeIntercepts(const UpdateData& data, const std::string& target, bool inFOV) { + double t2 = (openspace::ImageSequencer::ref().getNextCaptureTime()); + double diff = (t2 - data.time); + float diffTime = 0.0; + float interpolationStart = 7.0; //seconds before + if (diff <= interpolationStart) + diffTime = static_cast(1.0 - (diff / interpolationStart)); + + if (diff < 0.0) + diffTime = 0.f; + + //PerfMeasure("computeIntercepts"); + // for each FOV vector + bool interceptTag[35]; + + _fovBounds.clear(); + for (int i = 0; i <= _instrument.bounds.size(); ++i) { + int r = (i == _instrument.bounds.size()) ? 0 : i; + std::string bodyfixed = "IAU_"; + bool convert = (_instrument.referenceFrame.find(bodyfixed) == std::string::npos); + if (convert) { + bodyfixed = SpiceManager::ref().frameFromBody(target); + } + else { + bodyfixed = _instrument.referenceFrame; + } + + SpiceManager::SurfaceInterceptResult res = + SpiceManager::ref().surfaceIntercept(target, _instrument.spacecraft, + _instrument.name, bodyfixed, _instrument.aberrationCorrection, data.time, _instrument.bounds[r]); + + if (convert) { + res.surfaceVector = SpiceManager::ref().frameTransformationMatrix(bodyfixed, _instrument.referenceFrame, data.time) * res.surfaceVector; + } + + interceptTag[r] = res.interceptFound; + + // if not found, use the orthogonal projected point + glm::dvec3 b; + if (!interceptTag[r]) { + b = orthogonalProjection(_instrument.bounds[r], data.time, target); + } + + glm::vec4 fovOrigin = glm::vec4(0); //This will have to be fixed once spacecraft is 1:1! + + if (interceptTag[r]) { + // INTERCEPTIONS + insertPoint(_fovBounds, fovOrigin, _colors.intersectionStart); + insertPoint(_fovBounds, glm::vec4(res.surfaceVector, 0.0), endColor(diffTime)); + } + else if (inFOV) { + // OBJECT IN FOV, NO INTERCEPT FOR THIS FOV-RAY + insertPoint(_fovBounds, fovOrigin, glm::vec4(0, 0, 1, 1)); + insertPoint(_fovBounds, glm::vec4(b, 0.0), _colors.targetInFieldOfView); + } + else { + //glm::vec4 corner(_bounds[r][0], _bounds[r][1], _bounds[r][2], 8); + ////glm::vec4 corner = _projectionBounds[r].vec4(); + //corner = _spacecraftRotation*corner; + //// NONE OF THE FOV-RAYS INTERCEPT AND NO OBJECT IN FOV + //insertPoint(_fovBounds, fovOrigin, col_gray); + //insertPoint(_fovBounds, corner, glm::vec4(0)); + insertPoint(_fovBounds, fovOrigin, _colors.default); + insertPoint(_fovBounds, glm::vec4(b, 0.0), glm::vec4(0.4)); + } + } + interceptTag[_instrument.bounds.size()] = interceptTag[0]; + //fovSurfaceIntercept(_interceptTag, _bounds, data.time); + + // FOV SURFACE INTERCEPT + auto bounds = _instrument.bounds; + _rebuild = false; _fovPlane.clear(); // empty the array glm::dvec3 mid; @@ -338,219 +844,100 @@ void RenderableFov::fovSurfaceIntercept(bool H[], std::vector bounds current = bounds[i]; next = bounds[k]; - if (H[i] == false) { // If point is non-interceptive, project it. - insertPoint(_fovPlane, orthogonalProjection(current).vec4(), tmp); - if (H[i + 1] == false && _withinFOV) { + if (interceptTag[i] == false) { // If point is non-interceptive, project it. + insertPoint(_fovPlane, glm::vec4(orthogonalProjection(current, data.time, target), 0.0), tmp); + _rebuild = true; + if (interceptTag[i + 1] == false && inFOV) { // IFF incident point is also non-interceptive BUT something is within FOV // we need then to check if this segment makes contact with surface glm::dvec3 half = interpolate(current, next, 0.5f); - + std::string bodyfixed = "IAU_"; - bool convert = (_frame.find(bodyfixed) == std::string::npos); + bool convert = (_instrument.referenceFrame.find(bodyfixed) == std::string::npos); if (convert) { - bodyfixed = SpiceManager::ref().frameFromBody(_fovTarget); + bodyfixed = SpiceManager::ref().frameFromBody(target); } else { - bodyfixed = _frame; - } - - SpiceManager::SurfaceInterceptResult res = - SpiceManager::ref().surfaceIntercept(_fovTarget, _spacecraft, - _instrumentID, bodyfixed, _aberrationCorrection, _time, half); - - if (convert) { - res.surfaceVector = SpiceManager::ref().frameTransformationMatrix(bodyfixed, _frame, _time) * res.surfaceVector; + bodyfixed = _instrument.referenceFrame; + } + + SpiceManager::SurfaceInterceptResult res = + SpiceManager::ref().surfaceIntercept(target, _instrument.spacecraft, + _instrument.name, bodyfixed, _instrument.aberrationCorrection, data.time, half); + + if (convert) { + res.surfaceVector = SpiceManager::ref().frameTransformationMatrix(bodyfixed, _instrument.referenceFrame, data.time) * res.surfaceVector; } - ipoint = res.surfaceIntercept; - ivec = res.surfaceVector; bool intercepted = res.interceptFound; if (intercepted) { // find the two outer most points of intersection - glm::dvec3 root1 = bisection(half, current); - glm::dvec3 root2 = bisection(half, next); + glm::dvec3 root1 = bisection(half, current, data.time, target); + glm::dvec3 root2 = bisection(half, next, data.time, target); - insertPoint(_fovPlane, orthogonalProjection(root1).vec4(), col_sq); + insertPoint(_fovPlane, glm::vec4(orthogonalProjection(root1, data.time, target), 0.0), squareColor(diffTime)); for (int j = 1; j < InterpolationSteps; ++j) { float t = (static_cast(j) / InterpolationSteps); interpolated = interpolate(root1, root2, t); - _interceptVector = checkForIntercept(interpolated); - insertPoint(_fovPlane, _interceptVector.vec4(), col_sq); + glm::dvec3 ivec = checkForIntercept(interpolated, data.time, target); + insertPoint(_fovPlane, glm::vec4(ivec,0.0) , squareColor(diffTime)); } - insertPoint(_fovPlane, orthogonalProjection(root2).vec4(), col_sq); + insertPoint(_fovPlane, glm::vec4(orthogonalProjection(root2, data.time, target), 0.0), squareColor(diffTime)); } } } - if (H[i] == true && H[i + 1] == false) { // current point is interceptive, next is not - // find outer most point for interpolation - mid = bisection(current, next); + if (interceptTag[i] == true && interceptTag[i + 1] == false) { // current point is interceptive, next is not + // find outer most point for interpolation + mid = bisection(current, next, data.time, target); for (int j = 1; j <= InterpolationSteps; ++j) { float t = (static_cast(j) / InterpolationSteps); interpolated = interpolate(current, mid, t); - _interceptVector = (j < InterpolationSteps) ? checkForIntercept(interpolated) : orthogonalProjection(interpolated); - insertPoint(_fovPlane, _interceptVector.vec4(), col_sq); + glm::dvec3 ivec = (j < InterpolationSteps) ? checkForIntercept(interpolated, data.time, target) : orthogonalProjection(interpolated, data.time, target); + insertPoint(_fovPlane, glm::vec4(ivec, 0.0), squareColor(diffTime)); + _rebuild = true; } } - if (H[i] == false && H[i + 1] == true){ // current point is non-interceptive, next is - mid = bisection(next, current); + if (interceptTag[i] == false && interceptTag[i + 1] == true) { // current point is non-interceptive, next is + mid = bisection(next, current, data.time, target); for (int j = 1; j <= InterpolationSteps; ++j) { float t = (static_cast(j) / InterpolationSteps); interpolated = interpolate(mid, next, t); - _interceptVector = (j > 1) ? checkForIntercept(interpolated) : orthogonalProjection(interpolated); - insertPoint(_fovPlane, _interceptVector.vec4(), col_sq); + glm::dvec3 ivec = (j > 1) ? checkForIntercept(interpolated, data.time, target) : orthogonalProjection(interpolated, data.time, target); + insertPoint(_fovPlane, glm::vec4(ivec, 0.0), squareColor(diffTime)); + _rebuild = true; } } - if (H[i] == true && H[i + 1] == true){ // both points intercept + if (interceptTag[i] == true && interceptTag[i + 1] == true) { // both points intercept for (int j = 0; j <= InterpolationSteps; ++j) { float t = (static_cast(j) / InterpolationSteps); interpolated = interpolate(current, next, t); - _interceptVector = checkForIntercept(interpolated); - insertPoint(_fovPlane, _interceptVector.vec4(), col_sq); + glm::dvec3 ivec = checkForIntercept(interpolated, data.time, target); + insertPoint(_fovPlane, glm::vec4(ivec, 0.0), squareColor(diffTime)); + _rebuild = true; } } } - } - if (_nrInserted == 0) { - _rebuild = false; - } - else { - _rebuild = true; + } + if (_rebuild) { //update size etc; - _vPlaneSize = static_cast(_fovPlane.size()); + _orthogonalPlane.size = static_cast(_fovPlane.size()); } -} + // -// This method is purely cosmetics, can very well be removed -// but be sure to set colors somewhere. -void RenderableFov::computeColors() { - double t2 = (openspace::ImageSequencer::ref().getNextCaptureTime()); - double diff = (t2 - _time); - float t = 0.0; - float interpolationStart = 7.0; //seconds before - if (diff <= interpolationStart) - t = static_cast(1.0 - (diff / interpolationStart)); + glm::mat4 spacecraftRotation = glm::mat4( + SpiceManager::ref().positionTransformMatrix(_instrument.name, _instrument.referenceFrame, data.time) + ); - if (diff < 0.0) - t = 0.f; - - // This is a bit hardcoded - either we go for color tables - // or make these properties. - col_gray = glm::vec4(0.7); - col_project = glm::vec4(0.0, 1.0, 0.00, 1); - col_start = glm::vec4(1.00, 0.89, 0.00, 1); - col_end = glm::vec4(1.00, 0.29, 0.00, 1); - col_blue = glm::vec4(0, 0.5, 0.7, 1); - col_sq = glm::vec4(1.00, 0.29, 0.00, 1); - - col_end = col_project*t + col_end*(1 - t); - col_blue = col_project*t + col_blue*(1 - t); - col_sq = col_project*t + col_sq*(1 - t); - - float alpha; - alpha = _drawSolid ? 0.5f : 0.8f; - - col_blue.w = alpha; - col_project.w = alpha; - col_end.w = alpha; -} - -void RenderableFov::determineTarget() { - PerfMeasure("determineTarget"); - _fovTarget = _potentialTargets[0]; //default; - for (int i = 0; i < _potentialTargets.size(); ++i) { - try - { - _withinFOV = openspace::SpiceManager::ref().isTargetInFieldOfView( - _potentialTargets[i], - _spacecraft, - _instrumentID, - SpiceManager::FieldOfViewMethod::Ellipsoid, - _aberrationCorrection, - _time - ); - } - catch (const openspace::SpiceManager::SpiceException e) - { - _withinFOV = false; - } - - if (_withinFOV) { - _fovTarget = _potentialTargets[i]; - break; - } - } -} - -void RenderableFov::computeIntercepts(const RenderData& data) { - //PerfMeasure("computeIntercepts"); - // for each FOV vector - _fovBounds.clear(); - for (int i = 0; i <= _bounds.size(); ++i) { - int r = (i == _bounds.size()) ? 0 : i; - std::string bodyfixed = "IAU_"; - bool convert = (_frame.find(bodyfixed) == std::string::npos); - if (convert) { - bodyfixed = SpiceManager::ref().frameFromBody(_fovTarget); - } - else { - bodyfixed = _frame; - } - - SpiceManager::SurfaceInterceptResult res = - SpiceManager::ref().surfaceIntercept(_fovTarget, _spacecraft, - _instrumentID, bodyfixed, _aberrationCorrection, _time, _bounds[r]); - - if (convert) { - res.surfaceVector = SpiceManager::ref().frameTransformationMatrix(bodyfixed, _frame, _time) * res.surfaceVector; - } - - ipoint = res.surfaceIntercept; - ivec = res.surfaceVector; - _interceptTag[r] = res.interceptFound; - - // if not found, use the orthogonal projected point - if (!_interceptTag[r]) { - _projectionBounds[r] = orthogonalProjection(_bounds[r]); - } - - glm::vec4 fovOrigin = glm::vec4(0); //This will have to be fixed once spacecraft is 1:1! - - if (_interceptTag[r]) { - _interceptVector = PowerScaledCoordinate::CreatePowerScaledCoordinate(ivec[0], ivec[1], ivec[2]); - _interceptVector[3] += 3; - // INTERCEPTIONS - insertPoint(_fovBounds, fovOrigin, col_start); - insertPoint(_fovBounds, _interceptVector.vec4(), col_end); - } - else if (_withinFOV) { - // OBJECT IN FOV, NO INTERCEPT FOR THIS FOV-RAY - insertPoint(_fovBounds, fovOrigin, glm::vec4(0, 0, 1, 1)); - insertPoint(_fovBounds, _projectionBounds[r].vec4(), col_blue); - } - else { - //glm::vec4 corner(_bounds[r][0], _bounds[r][1], _bounds[r][2], 8); - ////glm::vec4 corner = _projectionBounds[r].vec4(); - //corner = _spacecraftRotation*corner; - //// NONE OF THE FOV-RAYS INTERCEPT AND NO OBJECT IN FOV - //insertPoint(_fovBounds, fovOrigin, col_gray); - //insertPoint(_fovBounds, corner, glm::vec4(0)); - insertPoint(_fovBounds, fovOrigin, col_gray); - insertPoint(_fovBounds, _projectionBounds[r].vec4(), glm::vec4(0.4)); - } - } - _interceptTag[_bounds.size()] = _interceptTag[0]; - fovSurfaceIntercept(_interceptTag, _bounds); - - glm::vec3 aim = (_spacecraftRotation * glm::vec4(_boresight, 1)); + glm::vec3 aim = (spacecraftRotation * glm::vec4(_instrument.boresight, 1)); double lt; glm::dvec3 position = SpiceManager::ref().targetPosition( - _fovTarget, - _spacecraft, - _frame, - _aberrationCorrection, - _time, + target, + _instrument.spacecraft, + _instrument.referenceFrame, + _instrument.aberrationCorrection, + data.time, lt ); psc p = PowerScaledCoordinate::CreatePowerScaledCoordinate(position.x, position.y, position.z); @@ -564,62 +951,173 @@ void RenderableFov::computeIntercepts(const RenderData& data) { _drawFOV = false; } } +#endif void RenderableFov::render(const RenderData& data) { - assert(_programObject); - _programObject->activate(); - - _drawFOV = false; - // setup the data to the shader - // Model transform and view transform needs to be in double precision - glm::dmat4 modelTransform = - glm::translate(glm::dmat4(1.0), data.modelTransform.translation) * // Translation - glm::dmat4(data.modelTransform.rotation) * - glm::scale(glm::dmat4(1.0), glm::dvec3(data.modelTransform.scale)); - - glm::mat4 modelViewProjectionTransform = - data.camera.projectionMatrix() * - glm::mat4(data.camera.combinedViewMatrix() * - modelTransform); - - _programObject->setUniform("modelViewProjectionTransform", modelViewProjectionTransform); - - if (openspace::ImageSequencer::ref().isReady()) { - _drawFOV = ImageSequencer::ref().instrumentActive(_instrumentID); - } - if (_drawFOV) { - // update only when time progresses. - if (_oldTime != _time) { - //PerfMeasure("Total"); - determineTarget(); - computeColors(); - computeIntercepts(data); - updateGPU(); - } - _oldTime = _time; - _mode = _drawSolid ? GL_TRIANGLE_STRIP : GL_LINES; + _programObject->activate(); + + // Model transform and view transform needs to be in double precision + glm::dmat4 modelTransform = + glm::translate(glm::dmat4(1.0), data.modelTransform.translation) * // Translation + glm::dmat4(data.modelTransform.rotation) * + glm::scale(glm::dmat4(1.0), glm::dvec3(data.modelTransform.scale)); + + glm::mat4 modelViewProjectionTransform = + data.camera.projectionMatrix() * + glm::mat4(data.camera.combinedViewMatrix() * + modelTransform); + + _programObject->setUniform("modelViewProjectionTransform", modelViewProjectionTransform); + + _programObject->setUniform("defaultColorStart", _colors.defaultStart); + _programObject->setUniform("defaultColorEnd", _colors.defaultEnd); + _programObject->setUniform("activeColor", _colors.active); + _programObject->setUniform("targetInFieldOfViewColor", _colors.targetInFieldOfView); + _programObject->setUniform("intersectionStartColor", _colors.intersectionStart); + _programObject->setUniform("intersectionEndColor", _colors.intersectionEnd); + _programObject->setUniform("squareColor", _colors.square); + _programObject->setUniform("interpolation", _interpolationTime); + + GLenum mode = _drawSolid ? GL_TRIANGLE_STRIP : GL_LINES; glLineWidth(_lineWidth); - glBindVertexArray(_fovBoundsVAO); - glDrawArrays(_mode, 0, static_cast(_vBoundsSize / Stride)); - glBindVertexArray(0); + glBindVertexArray(_fieldOfViewBounds.vao); + glDrawArrays(mode, 0, static_cast(_fieldOfViewBounds.data.size())); - if (_drawFOV) { - glLineWidth(2.f); - glBindVertexArray(_fovPlaneVAO); - glDrawArrays(GL_LINE_LOOP, 0, static_cast(_vPlaneSize / Stride)); - glBindVertexArray(0); - } + glLineWidth(2.f); + glBindVertexArray(_orthogonalPlane.vao); + glDrawArrays(GL_LINE_LOOP, 0, static_cast(_orthogonalPlane.data.size())); + glBindVertexArray(0); glLineWidth(1.f); + + _programObject->deactivate(); } - _programObject->deactivate(); } void RenderableFov::update(const UpdateData& data) { - _time = data.time; - _stateMatrix = SpiceManager::ref().positionTransformMatrix(_instrumentID, _frame, data.time); - _spacecraftRotation = glm::mat4(_stateMatrix); + _drawFOV = false; + if (openspace::ImageSequencer::ref().isReady()) { + _drawFOV = ImageSequencer::ref().instrumentActive(_instrument.name); + } + + if (_drawFOV && !data.timePaused) { + auto t = determineTarget(data.time); + std::string target = t.first; + bool inFOV = t.second; + + computeIntercepts(data, target, inFOV); + updateGPU(); + + double t2 = (ImageSequencer::ref().getNextCaptureTime()); + double diff = (t2 - data.time); + _interpolationTime = 0.0; + float interpolationStart = 7.0; //seconds before + if (diff <= interpolationStart) { + _interpolationTime = static_cast(1.0 - (diff / interpolationStart)); + } + + if (diff < 0.0) { + _interpolationTime = 0.f; + } + } +} + +std::pair RenderableFov::determineTarget(double time) { + // First, for all potential targets, check whether they are in the field of view + for (const std::string& pt : _instrument.potentialTargets) { + try { + bool inFOV = SpiceManager::ref().isTargetInFieldOfView( + pt, + _instrument.spacecraft, + _instrument.name, + SpiceManager::FieldOfViewMethod::Ellipsoid, + _instrument.aberrationCorrection, + time + ); + + if (inFOV) { + _previousTarget = pt; + return { pt, true }; + } + } + catch (const openspace::SpiceManager::SpiceException&) {} + } + + // If none of the targets is in field of view, either use the last target or if there + // hasn't been one, find the closest target + if (_previousTarget.empty()) { + // If we reached this, we haven't found a target in field of view and we don't + // have a previously selected target, so the next best heuristic for a target is + // the closest one + std::vector distances(_instrument.potentialTargets.size()); + std::transform( + _instrument.potentialTargets.begin(), + _instrument.potentialTargets.end(), + distances.begin(), + [&o = _instrument.spacecraft, &f = _instrument.referenceFrame, &t = time](const std::string& pt) { + double lt; + glm::dvec3 p = SpiceManager::ref().targetPosition(pt, o, f, {}, t, lt); + return glm::length(p); + } + ); + + // The iterator points to the item with the minimal distance + auto iterator = std::min_element(distances.begin(), distances.end()); + + // Since the two vectors are ordered the same, we can use the distance as offset + _previousTarget = _instrument.potentialTargets[ + std::distance(distances.begin(), iterator) + ]; + } + + return { _previousTarget, false }; +} + +void RenderableFov::updateGPU() { + // @SPEEDUP: Only upload the part of the data that has changed ---abock + glBindBuffer(GL_ARRAY_BUFFER, _fieldOfViewBounds.vbo); + glBufferData( + GL_ARRAY_BUFFER, + _fieldOfViewBounds.data.size() * sizeof(RenderInformation::VBOData), + _fieldOfViewBounds.data.data(), + GL_STREAM_DRAW + ); + + glBindBuffer(GL_ARRAY_BUFFER, _orthogonalPlane.vbo); + glBufferData( + GL_ARRAY_BUFFER, + _orthogonalPlane.data.size() * sizeof(RenderInformation::VBOData), + _orthogonalPlane.data.data(), + GL_STREAM_DRAW + ); + + + //glBindBuffer(GL_ARRAY_BUFFER, _bounds.vbo); + //glBufferSubData(GL_ARRAY_BUFFER, 0, _bounds.size * sizeof(GLfloat), _fovBounds.data()); + + ////LINFOC(_instrument, _boundsV.size); + + //if (!_rebuild) { + // // no new points + // glBindBuffer(GL_ARRAY_BUFFER, _orthogonalPlane.vbo); + // glBufferSubData(GL_ARRAY_BUFFER, 0, _orthogonalPlane.size * sizeof(GLfloat), _fovPlane.data()); + //} + //else { + // // new points - memory change + // glBindVertexArray(_orthogonalPlane.vao); + // glBindBuffer(GL_ARRAY_BUFFER, _orthogonalPlane.vbo); + // glBufferData(GL_ARRAY_BUFFER, _orthogonalPlane.size * sizeof(GLfloat), NULL, GL_STATIC_DRAW); // orphaning the buffer, sending NULL data. + // glBufferSubData(GL_ARRAY_BUFFER, 0, _orthogonalPlane.size * sizeof(GLfloat), _fovPlane.data()); + + // GLsizei st = sizeof(GLfloat) * Stride; + // glEnableVertexAttribArray(0); + // glEnableVertexAttribArray(1); + // glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, st, (void*)0); + // glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, st, (void*)(4 * sizeof(GLfloat))); + //} + + //glBindVertexArray(0); } } // namespace openspace diff --git a/modules/newhorizons/rendering/renderablefov.h b/modules/newhorizons/rendering/renderablefov.h index c57091cd09..b1f4ec1da5 100644 --- a/modules/newhorizons/rendering/renderablefov.h +++ b/modules/newhorizons/rendering/renderablefov.h @@ -29,6 +29,7 @@ #include #include +#include #include #include @@ -44,6 +45,8 @@ class Texture; namespace openspace { +namespace documentation { struct Documentation; } + class RenderableFov : public Renderable { public: RenderableFov(const ghoul::Dictionary& dictionary); @@ -55,79 +58,100 @@ public: void render(const RenderData& data) override; void update(const UpdateData& data) override; + + static documentation::Documentation Documentation(); + +private: + // Checks the field of view of the instrument for the current \p time against all of + // the potential targets are returns the first name of the target that is in field of + // view, the previous target, or the closest target to the space craft. The second + // return value is whether the target is currently in the field of view + std::pair determineTarget(double time); - private: - void loadTexture(); - void allocateData(); - void insertPoint(std::vector& arr, glm::vec4 p, glm::vec4 c); - void fovSurfaceIntercept(bool H[], std::vector bounds); - void determineTarget(); void updateGPU(); - void sendToGPU(); + void insertPoint(std::vector& arr, glm::vec4 p, glm::vec4 c); + glm::vec4 squareColor(float t) const { + return _colors.active.value() * t + _colors.square.value() * (1 - t); + } - void computeColors(); - void computeIntercepts(const RenderData& data); - psc orthogonalProjection(glm::dvec3 camvec); - psc checkForIntercept(glm::dvec3 ray); - psc pscInterpolate(psc p0, psc p1, float t); - glm::dvec3 interpolate(glm::dvec3 p0, glm::dvec3 p1, float t); - glm::dvec3 bisection(glm::dvec3 p1, glm::dvec3 p2); + glm::vec4 endColor(float t) const { + return _colors.active.value() * t + _colors.intersectionEnd.value() * (1 - t); + } + + glm::vec4 fovColor(float t) const { + return _colors.active.value() * t + _colors.targetInFieldOfView.value() * (1 - t); + } + + void computeIntercepts(const UpdateData& data, const std::string& target , bool inFOV); + glm::dvec3 orthogonalProjection(const glm::dvec3& camvec, double time, const std::string& target) const; + glm::dvec3 checkForIntercept(const glm::dvec3& ray, double time, const std::string& target) const; + //glm::dvec3 bisection(const glm::dvec3& p1, const glm::dvec3& p2, double time, const std::string& target, const glm::dvec3& previousHalf = glm::dvec3(0.0)) const; // properties properties::FloatProperty _lineWidth; properties::BoolProperty _drawSolid; std::unique_ptr _programObject; - ghoul::opengl::Texture* _texture; - // instance variables - int _nrInserted = 0; bool _rebuild = false; - bool _interceptTag[35]; - bool _withinFOV; - std::vector _projectionBounds; - psc _interceptVector; - std::vector _fovBounds; - std::vector _fovPlane; - // spice - std::string _spacecraft; - std::string _observer; - std::string _frame; - std::string _instrumentID; - SpiceManager::AberrationCorrection _aberrationCorrection; - std::string _fovTarget; - glm::dvec3 ipoint, ivec; - glm::dvec3 _previousHalf; - glm::dvec3 _boresight; - glm::dmat3 _stateMatrix; - glm::mat4 _spacecraftRotation; - std::vector _bounds; - std::vector _potentialTargets; + //std::vector _fovBounds; + //std::vector _fovPlane; + + std::string _previousTarget; bool _drawFOV; - // GPU - GLuint _fovBoundsVAO; - GLuint _fovBoundsVBO; - unsigned int _vBoundsSize; - GLuint _fovPlaneVAO; - GLuint _fovPlaneVBO; - unsigned int _vPlaneSize; - GLenum _mode; + struct { + std::string spacecraft; + std::string name; + std::string referenceFrame; + SpiceManager::AberrationCorrection aberrationCorrection; - // time - double _time = 0; - double _oldTime = 0; + std::vector bounds; + glm::dvec3 boresight; + std::vector potentialTargets; + } _instrument; - // colors - glm::vec4 col_sq; // orthogonal white square - glm::vec4 col_project; // color when projections occur - glm::vec4 col_start; // intersection start color - glm::vec4 col_end; // intersection end color - glm::vec4 col_blue; // withinFov color - glm::vec4 col_gray; // no intersection color + float _interpolationTime; + + struct RenderInformation { + // Differentiating different vertex types + using VertexColorType = int32_t; + // This needs to be synced with the fov_vs.glsl shader + static const VertexColorType VertexColorTypeDefaultStart = 0; + static const VertexColorType VertexColorTypeDefaultEnd = 1; + static const VertexColorType VertexColorTypeInFieldOfView = 2; + static const VertexColorType VertexColorTypeActive = 3; + static const VertexColorType VertexColorTypeIntersectionStart = 4; + static const VertexColorType VertexColorTypeIntersectionEnd = 5; + static const VertexColorType VertexColorTypeSquare = 6; + + struct VBOData { + GLfloat position[3]; + VertexColorType color; + }; + + GLuint vao = 0; + GLuint vbo = 0; + // @SPEEDUP: Add an ibo to reduce the number of vertices drawn + std::vector data; + bool isDirty = true; + }; + + RenderInformation _orthogonalPlane; + RenderInformation _fieldOfViewBounds; + + struct { + properties::Vec4Property defaultStart; // Start color for uninteresting times + properties::Vec4Property defaultEnd; // End color for uninteresting times + properties::Vec4Property active; // Color use when a field-of-view is projecting + properties::Vec4Property targetInFieldOfView; // Color to use for target in fov + properties::Vec4Property intersectionStart; // Color at the start of intersection + properties::Vec4Property intersectionEnd; // Color at the end of intersection + properties::Vec4Property square; // Color for the orthogonal square + } _colors; }; } // namespace openspace diff --git a/modules/newhorizons/shaders/crawlingline_fs.glsl b/modules/newhorizons/shaders/crawlingline_fs.glsl index 9f0eb3b8ad..7203d95209 100644 --- a/modules/newhorizons/shaders/crawlingline_fs.glsl +++ b/modules/newhorizons/shaders/crawlingline_fs.glsl @@ -27,20 +27,15 @@ uniform vec4 objpos; uniform vec3 color; uniform float _alpha; -in vec4 vs_position; +in vec4 vs_positionScreenSpace; in vec4 vs_color; #include "PowerScaling/powerScaling_fs.hglsl" #include "fragment.glsl" Fragment getFragment() { - vec4 position = vs_position; - vec4 diffuse = vs_color; - float depth = pscDepth(position); - diffuse.a = _alpha; - Fragment frag; - frag.color = diffuse; - frag.depth = depth; + frag.color = vec4(vs_color.rgb, vs_color.a * _alpha); + frag.depth = vs_positionScreenSpace.w; return frag; } diff --git a/modules/newhorizons/shaders/crawlingline_vs.glsl b/modules/newhorizons/shaders/crawlingline_vs.glsl index 8145739af4..a5a101b921 100644 --- a/modules/newhorizons/shaders/crawlingline_vs.glsl +++ b/modules/newhorizons/shaders/crawlingline_vs.glsl @@ -24,33 +24,24 @@ #version __CONTEXT__ -uniform mat4 ViewProjection; -uniform mat4 ModelTransform; +layout(location = 0) in vec3 in_position; +layout(location = 1) in vec4 in_color; -uniform vec3 color; - -layout(location = 0) in vec4 in_position; +uniform mat4 modelViewProjection; +// uniform vec3 color; out vec4 vs_color; -out vec4 vs_position; +out vec4 vs_positionScreenSpace; +// out vec4 vs_positionCameraSpace; + const int targetId = 1; #include "PowerScaling/powerScaling_vs.hglsl" void main() { - vs_position = in_position; - vec4 tmp = in_position; - int id = gl_VertexID; - - vec3 black = vec3(0.0); + vec4 positionClipSpace = modelViewProjection * vec4(in_position, 1.0); + vs_positionScreenSpace = z_normalization(positionClipSpace); + gl_Position = vs_positionScreenSpace; - if(id == targetId) - vs_color.xyz = black; - else - vs_color.xyz = color; - - vec4 position = pscTransform(tmp, ModelTransform); - vs_position = tmp; - position = ViewProjection * position; - gl_Position = z_normalization(position); -} \ No newline at end of file + vs_color = in_color; +} diff --git a/modules/newhorizons/shaders/fov_fs.glsl b/modules/newhorizons/shaders/fov_fs.glsl index 0b29147ea5..7e5440729a 100644 --- a/modules/newhorizons/shaders/fov_fs.glsl +++ b/modules/newhorizons/shaders/fov_fs.glsl @@ -22,28 +22,14 @@ * OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * ****************************************************************************************/ -/* -uniform mat4 ViewProjection; -uniform mat4 ModelTransform; - -in vec4 vs_point_position; -in vec4 vs_point_velocity; -*/ - -//out vec4 vs_point_position; -in vec4 vs_point_velocity; -in vec4 vs_positionScreenSpace; - - -//out vec4 diffuse; - -#include "PowerScaling/powerScaling_fs.hglsl" #include "fragment.glsl" +in vec4 vs_color; +in vec4 vs_positionScreenSpace; + Fragment getFragment() { Fragment frag; - frag.color = vs_point_velocity; + frag.color = vs_color; frag.depth = vs_positionScreenSpace.w; - return frag; } diff --git a/modules/newhorizons/shaders/fov_vs.glsl b/modules/newhorizons/shaders/fov_vs.glsl index be0a93f3a5..b6fccd732f 100644 --- a/modules/newhorizons/shaders/fov_vs.glsl +++ b/modules/newhorizons/shaders/fov_vs.glsl @@ -24,63 +24,64 @@ #version __CONTEXT__ -//uniform mat4 ViewProjection; -//uniform mat4 ModelTransform; -//uniform vec4 etColor; -//uniform vec4 objectVelocity; - -layout(location = 0) in vec4 in_point_position; -layout(location = 1) in vec4 in_point_velocity; -layout(location = 2) in vec2 in_point_timeindex; - - -//out vec4 vs_point_position; -out vec4 vs_point_velocity; - -// Uniforms -uniform mat4 modelViewProjectionTransform; - -// Outputs -out vec4 vs_positionScreenSpace; - #include "PowerScaling/powerScaling_vs.hglsl" -void main() -{ - vec4 position = vec4(in_point_position.xyz * pow(10, in_point_position.w), 1); +// This needs to be synced with the RenderableFov header +const int VertexColorTypeDefaultStart = 0; +const int VertexColorTypeDefaultEnd = 1; +const int VertexColorTypeInFieldOfView = 2; +const int VertexColorTypeActive = 3; +const int VertexColorTypeIntersectionStart = 4; +const int VertexColorTypeIntersectionEnd = 5; +const int VertexColorTypeSquare = 6; + +layout(location = 0) in vec3 in_point_position; +layout (location = 1) in int colorInformation; + +out vec4 vs_color; +out vec4 vs_positionScreenSpace; + +uniform mat4 modelViewProjectionTransform; + +uniform vec4 defaultColorStart; +uniform vec4 defaultColorEnd; +uniform vec4 activeColor; +uniform vec4 targetInFieldOfViewColor; +uniform vec4 intersectionStartColor; +uniform vec4 intersectionEndColor; +uniform vec4 squareColor; +uniform float interpolation; + +void main() { + vec4 position = vec4(in_point_position, 1); vec4 positionClipSpace = modelViewProjectionTransform * position; - // Write output vs_positionScreenSpace = z_normalization(positionClipSpace); gl_Position = vs_positionScreenSpace; - vs_point_velocity = in_point_velocity; - -/* - //vs_point_position = objpos; - - // rotate and scale vertex with model transform and add the translation - vec3 local_vertex_pos = mat3(ModelTransform) * in_point_position.xyz; - //vec4 lvp = ModelTransform * in_point_position; - - // PSC addition; local vertex position and the object power scaled world position - vs_point_position = psc_addition(vec4(local_vertex_pos,in_point_position.w),objpos); - //vs_point_position = psc_addition(lvp,objpos); - - // PSC addition; rotated and viewscaled vertex and the cmaeras negative position - vs_point_position = psc_addition(vs_point_position,vec4(-campos.xyz,campos.w)); - - // rotate the camera - local_vertex_pos = mat3(camrot) * vs_point_position.xyz; - vs_point_position = vec4(local_vertex_pos, vs_point_position.w); - //vs_point_position = camrot* vs_point_position; - - // project using the rescaled coordinates, - //vec4 vs_point_position_rescaled = psc_scaling(vs_point_position, scaling); - vec4 vs_point_position_rescaled = psc_to_meter(vs_point_position, scaling); - //vs_point_position = vs_point_position_rescaled; - - // project the position to view space - gl_Position = ViewProjection * vs_point_position_rescaled; - */ -} \ No newline at end of file + switch (colorInformation) { + case VertexColorTypeDefaultStart: + vs_color = defaultColorStart; + break; + case VertexColorTypeDefaultEnd: + vs_color = defaultColorEnd; + break; + case VertexColorTypeInFieldOfView: + vs_color = activeColor * interpolation + targetInFieldOfViewColor * (1 - interpolation); + break; + case VertexColorTypeActive: + vs_color = activeColor; + break; + case VertexColorTypeIntersectionStart: + vs_color = intersectionStartColor; + break; + case VertexColorTypeIntersectionEnd: + vs_color = activeColor * interpolation + intersectionEndColor * (1 - interpolation); + break; + case VertexColorTypeSquare: + vs_color = activeColor * interpolation + squareColor * (1 - interpolation); + break; + default: + vs_color = vec4(1.0, 0.0, 1.0, 1.0); + } +} diff --git a/src/util/spicemanager.cpp b/src/util/spicemanager.cpp index 21405c71ed..3dd60fb51b 100644 --- a/src/util/spicemanager.cpp +++ b/src/util/spicemanager.cpp @@ -565,6 +565,21 @@ glm::dvec3 SpiceManager::targetPosition(const std::string& target, } } +glm::dvec3 SpiceManager::targetPosition(const std::string& target, + const std::string& observer, const std::string& referenceFrame, + AberrationCorrection aberrationCorrection, double ephemerisTime) const +{ + double unused = 0.0; + return targetPosition( + target, + observer, + referenceFrame, + aberrationCorrection, + ephemerisTime, + unused + ); +} + glm::dmat3 SpiceManager::frameTransformationMatrix(const std::string& from, const std::string& to, double ephemerisTime) const