mirror of
https://github.com/OpenSpace/OpenSpace.git
synced 2026-01-07 12:10:52 -06:00
Merge
This commit is contained in:
@@ -25,7 +25,7 @@
|
||||
#include "fragment.glsl"
|
||||
|
||||
flat in vec4 gs_colorMap;
|
||||
in float vs_screenSpaceDepth;
|
||||
flat in float vs_screenSpaceDepth;
|
||||
in vec2 texCoord;
|
||||
in float ta;
|
||||
|
||||
|
||||
@@ -55,7 +55,7 @@ flat in float dvarScaling[];
|
||||
flat out vec4 gs_colorMap;
|
||||
|
||||
out vec2 texCoord;
|
||||
out float vs_screenSpaceDepth;
|
||||
flat out float vs_screenSpaceDepth;
|
||||
out float ta;
|
||||
|
||||
const double PARSEC = 0.308567756e17LF;
|
||||
@@ -171,17 +171,22 @@ void main() {
|
||||
|
||||
|
||||
// Build primitive
|
||||
texCoord = corners[3];
|
||||
gl_Position = thirdPosition;
|
||||
EmitVertex();
|
||||
|
||||
texCoord = corners[0];
|
||||
gl_Position = initialPosition;
|
||||
EmitVertex();
|
||||
texCoord = corners[2];
|
||||
gl_Position = crossCorner;
|
||||
EmitVertex();
|
||||
|
||||
texCoord = corners[1];
|
||||
gl_Position = secondPosition;
|
||||
EmitVertex();
|
||||
|
||||
texCoord = corners[3];
|
||||
gl_Position = thirdPosition;
|
||||
EmitVertex();
|
||||
|
||||
texCoord = corners[2];
|
||||
gl_Position = crossCorner;
|
||||
EmitVertex();
|
||||
|
||||
EndPrimitive();
|
||||
}
|
||||
@@ -308,7 +308,6 @@ RenderableSatellites::RenderableSatellites(const ghoul::Dictionary& dictionary)
|
||||
: Renderable(dictionary)
|
||||
, _path(PathInfo)
|
||||
, _nSegments(SegmentsInfo, 120, 4, 1024)
|
||||
|
||||
{
|
||||
documentation::testSpecificationAndThrow(
|
||||
Documentation(),
|
||||
@@ -322,6 +321,14 @@ RenderableSatellites::RenderableSatellites(const ghoul::Dictionary& dictionary)
|
||||
if (dictionary.hasKeyAndValue<glm::vec3>(LineColorInfo.identifier)) {
|
||||
_appearance.lineColor = dictionary.value<glm::vec3>(LineColorInfo.identifier);
|
||||
}
|
||||
if (dictionary.hasKeyAndValue<double>("FadeInfo")) {
|
||||
_appearance.lineFade = static_cast<float>(
|
||||
dictionary.value<double>("FadeInfo")
|
||||
);
|
||||
}
|
||||
else {
|
||||
_appearance.lineFade = 20;
|
||||
}
|
||||
|
||||
auto reinitializeTrailBuffers = [this]() {
|
||||
initializeGL();
|
||||
@@ -333,6 +340,9 @@ RenderableSatellites::RenderableSatellites(const ghoul::Dictionary& dictionary)
|
||||
addPropertySubOwner(_appearance);
|
||||
addProperty(_path);
|
||||
addProperty(_nSegments);
|
||||
addProperty(_opacity);
|
||||
|
||||
setRenderBin(Renderable::RenderBin::Overlay);
|
||||
}
|
||||
|
||||
|
||||
@@ -475,7 +485,6 @@ void RenderableSatellites::initializeGL() {
|
||||
_uniformCache.opacity = _programObject->uniformLocation("opacity");
|
||||
|
||||
updateBuffers();
|
||||
setRenderBin(Renderable::RenderBin::Overlay);
|
||||
}
|
||||
|
||||
void RenderableSatellites::deinitializeGL() {
|
||||
@@ -514,9 +523,12 @@ void RenderableSatellites::render(const RenderData& data, RendererTasks&) {
|
||||
data.camera.combinedViewMatrix() * modelTransform
|
||||
);
|
||||
|
||||
// Because we want the property to work similar to the planet trails
|
||||
float fade = static_cast<float>(pow(_appearance.lineFade.maxValue() - _appearance.lineFade, 2.0));
|
||||
|
||||
_programObject->setUniform(_uniformCache.projection, data.camera.projectionMatrix());
|
||||
_programObject->setUniform(_uniformCache.color, _appearance.lineColor);
|
||||
_programObject->setUniform(_uniformCache.lineFade, _appearance.lineFade);
|
||||
_programObject->setUniform(_uniformCache.lineFade, fade);
|
||||
|
||||
glLineWidth(_appearance.lineWidth);
|
||||
|
||||
@@ -595,10 +607,10 @@ void RenderableSatellites::updateBuffers() {
|
||||
);
|
||||
|
||||
glEnableVertexAttribArray(0);
|
||||
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, sizeof(TrailVBOLayout), (GLvoid*)0); // stride : 4*sizeof(GL_FLOAT) + 2*sizeof(GL_DOUBLE)
|
||||
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, sizeof(TrailVBOLayout), nullptr);
|
||||
|
||||
glEnableVertexAttribArray(1);
|
||||
glVertexAttribPointer(1, 2, GL_DOUBLE, GL_FALSE, sizeof(TrailVBOLayout), (GLvoid*)(4*sizeof(GL_FLOAT)) );
|
||||
glVertexAttribPointer(1, 2, GL_DOUBLE, GL_FALSE, sizeof(TrailVBOLayout), (GLvoid*)(4 * sizeof(GL_FLOAT)));
|
||||
|
||||
|
||||
glBindVertexArray(0);
|
||||
|
||||
@@ -94,10 +94,6 @@ private:
|
||||
/// trail.
|
||||
std::vector<TrailVBOLayout> _vertexBufferData;
|
||||
|
||||
/// The index array that is potentially used in the draw call. If this is empty, no
|
||||
/// element draw call is used.
|
||||
std::vector<unsigned int> _indexBufferData;
|
||||
|
||||
GLuint _vertexArray;
|
||||
GLuint _vertexBuffer;
|
||||
GLuint _indexBuffer;
|
||||
|
||||
@@ -63,15 +63,25 @@ Fragment getFragment() {
|
||||
vertexDistance_f += 1.0;
|
||||
}
|
||||
|
||||
float invert = 1.0 - vertexDistance_f;
|
||||
float fade = clamp(invert * lineFade, 0.0, 1.0);
|
||||
float invert = pow((1.0 - vertexDistance_f), lineFade);
|
||||
float fade = clamp(invert, 0.0, 1.0);
|
||||
|
||||
// Currently even fully transparent lines can occlude other lines, thus we discard
|
||||
// these fragments since debris and satellites are rendered so close to each other
|
||||
if (fade < 0.05) {
|
||||
discard;
|
||||
}
|
||||
Fragment frag;
|
||||
|
||||
// Use additive blending for some values to make the discarding less abrupt
|
||||
if (fade < 0.15) {
|
||||
frag.blend = BLEND_MODE_ADDITIVE;
|
||||
}
|
||||
|
||||
frag.color = vec4(color, fade * opacity);
|
||||
frag.depth = vs_position_w;
|
||||
frag.gPosition = viewSpacePosition;
|
||||
frag.gNormal = vec4(1, 1, 1, 0);
|
||||
// frag.blend = BLEND_MODE_ADDITIVE;
|
||||
|
||||
|
||||
// to debug using colors use this if-statment.
|
||||
|
||||
@@ -82,14 +82,6 @@ void main() {
|
||||
vs_position = gl_in[0].gl_Position; // in object space
|
||||
dvec4 dpos = modelMatrix * dvec4(vs_position);
|
||||
|
||||
dvec4 clipTestPos = cameraViewProjectionMatrix * dpos;
|
||||
clipTestPos /= clipTestPos.w;
|
||||
if ((clipTestPos.x < -1.0 || clipTestPos.x > 1.0) ||
|
||||
(clipTestPos.y < -1.0 || clipTestPos.y > 1.0))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
ge_bvLumAbsMagAppMag = vs_bvLumAbsMagAppMag[0];
|
||||
ge_velocity = vs_velocity[0];
|
||||
ge_speed = vs_speed[0];
|
||||
@@ -156,96 +148,43 @@ void main() {
|
||||
dvec3 scaledUp = dvec3(0.0);
|
||||
vec4 bottomLeftVertex, bottomRightVertex, topLeftVertex, topRightVertex;
|
||||
|
||||
// if (distanceToStarInParsecs > 1800.0) {
|
||||
// scaledRight = scaleMultiply * invariantRight * 0.5f;
|
||||
// scaledUp = scaleMultiply * invariantUp * 0.5f;
|
||||
// } else {
|
||||
dvec3 normal = normalize(eyePosition - dpos.xyz);
|
||||
dvec3 newRight = normalize(cross(cameraUp, normal));
|
||||
dvec3 newUp = cross(normal, newRight);
|
||||
scaledRight = scaleMultiply * newRight;
|
||||
scaledUp = scaleMultiply * newUp;
|
||||
//}
|
||||
|
||||
dvec3 normal = normalize(eyePosition - dpos.xyz);
|
||||
dvec3 newRight = normalize(cross(cameraUp, normal));
|
||||
dvec3 newUp = cross(normal, newRight);
|
||||
scaledRight = scaleMultiply * newRight;
|
||||
scaledUp = scaleMultiply * newUp;
|
||||
|
||||
bottomLeftVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
dvec4(dpos.xyz - scaledRight - scaledUp, dpos.w)));
|
||||
gs_screenSpaceDepth = bottomLeftVertex.w;
|
||||
|
||||
topRightVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
dvec4(dpos.xyz + scaledUp + scaledRight, dpos.w)));
|
||||
topRightVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
dvec4(dpos.xyz + scaledUp + scaledRight, dpos.w)));
|
||||
|
||||
// Testing size:
|
||||
// vec3 tmpPos = vec3(eyePositionDelta);
|
||||
// vec4 falseBottomLeftVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
// dvec4(tmpPos - scaledRight - scaledUp, dpos.w)));
|
||||
bottomRightVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
dvec4(dpos.xyz + scaledRight - scaledUp, dpos.w)));
|
||||
|
||||
// vec4 falseTopRightVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
// dvec4(tmpPos + scaledUp + scaledRight, dpos.w)));
|
||||
// vec2 halfViewSize = vec2(screenSize.x, screenSize.y) * 0.5f;
|
||||
// vec2 topRight = falseTopRightVertex.xy/falseTopRightVertex.w;
|
||||
// vec2 bottomLeft = falseBottomLeftVertex.xy/falseBottomLeftVertex.w;
|
||||
|
||||
// Complete algebra
|
||||
// topRight = ((topRight + vec2(1.0)) * halfViewSize) - vec2(0.5);
|
||||
// bottomLeft = ((bottomLeft + vec2(1.0)) * halfViewSize) - vec2(0.5);
|
||||
//vec2 sizes = abs(topRight - bottomLeft);
|
||||
|
||||
// Optimized version
|
||||
// vec2 sizes = abs(halfViewSize * (topRight - bottomLeft));
|
||||
|
||||
// float height = sizes.y;
|
||||
// float width = sizes.x;
|
||||
|
||||
// if ((height > billboardSize) ||
|
||||
// (width > billboardSize)) {
|
||||
// float correctionScale = height > billboardSize ? billboardSize / height :
|
||||
// billboardSize / width;
|
||||
|
||||
// scaledRight *= correctionScale;
|
||||
// scaledUp *= correctionScale;
|
||||
// bottomLeftVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
// dvec4(dpos.xyz - scaledRight - scaledUp, dpos.w)));
|
||||
// gs_screenSpaceDepth = bottomLeftVertex.w;
|
||||
// topRightVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
// dvec4(dpos.xyz + scaledUp + scaledRight, dpos.w)));
|
||||
|
||||
|
||||
// bottomRightVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
// dvec4(dpos.xyz + scaledRight - scaledUp, dpos.w)));
|
||||
|
||||
// topLeftVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
// dvec4(dpos.xyz + scaledUp - scaledRight, dpos.w)));
|
||||
|
||||
// } else {
|
||||
// if (width < 2.0f) {
|
||||
// float maxVar = 2.0f;
|
||||
// float minVar = 1.0f;
|
||||
// float var = (height + width);
|
||||
// float ta = ( (var - minVar)/(maxVar - minVar) );
|
||||
// if (ta == 0.0f)
|
||||
// return;
|
||||
// }
|
||||
// float minSize = 30.f;
|
||||
// if ((width < minSize) || (height < minSize))
|
||||
// return;
|
||||
bottomRightVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
dvec4(dpos.xyz + scaledRight - scaledUp, dpos.w)));
|
||||
topLeftVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
topLeftVertex = z_normalization(vec4(cameraViewProjectionMatrix *
|
||||
dvec4(dpos.xyz + scaledUp - scaledRight, dpos.w)));
|
||||
// }
|
||||
|
||||
|
||||
// Build primitive
|
||||
gl_Position = topLeftVertex;
|
||||
psfCoords = vec2(-1.0, 1.0);
|
||||
EmitVertex();
|
||||
|
||||
gl_Position = bottomLeftVertex;
|
||||
psfCoords = vec2(-1.0, -1.0);
|
||||
EmitVertex();
|
||||
gl_Position = topRightVertex;
|
||||
psfCoords = vec2(1.0, 1.0);
|
||||
EmitVertex();
|
||||
|
||||
gl_Position = bottomRightVertex;
|
||||
psfCoords = vec2(1.0, -1.0);
|
||||
EmitVertex();
|
||||
|
||||
gl_Position = topLeftVertex;
|
||||
psfCoords = vec2(-1.0, 1.0);
|
||||
EmitVertex();
|
||||
|
||||
gl_Position = topRightVertex;
|
||||
psfCoords = vec2(1.0, 1.0);
|
||||
EmitVertex();
|
||||
|
||||
EndPrimitive();
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user