Feature/sun position atmosphere (#1673)

* General atmosphere cleanup
* Make sun projection originate from Sun position instead of SSB (closes #1695)
This commit is contained in:
Alexander Bock
2021-07-01 15:27:09 +02:00
committed by GitHub
parent 92c4837c6c
commit 20aa307ece
3 changed files with 85 additions and 90 deletions
@@ -74,8 +74,8 @@ namespace {
"cullAtmosphere", "Rg", "Rt", "groundRadianceEmission", "HR", "betaRayleigh",
"HM", "betaMieExtinction", "mieG", "sunRadiance", "ozoneLayerEnabled", "HO",
"betaOzoneExtinction", "SAMPLES_R", "SAMPLES_MU", "SAMPLES_MU_S", "SAMPLES_NU",
"dInverseModelTransformMatrix", "dModelTransformMatrix",
"dSgctProjectionToModelTransformMatrix", "dSGCTViewToWorldMatrix", "dCamPosObj",
"inverseModelTransformMatrix", "modelTransformMatrix",
"projectionToModelTransformMatrix", "viewToWorldMatrix", "camPosObj",
"sunDirectionObj", "hardShadows", "transmittanceTexture", "irradianceTexture",
"inscatterTexture"
};
@@ -309,53 +309,38 @@ void AtmosphereDeferredcaster::preRaycast(const RenderData& renderData,
// Object Space
glm::dmat4 inverseModelMatrix = glm::inverse(_modelTransform);
program.setUniform(
_uniformCache.dInverseModelTransformMatrix,
inverseModelMatrix
_uniformCache.inverseModelTransformMatrix, inverseModelMatrix
);
program.setUniform(_uniformCache.dModelTransformMatrix, _modelTransform);
program.setUniform(_uniformCache.modelTransformMatrix, _modelTransform);
// Eye Space in SGCT to Eye Space in OS (SGCT View to OS Camera Rig)
// glm::dmat4 dSgctEye2OSEye = glm::inverse(
// glm::dmat4(renderData.camera.viewMatrix()));
glm::dmat4 dSGCTViewToWorldMatrix = glm::inverse(
glm::dmat4 viewToWorldMatrix = glm::inverse(
renderData.camera.combinedViewMatrix()
);
// Eye Space in SGCT to OS World Space
program.setUniform(
_uniformCache.dSGCTViewToWorldMatrix,
dSGCTViewToWorldMatrix
);
// Eye Space to World Space
program.setUniform(_uniformCache.viewToWorldMatrix, viewToWorldMatrix);
// SGCT Projection to SGCT Eye Space
// Projection to Eye Space
glm::dmat4 dInverseProjection = glm::inverse(
glm::dmat4(renderData.camera.projectionMatrix())
);
glm::dmat4 inverseWholeMatrixPipeline =
inverseModelMatrix * dSGCTViewToWorldMatrix * dInverseProjection;
inverseModelMatrix * viewToWorldMatrix * dInverseProjection;
program.setUniform(
_uniformCache.dSgctProjectionToModelTransformMatrix,
_uniformCache.projectionToModelTransformMatrix,
inverseWholeMatrixPipeline
);
glm::dvec4 camPosObjCoords =
inverseModelMatrix * glm::dvec4(renderData.camera.eyePositionVec3(), 1.0);
program.setUniform(_uniformCache.dCamPosObj, camPosObjCoords);
program.setUniform(_uniformCache.camPosObj, glm::dvec3(camPosObjCoords));
SceneGraphNode* node = sceneGraph()->sceneGraphNode("Sun");
glm::dvec3 sunPosWorld = node ? node->worldPosition() : glm::dvec3(0.0);
double lt;
glm::dvec3 sunPosWorld = SpiceManager::ref().targetPosition(
"SUN",
"SSB",
"GALACTIC",
{},
_time,
lt
);
glm::dvec4 sunPosObj;
// Sun following camera position
if (_sunFollowingCameraEnabled) {
sunPosObj = inverseModelMatrix * glm::dvec4(
@@ -365,7 +350,10 @@ void AtmosphereDeferredcaster::preRaycast(const RenderData& renderData,
}
else {
sunPosObj = inverseModelMatrix *
glm::dvec4(sunPosWorld - renderData.modelTransform.translation, 1.0);
glm::dvec4(
(sunPosWorld - renderData.modelTransform.translation) * 1000.0,
1.0
);
}
// Sun Position in Object Space
@@ -374,6 +362,8 @@ void AtmosphereDeferredcaster::preRaycast(const RenderData& renderData,
glm::normalize(glm::dvec3(sunPosObj))
);
ghoul::opengl::updateUniformLocations(program, _uniformCache, UniformNames);
// Shadow calculations..
if (!_shadowConfArray.empty()) {
ZoneScopedN("Shadow Configuration")
@@ -384,6 +374,7 @@ void AtmosphereDeferredcaster::preRaycast(const RenderData& renderData,
// TO REMEMBER: all distances and lengths in world coordinates are in
// meters!!! We need to move this to view space...
// Getting source and caster:
double lt;
glm::dvec3 sourcePos = SpiceManager::ref().targetPosition(
shadowConf.source.first,
"SSB",
@@ -126,9 +126,9 @@ private:
UniformCache(cullAtmosphere, Rg, Rt, groundRadianceEmission, HR, betaRayleigh, HM,
betaMieExtinction, mieG, sunRadiance, ozoneLayerEnabled, HO, betaOzoneExtinction,
SAMPLES_R, SAMPLES_MU, SAMPLES_MU_S, SAMPLES_NU, dInverseModelTransformMatrix,
dModelTransformMatrix, dSgctProjectionToModelTransformMatrix,
dSGCTViewToWorldMatrix, dCamPosObj, sunDirectionObj, hardShadows,
SAMPLES_R, SAMPLES_MU, SAMPLES_MU_S, SAMPLES_NU, inverseModelTransformMatrix,
modelTransformMatrix, projectionToModelTransformMatrix,
viewToWorldMatrix, camPosObj, sunDirectionObj, hardShadows,
transmittanceTexture, irradianceTexture, inscatterTexture) _uniformCache;
GLuint _transmittanceTableTexture = 0;
@@ -62,7 +62,6 @@
in vec2 texCoord;
out vec4 renderTarget;
uniform int nAaSamples;
uniform int cullAtmosphere;
uniform sampler2D irradianceTexture;
@@ -71,17 +70,21 @@ uniform sampler2D mainPositionTexture;
uniform sampler2D mainNormalTexture;
uniform sampler2D mainColorTexture;
uniform dmat4 dInverseModelTransformMatrix;
uniform dmat4 dModelTransformMatrix;
uniform dmat4 dSGCTViewToWorldMatrix;
uniform dmat4 dSgctProjectionToModelTransformMatrix;
uniform dmat4 inverseModelTransformMatrix;
uniform dmat4 modelTransformMatrix;
uniform dmat4 viewToWorldMatrix;
uniform dmat4 projectionToModelTransformMatrix;
uniform vec4 viewport;
uniform vec2 resolution;
uniform dvec4 dCamPosObj;
uniform dvec3 camPosObj;
uniform dvec3 sunDirectionObj;
uniform dvec3 sunWorld;
uniform dvec3 viewDirWorld;
uniform dvec3 sunModel;
/*******************************************************************************
***** ALL CALCULATIONS FOR ECLIPSE ARE IN METERS AND IN WORLD SPACE SYSTEM ****
*******************************************************************************/
@@ -215,13 +218,13 @@ Ray calculateRayRenderableGlobe(vec2 st) {
dvec4 clipCoords = dvec4(interpolatedNDCPos, 1.0, 1.0);
// Clip to Object Coords
dvec4 objectCoords = dSgctProjectionToModelTransformMatrix * clipCoords;
objectCoords /= objectCoords.w;
dvec4 objectCoords = projectionToModelTransformMatrix * clipCoords;
objectCoords.xyz /= objectCoords.w;
// Building Ray
// Ray in object space (in KM)
Ray ray;
ray.origin = dvec3(dCamPosObj * dvec4(0.001, 0.001, 0.001, 1.0));
ray.origin = camPosObj * 0.001;
ray.direction = normalize(objectCoords.xyz * dvec3(0.001) - ray.origin);
return ray;
}
@@ -245,15 +248,14 @@ Ray calculateRayRenderableGlobe(vec2 st) {
* the reflectance R[L]
*/
vec3 inscatterRadiance(vec3 x, inout float t, inout float irradianceFactor, vec3 v, vec3 s,
out float r, out float mu, out vec3 attenuation, vec3 fragPosObj,
float r, out float mu, out vec3 attenuation, vec3 fragPosObj,
out bool groundHit, double maxLength, double pixelDepth,
vec4 spaceColor, float sunIntensity)
vec3 spaceColor, float sunIntensity)
{
const float INTERPOLATION_EPS = 0.004; // precision const from Brunetton
vec3 radiance;
r = length(x);
mu = dot(x, v) / r;
float r2 = r * r;
@@ -303,16 +305,15 @@ vec3 inscatterRadiance(vec3 x, inout float t, inout float irradianceFactor, vec3
}
// cos(PI-thetaH) = dist/r
// cos(thetaH) = - dist/r
// cos(thetaH) = -dist/r
// muHorizon = -sqrt(r^2-Rg^2)/r = -sqrt(1-(Rg/r)^2)
float muHorizon = -sqrt(1.0 - Rg2 / r2);
// In order to avoid imprecision problems near horizon, we interpolate between two
// In order to avoid precision problems near horizon, we interpolate between two
// points: above and below horizon
if (abs(mu - muHorizon) < INTERPOLATION_EPS) {
// We want an interpolation value close to 1/2, so the contribution of each radiance
// value is almost the same or it has a heavy weight if from above or
// below horizon
// value is almost the same or it has a heavy weight if from above or below horizon
float interpolationValue = (mu - muHorizon + INTERPOLATION_EPS) / (2.0 * INTERPOLATION_EPS);
// Above Horizon
@@ -321,9 +322,9 @@ vec3 inscatterRadiance(vec3 x, inout float t, inout float irradianceFactor, vec3
// From cosine law where t = distance between x and x0
// r0^2 = r^2 + t^2 - 2 * r * t * cos(PI-theta)
// r0 = sqrt(r2 + t2 + 2.0f * r * t * mu);
float halfCossineLaw1 = r2 + (t * t);
float halfCossineLaw2 = 2.0 * r * t;
r0 = sqrt(halfCossineLaw1 + halfCossineLaw2 * mu);
float halfCosineLaw1 = r2 + (t * t);
float halfCosineLaw2 = 2.0 * r * t;
r0 = sqrt(halfCosineLaw1 + halfCosineLaw2 * mu);
// From the dot product: cos(theta0) = (x0 dot v)/(||ro||*||v||)
// mu0 = ((x + t) dot v) / r0
@@ -339,7 +340,7 @@ vec3 inscatterRadiance(vec3 x, inout float t, inout float irradianceFactor, vec3
// Below Horizon
mu = muHorizon + INTERPOLATION_EPS;
//r0 = sqrt(r2 + t2 + 2.0f * r * t * mu);
r0 = sqrt(halfCossineLaw1 + halfCossineLaw2 * mu);
r0 = sqrt(halfCosineLaw1 + halfCosineLaw2 * mu);
mu0 = (r * mu + t) * (1.0 / r0);
@@ -374,7 +375,7 @@ vec3 inscatterRadiance(vec3 x, inout float t, inout float irradianceFactor, vec3
return finalScatteringRadiance;
}
else {
return spaceColor.rgb + finalScatteringRadiance;
return spaceColor + finalScatteringRadiance;
}
}
@@ -457,16 +458,23 @@ vec3 groundColor(vec3 x, float t, vec3 v, vec3 s, vec3 attenuationXtoX0, vec3 gr
* attenuation := transmittance T(x,x0)
*/
vec3 sunColor(vec3 v, vec3 s, float r, float mu, float irradianceFactor) {
vec3 tm = vec3(1.0);
if (r <= Rt) {
tm = mu < -sqrt(1.0 - Rg2 / (r * r)) ? vec3(0.0) : transmittance(r, mu);
}
// JCC: Change this function to a impostor texture with gaussian decay color weighted
// by the sunRadiance, transmittance and irradianceColor (11/03/2017)
float sunFinalColor = smoothstep(cos(M_PI / 500.0), cos(M_PI / 900.0), dot(v, s)) *
sunRadiance * (1.0 - irradianceFactor);
// v = normalize(vec3(inverseModelTransformMatrix * dvec4(sunWorld, 1.0)));
float angle = dot(v, s);
return tm * sunFinalColor;
// JCC: Change this function to a impostor texture with gaussian decay color weighted
// by the sunRadiance, transmittance and irradianceColor (11/03/2017)
// @TODO (abock, 2021-07-01) This value is hard-coded to our sun right now
// Convert 0.3 degrees -> radians
const float SunAngularSize = (0.3 * M_PI / 180.0);
const float FuzzyFactor = 0.5; // How fuzzy should the edges be
const float p1 = cos(SunAngularSize);
const float p2 = cos(SunAngularSize * FuzzyFactor);
float t = (angle - p1) / (p2 - p1);
float scale = clamp(t, 0.0, 1.0);
return scale * transmittance(r, mu) * sunRadiance * (1.0 - irradianceFactor);
}
void main() {
@@ -481,17 +489,13 @@ void main() {
st.x = st.x / (resolution.x / viewport[2]) + (viewport[0] / resolution.x);
st.y = st.y / (resolution.y / viewport[3]) + (viewport[1] / resolution.y);
// Color from G-Buffer
vec3 color = texture(mainColorTexture, st).rgb;
if (cullAtmosphere == 1) {
renderTarget = texture(mainColorTexture, st);
renderTarget.rgb = color;
return;
}
vec4 atmosphereFinalColor = vec4(0.0);
int nSamples = 1;
// Color from G-Buffer
vec4 color = texture(mainColorTexture, st);
// Get the ray from camera to atm in object space
Ray ray = calculateRayRenderableGlobe(texCoord);
@@ -499,7 +503,7 @@ void main() {
double maxLength = 0.0; // in KM
bool intersect = atmosphereIntersection(ray, Rt - (ATM_EPSILON * 0.001), offset, maxLength);
if (!intersect) {
renderTarget = color;
renderTarget.rgb = color;
return;
}
@@ -509,14 +513,11 @@ void main() {
// Space (View plus Camera Rig Coords) when using their positions later, one must
// convert them to the planet's coords
//
// Get data from G-Buffer
// Normal is stored in SGCT View Space and transformed to the current object space
// Normal is stored in view space and transformed to the current object space
vec4 normalViewSpaceAndWaterReflectance = texture(mainNormalTexture, st);
dvec4 normalViewSpace = vec4(normalViewSpaceAndWaterReflectance.xyz, 0.0);
dvec4 normalWorldSpace = dSGCTViewToWorldMatrix * normalViewSpace;
vec4 normal = vec4(dInverseModelTransformMatrix * normalWorldSpace);
dvec4 normalWorldSpace = viewToWorldMatrix * normalViewSpace;
vec4 normal = vec4(inverseModelTransformMatrix * normalWorldSpace);
normal.xyz = normalize(normal.xyz);
normal.w = normalViewSpaceAndWaterReflectance.w;
@@ -524,19 +525,20 @@ void main() {
vec4 position = texture(mainPositionTexture, st);
// OS Eye to World coords
dvec4 positionWorldCoords = dSGCTViewToWorldMatrix * position;
dvec4 positionWorldCoords = viewToWorldMatrix * position;
// World to Object (Normal and Position in meters)
dvec4 positionObjectsCoords = dInverseModelTransformMatrix * positionWorldCoords;
dvec3 positionObjectsCoords = (inverseModelTransformMatrix * positionWorldCoords).xyz;
// Distance of the pixel in the gBuffer to the observer
// JCC (12/12/2017): AMD distance function is buggy.
//double pixelDepth = distance(cameraPositionInObject.xyz, positionObjectsCoords.xyz);
double pixelDepth = length(dCamPosObj.xyz - positionObjectsCoords.xyz);
double pixelDepth = length(camPosObj - positionObjectsCoords);
// JCC (12/13/2017): Trick to remove floating error in texture.
// We see a squared noise on planet's surface when seeing the planet from far away
float dC = float(length(dCamPosObj.xyz));
// @TODO (abock, 2021-07-01) I don't think this does anything. Remove?
float dC = float(length(camPosObj));
const float x1 = 1e8;
if (dC > x1) {
pixelDepth += 1000.0;
@@ -552,22 +554,21 @@ void main() {
// All calculations are done in KM:
pixelDepth *= 0.001;
positionObjectsCoords.xyz *= 0.001;
positionObjectsCoords *= 0.001;
if (pixelDepth < offset) {
// ATM Occluded - Something in front of ATM
renderTarget = color;
renderTarget.rgb = color;
return;
}
// Following paper nomenclature
double t = offset;
vec3 attenuation;
// Moving observer from camera location to top atmosphere. If the observer is already
// inside the atm, offset = 0.0 and no changes at all
vec3 x = vec3(ray.origin + t * ray.direction);
float r = 0.0; // length(x);
float r = length(x);
vec3 v = vec3(ray.direction);
float mu = 0.0; // dot(x, v) / r;
vec3 s = vec3(sunDirectionObj);
@@ -578,27 +579,30 @@ void main() {
// comparison with the planet's ground make sense:
pixelDepth -= offset;
dvec3 onATMPos = (dModelTransformMatrix * dvec4(x * 1000.0, 1.0)).xyz;
dvec3 onATMPos = (modelTransformMatrix * dvec4(x * 1000.0, 1.0)).xyz;
float eclipseShadowATM = calcShadow(shadowDataArray, onATMPos, false);
float sunIntensityInscatter = sunRadiance * eclipseShadowATM;
float irradianceFactor = 0.0;
bool groundHit = false;
vec3 attenuation;
vec3 inscatterColor = inscatterRadiance(x, tF, irradianceFactor, v, s, r, mu,
attenuation, vec3(positionObjectsCoords.xyz), groundHit, maxLength, pixelDepth,
color, sunIntensityInscatter);
attenuation, vec3(positionObjectsCoords), groundHit, maxLength, pixelDepth,
color, sunIntensityInscatter);
vec3 atmColor = vec3(0.0);
if (groundHit) {
float eclipseShadowPlanet = calcShadow(shadowDataArray, positionWorldCoords.xyz, true);
float sunIntensityGround = sunRadiance * eclipseShadowPlanet;
atmColor = groundColor(x, tF, v, s, attenuation, color.rgb, normal.xyz,
atmColor = groundColor(x, tF, v, s, attenuation, color, normal.xyz,
irradianceFactor, normal.w, sunIntensityGround);
}
else {
// In order to get better performance, we are not tracing multiple rays per pixel
// when the ray doesn't intersect the ground
atmColor = sunColor(v, s, r, mu, irradianceFactor);
atmColor = sunColor(v, s, r, mu, irradianceFactor);
}
// Final Color of ATM plus terrain: