Improved ATM perfomance (still working on it). Debugging billboards alingment (in progress).

This commit is contained in:
Jonathas Costa
2017-11-03 17:10:03 -04:00
parent 45df1d498a
commit 089cfe727f
7 changed files with 351 additions and 329 deletions
+2 -2
View File
@@ -13,7 +13,7 @@
</PlanarProjection>
</Viewport>
</Window>
<Window fullScreen="false" numberOfSamples="8" border="false">
<Window fullScreen="false" numberOfSamples="8" border="true">
<Pos x="340" y="100" />
<Size x="1280" y="720" />
<Viewport>
@@ -21,7 +21,7 @@
<Size x="1.0" y="1.0" />
<PlanarProjection>
<FOV down="16.875" left="30.0" right="30.0" up="16.875" />
<Orientation heading="0.0" pitch="0.0" roll="0.0" />
<Orientation heading="45.0" pitch="0.0" roll="0.0" />
</PlanarProjection>
</Viewport>
</Window>
+3 -3
View File
@@ -129,8 +129,8 @@ return {
--"mercury",
--"venus",
"earth",
--"moon",
--"mars",
"moon",
"mars",
--"jupiter",
--"saturn",
--"uranus",
@@ -138,7 +138,7 @@ return {
-- "satellites",
"grids",
--"digitaluniverse",
"digitaluniverse",
"stars/digitaluniverse",
"milkyway/digitaluniverse"
}
@@ -168,8 +168,8 @@ vec4 calcShadow(const ShadowRenderingStruct shadowInfoArray[numberOfShadows], co
*******************************************************************************/
struct dRay {
dvec4 origin;
dvec4 direction;
dvec4 origin;
dvec4 direction;
};
/* Function to calculate the initial intersection of the eye (camera) ray
@@ -189,47 +189,47 @@ struct dRay {
* is inside atmosphere.
*/
bool dAtmosphereIntersection(const dvec3 planetPosition, const dRay ray, const double atmRadius,
out bool inside, out double offset, out double maxLength ) {
dvec3 l = planetPosition - ray.origin.xyz;
double s = dot(l, ray.direction.xyz);
double l2 = dot(l, l);
double r2 = atmRadius * atmRadius; // avoiding surface acne
out bool inside, out double offset, out double maxLength ) {
dvec3 l = planetPosition - ray.origin.xyz;
double s = dot(l, ray.direction.xyz);
double l2 = dot(l, l);
double r2 = atmRadius * atmRadius; // avoiding surface acne
// Ray origin (eye position) is behind sphere
if ((s < 0.0) && (l2 > r2)) {
inside = false;
offset = 0.0;
maxLength = 0.0;
return false;
}
// Ray origin (eye position) is behind sphere
if ((s < 0.0) && (l2 > r2)) {
inside = false;
offset = 0.0;
maxLength = 0.0;
return false;
}
double m2 = l2 - s*s;
double m2 = l2 - s*s;
// Ray misses atmospere
if (m2 > r2) {
inside = false;
offset = 0.0;
maxLength = 0.0;
return false;
}
// Ray misses atmospere
if (m2 > r2) {
inside = false;
offset = 0.0;
maxLength = 0.0;
return false;
}
// We already now the ray hits the atmosphere
// We already now the ray hits the atmosphere
// If q = 0.0f, there is only one intersection
double q = sqrt(r2 - m2);
// If q = 0.0f, there is only one intersection
double q = sqrt(r2 - m2);
// If l2 < r2, the ray origin is inside the sphere
if (l2 > r2) {
inside = false;
offset = s - q;
maxLength = s + q;
} else {
inside = true;
offset = 0.0;
maxLength = s + q;
}
return true;
// If l2 < r2, the ray origin is inside the sphere
if (l2 > r2) {
inside = false;
offset = s - q;
maxLength = s + q;
} else {
inside = true;
offset = 0.0;
maxLength = s + q;
}
return true;
}
/*
@@ -245,45 +245,44 @@ bool dAtmosphereIntersection(const dvec3 planetPosition, const dRay ray, const d
void dCalculateRayRenderableGlobe(in int mssaSample, out dRay ray,
out dvec4 planetPositionObjectCoords,
out dvec4 cameraPositionInObject) {
// ======================================
// ======= Avoiding Some Matrices =======
// ======================================
// ======= Avoiding Some Matrices =======
// NDC to clip coordinates (gl_FragCoord.w = 1.0/w_clip)
// Using the interpolated coords:
// Assuming Red Book is right: z_ndc e [0, 1] and not [-1, 1]
dvec2 samplePos = dvec2(msaaSamplePatter[mssaSample],
msaaSamplePatter[mssaSample+1]);
dvec4 clipCoords = dvec4(interpolatedNDCPos.xy + samplePos, interpolatedNDCPos.z, 1.0) / gl_FragCoord.w;
// NDC to clip coordinates (gl_FragCoord.w = 1.0/w_clip)
// Using the interpolated coords:
// Assuming Red Book is right: z_ndc e [0, 1] and not [-1, 1]
dvec2 samplePos = dvec2(msaaSamplePatter[mssaSample],
msaaSamplePatter[mssaSample+1]);
dvec4 clipCoords = dvec4(interpolatedNDCPos.xy + samplePos, interpolatedNDCPos.z, 1.0) / gl_FragCoord.w;
// Clip to SGCT Eye
dvec4 sgctEyeCoords = dInverseSgctProjectionMatrix * clipCoords;
//sgctEyeCoords /= sgctEyeCoords.w;
sgctEyeCoords.w = 1.0;
// SGCT Eye to OS Eye
dvec4 tOSEyeCoordsInv = dSgctEyeToOSEyeTranform * sgctEyeCoords;
// OS Eye to World coords
dvec4 tmpRInv = dInverseCamRotTransform * tOSEyeCoordsInv;
dvec4 worldCoords = dvec4(dvec3(tmpRInv) + dCampos, 1.0);
// World to Object
dvec4 objectCoords = dInverseModelTransformMatrix * worldCoords;
// Planet Position in Object Space
// JCC: Applying the inverse of the model transformation on the object postion in World
// space results in imprecision.
planetPositionObjectCoords = dvec4(0.0,0.0,0.0,1.0);//dInverseModelTransformMatrix * dvec4(dObjpos.xyz, 1.0);
//planetPositionObjectCoords = dInverseModelTransformMatrix * dvec4(dObjpos.xyz, 1.0);
// Camera Position in Object Space
cameraPositionInObject = dInverseModelTransformMatrix * dvec4(dCampos, 1.0);
// Clip to SGCT Eye
dvec4 sgctEyeCoords = dInverseSgctProjectionMatrix * clipCoords;
sgctEyeCoords.w = 1.0;
// ============================
// ====== Building Ray ========
// Ray in object space (in KM)
ray.origin = cameraPositionInObject * dvec4(0.001, 0.001, 0.001, 1.0);
ray.direction = dvec4(normalize(objectCoords.xyz - cameraPositionInObject.xyz), 0.0);
// SGCT Eye to OS Eye
dvec4 tOSEyeCoordsInv = dSgctEyeToOSEyeTranform * sgctEyeCoords;
// OS Eye to World coords
dvec4 tmpRInv = dInverseCamRotTransform * tOSEyeCoordsInv;
dvec4 worldCoords = dvec4(dvec3(tmpRInv) + dCampos, 1.0);
// World to Object
dvec4 objectCoords = dInverseModelTransformMatrix * worldCoords;
// Planet Position in Object Space
// JCC: Applying the inverse of the model transformation on the object postion in World
// space results in imprecision.
planetPositionObjectCoords = dvec4(0.0, 0.0, 0.0, 1.0);
//planetPositionObjectCoords = dInverseModelTransformMatrix * dvec4(dObjpos.xyz, 1.0);
// Camera Position in Object Space
cameraPositionInObject = dInverseModelTransformMatrix * dvec4(dCampos, 1.0);
// ============================
// ====== Building Ray ========
// Ray in object space (in KM)
ray.origin = cameraPositionInObject * dvec4(0.001, 0.001, 0.001, 1.0);
ray.direction = dvec4(normalize(objectCoords.xyz - cameraPositionInObject.xyz), 0.0);
}
/*
@@ -309,130 +308,130 @@ vec3 inscatterRadiance(inout vec3 x, inout float t, inout float irradianceFactor
out vec3 attenuation, const vec3 fragPosObj,
const double maxLength, const double pixelDepth,
const vec4 spaceColor, const float sunIntensity) {
vec3 radiance;
r = length(x);
mu = dot(x, v) / r;
float mu2 = mu * mu;
float r2 = r * r;
float Rt2 = Rt * Rt;
float Rg2 = Rg * Rg;
float nu = dot(v, s);
float muSun = dot(x, s) / r;
float rayleighPhase = rayleighPhaseFunction(nu);
float miePhase = miePhaseFunction(nu);
vec3 radiance;
// S[L](x,s,v)
// I.e. the next line has the scattering light for the "infinite" ray passing
// through the atmosphere. If this ray hits something inside the atmosphere,
// we will subtract the attenuated scattering light from that path in the
// current path.
vec4 inscatterRadiance = max(texture4D(inscatterTexture, r, mu, muSun, nu), 0.0);
r = length(x);
mu = dot(x, v) / r;
float mu2 = mu * mu;
float r2 = r * r;
float Rt2 = Rt * Rt;
float Rg2 = Rg * Rg;
float nu = dot(v, s);
float muSun = dot(x, s) / r;
float rayleighPhase = rayleighPhaseFunction(nu);
float miePhase = miePhaseFunction(nu);
// S[L](x,s,v)
// I.e. the next line has the scattering light for the "infinite" ray passing
// through the atmosphere. If this ray hits something inside the atmosphere,
// we will subtract the attenuated scattering light from that path in the
// current path.
vec4 inscatterRadiance = max(texture4D(inscatterTexture, r, mu, muSun, nu), 0.0);
// After removing the initial path from camera pos to top of atmosphere (for an
// observer in the space) we test if the light ray is hitting the atmosphere
vec3 x0 = fragPosObj;
float r0 = length(fragPosObj);
float invr0 = 1.0/r0;
float muSun0 = dot(fragPosObj, s) * invr0;
//vec3 x0 = x + float(pixelDepth) * v;
float mu0 = dot(x0, v) * invr0;
bool groundHit = false;
if ((pixelDepth > 0.0) && (pixelDepth < maxLength)) {
t = float(pixelDepth);
groundHit = true;
// Transmittance from point r, direction mu, distance t
// By Analytical calculation
//attenuation = analyticTransmittance(r, mu, t);
// JCC: change from analytical to LUT transmittance to avoid
// acme on planet surface when looking from far away. (11/02/2017)
attenuation = transmittance(r, mu, t);
// Here we use the idea of S[L](a->b) = S[L](b->a), and get the S[L](x0, v, s)
// Then we calculate S[L] = S[L]|x - T(x, x0)*S[L]|x0
// The "infinite" ray hist something inside the atmosphere, so we need to remove
// the unsused contribution to the final radiance.
vec4 inscatterFromSurface = texture4D(inscatterTexture, r0, mu0, muSun0, nu);
inscatterRadiance = max(inscatterRadiance - attenuation.rgbr * inscatterFromSurface, 0.0);
// We set the irradianceFactor to 1.0 so the reflected irradiance will be considered
// when calculating the reflected light on the ground.
irradianceFactor = 1.0;
} else {
attenuation = analyticTransmittance(r, mu, t);
}
// cos(PI-thetaH) = dist/r
// cos(thetaH) = - dist/r
// muHorizon = -sqrt(r^2-Rg^2)/r = -sqrt(1-(Rg/r)^2)
float muHorizon = -sqrt(1.0f - (Rg2 / r2));
// In order to avoid imprecision problems near horizon,
// we interpolate between two points: above and below horizon
const float INTERPOLATION_EPS = 0.004f; // precision const from Brunetton
if (abs(mu - muHorizon) < INTERPOLATION_EPS) {
// We want an interpolation value close to 1/2, so the
// contribution of each radiance value is almost the same
// or it has a havey weight if from above or below horizon
float interpolationValue = ((mu - muHorizon) + INTERPOLATION_EPS) / (2.0f * INTERPOLATION_EPS);
float t2 = t * t;
// Above Horizon
mu = muHorizon - INTERPOLATION_EPS;
//r0 = sqrt(r * r + t * t + 2.0f * r * t * mu);
// From cosine law where t = distance between x and x0
// r0^2 = r^2 + t^2 - 2 * r * t * cos(PI-theta)
r0 = sqrt(r2 + t2 + 2.0f * r * t * mu);
// From the dot product: cos(theta0) = (x0 dot v)/(||ro||*||v||)
// mu0 = ((x + t) dot v) / r0
// mu0 = (x dot v + t dot v) / r0
// mu0 = (r*mu + t) / r0
mu0 = (r * mu + t) / r0;
vec4 inScatterAboveX = texture4D(inscatterTexture, r, mu, muSun, nu);
vec4 inScatterAboveXs = texture4D(inscatterTexture, r0, mu0, muSun0, nu);
// Attention for the attenuation.r value applied to the S_Mie
vec4 inScatterAbove = max(inScatterAboveX - attenuation.rgbr * inScatterAboveXs, 0.0f);
// Below Horizon
mu = muHorizon + INTERPOLATION_EPS;
r0 = sqrt(r2 + t2 + 2.0f * r * t * mu);
mu0 = (r * mu + t) / r0;
vec4 inScatterBelowX = texture4D(inscatterTexture, r, mu, muSun, nu);
vec4 inScatterBelowXs = texture4D(inscatterTexture, r0, mu0, muSun0, nu);
// Attention for the attenuation.r value applied to the S_Mie
vec4 inScatterBelow = max(inScatterBelowX - attenuation.rgbr * inScatterBelowXs, 0.0);
// Interpolate between above and below inScattering radiance
inscatterRadiance = mix(inScatterAbove, inScatterBelow, interpolationValue);
}
// The w component of inscatterRadiance has stored the Cm,r value (Cm = Sm[L0])
// So, we must reintroduce the Mie inscatter by the proximity rule as described in the
// paper by Bruneton and Neyret in "Angular precision" paragraph:
// Hermite interpolation between two values
// This step is done because imprecision problems happen when the Sun is slightly below
// the horizon. When this happen, we avoid the Mie scattering contribution.
inscatterRadiance.w *= smoothstep(0.0f, 0.02f, muSun);
vec3 inscatterMie = inscatterRadiance.rgb * inscatterRadiance.a / max(inscatterRadiance.r, 1e-4) *
(betaRayleigh.r / betaRayleigh);
radiance = max(inscatterRadiance.rgb * rayleighPhase + inscatterMie * miePhase, 0.0f);
// After removing the initial path from camera pos to top of atmosphere (for an
// observer in the space) we test if the light ray is hitting the atmosphere
vec3 x0 = fragPosObj;
float r0 = length(fragPosObj);
float invr0 = 1.0/r0;
float muSun0 = dot(fragPosObj, s) * invr0;
//vec3 x0 = x + float(pixelDepth) * v;
float mu0 = dot(x0, v) * invr0;
bool groundHit = false;
if ((pixelDepth > 0.0) && (pixelDepth < maxLength)) {
t = float(pixelDepth);
groundHit = true;
// Finally we add the Lsun (all calculations are done with no Lsun so
// we can change it on the fly with no precomputations)
// return radiance * sunRadiance;
vec3 finalScatteringRadiance = radiance * sunIntensity;
if (groundHit) {
return finalScatteringRadiance;
} else {
return ((r-Rg)/(Rt-Rg))*spaceColor.rgb * backgroundExposure + finalScatteringRadiance;
}
// Transmittance from point r, direction mu, distance t
// By Analytical calculation
//attenuation = analyticTransmittance(r, mu, t);
// JCC: change from analytical to LUT transmittance to avoid
// acme on planet surface when looking from far away. (11/02/2017)
attenuation = transmittance(r, mu, t);
// Here we use the idea of S[L](a->b) = S[L](b->a), and get the S[L](x0, v, s)
// Then we calculate S[L] = S[L]|x - T(x, x0)*S[L]|x0
// The "infinite" ray hist something inside the atmosphere, so we need to remove
// the unsused contribution to the final radiance.
vec4 inscatterFromSurface = texture4D(inscatterTexture, r0, mu0, muSun0, nu);
inscatterRadiance = max(inscatterRadiance - attenuation.rgbr * inscatterFromSurface, 0.0);
// We set the irradianceFactor to 1.0 so the reflected irradiance will be considered
// when calculating the reflected light on the ground.
irradianceFactor = 1.0;
} else {
attenuation = analyticTransmittance(r, mu, t);
}
// cos(PI-thetaH) = dist/r
// cos(thetaH) = - dist/r
// muHorizon = -sqrt(r^2-Rg^2)/r = -sqrt(1-(Rg/r)^2)
float muHorizon = -sqrt(1.0f - (Rg2 / r2));
// In order to avoid imprecision problems near horizon,
// we interpolate between two points: above and below horizon
const float INTERPOLATION_EPS = 0.004f; // precision const from Brunetton
if (abs(mu - muHorizon) < INTERPOLATION_EPS) {
// We want an interpolation value close to 1/2, so the
// contribution of each radiance value is almost the same
// or it has a havey weight if from above or below horizon
float interpolationValue = ((mu - muHorizon) + INTERPOLATION_EPS) / (2.0f * INTERPOLATION_EPS);
float t2 = t * t;
// Above Horizon
mu = muHorizon - INTERPOLATION_EPS;
//r0 = sqrt(r * r + t * t + 2.0f * r * t * mu);
// From cosine law where t = distance between x and x0
// r0^2 = r^2 + t^2 - 2 * r * t * cos(PI-theta)
r0 = sqrt(r2 + t2 + 2.0f * r * t * mu);
// From the dot product: cos(theta0) = (x0 dot v)/(||ro||*||v||)
// mu0 = ((x + t) dot v) / r0
// mu0 = (x dot v + t dot v) / r0
// mu0 = (r*mu + t) / r0
mu0 = (r * mu + t) / r0;
vec4 inScatterAboveX = texture4D(inscatterTexture, r, mu, muSun, nu);
vec4 inScatterAboveXs = texture4D(inscatterTexture, r0, mu0, muSun0, nu);
// Attention for the attenuation.r value applied to the S_Mie
vec4 inScatterAbove = max(inScatterAboveX - attenuation.rgbr * inScatterAboveXs, 0.0f);
// Below Horizon
mu = muHorizon + INTERPOLATION_EPS;
r0 = sqrt(r2 + t2 + 2.0f * r * t * mu);
mu0 = (r * mu + t) / r0;
vec4 inScatterBelowX = texture4D(inscatterTexture, r, mu, muSun, nu);
vec4 inScatterBelowXs = texture4D(inscatterTexture, r0, mu0, muSun0, nu);
// Attention for the attenuation.r value applied to the S_Mie
vec4 inScatterBelow = max(inScatterBelowX - attenuation.rgbr * inScatterBelowXs, 0.0);
// Interpolate between above and below inScattering radiance
inscatterRadiance = mix(inScatterAbove, inScatterBelow, interpolationValue);
}
// The w component of inscatterRadiance has stored the Cm,r value (Cm = Sm[L0])
// So, we must reintroduce the Mie inscatter by the proximity rule as described in the
// paper by Bruneton and Neyret in "Angular precision" paragraph:
// Hermite interpolation between two values
// This step is done because imprecision problems happen when the Sun is slightly below
// the horizon. When this happen, we avoid the Mie scattering contribution.
inscatterRadiance.w *= smoothstep(0.0f, 0.02f, muSun);
vec3 inscatterMie = inscatterRadiance.rgb * inscatterRadiance.a / max(inscatterRadiance.r, 1e-4) *
(betaRayleigh.r / betaRayleigh);
radiance = max(inscatterRadiance.rgb * rayleighPhase + inscatterMie * miePhase, 0.0f);
// Finally we add the Lsun (all calculations are done with no Lsun so
// we can change it on the fly with no precomputations)
// return radiance * sunRadiance;
vec3 finalScatteringRadiance = radiance * sunIntensity;
if (groundHit) {
return finalScatteringRadiance;
} else {
return ((r-Rg)/(Rt-Rg))*spaceColor.rgb * backgroundExposure + finalScatteringRadiance;
}
}
/*
@@ -459,50 +458,50 @@ vec3 groundColor(const vec3 x, const float t, const vec3 v, const vec3 s, const
const vec3 normal, const float irradianceFactor,
const float waterReflectance, const float sunIntensity)
{
vec3 reflectedRadiance = vec3(0.0f);
vec3 reflectedRadiance = vec3(0.0f);
// First we obtain the ray's end point on the surface
vec3 x0 = x + t * v;
float r0 = length(x0);
// Normal of intersection point.
vec3 n = normalize(normal);
//vec4 groundReflectance = groundColor * vec4(.37);
vec4 groundReflectance = groundColor *
vec4(groundRadianceEmittion, groundRadianceEmittion, groundRadianceEmittion, 1.0f);
// First we obtain the ray's end point on the surface
vec3 x0 = x + t * v;
float r0 = length(x0);
// Normal of intersection point.
vec3 n = normalize(normal);
//vec4 groundReflectance = groundColor * vec4(.37);
vec4 groundReflectance = groundColor *
vec4(groundRadianceEmittion, groundRadianceEmittion, groundRadianceEmittion, 1.0f);
// L0 is not included in the irradiance texture.
// We first calculate the light attenuation from the top of the atmosphere
// to x0.
float dotNS = dot(n, s);
float muSun = max(dotNS, 0.0f);
// L0 is not included in the irradiance texture.
// We first calculate the light attenuation from the top of the atmosphere
// to x0.
float dotNS = dot(n, s);
float muSun = max(dotNS, 0.0f);
// Is direct Sun light arriving at x0? If not, there is no direct light from Sun (shadowed)
vec3 transmittanceL0 = muSun < -sqrt(1.0f - ((Rg * Rg) / (r0 * r0))) ? vec3(0.0f) : transmittanceLUT(r0, muSun);
// E[L*] at x0
vec3 irradianceReflected = irradiance(irradianceTexture, r0, muSun) * irradianceFactor;
// Is direct Sun light arriving at x0? If not, there is no direct light from Sun (shadowed)
vec3 transmittanceL0 = muSun < -sqrt(1.0f - ((Rg * Rg) / (r0 * r0))) ?
vec3(0.0f) : transmittanceLUT(r0, muSun);
// E[L*] at x0
vec3 irradianceReflected = irradiance(irradianceTexture, r0, muSun) * irradianceFactor;
// R[L0] + R[L*]
vec3 groundRadiance = (dotNS < -0.2f ? groundReflectance.rgb * 15 : groundReflectance.rgb) *
(muSun * transmittanceL0 + irradianceReflected) *
sunIntensity / M_PI;
// R[L0] + R[L*]
vec3 groundRadiance = (dotNS < -0.2f ? groundReflectance.rgb * 15 : groundReflectance.rgb) *
(muSun * transmittanceL0 + irradianceReflected) * sunIntensity / M_PI;
// Specular reflection from sun on oceans and rivers
if ((waterReflectance > 0.1) && (muSun > 0.0)) {
vec3 h = normalize(s - v);
// Fresnell Schlick's approximation
float fresnel = 0.02f + 0.98f * pow(1.0f - dot(-v, h), 5.0f);
// Walter BRDF approximation
float waterBrdf = fresnel * pow(max(dot(h, n), 0.0f), 150.0f);
// Adding Fresnell and Water BRDFs approximation to the final surface color
// (After adding the sunRadiance and the attenuation of the Sun through atmosphere)
groundRadiance += waterReflectance * max(waterBrdf, 0.0) * transmittanceL0 * sunIntensity;
}
//return groundRadiance;
// Finally, we attenuate the surface Radiance from the the point x0 to the camera location.
reflectedRadiance = attenuationXtoX0 * groundRadiance;
// Returns reflectedRadiance = 0.0 if the ray doesn't hit the ground.
return reflectedRadiance;
// Specular reflection from sun on oceans and rivers
if ((waterReflectance > 0.1) && (muSun > 0.0)) {
vec3 h = normalize(s - v);
// Fresnell Schlick's approximation
float fresnel = 0.02f + 0.98f * pow(1.0f - dot(-v, h), 5.0f);
// Walter BRDF approximation
float waterBrdf = fresnel * pow(max(dot(h, n), 0.0f), 150.0f);
// Adding Fresnell and Water BRDFs approximation to the final surface color
// (After adding the sunRadiance and the attenuation of the Sun through atmosphere)
groundRadiance += waterReflectance * max(waterBrdf, 0.0) * transmittanceL0 * sunIntensity;
}
//return groundRadiance;
// Finally, we attenuate the surface Radiance from the the point x0 to the camera location.
reflectedRadiance = attenuationXtoX0 * groundRadiance;
// Returns reflectedRadiance = 0.0 if the ray doesn't hit the ground.
return reflectedRadiance;
}
/*
@@ -524,10 +523,13 @@ vec3 groundColor(const vec3 x, const float t, const vec3 v, const vec3 s, const
*/
vec3 sunColor(const vec3 x, const float t, const vec3 v, const vec3 s, const float r,
const float mu, const float irradianceFactor) {
vec3 transmittance = (r <= Rt) ? ( mu < -sqrt(1.0f - (Rg*Rg)/(r*r)) ? vec3(0.0f) : transmittanceLUT(r, mu)) : vec3(1.0f);
float sunFinalColor = step(cos(M_PI / 650.0), dot(v, s)) * sunRadiance * (1.0 - irradianceFactor);
vec3 transmittance = (r <= Rt) ? ( mu < -sqrt(1.0f - (Rg*Rg)/(r*r)) ?
vec3(0.0f) : transmittanceLUT(r, mu)) : vec3(1.0f);
// JCC: Change this function to a impostor texture with gaussian decay color weighted
// by tge sunRadiance, transmittance and irradianceColor (11/03/2017)
float sunFinalColor = step(cos(M_PI / 650.0), dot(v, s)) * sunRadiance * (1.0 - irradianceFactor);
return transmittance * sunFinalColor;
return transmittance * sunFinalColor;
}
void main() {
@@ -550,7 +552,8 @@ void main() {
int nSamples = 1;
if (complex) {
nSamples = nAaSamples;
//nSamples = nAaSamples;
nSamples = nAaSamples > 1 ? nAaSamples/2 : nAaSamples;
}
for (int i = 0; i < nSamples; i++) {
@@ -612,52 +615,52 @@ void main() {
pixelDepth *= 0.001;
fragObjectCoords.xyz *= 0.001;
if (position.xyz != vec3(0.0) && (pixelDepth < offset)) {
atmosphereFinalColor += vec4(HDR(color.xyz * backgroundExposure), color.a);
//discard;
if (position.xyz != vec3(0.0) && (pixelDepth < offset)) {
atmosphereFinalColor += vec4(HDR(color.xyz * backgroundExposure), color.a);
//discard;
} else {
// Following paper nomenclature
double t = offset;
vec3 attenuation;
// Following paper nomenclature
double t = offset;
vec3 attenuation;
// Moving observer from camera location to top atmosphere
// If the observer is already inside the atm, offset = 0.0
// and no changes at all.
vec3 x = vec3(ray.origin.xyz + t*ray.direction.xyz);
float r = 0.0;//length(x);
vec3 v = vec3(ray.direction.xyz);
float mu = 0.0;//dot(x, v) / r;
vec3 s = vec3(sunDirectionObj);
float tF = float(maxLength - t);
// Moving observer from camera location to top atmosphere
// If the observer is already inside the atm, offset = 0.0
// and no changes at all.
vec3 x = vec3(ray.origin.xyz + t*ray.direction.xyz);
float r = 0.0;//length(x);
vec3 v = vec3(ray.direction.xyz);
float mu = 0.0;//dot(x, v) / r;
vec3 s = vec3(sunDirectionObj);
float tF = float(maxLength - t);
// Because we may move the camera origin to the top of atmosphere
// we also need to adjust the pixelDepth for tdCalculateRayRenderableGlobehis offset so the
// next comparison with the planet's ground make sense:
pixelDepth -= offset;
dvec4 onATMPos = dModelTransformMatrix * dvec4(x*1000.0, 1.0);
vec4 eclipseShadowATM = calcShadow(shadowDataArray, onATMPos.xyz, false);
vec4 eclipseShadowPlanet = calcShadow(shadowDataArray, fragWorldCoords.xyz, true);
// Because we may move the camera origin to the top of atmosphere
// we also need to adjust the pixelDepth for tdCalculateRayRenderableGlobehis offset so the
// next comparison with the planet's ground make sense:
pixelDepth -= offset;
float sunIntensityInscatter = sunRadiance * eclipseShadowATM.x;
float sunIntensityGround = sunRadiance * eclipseShadowPlanet.x;
dvec4 onATMPos = dModelTransformMatrix * dvec4(x*1000.0, 1.0);
vec4 eclipseShadowATM = calcShadow(shadowDataArray, onATMPos.xyz, false);
vec4 eclipseShadowPlanet = calcShadow(shadowDataArray, fragWorldCoords.xyz, true);
float sunIntensityInscatter = sunRadiance * eclipseShadowATM.x;
float sunIntensityGround = sunRadiance * eclipseShadowPlanet.x;
float irradianceFactor = 0.0;
float irradianceFactor = 0.0;
vec3 inscatterColor = inscatterRadiance(x, tF, irradianceFactor, v,
s, r, mu, attenuation,
vec3(fragObjectCoords.xyz),
maxLength, pixelDepth,
color, sunIntensityInscatter);
vec3 groundColor = groundColor(x, tF, v, s, r, mu, attenuation,
color, normal.xyz, irradianceFactor,
otherData.r, sunIntensityGround);
vec3 sunColor = sunColor(x, tF, v, s, r, mu, irradianceFactor);
// Final Color of ATM plus terrain:
vec4 finalRadiance = vec4(HDR(inscatterColor + groundColor + sunColor), 1.0);
atmosphereFinalColor += finalRadiance;
vec3 inscatterColor = inscatterRadiance(x, tF, irradianceFactor, v,
s, r, mu, attenuation,
vec3(fragObjectCoords.xyz),
maxLength, pixelDepth,
color, sunIntensityInscatter);
vec3 groundColor = groundColor(x, tF, v, s, r, mu, attenuation,
color, normal.xyz, irradianceFactor,
otherData.r, sunIntensityGround);
vec3 sunColor = sunColor(x, tF, v, s, r, mu, irradianceFactor);
// Final Color of ATM plus terrain:
vec4 finalRadiance = vec4(HDR(inscatterColor + groundColor + sunColor), 1.0);
atmosphereFinalColor += finalRadiance;
}
}
else { // no intersection
@@ -669,13 +672,13 @@ void main() {
renderTarget = atmosphereFinalColor / float(nSamples);
}
else { // culling
//discard;
vec4 color = vec4(0.0f);
for (int i = 0; i < nAaSamples; i++) {
color += texelFetch(mainColorTexture, ivec2(gl_FragCoord), i);
}
color /= float(nAaSamples);
renderTarget = vec4(HDR(color.xyz * backgroundExposure), color.a);
discard;
// vec4 color = vec4(0.0f);
// for (int i = 0; i < nAaSamples; i++) {
// color += texelFetch(mainColorTexture, ivec2(gl_FragCoord), i);
// }
// color /= float(nAaSamples);
// renderTarget = vec4(HDR(color.xyz * backgroundExposure), color.a);
}
}
@@ -582,8 +582,17 @@ void RenderableBillboardsCloud::renderBillboards(const RenderData& data, const g
_program->setUniform("modelViewTransform", modelViewMatrix);
_program->setUniform("modelViewProjectionTransform", glm::dmat4(projectionMatrix) * modelViewMatrix);
_program->setUniform("cameraPosition", data.camera.positionVec3());
_program->setUniform("cameraLookUp", data.camera.lookUpVectorWorldSpace());
glm::dmat4 modelMatrix =
glm::translate(glm::dmat4(1.0), data.modelTransform.translation) * // Translation
glm::dmat4(data.modelTransform.rotation) * // Spice rotation
glm::scale(glm::dmat4(1.0), glm::dvec3(data.modelTransform.scale));
glm::dmat4 worldToModelTransform = glm::inverse(modelMatrix);
_program->setUniform("cameraPosition", glm::dvec3(worldToModelTransform * glm::dvec4(data.camera.positionVec3(), 1.0)));
_program->setUniform("cameraLookUp", glm::dvec3(worldToModelTransform * glm::dvec4(data.camera.lookUpVectorWorldSpace(),1.0)));
//_program->setUniform("cameraPosition", data.camera.positionVec3());
//_program->setUniform("cameraLookUp", data.camera.lookUpVectorWorldSpace());
_program->setUniform("renderOption", _renderOption.value());
glm::dvec4 centerScreenWorld = glm::inverse(data.camera.combinedViewMatrix()) * glm::dvec4(0.0, 0.0, 0.0, 1.0);
_program->setUniform("centerScreenInWorldPosition", centerScreenWorld);
@@ -702,9 +711,13 @@ void RenderableBillboardsCloud::render(const RenderData& data, RendererTasks&) {
glm::dmat4 modelViewMatrix = data.camera.combinedViewMatrix() * modelMatrix;
glm::mat4 viewMatrix = data.camera.viewMatrix();
glm::mat4 projectionMatrix = data.camera.projectionMatrix();
glm::dmat4 modelViewProjectionMatrix = glm::dmat4(projectionMatrix) * modelViewMatrix;
glm::dmat4 modelViewProjectionMatrix = glm::dmat4(projectionMatrix) *
modelViewMatrix;
glm::vec3 lookup = data.camera.lookUpVectorWorldSpace();
//glm::vec3 viewDirection = glm::vec3(glm::dmat4(OsEng.renderEngine().getSGCTModelMatrix()) * glm::dvec4(data.camera.viewDirectionWorldSpace(), 1.0));
glm::vec3 viewDirection = data.camera.viewDirectionWorldSpace();
glm::vec3 right = glm::cross(viewDirection, lookup);
glm::vec3 up = glm::cross(right, viewDirection);
@@ -57,27 +57,28 @@ const vec2 corners[4] = vec2[4](
void main() {
vec4 pos = gl_in[0].gl_Position;
vec4 pos = gl_in[0].gl_Position;
gs_colorMap = colorMap[0];
double scaleMultiply = exp(scaleFactor/10);
dvec3 scaledRight = dvec3(0.0);
dvec3 scaledUp = dvec3(0.0);
dvec3 scaledRight = dvec3(0.0);
dvec3 scaledUp = dvec3(0.0);
if (renderOption == 0) {
scaledRight = scaleMultiply * right/2.0f;
scaledUp = scaleMultiply * up/2.0f;
scaledUp = scaleMultiply * up/2.0f;
} else if (renderOption == 1) {
dvec3 normal = normalize(cameraPosition - dvec3(pos.xyz));
dvec3 normal = normalize(cameraPosition - dvec3(pos.xyz));
dvec3 newRight = normalize(cross(cameraLookUp, normal));
dvec3 newUp = cross(normal, newRight);
scaledRight = scaleMultiply * newRight/2.0f;
scaledUp = scaleMultiply * newUp/2.0f;
dvec3 newUp = cross(normal, newRight);
scaledRight = scaleMultiply * newRight/2.0f;
scaledUp = scaleMultiply * newUp/2.0f;
} else if (renderOption == 2) {
dvec3 normal = normalize(centerScreenInWorldPosition.xyz - dvec3(pos.xyz));
dvec3 normal = normalize(centerScreenInWorldPosition.xyz - dvec3(pos.xyz));
dvec3 newRight = normalize(cross(cameraLookUp, normal));
dvec3 newUp = cross(normal, newRight);
scaledRight = scaleMultiply * newRight/2.0f;
scaledUp = scaleMultiply * newUp/2.0f;
dvec3 newUp = cross(normal, newRight);
scaledRight = scaleMultiply * newRight/2.0f;
scaledUp = scaleMultiply * newUp/2.0f;
}
double unit = PARSEC;
@@ -101,17 +102,20 @@ void main() {
dvec4 dpos = dvec4(dvec3(pos.xyz) * unit, 1.0);
texCoord = corners[0];
vec4 initialPosition = z_normalization(vec4(modelViewProjectionTransform * dvec4(dpos.xyz - scaledRight - scaledUp, dpos.w)));
vs_screenSpaceDepth = initialPosition.w;
vec4 initialPosition = z_normalization(vec4(modelViewProjectionTransform *
dvec4(dpos.xyz - scaledRight - scaledUp, dpos.w)));
vs_screenSpaceDepth = initialPosition.w;
gl_Position = initialPosition;
EmitVertex();
texCoord = corners[1];
gl_Position = z_normalization(vec4(modelViewProjectionTransform * dvec4(dpos.xyz + scaledRight - scaledUp, dpos.w)));
texCoord = corners[1];
gl_Position = z_normalization(vec4(modelViewProjectionTransform *
dvec4(dpos.xyz + scaledRight - scaledUp, dpos.w)));
EmitVertex();
texCoord = corners[2];
vec4 crossCorner = z_normalization(vec4(modelViewProjectionTransform * dvec4(dpos.xyz + scaledUp + scaledRight, dpos.w)));
vec4 crossCorner = z_normalization(vec4(modelViewProjectionTransform *
dvec4(dpos.xyz + scaledUp + scaledRight, dpos.w)));
gl_Position = crossCorner;
EmitVertex();
EndPrimitive(); // First Triangle
@@ -125,7 +129,8 @@ void main() {
EmitVertex();
texCoord = corners[3];
gl_Position = z_normalization(vec4(modelViewProjectionTransform * dvec4(dpos.xyz + scaledUp - scaledRight, dpos.w)));
gl_Position = z_normalization(vec4(modelViewProjectionTransform *
dvec4(dpos.xyz + scaledUp - scaledRight, dpos.w)));
EmitVertex();
EndPrimitive(); // Second Triangle
}
+2 -1
View File
@@ -7,7 +7,8 @@ return {
-- occurs in a single window, a fisheye projection, or a dome cluster system
-- A regular 1280x720 window
SGCTConfig = sgct.config.single{1280, 720},
--SGCTConfig = sgct.config.single{1280, 720, scene={orientation = { yaw = 120, pitch = 15, roll = 0.0 }}},
SGCTConfig = "${CONFIG}/single_two_win.xml",
-- A regular 1920x1080 window
-- SGCTConfig = sgct.config.single{1920, 1080},
+18 -18
View File
@@ -1019,30 +1019,30 @@ void FramebufferRenderer::render(float blackoutFactor, bool doPerformanceMeasure
glDrawBuffers(1, dBuffer);
glClear(GL_COLOR_BUFFER_BIT);
//// HDR Image Control and Resolve
//_hdrBackGroundProgram->activate();
// HDR Image Control and Resolve
_hdrBackGroundProgram->activate();
//ghoul::opengl::TextureUnit mainColorTextureUnit;
//mainColorTextureUnit.activate();
//glBindTexture(GL_TEXTURE_2D_MULTISAMPLE, _mainColorTexture);
ghoul::opengl::TextureUnit mainColorTextureUnit;
mainColorTextureUnit.activate();
glBindTexture(GL_TEXTURE_2D_MULTISAMPLE, _mainColorTexture);
//_hdrBackGroundProgram->setUniform("mainColorTexture", mainColorTextureUnit);
//_hdrBackGroundProgram->setUniform("nAaSamples", _nAaSamples);
//_hdrBackGroundProgram->setUniform("exposure", _hdrExposure);
//_hdrBackGroundProgram->setUniform("backgroundExposure", _hdrBackground);
//_hdrBackGroundProgram->setUniform("gamma", _gamma);
_hdrBackGroundProgram->setUniform("mainColorTexture", mainColorTextureUnit);
_hdrBackGroundProgram->setUniform("nAaSamples", _nAaSamples);
_hdrBackGroundProgram->setUniform("exposure", _hdrExposure);
_hdrBackGroundProgram->setUniform("backgroundExposure", _hdrBackground);
_hdrBackGroundProgram->setUniform("gamma", _gamma);
//glDisable(GL_DEPTH_TEST);
//glDepthMask(false);
glDisable(GL_DEPTH_TEST);
glDepthMask(false);
//glBindVertexArray(_screenQuad);
//glDrawArrays(GL_TRIANGLES, 0, 6);
//glBindVertexArray(0);
glBindVertexArray(_screenQuad);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindVertexArray(0);
//glDepthMask(true);
//glEnable(GL_DEPTH_TEST);
glDepthMask(true);
glEnable(GL_DEPTH_TEST);
//_hdrBackGroundProgram->deactivate();
_hdrBackGroundProgram->deactivate();
for (const DeferredcasterTask& deferredcasterTask : tasks.deferredcasterTasks) {