mirror of
https://github.com/OpenSpace/OpenSpace.git
synced 2026-01-25 21:48:41 -06:00
Added HDR user selection. Fixed bugs in atm shaders. Changed calculation to double for better precision. Removed unused code.
This commit is contained in:
@@ -133,6 +133,7 @@ namespace openspace {
|
||||
, _rayleighScatteringCoeff(glm::vec3(0.f))
|
||||
, _mieScatteringCoeff(glm::vec3(0.f))
|
||||
, _sunRadianceIntensity(50.0f)
|
||||
, _hdrConstant(0.4f)
|
||||
, _hasNightTexture(false)
|
||||
, _hasHeightTexture(false)
|
||||
, _hasReflectanceTexture(false)
|
||||
@@ -147,6 +148,7 @@ namespace openspace {
|
||||
"Mie Scattering/Extinction Proportion Coefficient (%)", 0.9f, 0.1f, 1.0f)
|
||||
, _mieAsymmetricFactorGP("mieAsymmetricFactorG", "Mie Asymmetric Factor G", 1.0f, -1.0f, 1.0f)
|
||||
, _sunIntensityP("sunIntensity", "Sun Intensity", 50.0f, 0.1f, 100.0f)
|
||||
, _hdrExpositionP("hdrExposition", "HDR", 0.0f, 0.05f, 1.0f)
|
||||
{
|
||||
std::string name;
|
||||
bool success = dictionary.getValue(SceneGraphNode::KeyName, name);
|
||||
@@ -453,6 +455,10 @@ namespace openspace {
|
||||
_sunIntensityP.set(_sunRadianceIntensity);
|
||||
_sunIntensityP.onChange(std::bind(&RenderablePlanetAtmosphere::updateAtmosphereParameters, this));
|
||||
addProperty(_sunIntensityP);
|
||||
|
||||
_hdrExpositionP.set(_hdrConstant);
|
||||
_hdrExpositionP.onChange(std::bind(&RenderablePlanetAtmosphere::updateAtmosphereParameters, this));
|
||||
addProperty(_hdrExpositionP);
|
||||
}
|
||||
|
||||
|
||||
@@ -889,7 +895,7 @@ namespace openspace {
|
||||
}
|
||||
|
||||
// HDR
|
||||
_programObject->setUniform("exposure", 0.4f);
|
||||
_programObject->setUniform("exposure", _hdrConstant);
|
||||
|
||||
}
|
||||
|
||||
@@ -931,7 +937,7 @@ namespace openspace {
|
||||
//std::cout << "\n\nCam Position in View: " << camPosView.x << ", " << camPosView.y << ", " << camPosView.z << std::endl;
|
||||
//std::cout << "\n\nCam Position from Earth in View: " << camDirView.x << ", " << camDirView.y << ", " << camDirView.z << std::endl;
|
||||
|
||||
std::cout << std::endl;
|
||||
//std::cout << std::endl;
|
||||
|
||||
GLint defaultFBO;
|
||||
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &defaultFBO);
|
||||
@@ -989,9 +995,10 @@ namespace openspace {
|
||||
|
||||
|
||||
// Object ModelTransform
|
||||
std::cout << "\n transform: " << glm::to_string(transform) << std::endl;
|
||||
//std::cout << "\n transform: " << glm::to_string(transform) << std::endl;
|
||||
|
||||
_deferredAtmosphereProgramObject->setUniform("inverseTransformMatrix", glm::inverse(transform));
|
||||
_deferredAtmosphereProgramObject->setUniform("dInverseTransformMatrix", glm::inverse(glm::dmat4(transform)));
|
||||
|
||||
// The following scale comes from PSC transformations.
|
||||
float fScaleFactor = data.camera.scaling().x * pow(10.0, data.camera.scaling().y);
|
||||
@@ -999,25 +1006,34 @@ namespace openspace {
|
||||
glm::mat4 fScaleCamTransf = glm::scale(glm::vec3(fScaleFactor));
|
||||
glm::dmat4 dfScaleCamTransf = glm::scale(glm::dvec3(fScaleFactor));
|
||||
_deferredAtmosphereProgramObject->setUniform("scaleTransformMatrix", fScaleCamTransf);
|
||||
_deferredAtmosphereProgramObject->setUniform("dScaleTransformMatrix", dfScaleCamTransf);
|
||||
_deferredAtmosphereProgramObject->setUniform("inverseScaleTransformMatrix", glm::inverse(fScaleCamTransf));
|
||||
_deferredAtmosphereProgramObject->setUniform("dInverseScaleTransformMatrix", glm::inverse(dfScaleCamTransf));
|
||||
//std::cout << "\n fScaleCamTransf: " << glm::to_string(fScaleCamTransf) << std::endl;
|
||||
|
||||
// Object Space to World Space (in meters)
|
||||
glm::mat4 obj2World = glm::translate(glm::mat4(1.0), data.position.vec3()) * transform;
|
||||
glm::dmat4 dObj2World = glm::translate(data.position.dvec3()) * glm::dmat4(transform);
|
||||
_deferredAtmosphereProgramObject->setUniform("objToWorldTransform", obj2World);
|
||||
glm::mat4 world2Obj = glm::inverse(obj2World);
|
||||
glm::dmat4 dWorld2Obj = glm::inverse(dObj2World);
|
||||
_deferredAtmosphereProgramObject->setUniform("objToWorldTransform", obj2World);
|
||||
_deferredAtmosphereProgramObject->setUniform("worldToObjectTransform", world2Obj);
|
||||
_deferredAtmosphereProgramObject->setUniform("dObjToWorldTransform", dObj2World);
|
||||
_deferredAtmosphereProgramObject->setUniform("dWorldToObjectTransform", dWorld2Obj);
|
||||
|
||||
// World to Eye Space in OS
|
||||
glm::mat4 world2Eye = fScaleCamTransf * glm::mat4(data.camera.viewRotationMatrix()) *
|
||||
glm::translate(-data.camera.position().vec3());
|
||||
glm::dmat4 dWorld2Eye = dfScaleCamTransf * data.camera.viewRotationMatrix() *
|
||||
glm::translate(-data.camera.position().dvec3());
|
||||
_deferredAtmosphereProgramObject->setUniform("worldToEyeTransform", world2Eye);
|
||||
glm::mat4 eye2World = glm::inverse(world2Eye);
|
||||
glm::dmat4 dEye2World = glm::inverse(dWorld2Eye);
|
||||
_deferredAtmosphereProgramObject->setUniform("eyeToWorldTransform", eye2World);
|
||||
_deferredAtmosphereProgramObject->setUniform("inverseCamRotTransform", glm::inverse(glm::mat4(data.camera.viewRotationMatrix())));
|
||||
_deferredAtmosphereProgramObject->setUniform("dInverseCamRotTransform", glm::inverse(data.camera.viewRotationMatrix()));
|
||||
_deferredAtmosphereProgramObject->setUniform("worldToOsEyeTransform", world2Eye);
|
||||
_deferredAtmosphereProgramObject->setUniform("osEyeToWorldTransform", eye2World);
|
||||
_deferredAtmosphereProgramObject->setUniform("dWorldToOsEyeTransform", dWorld2Eye);
|
||||
_deferredAtmosphereProgramObject->setUniform("dOsEyeToWorldTransform", dEye2World);
|
||||
|
||||
// Eye Space in OS to Eye Space in SGCT
|
||||
glm::mat4 osEye2SGCTEye = data.camera.viewMatrix();
|
||||
@@ -1025,18 +1041,21 @@ namespace openspace {
|
||||
glm::mat4 sgctEye2OSEye = glm::inverse(osEye2SGCTEye);
|
||||
glm::dmat4 dSgctEye2OSEye = glm::inverse(dOsEye2SGCTEye);
|
||||
|
||||
_deferredAtmosphereProgramObject->setUniform("osEye2SGCTEyeTranform", osEye2SGCTEye);
|
||||
_deferredAtmosphereProgramObject->setUniform("sgctEye2OSEyeTranform", sgctEye2OSEye);
|
||||
_deferredAtmosphereProgramObject->setUniform("osEyeToSGCTEyeTranform", osEye2SGCTEye);
|
||||
_deferredAtmosphereProgramObject->setUniform("sgctEyeToOSEyeTranform", sgctEye2OSEye);
|
||||
_deferredAtmosphereProgramObject->setUniform("dOsEyeToSGCTEyeTranform", dOsEye2SGCTEye);
|
||||
_deferredAtmosphereProgramObject->setUniform("dSgctEyeToOSEyeTranform", dSgctEye2OSEye);
|
||||
|
||||
// Eye Space in SGCT to Projection (Clip) Space in SGCT
|
||||
glm::mat4 eye2View = data.camera.projectionMatrix();
|
||||
glm::dmat4 dEye2View = glm::dmat4(data.camera.projectionMatrix());
|
||||
_deferredAtmosphereProgramObject->setUniform("eyeToViewTranform", eye2View);
|
||||
_deferredAtmosphereProgramObject->setUniform("viewToEyeTranform", glm::inverse(eye2View));
|
||||
glm::mat4 sgctEye2Clip = data.camera.projectionMatrix();
|
||||
glm::dmat4 dSgctEye2Clip = glm::dmat4(data.camera.projectionMatrix());
|
||||
glm::mat4 inverseProjection = glm::inverse(sgctEye2Clip);
|
||||
glm::dmat4 dInverseProjection = glm::inverse(dSgctEye2Clip);
|
||||
|
||||
glm::mat4 inverseProjection = glm::inverse(eye2View);
|
||||
glm::dmat4 dInverseProjection = glm::inverse(dEye2View);
|
||||
_deferredAtmosphereProgramObject->setUniform("sgctEyeToClipTranform", sgctEye2Clip);
|
||||
_deferredAtmosphereProgramObject->setUniform("inverseSgctProjectionMatrix", inverseProjection);
|
||||
_deferredAtmosphereProgramObject->setUniform("dSgctEyeToClipTranform", dSgctEye2Clip);
|
||||
_deferredAtmosphereProgramObject->setUniform("dInverseSgctProjectionMatrix", dInverseProjection);
|
||||
/*std::cout << "\nProjection: " << glm::to_string(data.camera.projectionMatrix()) << std::endl;
|
||||
std::cout << "\nInverse Projection: " << glm::to_string(inverseProjection) << std::endl;*/
|
||||
|
||||
@@ -1045,61 +1064,78 @@ namespace openspace {
|
||||
glm::translate(glm::mat4(1.0), -data.camera.position().vec3()) *
|
||||
glm::translate(glm::mat4(1.0), data.position.vec3())
|
||||
* transform;
|
||||
|
||||
glm::mat4 inverseCompleteVertexTransformations = glm::inverse(completeVertexTransformations);
|
||||
|
||||
glm::dmat4 dCompleteVertexTransformations = glm::dmat4(data.camera.viewProjectionMatrix()) *
|
||||
data.camera.viewRotationMatrix() *
|
||||
glm::translate(glm::dmat4(1.0), -data.camera.position().dvec3()) *
|
||||
glm::translate(glm::dmat4(1.0), data.position.dvec3())
|
||||
* glm::dmat4(transform);
|
||||
glm::dmat4 dCompleteVertexTransformationsInverse = glm::inverse(dCompleteVertexTransformations);
|
||||
glm::dmat4 dInverseCompleteVertexTransformations = glm::inverse(dCompleteVertexTransformations);
|
||||
|
||||
_deferredAtmosphereProgramObject->setUniform("completeVertexTransform", completeVertexTransformations);
|
||||
glm::mat4 inverseCompleteVertexTransformations = glm::inverse(completeVertexTransformations);
|
||||
_deferredAtmosphereProgramObject->setUniform("inverseCompleteVertexTransform", inverseCompleteVertexTransformations);
|
||||
_deferredAtmosphereProgramObject->setUniform("dCompleteVertexTransform", dCompleteVertexTransformations);
|
||||
_deferredAtmosphereProgramObject->setUniform("dInverseCompleteVertexTransform", dInverseCompleteVertexTransformations);
|
||||
|
||||
_deferredAtmosphereProgramObject->setUniform("sgctProjectionMatrix", data.camera.projectionMatrix());
|
||||
_deferredAtmosphereProgramObject->setUniform("inverseSgctProjectionMatrix", inverseProjection);
|
||||
_deferredAtmosphereProgramObject->setUniform("dSgctProjectionMatrix", glm::dmat4(data.camera.projectionMatrix()));
|
||||
_deferredAtmosphereProgramObject->setUniform("dInverseSgctProjectionMatrix", dInverseProjection);
|
||||
/*std::cout << "\nProjection: " << glm::to_string(data.camera.projectionMatrix()) << std::endl;
|
||||
std::cout << "\nInverse Projection: " << glm::to_string(inverseProjection) << std::endl;*/
|
||||
|
||||
|
||||
|
||||
|
||||
//===========================
|
||||
// Testing Transformations:
|
||||
glm::vec4 planetCenterOrigin = glm::vec4(1000.0, 1000.0, 1000.0, 1.0);
|
||||
//===========================
|
||||
// Origin
|
||||
glm::vec4 planetCenterOrigin = glm::vec4(1000.0, 1000.0, 1000.0, 1.0);
|
||||
glm::dvec4 dPlanetCenterOrigin = glm::vec4(1000.0, 1000.0, 1000.0, 1.0);
|
||||
//std::cout << "Planet Position in OS Object Space: " << glm::to_string(planetCenterOrigin) << std::endl;
|
||||
glm::vec4 planetCenterTmp = transform * planetCenterOrigin;
|
||||
|
||||
// Object Coords to World Coords
|
||||
glm::vec4 planetCenterTmp = transform * planetCenterOrigin;
|
||||
glm::dvec4 dPlanetCenterTmp = glm::dmat4(transform) * dPlanetCenterOrigin;
|
||||
std::cout << "Planet Position in OS World Space After Transf: " << glm::to_string(dPlanetCenterTmp) << std::endl;
|
||||
glm::vec4 planetCenterTmpWorld = planetCenterTmp + glm::vec4(data.position.vec3(), 0.0);
|
||||
//std::cout << "Planet Position in OS World Space After Transf: " << glm::to_string(dPlanetCenterTmp) << std::endl;
|
||||
glm::vec4 planetCenterTmpWorld = planetCenterTmp + glm::vec4(data.position.vec3(), 0.0);
|
||||
glm::dvec4 dPlanetCenterTmpWorld = dPlanetCenterTmp + glm::dvec4(data.position.dvec3(), 0.0);
|
||||
std::cout << "Planet Position in OS World Space After Transl: " << glm::to_string(dPlanetCenterTmpWorld) << std::endl;
|
||||
std::cout << "Object Translation Vector: " << glm::to_string(data.position.dvec3()) << std::endl;
|
||||
//std::cout << "Planet Position in OS World Space: " << glm::to_string(planetCenterTmpWorld) << std::endl;
|
||||
planetCenterTmp = planetCenterTmpWorld + glm::vec4(-data.camera.positionVec3(), 0.0);
|
||||
dPlanetCenterTmp = dPlanetCenterTmpWorld + glm::dvec4(-data.camera.positionVec3(), 0.0);
|
||||
glm::vec3 tt = glm::mat3(data.camera.viewRotationMatrix()) * glm::vec3(planetCenterTmp);
|
||||
glm::dvec3 dtt = glm::dmat3(data.camera.viewRotationMatrix()) * glm::dvec3(dPlanetCenterTmp);
|
||||
glm::vec4 planetCenterTmpOSEye = glm::vec4(0.0);
|
||||
glm::dvec4 dPlanetCenterTmpOSEye = glm::dvec4(0.0);
|
||||
//std::cout << "Planet Position in OS World Space After Transl: " << glm::to_string(dPlanetCenterTmpWorld) << std::endl;
|
||||
//std::cout << "Object Translation Vector: " << glm::to_string(data.position.dvec3()) << std::endl;
|
||||
//std::cout << "Planet Position in OS World Space (f): " << glm::to_string(planetCenterTmpWorld) << std::endl;
|
||||
//std::cout << "Planet Position in OS World Space (d): " << glm::to_string(dPlanetCenterTmpWorld) << std::endl;
|
||||
|
||||
// World Coords to Camera Rig (OS Eye) Coords
|
||||
glm::vec4 planetCenterTmpOSEye = planetCenterTmpWorld + glm::vec4(-data.camera.positionVec3(), 0.0);
|
||||
glm::dvec4 dPlanetCenterTmpOSEye = dPlanetCenterTmpWorld + glm::dvec4(-data.camera.positionVec3(), 0.0);
|
||||
glm::vec3 tt = glm::mat3(data.camera.viewRotationMatrix()) * glm::vec3(planetCenterTmpOSEye);
|
||||
glm::dvec3 dtt = glm::dmat3(data.camera.viewRotationMatrix()) * glm::dvec3(dPlanetCenterTmpOSEye);
|
||||
planetCenterTmpOSEye.x = tt.x; planetCenterTmpOSEye.y = tt.y; planetCenterTmpOSEye.z = tt.z; planetCenterTmpOSEye.w = 1.0;
|
||||
dPlanetCenterTmpOSEye.x = dtt.x; dPlanetCenterTmpOSEye.y = dtt.y; dPlanetCenterTmpOSEye.z = dtt.z; dPlanetCenterTmpOSEye.w = 1.0;
|
||||
float scaleF = data.camera.scaling().x * powf(10.0, data.camera.scaling().y);
|
||||
float scaleF = data.camera.scaling().x * powf(10.0, data.camera.scaling().y);
|
||||
double dScaleF = static_cast<double>(data.camera.scaling().x) * pow(10.0, static_cast<double>(data.camera.scaling().y));
|
||||
glm::mat4 scaleM = glm::scale(glm::vec3(scaleF));
|
||||
glm::mat4 scaleM = glm::scale(glm::vec3(scaleF));
|
||||
glm::dmat4 dScaleM = glm::scale(glm::dvec3(dScaleF));
|
||||
planetCenterTmpOSEye = scaleM * planetCenterTmpOSEye;
|
||||
planetCenterTmpOSEye = scaleM * planetCenterTmpOSEye;
|
||||
dPlanetCenterTmpOSEye = dScaleM * dPlanetCenterTmpOSEye;
|
||||
//std::cout << "Planet Position in OS Eye Space: " << glm::to_string(planetCenterTmp) << std::endl;
|
||||
glm::vec4 planetCenterTmpSGCTEye = data.camera.viewMatrix() * planetCenterTmpOSEye;
|
||||
//std::cout << "Planet Position in OS Eye Space (f): " << glm::to_string(planetCenterTmpOSEye) << std::endl;
|
||||
//std::cout << "Planet Position in OS Eye Space (d): " << glm::to_string(dPlanetCenterTmpOSEye) << std::endl;
|
||||
|
||||
// Camera Rig (OS Eye) to SGCT Eye Coords
|
||||
glm::vec4 planetCenterTmpSGCTEye = data.camera.viewMatrix() * planetCenterTmpOSEye;
|
||||
glm::dvec4 dPlanetCenterTmpSGCTEye = glm::dmat4(data.camera.viewMatrix()) * dPlanetCenterTmpOSEye;
|
||||
//std::cout << "Planet Position in SGCT Eye Space: " << glm::to_string(planetCenterTmpSGCTEye) << std::endl;
|
||||
glm::vec4 planetCenterTmpSGCTView = data.camera.projectionMatrix() * planetCenterTmpSGCTEye;
|
||||
//std::cout << "Planet Position in SGCT Eye Space (f): " << glm::to_string(planetCenterTmpSGCTEye) << std::endl;
|
||||
//std::cout << "Planet Position in SGCT Eye Space (d): " << glm::to_string(dPlanetCenterTmpSGCTEye) << std::endl;
|
||||
|
||||
// SGCT Eye Coords to SGCT Clip Coords
|
||||
glm::vec4 planetCenterTmpSGCTView = data.camera.projectionMatrix() * planetCenterTmpSGCTEye;
|
||||
glm::dvec4 dPlanetCenterTmpSGCTView = glm::dmat4(data.camera.projectionMatrix()) * dPlanetCenterTmpSGCTEye;
|
||||
//std::cout << "Planet Position in SGCT View Space: " << glm::to_string(planetCenterTmpSGCTView) << std::endl;
|
||||
|
||||
//std::cout << "Planet Position in SGCT Clip Space (f): " << glm::to_string(planetCenterTmpSGCTView) << std::endl;
|
||||
//std::cout << "Planet Position in SGCT Clip Space (d): " << glm::to_string(dPlanetCenterTmpSGCTView) << std::endl;
|
||||
|
||||
/*
|
||||
/////////////////////////
|
||||
// Inverse Path:
|
||||
std::cout << "------ Inverse Path ------" << std::endl;
|
||||
//planetCenterTmpSGCTView /= planetCenterTmpSGCTView.w;
|
||||
@@ -1135,7 +1171,7 @@ namespace openspace {
|
||||
glm::vec4 ttmp3 = glm::vec4(data.camera.position().vec3() + ttmp2, 1.0);
|
||||
glm::dvec4 dttmp = glm::inverse(dScaleM) * dInversePlanetCenterTmpOSEye;
|
||||
glm::dvec3 dttmp2 = glm::inverse(glm::dmat3(data.camera.viewRotationMatrix())) * glm::dvec3(dttmp);
|
||||
glm::dvec4 dttmp3 = glm::dvec4(data.camera.position().dvec3() + dttmp2, 1.0);
|
||||
glm::dvec4 dttmp3 = glm::dvec4(data.camera.positionVec3() + dttmp2, 1.0);
|
||||
std::cout << "Planet Position in OS World Space (hand) : " << glm::to_string(ttmp3) << std::endl;
|
||||
std::cout << "Planet Position in OS World Space (Orig) : " << glm::to_string(planetCenterTmpWorld) << std::endl;
|
||||
std::cout << "Planet Position in OS World Space (hand D) : " << glm::to_string(dttmp3) << std::endl;
|
||||
@@ -1161,43 +1197,12 @@ namespace openspace {
|
||||
std::cout << "Planet Position in OS Object Space (OD) : " << glm::to_string(dPlanetCenterOrigin) << std::endl;
|
||||
|
||||
std::cout << "Planet Position in OS Object Space (comp) : " << glm::to_string(inverseCompleteVertexTransformations * planetCenterTmpSGCTView) << std::endl;
|
||||
std::cout << "Planet Position in OS Object Space (dcomp): " << glm::to_string(dCompleteVertexTransformationsInverse * glm::dvec4(planetCenterTmpSGCTView)) << std::endl;
|
||||
std::cout << "Planet Position in OS Object Space (dcomp): " << glm::to_string(dInverseCompleteVertexTransformations * dPlanetCenterTmpSGCTView) << std::endl;
|
||||
std::cout << "Planet Position in OS Object Space (Orig) : " << glm::to_string(planetCenterOrigin) << std::endl;
|
||||
|
||||
std::cout << "Planet Position in OS Object Space (No Transf): " << glm::to_string(glm::dvec4(glm::dvec3(dttmp3) - data.position.dvec3(), 1.0)) << std::endl;
|
||||
std::cout << "Planet Position in OS Object Space (no glm): " << dttmp3.x - data.position.dvec3().x << ", " << dttmp3.y - data.position.dvec3().y << ", " << dttmp3.z - data.position.dvec3().z << std::endl;
|
||||
std::cout << "Object Translation Vector: " << glm::to_string(data.position.dvec3()) << std::endl;
|
||||
*/
|
||||
|
||||
|
||||
psc pscPlanetPosObjCoords = PowerScaledCoordinate::CreatePowerScaledCoordinate(ttmp3.x, ttmp3.y, ttmp3.z);
|
||||
pscPlanetPosObjCoords = pscPlanetPosObjCoords - data.position;
|
||||
psc psctmp = glm::inverse(glm::mat3(transform)) * glm::vec3(pscPlanetPosObjCoords.vec4());
|
||||
pscPlanetPosObjCoords[0] = psctmp[0]; pscPlanetPosObjCoords[1] = psctmp[1]; pscPlanetPosObjCoords[2] = psctmp[2];
|
||||
std::cout << "Planet Position in OS Object Space (psc): " << glm::to_string(glm::vec4(pscPlanetPosObjCoords.vec3(),1.0)) << std::endl;
|
||||
std::cout << "Planet Position in OS Object Space (Orig): " << glm::to_string(planetCenterOrigin) << std::endl;
|
||||
|
||||
|
||||
|
||||
//glm::mat4 tmpTrans = glm::translate(transform, data.position.vec3());
|
||||
glm::mat4 tmpTrans = glm::translate(data.position.vec3());
|
||||
std::cout << "\n******* tmpTrans: " << glm::to_string(tmpTrans) << " *******" << std::endl;
|
||||
float divisor = 1.0;
|
||||
for (int i = 0; i < 4; i++) for (int j = 0; j < 4; j++) {
|
||||
if (abs(tmpTrans[i][j] > divisor)) divisor = tmpTrans[i][j];
|
||||
}
|
||||
std::cout << "\n******* Divisor: " << divisor << " *******" << std::endl;
|
||||
glm::mat4 scaledModelTransform = tmpTrans / divisor;
|
||||
std::cout << "\n******* scaledModelTrans: " << glm::to_string(scaledModelTransform) << " *******" << std::endl;
|
||||
glm::mat4 inverScaledModelTransformation = glm::translate(glm::vec3(-scaledModelTransform[3][0], -scaledModelTransform[3][1], -scaledModelTransform[3][2]));
|
||||
std::cout << "\n******* scaledModelTrans Inverse: " << glm::to_string(glm::inverse(scaledModelTransform)) << " *******" << std::endl;
|
||||
std::cout << "\n******* scaledModelTrans Inverse2: " << glm::to_string(inverScaledModelTransformation) << " *******" << std::endl;
|
||||
std::cout << "\n******* scaledModelTrans Back: " << glm::to_string(glm::inverse(scaledModelTransform) / divisor ) << " *******" << std::endl;
|
||||
std::cout << "\n******* scaledModelTrans Back2: " << glm::to_string(inverScaledModelTransformation / divisor) << " *******" << std::endl;
|
||||
glm::vec4 modelPos = (glm::inverse(scaledModelTransform) / divisor) * ttmp3;
|
||||
glm::vec4 modelPos2 = (inverScaledModelTransformation / divisor) * ttmp3;
|
||||
std::cout << "Planet Position in OS Object Space (Emil) : " << glm::to_string(modelPos) << std::endl;
|
||||
std::cout << "Planet Position in OS Object Space (Emil2): " << glm::to_string(modelPos2) << std::endl;
|
||||
std::cout << "Planet Position in OS Object Space (Orig) : " << glm::to_string(planetCenterOrigin) << std::endl;
|
||||
|
||||
/*glm::mat4 invRot = glm::mat4(glm::inverse(glm::mat3(data.camera.viewRotationMatrix())));
|
||||
invRot[3][3] = 1.0;
|
||||
@@ -1237,7 +1242,7 @@ namespace openspace {
|
||||
//std::cout << "\nInverse transform matrix: " << glm::to_string(glm::inverse(transform)) << std::endl;
|
||||
|
||||
|
||||
std::cout << std::endl;
|
||||
//std::cout << std::endl;
|
||||
|
||||
// Camera Position in Object Space in Meters
|
||||
//glm::vec4 cameraPosObjecCoords = glm::vec4(0.0, 0.0, 0.0, 1.0);
|
||||
@@ -1245,37 +1250,52 @@ namespace openspace {
|
||||
// by hand:
|
||||
glm::vec4 cameraPosObjecCoords = glm::inverse(transform) * glm::vec4(-data.position.vec3() + glm::vec3(data.camera.positionVec3()), 1.0);
|
||||
_deferredAtmosphereProgramObject->setUniform("cameraPositionObjectCoords", cameraPosObjecCoords);
|
||||
std::cout << "\n== Camera position Object Space : " << glm::to_string(cameraPosObjecCoords) << std::endl;
|
||||
std::cout << "== Camera position Object Space (other): " << glm::to_string(glm::transpose(glm::mat3(transform)) * (-data.position.vec3() + glm::vec3(data.camera.positionVec3()))) << std::endl;
|
||||
std::cout << "== Camera position World Space : " << glm::to_string(data.camera.positionVec3()) << std::endl;
|
||||
//std::cout << "\n== Camera position Object Space : " << glm::to_string(cameraPosObjecCoords) << std::endl;
|
||||
//std::cout << "== Camera position Object Space (other): " << glm::to_string(glm::transpose(glm::mat3(transform)) * (-data.position.vec3() + glm::vec3(data.camera.positionVec3()))) << std::endl;
|
||||
//std::cout << "== Camera position World Space : " << glm::to_string(data.camera.positionVec3()) << std::endl;
|
||||
|
||||
std::cout << "-- Object position World Space : " << glm::to_string(data.position.vec3()) << std::endl;
|
||||
//std::cout << "-- Object position World Space : " << glm::to_string(data.position.vec3()) << std::endl;
|
||||
//std::cout << "\n-- Object position Obj Space: " << glm::to_string(world2Obj * glm::vec4(data.position.vec3(), 1.0)) << std::endl;
|
||||
|
||||
|
||||
std::cout << "*** Distance Camera Planet (World) : " << glm::distance(glm::vec3(data.camera.positionVec3()), data.position.vec3()) << std::endl;
|
||||
std::cout << "*** Distance Camera Planet (Object) : " << glm::distance(cameraPosObjecCoords, glm::vec4(0.0, 0.0, 0.0, 1.0)) << std::endl;
|
||||
//std::cout << "*** Distance Camera Planet (World) : " << glm::distance(glm::vec3(data.camera.positionVec3()), data.position.vec3()) << std::endl;
|
||||
//std::cout << "*** Distance Camera Planet (Object) : " << glm::distance(cameraPosObjecCoords, glm::vec4(0.0, 0.0, 0.0, 1.0)) << std::endl;
|
||||
|
||||
float depthParams[2] = {0};
|
||||
glGetFloatv(GL_DEPTH_RANGE, depthParams);
|
||||
_deferredAtmosphereProgramObject->setUniform("depthrange", glm::vec2(depthParams[0], depthParams[1]));
|
||||
std::cout << "~~~~~ Depth Params: " << depthParams[0] << ", " << depthParams[1] << std::endl;
|
||||
//std::cout << "~~~~~ Depth Params: " << depthParams[0] << ", " << depthParams[1] << std::endl;
|
||||
|
||||
_deferredAtmosphereProgramObject->setUniform("objpos", glm::vec4(data.position.vec3(),1.0));
|
||||
_deferredAtmosphereProgramObject->setUniform("campos", data.camera.position().vec3());
|
||||
_deferredAtmosphereProgramObject->setUniform("camrot", glm::mat3(data.camera.viewRotationMatrix()));
|
||||
_deferredAtmosphereProgramObject->setUniform("dObjpos", glm::dvec4(data.position.dvec3(),1.0));
|
||||
_deferredAtmosphereProgramObject->setUniform("dCampos", data.camera.positionVec3());
|
||||
_deferredAtmosphereProgramObject->setUniform("dCamrot", glm::dmat3(data.camera.viewRotationMatrix()));
|
||||
// I know it is (0,0,0). It is here just for sake of sanity. :-p
|
||||
glm::dvec3 sunPosWorld =
|
||||
SpiceManager::ref().targetPosition("SUN", "SUN", "GALACTIC", {}, _time, lt);
|
||||
glm::dvec4 sunPosObj = glm::inverse(transform) * glm::dvec4(sunPosWorld - data.position.dvec3(), 1.0);
|
||||
//std::cout << "Sun Obj Coord by : " << glm::to_string(sunPosObj) << std::endl;
|
||||
//std::cout << "Sun Direction in Obj Coord Norm : " << glm::to_string(glm::normalize(glm::dvec3(sunPosObj))) << std::endl;
|
||||
|
||||
_deferredAtmosphereProgramObject->setUniform("sunPositionObj", sunPosObj);
|
||||
_deferredAtmosphereProgramObject->setUniform("sunDirectionObj", glm::normalize(glm::dvec3(sunPosObj)));
|
||||
_deferredAtmosphereProgramObject->setUniform("_performShading", _performShading);
|
||||
|
||||
ghoul::opengl::TextureUnit transmittanceTableTextureUnit;
|
||||
transmittanceTableTextureUnit.activate();
|
||||
glBindTexture(GL_TEXTURE_2D, _transmittanceTableTexture);
|
||||
_deferredAtmosphereProgramObject->setUniform("transmittanceTexture", transmittanceTableTextureUnit);
|
||||
|
||||
ghoul::opengl::TextureUnit irradianceTableTextureUnit;
|
||||
irradianceTableTextureUnit.activate();
|
||||
glBindTexture(GL_TEXTURE_2D, _irradianceTableTexture);
|
||||
_deferredAtmosphereProgramObject->setUniform("irradianceTexture", irradianceTableTextureUnit);
|
||||
|
||||
ghoul::opengl::TextureUnit inScatteringTableTextureUnit;
|
||||
inScatteringTableTextureUnit.activate();
|
||||
glBindTexture(GL_TEXTURE_3D, _inScatteringTableTexture);
|
||||
_deferredAtmosphereProgramObject->setUniform("inscatterTexture", inScatteringTableTextureUnit);
|
||||
|
||||
_deferredAtmosphereProgramObject->setUniform("screenX", (float)m_viewport[0]);
|
||||
@@ -1311,7 +1331,7 @@ namespace openspace {
|
||||
}
|
||||
|
||||
// HDR
|
||||
_deferredAtmosphereProgramObject->setUniform("exposure", 0.4f);
|
||||
_deferredAtmosphereProgramObject->setUniform("exposure", _hdrConstant);
|
||||
|
||||
renderQuadForCalc(_atmosphereRenderVAO, 6);
|
||||
|
||||
@@ -1967,7 +1987,7 @@ namespace openspace {
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
// Stopped using a buffer object for GL_PIXEL_UNPACK_BUFFER
|
||||
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB16F, TRANSMITTANCE_TABLE_WIDTH,
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB32F, TRANSMITTANCE_TABLE_WIDTH,
|
||||
TRANSMITTANCE_TABLE_HEIGHT, 0, GL_RGB, GL_FLOAT, nullptr);
|
||||
while ((err = glGetError()) != GL_NO_ERROR) {
|
||||
const GLubyte * errString = gluErrorString(err);
|
||||
@@ -1985,7 +2005,7 @@ namespace openspace {
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB16F, IRRADIANCE_TABLE_WIDTH,
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB32F, IRRADIANCE_TABLE_WIDTH,
|
||||
IRRADIANCE_TABLE_HEIGHT, 0, GL_RGB, GL_FLOAT, nullptr);
|
||||
|
||||
while ((err = glGetError()) != GL_NO_ERROR) {
|
||||
@@ -2005,7 +2025,7 @@ namespace openspace {
|
||||
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
|
||||
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGBA16F_ARB, MU_S_SAMPLES * NU_SAMPLES,
|
||||
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGBA32F_ARB, MU_S_SAMPLES * NU_SAMPLES,
|
||||
MU_SAMPLES, R_SAMPLES, 0, GL_RGB, GL_FLOAT, nullptr);
|
||||
|
||||
while ((err = glGetError()) != GL_NO_ERROR) {
|
||||
@@ -2025,7 +2045,7 @@ namespace openspace {
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB16F, DELTA_E_TABLE_WIDTH,
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB32F, DELTA_E_TABLE_WIDTH,
|
||||
DELTA_E_TABLE_HEIGHT, 0, GL_RGB, GL_FLOAT, nullptr);
|
||||
|
||||
while ((err = glGetError()) != GL_NO_ERROR) {
|
||||
@@ -2045,7 +2065,7 @@ namespace openspace {
|
||||
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
|
||||
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGB16F, MU_S_SAMPLES * NU_SAMPLES,
|
||||
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGB32F, MU_S_SAMPLES * NU_SAMPLES,
|
||||
MU_SAMPLES, R_SAMPLES, 0, GL_RGB, GL_FLOAT, nullptr);
|
||||
|
||||
while ((err = glGetError()) != GL_NO_ERROR) {
|
||||
@@ -2064,7 +2084,7 @@ namespace openspace {
|
||||
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
|
||||
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGB16F, MU_S_SAMPLES * NU_SAMPLES,
|
||||
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGB32F, MU_S_SAMPLES * NU_SAMPLES,
|
||||
MU_SAMPLES, R_SAMPLES, 0, GL_RGB, GL_FLOAT, nullptr);
|
||||
|
||||
while ((err = glGetError()) != GL_NO_ERROR) {
|
||||
@@ -2084,7 +2104,7 @@ namespace openspace {
|
||||
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
|
||||
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGB16F, MU_S_SAMPLES * NU_SAMPLES,
|
||||
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGB32F, MU_S_SAMPLES * NU_SAMPLES,
|
||||
MU_SAMPLES, R_SAMPLES, 0, GL_RGB, GL_FLOAT, nullptr);
|
||||
|
||||
while ((err = glGetError()) != GL_NO_ERROR) {
|
||||
@@ -2121,6 +2141,7 @@ namespace openspace {
|
||||
_mieExtinctionCoeff = _mieScatteringCoeff * (1.0f / static_cast<float>(_mieScatteringExtinctionPropCoefficientP));
|
||||
_miePhaseConstant = _mieAsymmetricFactorGP;
|
||||
_sunRadianceIntensity = _sunIntensityP;
|
||||
_hdrConstant = _hdrExpositionP;
|
||||
|
||||
preCalculateAtmosphereParam();
|
||||
|
||||
|
||||
@@ -26,8 +26,6 @@
|
||||
#define __RENDERABLEPLANETATMOSPHERE_H__
|
||||
|
||||
// open space includes
|
||||
//#include <modules/atmosphere/atmosphere.h>
|
||||
|
||||
#include <openspace/rendering/renderable.h>
|
||||
|
||||
#include <openspace/properties/scalar/boolproperty.h>
|
||||
@@ -54,7 +52,6 @@ namespace openspace {
|
||||
class PlanetGeometry;
|
||||
}
|
||||
|
||||
//class RenderablePlanetAtmosphere : public Atmosphere, public Renderable {
|
||||
class RenderablePlanetAtmosphere : public Renderable {
|
||||
public:
|
||||
// Shadow structure
|
||||
@@ -137,6 +134,7 @@ namespace openspace {
|
||||
const int width, const int height) const;
|
||||
void checkFrameBufferState(const std::string & codePosition) const;
|
||||
|
||||
|
||||
private:
|
||||
properties::StringProperty _colorTexturePath;
|
||||
properties::StringProperty _nightTexturePath;
|
||||
@@ -198,6 +196,7 @@ namespace openspace {
|
||||
properties::FloatProperty _mieScatteringExtinctionPropCoefficientP;
|
||||
properties::FloatProperty _mieAsymmetricFactorGP;
|
||||
properties::FloatProperty _sunIntensityP;
|
||||
properties::FloatProperty _hdrExpositionP;
|
||||
|
||||
|
||||
// DEBUG Properties:
|
||||
@@ -228,6 +227,7 @@ namespace openspace {
|
||||
float _mieHeightScale;
|
||||
float _miePhaseConstant;
|
||||
float _sunRadianceIntensity;
|
||||
float _hdrConstant;
|
||||
glm::vec3 _mieExtinctionCoeff;
|
||||
glm::vec3 _rayleighScatteringCoeff;
|
||||
glm::vec3 _mieScatteringCoeff;
|
||||
|
||||
@@ -284,8 +284,8 @@ float miePhaseFunction(const float mu) {
|
||||
// ( ( (1.0f - (mieG * mieG) ) * (1.0f + mu * mu) ) /
|
||||
// ( (2.0f + mieG * mieG) *
|
||||
// pow(1.0f + mieG * mieG - 2.0f * mieG * mu, 3.0f/2.0f) ) );
|
||||
return 1.5f * 1.0f / (4.0f * M_PI) * (1.0f - mieG*mieG) *
|
||||
pow(1.0f + (mieG*mieG) - 2.0f*mieG*mu, -3.0f/2.0f) * (1.0f + mu * mu) / (2.0f + mieG*mieG);
|
||||
return 1.5f * 1.0f / (4.0f * M_PI) * (1.0f - mieG * mieG) *
|
||||
pow(1.0f + (mieG * mieG) - 2.0f * mieG * mu, -3.0f/2.0f) * (1.0f + mu * mu) / (2.0f + mieG*mieG);
|
||||
}
|
||||
|
||||
// -- Given the height rm view-zenith angle (cosine) mu,
|
||||
@@ -300,12 +300,13 @@ float miePhaseFunction(const float mu) {
|
||||
vec4 texture4D(sampler3D table, const float r, const float mu,
|
||||
const float muSun, const float nu)
|
||||
{
|
||||
float Rg2 = Rg * Rg;
|
||||
float Rt2 = Rt * Rt;
|
||||
float Rg2 = Rg * Rg;
|
||||
float Rt2 = Rt * Rt;
|
||||
float r2 = r * r;
|
||||
float H = sqrt(Rt2 - Rg2);
|
||||
float rho = sqrt(r * r - Rg2);
|
||||
float rho = sqrt(r2 - Rg2);
|
||||
float rmu = r * mu;
|
||||
float delta = rmu * rmu - r * r + Rg2;
|
||||
float delta = rmu * rmu - r2 + Rg2;
|
||||
vec4 cst = rmu < 0.0f && delta > 0.0f ?
|
||||
vec4(1.0f, 0.0f, 0.0f, 0.5f - 0.5f / float(SAMPLES_MU)) :
|
||||
vec4(-1.0f, H * H, H, 0.5f + 0.5f / float(SAMPLES_MU));
|
||||
@@ -314,10 +315,10 @@ vec4 texture4D(sampler3D table, const float r, const float mu,
|
||||
float u_mu_s = 0.5f / float(SAMPLES_MU_S) +
|
||||
(atan(max(muSun, -0.1975) * tan(1.26f * 1.1f)) / 1.1f + (1.0f - 0.26f)) * 0.5f * (1.0f - 1.0f / float(SAMPLES_MU_S));
|
||||
float lerp = (nu + 1.0f) / 2.0f * (float(SAMPLES_NU) - 1.0f);
|
||||
float uNu = floor(lerp);
|
||||
lerp = lerp - uNu;
|
||||
return texture(table, vec3((uNu + u_mu_s) / float(SAMPLES_NU), u_mu, u_r)) * (1.0f - lerp) +
|
||||
texture(table, vec3((uNu + u_mu_s + 1.0f) / float(SAMPLES_NU), u_mu, u_r)) * lerp;
|
||||
float u_nu = floor(lerp);
|
||||
lerp = lerp - u_nu;
|
||||
return texture(table, vec3((u_nu + u_mu_s) / float(SAMPLES_NU), u_mu, u_r)) * (1.0f - lerp) +
|
||||
texture(table, vec3((u_nu + u_mu_s + 1.0f) / float(SAMPLES_NU), u_mu, u_r)) * lerp;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -26,34 +26,30 @@
|
||||
|
||||
#define EPSILON 0.0001f
|
||||
|
||||
// Sun Irradiance
|
||||
const float ISun = 40.0;
|
||||
// Double Precision Versions:
|
||||
uniform dmat4 dSgctProjectionMatrix;
|
||||
uniform dmat4 dInverseTransformMatrix;
|
||||
uniform dmat4 dScaleTransformMatrix;
|
||||
uniform dmat4 dInverseScaleTransformMatrix;
|
||||
uniform dmat4 dObjToWorldTransform;
|
||||
uniform dmat4 dWorldToObjectTransform;
|
||||
uniform dmat4 dWorldToOsEyeTransform;
|
||||
uniform dmat4 dOsEyeToWorldTransform; // OS Eye to World
|
||||
uniform dmat4 dOsEyeToSGCTEyeTranform; // OS Eye to SGCT Eye
|
||||
uniform dmat4 dSgctEyeToOSEyeTranform; // SGCT Eye to OS Eye
|
||||
uniform dmat4 dSgctEyeToClipTranform; // SGCT Eye to SGCT Project Clip
|
||||
uniform dmat4 dInverseSgctProjectionMatrix; // Clip to SGCT Eye
|
||||
uniform dmat4 dInverseCamRotTransform;
|
||||
|
||||
uniform mat4 sgctProjectionMatrix;
|
||||
uniform mat4 inverseTransformMatrix;
|
||||
uniform mat4 scaleTransformMatrix;
|
||||
uniform mat4 objToWorldTransform;
|
||||
uniform mat4 worldToObjectTransform;
|
||||
uniform mat4 worldToEyeTransform;
|
||||
uniform mat4 eyeToWorldTransform; // OS Eye to World
|
||||
uniform mat4 osEye2SGCTEyeTranform; // OS Eye to SGCT Eye
|
||||
uniform mat4 sgctEye2OSEyeTranform; // SGCT Eye to OS Eye
|
||||
uniform mat4 eyeToViewTranform; // SGCT Eye to SGCT Project Clip
|
||||
uniform mat4 viewToEyeTranform; // SGCT Project Clip to SGCT Eye
|
||||
uniform mat4 inverseSgctProjectionMatrix;
|
||||
// Double Precision Versions:
|
||||
uniform dvec4 dObjpos;
|
||||
uniform dvec3 dCampos;
|
||||
uniform dmat3 dCamrot;
|
||||
|
||||
uniform mat4 completeVertexTransform;
|
||||
uniform mat4 inverseCompleteVertexTransform;
|
||||
|
||||
uniform vec4 cameraPositionObjectCoords;
|
||||
|
||||
//uniform vec4 campos;
|
||||
uniform vec4 objpos;
|
||||
uniform vec3 campos;
|
||||
uniform mat3 camrot;
|
||||
//uniform vec3 sun_pos;
|
||||
uniform dvec3 sunDirectionObj;
|
||||
|
||||
uniform bool _performShading = true;
|
||||
/*
|
||||
uniform float transparency;
|
||||
uniform int shadows;
|
||||
|
||||
@@ -65,23 +61,21 @@ uniform float screenHEIGHT;
|
||||
uniform vec2 depthrange;
|
||||
|
||||
uniform float time;
|
||||
*/
|
||||
|
||||
|
||||
uniform sampler2D reflectanceTexture;
|
||||
//uniform sampler2D transmittanceTexture;
|
||||
uniform sampler2D irradianceTexture;
|
||||
uniform sampler3D inscatterTexture;
|
||||
|
||||
#include "hdr.glsl"
|
||||
//#include "PowerScaling/powerScaling_fs.hglsl"
|
||||
//#include "fragment.glsl"
|
||||
#include "atmosphere_common.glsl"
|
||||
|
||||
|
||||
layout(location = 0) out vec4 renderTarget;
|
||||
|
||||
in vec3 interpolatedNDCPos;
|
||||
in vec4 vertexPosObjVS;
|
||||
in vec3 interpolatedRayDirection;
|
||||
//in vec4 vertexPosObjVS;
|
||||
|
||||
|
||||
/*******************************************************************************
|
||||
****** ALL CALCULATIONS FOR ATMOSPHERE ARE KM AND IN OBJECT SPACE SYSTEM ******
|
||||
@@ -92,76 +86,18 @@ in vec3 interpolatedRayDirection;
|
||||
* and the second intersection: maxLength
|
||||
*/
|
||||
|
||||
struct Ray {
|
||||
vec4 origin;
|
||||
vec4 direction;
|
||||
struct dRay {
|
||||
dvec4 origin;
|
||||
dvec4 direction;
|
||||
};
|
||||
|
||||
struct Ellipsoid {
|
||||
vec4 center;
|
||||
vec4 size;
|
||||
dvec4 center;
|
||||
dvec4 size;
|
||||
};
|
||||
|
||||
bool algebraicIntersecSphere(const Ray ray, const float SphereRadius, const vec4 SphereCenter,
|
||||
out float offset, out float maxLength)
|
||||
{
|
||||
vec3 L = ray.origin.xyz - SphereCenter.xyz;
|
||||
float B = 2 * dot(ray.direction.xyz, L);
|
||||
float C = dot(L, L) - (SphereRadius*SphereRadius);
|
||||
float delta = B*B - 4*C;
|
||||
|
||||
if ( delta < 0.0 ) { // no intersection
|
||||
return false;
|
||||
}
|
||||
else if ( delta == 0.0 ) { // one intersection;
|
||||
offset = maxLength = -B/2.0;
|
||||
} else {
|
||||
float tmpB = -B * 0.5;
|
||||
float root = sqrt(delta) * 0.5;
|
||||
float t0 = tmpB - root;
|
||||
float t1 = tmpB + root;
|
||||
|
||||
if ( t0 < t1 ) {
|
||||
offset = t0;
|
||||
maxLength = t1;
|
||||
} else {
|
||||
offset = t1;
|
||||
maxLength = t0;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool geometricIntersecSphere(const Ray ray, const float SphereRadius, const dvec4 SphereCenter,
|
||||
out double offset, out double maxLength) {
|
||||
// Ray's direction must be normalized.
|
||||
dvec4 OC = SphereCenter - ray.origin;
|
||||
double L2 = dot(OC.xyz, OC.xyz);
|
||||
double Sr2 = SphereRadius * SphereRadius;
|
||||
|
||||
if ( L2 < Sr2 ) // Ray origin inside sphere.
|
||||
return false; // TODO: Bust be handled later
|
||||
|
||||
double t_ca = dot(OC.xyz, ray.direction.xyz);
|
||||
|
||||
if ( t_ca < 0.0 ) // Sphere's center lies behind the rays origin.
|
||||
return false; // TODO: Handle inside sphere.
|
||||
|
||||
double t_2hc = Sr2 - L2 + (t_ca * t_ca);
|
||||
|
||||
if ( t_2hc < 0.0 ) // Ray misses the sphere
|
||||
return false;
|
||||
|
||||
double t_hc = sqrt(t_2hc);
|
||||
|
||||
offset = t_ca - t_hc;
|
||||
maxLength = t_ca + t_hc;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool intersectEllipsoid(const Ray ray, const Ellipsoid ellipsoid, out double offset, out double maxLength) {
|
||||
dvec4 O_C = ray.origin-ellipsoid.center;
|
||||
bool dIntersectEllipsoid(const dRay ray, const Ellipsoid ellipsoid, out double offset, out double maxLength) {
|
||||
dvec4 O_C = ray.origin - ellipsoid.center;
|
||||
dvec4 dir = normalize(ray.direction);
|
||||
|
||||
offset = 0.0f;
|
||||
@@ -181,83 +117,46 @@ bool intersectEllipsoid(const Ray ray, const Ellipsoid ellipsoid, out double off
|
||||
+ ((O_C.z*O_C.z)/(ellipsoid.size.z*ellipsoid.size.z))
|
||||
- 1.f;
|
||||
|
||||
double d = ((b*b)-(4.f*a*c));
|
||||
if ( d<0.f || a==0.f || b==0.f || c==0.f )
|
||||
double d = ((b * b)-(4.0 * a * c));
|
||||
if ( d < 0.f || a == 0.f || b == 0.f || c == 0.f )
|
||||
return false;
|
||||
|
||||
d = sqrt(d);
|
||||
|
||||
double t1 = (-b+d)/(2.f*a);
|
||||
double t2 = (-b-d)/(2.f*a);
|
||||
double t1 = (-b+d) / (2.0 * a);
|
||||
double t2 = (-b-d) / (2.0 * a);
|
||||
|
||||
if( t1<=EPSILON && t2<=EPSILON )
|
||||
if ( t1 <= EPSILON && t2 <= EPSILON )
|
||||
return false; // both intersections are behind the ray origin
|
||||
|
||||
bool back = (t1<=EPSILON || t2<=EPSILON); // If only one intersection (t>0) then we are inside the ellipsoid and the intersection is at the back of the ellipsoid
|
||||
double t=0.f;
|
||||
if( t1<=EPSILON )
|
||||
// If only one intersection (t>0) then we are inside the ellipsoid and the intersection is at the back of the ellipsoid
|
||||
bool back = (t1 <= EPSILON || t2 <= EPSILON);
|
||||
double t = 0.0;
|
||||
if ( t1 <= EPSILON ) {
|
||||
t = t2;
|
||||
else
|
||||
if( t2<=EPSILON )
|
||||
} else {
|
||||
if( t2 <= EPSILON )
|
||||
t = t1;
|
||||
else
|
||||
t=(t1<t2) ? t1 : t2;
|
||||
|
||||
if( t<EPSILON ) return false; // Too close to intersection
|
||||
t = (t1 < t2) ? t1 : t2;
|
||||
}
|
||||
|
||||
dvec4 intersection = ray.origin + t*dir;
|
||||
dvec4 normal = intersection-ellipsoid.center;
|
||||
normal.x = 2.f*normal.x/(ellipsoid.size.x*ellipsoid.size.x);
|
||||
normal.y = 2.f*normal.y/(ellipsoid.size.y*ellipsoid.size.y);
|
||||
normal.z = 2.f*normal.z/(ellipsoid.size.z*ellipsoid.size.z);
|
||||
|
||||
normal.w = 0.f;
|
||||
normal *= (back) ? -1.f : 1.f;
|
||||
normal = normalize(normal);
|
||||
if ( t<EPSILON )
|
||||
return false; // Too close to intersection
|
||||
|
||||
dvec4 intersection = ray.origin + t * dir;
|
||||
dvec4 normal = intersection - ellipsoid.center;
|
||||
normal.x = 2.0 * normal.x / (ellipsoid.size.x * ellipsoid.size.x);
|
||||
normal.y = 2.0 * normal.y / (ellipsoid.size.y * ellipsoid.size.y);
|
||||
normal.z = 2.0 * normal.z / (ellipsoid.size.z * ellipsoid.size.z);
|
||||
|
||||
normal.w = 0.0;
|
||||
normal *= (back) ? -1.0 : 1.0;
|
||||
normal = normalize(normal);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool intersectAtmosphere(const dvec4 planetPos, const dvec3 rayDirection, const double sphereRadius,
|
||||
out double offset, out double maxLength) {
|
||||
offset = 0.0f;
|
||||
maxLength = 0.0f;
|
||||
|
||||
// REVIEW
|
||||
//dvec3 l = planetPos.xyz - cameraPositionObject.xyz;
|
||||
dvec3 l = planetPos.xyz;
|
||||
double s = dot(l, rayDirection);
|
||||
double l2 = dot(l, l);
|
||||
|
||||
// sphereRadius in Km
|
||||
double r = sphereRadius - EPSILON; // EPSILON to avoid surface acne
|
||||
double r2 = r * r;
|
||||
|
||||
if (l2 <= r2) {
|
||||
// ray origin inside sphere
|
||||
double m2 = l2 - (s*s);
|
||||
double q = sqrt(r2 - m2);
|
||||
maxLength = s + q;
|
||||
|
||||
return true;
|
||||
}
|
||||
else if (s >= 0.0) {
|
||||
// ray outside sphere
|
||||
double m2 = l2 - (s*s);
|
||||
if (m2 <= r2) {
|
||||
// ray hits atmosphere
|
||||
double q = sqrt(r2 - m2);
|
||||
offset = s-q;
|
||||
maxLength = (s+q)-offset;
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Function to calculate the initial intersection of the eye (camera) ray
|
||||
* with the atmosphere.
|
||||
* In (all parameters in the same coordinate system and same units):
|
||||
@@ -270,39 +169,39 @@ bool intersectAtmosphere(const dvec4 planetPos, const dvec3 rayDirection, const
|
||||
* - offset: the initial intersection distance from eye position when
|
||||
* the eye is outside the atmosphere
|
||||
* - maxLength : the second intersection distance from eye position when the
|
||||
* eye is inside the atmosphere or the initial (and only)
|
||||
* eye is outside the atmosphere or the initial (and only)
|
||||
* intersection of the ray with atmosphere when the eye position
|
||||
* is inside atmosphere.
|
||||
*/
|
||||
bool atmosphereIntersection(const vec3 planetPosition, const Ray ray, const float atmRadius,
|
||||
out bool inside, out float offset, out float maxLength ) {
|
||||
vec3 l = planetPosition - ray.origin.xyz;
|
||||
float s = dot(l, ray.direction.xyz);
|
||||
float l2 = dot(l, l);
|
||||
float r2 = (atmRadius - EPSILON) * (atmRadius - EPSILON); // avoiding surface acne
|
||||
bool dAtmosphereIntersection(const dvec3 planetPosition, const dRay ray, const double atmRadius,
|
||||
out bool inside, out double offset, out double maxLength ) {
|
||||
dvec3 l = planetPosition - ray.origin.xyz;
|
||||
double s = dot(l, ray.direction.xyz);
|
||||
double l2 = dot(l, l);
|
||||
double r2 = (atmRadius - EPSILON) * (atmRadius - EPSILON); // avoiding surface acne
|
||||
|
||||
// Ray origin (eye position) is behind sphere
|
||||
if ((s < 0.0f) && (l2 > r2)) {
|
||||
if ((s < 0.0) && (l2 > r2)) {
|
||||
inside = false;
|
||||
offset = 0.0f;
|
||||
maxLength = 0.0f;
|
||||
offset = 0.0;
|
||||
maxLength = 0.0;
|
||||
return false;
|
||||
}
|
||||
|
||||
float m2 = l2 - s*s;
|
||||
double m2 = l2 - s*s;
|
||||
|
||||
// Ray misses atmospere
|
||||
if (m2 > r2) {
|
||||
inside = false;
|
||||
offset = 0.0f;
|
||||
maxLength = 0.0f;
|
||||
offset = 0.0;
|
||||
maxLength = 0.0;
|
||||
return false;
|
||||
}
|
||||
|
||||
// We already now the ray hits the atmosphere
|
||||
|
||||
// If q = 0.0f, there is only one intersection
|
||||
float q = sqrt(r2 - m2);
|
||||
double q = sqrt(r2 - m2);
|
||||
|
||||
// If l2 < r2, the ray origin is inside the sphere
|
||||
if (l2 > r2) {
|
||||
@@ -311,13 +210,14 @@ bool atmosphereIntersection(const vec3 planetPosition, const Ray ray, const floa
|
||||
maxLength = s + q;
|
||||
} else {
|
||||
inside = true;
|
||||
offset = 0.0f;
|
||||
offset = -1.0;
|
||||
maxLength = s + q;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
float opticalDepth(float H, float r, float mu, float d) {
|
||||
float a = sqrt((0.5/H)*r);
|
||||
vec2 a01 = a*vec2(mu, mu + d / r);
|
||||
@@ -333,15 +233,16 @@ vec3 analyticTransmittance(float r, float mu, float d) {
|
||||
betaMieExtinction * opticalDepth(HM, r, mu, d));
|
||||
}
|
||||
|
||||
vec2 getIrradianceUV(float r, float muSun) {
|
||||
float uR = (r - Rg) / (Rt - Rg);
|
||||
float uMuS = (muSun + 0.2) / (1.0 + 0.2);
|
||||
return vec2(uMuS, uR);
|
||||
}
|
||||
// vec2 getIrradianceUV(float r, float muSun) {
|
||||
// float uR = (r - Rg) / (Rt - Rg);
|
||||
// float uMuS = (muSun + 0.2) / (1.0 + 0.2);
|
||||
// return vec2(uMuS, uR);
|
||||
// }
|
||||
|
||||
vec3 irradiance(sampler2D sampler, float r, float muSun) {
|
||||
vec2 uv = getIrradianceUV(r, muSun);
|
||||
return texture(sampler, uv).rgb;
|
||||
vec3 irradiance(sampler2D sampler, const float r, const float muSun) {
|
||||
float u_r = (r - Rg) / (Rt - Rg);
|
||||
float u_muSun = (muSun + 0.2) / (1.0 + 0.2);
|
||||
return texture(sampler, vec2(u_muSun, u_r)).rgb;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -374,13 +275,16 @@ vec3 inscatterRadiance(inout vec3 x, inout float t, const vec3 v, const vec3 s,
|
||||
float Rt2 = Rt * Rt;
|
||||
float Rg2 = Rg * Rg;
|
||||
|
||||
// Dist stores the distance from the camera position
|
||||
// to the first (the only one in some cases) intersection of the
|
||||
// light ray and the top of atmosphere.
|
||||
|
||||
// From the cosine law for x0 at top of atmosphere:
|
||||
// Rt^2 = r^2 + dist^2 - 2*r*dist*cos(PI - theta)
|
||||
// Pay attentation to the -sqrt, it means we are
|
||||
// considering the distance from observer to the
|
||||
// first intersection with the atmosphere.
|
||||
float dist = -r * mu - sqrt(r2 * (mu2 - 1.0f) + Rt2);
|
||||
|
||||
// Dist stores the distance from the camera position
|
||||
// to the first (the only in some cases) intersection of the
|
||||
// light ray and the top of atmosphere.
|
||||
|
||||
// Are we at space?
|
||||
if (dist > 0.0f) {
|
||||
@@ -391,26 +295,26 @@ vec3 inscatterRadiance(inout vec3 x, inout float t, const vec3 v, const vec3 s,
|
||||
// to the point on the atmosphere. So, because we have a new x,
|
||||
// we must also calculate the new cosine between x and v. s is the
|
||||
// same because we consider the Sun as a parallel ray light source.
|
||||
x += dist * v;
|
||||
t -= dist;
|
||||
x += dist * v;
|
||||
// mu(x0 and v)
|
||||
// cos(theta') = (x0 dot v)/(||x0||*||v||) = ((x + dist*v) dot v)/(Rt * 1)
|
||||
// cos(theta') = mu' = (r*mu + dist)/Rt
|
||||
mu = (r * mu + dist) / Rt;
|
||||
mu = (r * mu + dist) / Rt;
|
||||
mu2 = mu * mu;
|
||||
r = Rt;
|
||||
r2 = r * r;
|
||||
}
|
||||
|
||||
r = Rt;
|
||||
r2 = r * r;
|
||||
}
|
||||
|
||||
// Intersects atmosphere?
|
||||
if (r <= Rt) {
|
||||
if (r <= Rt + EPSILON) {
|
||||
float nu = dot(v, s);
|
||||
float muSun = dot(x, s) / r;
|
||||
float rayleighPhase = rayleighPhaseFunction(nu);
|
||||
float miePhase = miePhaseFunction(nu);
|
||||
|
||||
|
||||
// S[L](x,s,v)
|
||||
vec4 inscatterRadiance = max(texture4D(inscatterTexture, r, mu, muSun, nu), 0.0);
|
||||
vec4 inscatterRadiance = max(texture4D(inscatterTexture, r, mu, muSun, nu), 0.0);
|
||||
|
||||
// After removing the initial path from camera pos to top of atmosphere or the
|
||||
// current camera position if inside atmosphere, t > 0
|
||||
@@ -422,20 +326,24 @@ vec3 inscatterRadiance(inout vec3 x, inout float t, const vec3 v, const vec3 s,
|
||||
float muSun0 = dot(x0, s) / r0;
|
||||
|
||||
// Transmittance from point r, direction mu, distance t
|
||||
// By Analytical calculation
|
||||
attenuation = analyticTransmittance(r, mu, t);
|
||||
//attenuation = transmittance(r, mu, v, x+t*v);
|
||||
|
||||
// By Texture Access
|
||||
//attenuation = transmittance(r, mu, v, x0);
|
||||
|
||||
//The following Code is generating surface acne on atmosphere. JCC
|
||||
// We need a better acne avoidance constant (0.01). Done!! Adaptive from distance to x
|
||||
if (r0 > Rg + (0.1f * r)) {
|
||||
//if (r0 > Rg + (0.1f * r)) {
|
||||
// It r0 > Rg it means the ray hits something inside the atmosphere. So we need to
|
||||
// remove the inScattering contribution from the main ray from the hitting point
|
||||
// to the end of the ray.
|
||||
//if (r0 > Rg + (0.01f)) {
|
||||
|
||||
if (r0 > Rg + (0.01f)) {
|
||||
// Here we use the idea of S[L](a->b) = S[L](b->a), and get the S[L](x0, v, s)
|
||||
// Then we calculate S[L] = S[L]|x - T(x, x0)*S[L]|x0
|
||||
inscatterRadiance = max(inscatterRadiance - attenuation.rgbr * texture4D(inscatterTexture, r0, mu0, muSun0, nu), 0.0);
|
||||
|
||||
|
||||
// cos(PI-thetaH) = dist/r
|
||||
// cos(thetaH) = - dist/r
|
||||
// muHorizon = -sqrt(r^2-Rg^2)/r = -sqrt(1-(Rg/r)^2)
|
||||
@@ -443,7 +351,7 @@ vec3 inscatterRadiance(inout vec3 x, inout float t, const vec3 v, const vec3 s,
|
||||
|
||||
// In order to avoid imprecision problems near horizon,
|
||||
// we interpolate between two points: above and below horizon
|
||||
const float INTERPOLATION_EPS = 0.004f; // precision const
|
||||
const float INTERPOLATION_EPS = 0.004f; // precision const from Brunetton
|
||||
if (abs(mu - muHorizon) < INTERPOLATION_EPS) {
|
||||
// We want an interpolation value close to 1/2, so the
|
||||
// contribution of each radiance value is almost the same
|
||||
@@ -470,7 +378,6 @@ vec3 inscatterRadiance(inout vec3 x, inout float t, const vec3 v, const vec3 s,
|
||||
|
||||
// Below Horizon
|
||||
mu = muHorizon + INTERPOLATION_EPS;
|
||||
//r0 = sqrt(r * r + t * t + 2.0f * r * t * mu);
|
||||
r0 = sqrt(r2 + t2 + 2.0f * r * t * mu);
|
||||
mu0 = (r * mu + t) / r0;
|
||||
vec4 inScatterBelowX = texture4D(inscatterTexture, r, mu, muSun, nu);
|
||||
@@ -483,7 +390,7 @@ vec3 inscatterRadiance(inout vec3 x, inout float t, const vec3 v, const vec3 s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// The w component of inscatterRadiance has stored the Cm,r value (Cm = Sm[L0])
|
||||
// So, we must reintroduce the Mie inscatter by the proximity rule as described in the
|
||||
// paper by Bruneton and Neyret in "Angular precision" paragraph:
|
||||
@@ -496,15 +403,18 @@ vec3 inscatterRadiance(inout vec3 x, inout float t, const vec3 v, const vec3 s,
|
||||
(betaRayleigh.r / betaRayleigh);
|
||||
|
||||
radiance = max(inscatterRadiance.rgb * rayleighPhase + inscatterMie * miePhase, 0.0f);
|
||||
|
||||
} else {
|
||||
// No intersection with atmosphere
|
||||
// The ray is traveling on space
|
||||
radiance = vec3(0.0f);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Finally we add the Lsun (all calculations are done with no Lsun so
|
||||
// we can change it on the fly with no precomputations)
|
||||
return radiance * sunRadiance;
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -530,18 +440,24 @@ vec3 groundColor(const vec3 x, const float t, const vec3 v, const vec3 s, const
|
||||
const float mu, const vec3 attenuationXtoX0)
|
||||
{
|
||||
vec3 reflectedRadiance = vec3(0.0f);
|
||||
|
||||
float d = length(x + t*v);
|
||||
float x_0 = sqrt(r*r + d*d - 2*r*d*mu);
|
||||
|
||||
// Ray hits planet's surface
|
||||
if (t > 0.0f) {
|
||||
//if (t > 0.0f) {
|
||||
if (x_0 >= Rg) {
|
||||
// First we obtain the ray's end point on the surface
|
||||
vec3 x0 = x + t * v;
|
||||
float r0 = length(x0);
|
||||
// Normal of intersection point.
|
||||
// TODO: Change it to globebrowser
|
||||
vec3 n = x0 / r0;
|
||||
//vec3 n = -x0 / r0;
|
||||
|
||||
// Old deferred:
|
||||
// Old deferred:
|
||||
vec2 coords = vec2(atan(n.y, n.x), acos(n.z)) * vec2(0.5, 1.0) / M_PI + vec2(0.5, 0.0);
|
||||
//vec2 coords = vec2(0.5 + (atan(n.z, n.x))/(2*M_PI), 0.5 - asin(n.y)/(M_PI));
|
||||
vec4 reflectance = texture2D(reflectanceTexture, coords) * vec4(0.2, 0.2, 0.2, 1.0);
|
||||
|
||||
// Initial ground radiance (the surface color)
|
||||
@@ -561,10 +477,10 @@ vec3 groundColor(const vec3 x, const float t, const vec3 v, const vec3 s, const
|
||||
float muSun = dot(n, s);
|
||||
// Is direct Sun light arriving at x0? If not, there is no direct light from Sun (shadowed)
|
||||
vec3 transmittanceL0 = muSun < -sqrt(1.0f - ((Rg * Rg) / (r0 * r0))) ? vec3(0.0f) : transmittanceLUT(r0, muSun);
|
||||
|
||||
//return transmittanceL0;
|
||||
// E[L*] at x0
|
||||
vec3 irradianceReflected = irradiance(irradianceTexture, r0, muSun);
|
||||
|
||||
return irradianceReflected;
|
||||
// Adding clouds texture
|
||||
//vec4 clouds = vec4(0.85)*texture(cloudsTexture, vs_st);
|
||||
|
||||
@@ -589,6 +505,8 @@ vec3 groundColor(const vec3 x, const float t, const vec3 v, const vec3 s, const
|
||||
|
||||
// Finally, we attenuate the surface Radiance from the the point x0 to the camera location.
|
||||
reflectedRadiance = attenuationXtoX0 * groundRadiance;
|
||||
} else { // ray looking at the sky
|
||||
reflectedRadiance = vec3(0.0f);
|
||||
}
|
||||
|
||||
// Returns reflectedRadiance = 0.0 if the ray doesn't hit the ground.
|
||||
@@ -624,61 +542,6 @@ vec3 sunColor(const vec3 x, const float t, const vec3 v, const vec3 s, const flo
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Calculates Intersection Ray by walking through
|
||||
* all the graphic pipile transformations in the
|
||||
* opposite direction.
|
||||
*/
|
||||
void calculateRay(out Ray ray, out vec4 planetPositionObjectCoords) {
|
||||
|
||||
// Fragment to window coordinates
|
||||
vec4 windowCoords = vec4(0.0);
|
||||
|
||||
windowCoords.x = gl_FragCoord.x + 0.5; // +0.5 because the fragment has non-integer coords by default
|
||||
windowCoords.y = screenHEIGHT - gl_FragCoord.y - 0.5; // +0.5 because the fragment has non-integer coords by default
|
||||
windowCoords.z = gl_FragCoord.z; // z can be 0.0 or 1.0. We chose 1.0 to avoid math problems.
|
||||
windowCoords.w = gl_FragCoord.w; // remember: gl_FragCoord.w = 1.0/w_clip
|
||||
|
||||
// Window to NDC coordinates
|
||||
vec4 viewPort = vec4(screenX, screenY, screenWIDTH, screenHEIGHT);
|
||||
vec4 ndcCoords = vec4(0.0, 0.0, 0.0, 1.0);
|
||||
ndcCoords.xy = (2.0 * (windowCoords.xy - viewPort.xy) / viewPort.zw) - vec2(1.0);
|
||||
|
||||
// The ndcCoords for z are only need if we want something inside the
|
||||
// view frustum. In this case we just want the position in the
|
||||
// near plane, that is z = -1.0
|
||||
float f_plus_n = gl_DepthRange.far + gl_DepthRange.near;
|
||||
float f_minus_n = gl_DepthRange.far - gl_DepthRange.near;
|
||||
ndcCoords.z = (2.0 * windowCoords.z - f_plus_n) / f_minus_n;
|
||||
|
||||
// NDC to clip coordinates (gl_FragCoord.w = 1.0/w_clip)
|
||||
vec4 clipCoords = ndcCoords / gl_FragCoord.w;
|
||||
|
||||
// Clip to SGCT Eye
|
||||
vec4 sgctEyeCoords = inverseSgctProjectionMatrix * clipCoords;
|
||||
|
||||
// SGCT Eye to OS Eye (This is SGCT eye to OS eye)
|
||||
vec4 osEyeCoords = viewToEyeTranform * sgctEyeCoords;
|
||||
|
||||
// OS Eye to World
|
||||
vec4 worldCoords = eyeToWorldTransform * osEyeCoords;
|
||||
|
||||
// World to Object
|
||||
vec4 objectCoords = worldToObjectTransform * worldCoords;
|
||||
|
||||
// Planet Position in Object Space
|
||||
planetPositionObjectCoords = worldToObjectTransform * vec4(objpos.xyz, 1.0);
|
||||
|
||||
// Camera Position in Object Space
|
||||
vec4 cameraPositionObject = worldToObjectTransform * vec4(campos.xyz, 1.0);
|
||||
|
||||
// ============================
|
||||
// ====== Building Ray ========
|
||||
// Ray in object space
|
||||
ray.origin = cameraPositionObject;
|
||||
ray.direction = vec4(normalize(objectCoords.xyz - cameraPositionObject.xyz), 0.0);
|
||||
}
|
||||
|
||||
/*
|
||||
* Calculates Intersection Ray by walking through
|
||||
* all the graphic pipile transformations in the
|
||||
@@ -686,255 +549,120 @@ void calculateRay(out Ray ray, out vec4 planetPositionObjectCoords) {
|
||||
* Instead of passing through all the pipeline,
|
||||
* it starts at NDC from the interpolated
|
||||
* positions from the screen quad.
|
||||
*/
|
||||
void calculateInterpolatedRay(out Ray ray, out vec4 planetPositionObjectCoords) {
|
||||
// NDC to Clip coords
|
||||
vec4 clipCoords = vec4(interpolatedNDCPos, 1.0) / gl_FragCoord.w;
|
||||
|
||||
// Clip to SGCT Eye
|
||||
vec4 sgctEyeCoords = inverseSgctProjectionMatrix * clipCoords;
|
||||
|
||||
// SGCT Eye to OS Eye (This is SGCT eye to OS eye)
|
||||
vec4 osEyeCoords = viewToEyeTranform * sgctEyeCoords;
|
||||
|
||||
// OS Eye to World coords
|
||||
// Now we execute the transformations with no matrices:
|
||||
vec4 ttmp = inverse(scaleTransformMatrix) * osEyeCoords;
|
||||
vec3 ttmp2 = inverse(camrot) * vec3(ttmp);
|
||||
vec4 ttmp3 = vec4(campos + ttmp2, 1.0);
|
||||
|
||||
vec4 worldCoords = ttmp3;
|
||||
|
||||
// World to Object coords
|
||||
vec4 objectCoords = inverseTransformMatrix * vec4(-objpos.xyz + worldCoords.xyz, 1.0);
|
||||
|
||||
// Planet Position in Object Space
|
||||
planetPositionObjectCoords = inverseTransformMatrix * vec4(-objpos.xyz + objpos.xyz, 1.0);
|
||||
|
||||
// Camera Position in Object Space
|
||||
vec4 cameraPositionInObject = inverseTransformMatrix * vec4(-objpos.xyz + campos.xyz, 1.0);
|
||||
|
||||
// ============================
|
||||
// ====== Building Ray ========
|
||||
// Ray in object space
|
||||
ray.origin = cameraPositionInObject;
|
||||
ray.direction = vec4(normalize(objectCoords.xyz - cameraPositionInObject.xyz), 0.0);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Calculates Intersection Ray by walking through
|
||||
* all the graphic pipile transformations in the
|
||||
* opposite direction.
|
||||
* This method avoids matrices multiplications
|
||||
* wherever is possible.
|
||||
*/
|
||||
void calculateRay2(out Ray ray, out vec4 planetPositionObjectCoords) {
|
||||
void dCalculateRay2(out dRay ray, out dvec4 planetPositionObjectCoords) {
|
||||
// ======================================
|
||||
// ======= Avoiding Some Matrices =======
|
||||
|
||||
// NDC to clip coordinates (gl_FragCoord.w = 1.0/w_clip)
|
||||
// Using the interpolated coords:
|
||||
// Assuming Red Book is right: z_ndc e [0, 1] and not [-1, 1]
|
||||
vec4 clipCoords = vec4(interpolatedNDCPos, 1.0) / gl_FragCoord.w;
|
||||
dvec4 clipCoords = dvec4(interpolatedNDCPos, 1.0) / gl_FragCoord.w;
|
||||
// This next line is needed because OS or SGCT is not inverting Y axis from
|
||||
// window space.
|
||||
clipCoords.y = (-interpolatedNDCPos.y) / gl_FragCoord.w;
|
||||
|
||||
// Clip to SGCT Eye
|
||||
vec4 sgctEyeCoords = inverseSgctProjectionMatrix * clipCoords;
|
||||
dvec4 sgctEyeCoords = dInverseSgctProjectionMatrix * clipCoords;
|
||||
//sgctEyeCoords /= sgctEyeCoords.w;
|
||||
sgctEyeCoords.w = 1.0;
|
||||
|
||||
// SGCT Eye to OS Eye (This is SGCT eye to OS eye)
|
||||
vec4 osEyeCoords = sgctEye2OSEyeTranform * sgctEyeCoords;
|
||||
dvec4 osEyeCoords = dSgctEyeToOSEyeTranform * sgctEyeCoords;
|
||||
|
||||
// OS Eye to World coords
|
||||
// Now we execute the transformations with no matrices:
|
||||
vec4 ttmp = inverse(scaleTransformMatrix) * osEyeCoords;
|
||||
vec3 ttmp2 = inverse(camrot) * vec3(ttmp);
|
||||
vec4 ttmp3 = vec4(campos + ttmp2, 1.0);
|
||||
|
||||
vec4 worldCoords = ttmp3;
|
||||
dvec4 ttmp = dInverseScaleTransformMatrix * osEyeCoords;
|
||||
dvec3 ttmp2 = dmat3(dInverseCamRotTransform) * dvec3(ttmp);
|
||||
dvec4 worldCoords = dvec4(dCampos + ttmp2, 1.0);
|
||||
|
||||
// World to Object
|
||||
vec4 objectCoords = inverseTransformMatrix * vec4(-objpos.xyz + worldCoords.xyz, 1.0);
|
||||
dvec4 objectCoords = dInverseTransformMatrix * dvec4(-dObjpos.xyz + worldCoords.xyz, 1.0);
|
||||
|
||||
// Planet Position in Object Space
|
||||
planetPositionObjectCoords = inverseTransformMatrix * vec4(-objpos.xyz + objpos.xyz, 1.0);
|
||||
planetPositionObjectCoords = dInverseTransformMatrix * dvec4(-dObjpos.xyz + dObjpos.xyz, 1.0);
|
||||
|
||||
// Camera Position in Object Space
|
||||
vec4 cameraPositionInObject = inverseTransformMatrix * vec4(-objpos.xyz + campos, 1.0);
|
||||
dvec4 cameraPositionInObject = dInverseTransformMatrix * dvec4(-dObjpos.xyz + dCampos, 1.0);
|
||||
|
||||
// ============================
|
||||
// ====== Building Ray ========
|
||||
// Ray in object space
|
||||
ray.origin = cameraPositionInObject;
|
||||
ray.direction = vec4(normalize(objectCoords.xyz - cameraPositionInObject.xyz), 0.0);
|
||||
|
||||
//renderTarget = vec4(0.5 * interpolatedNDCPos.xyz + vec3(0.5), 1.0);
|
||||
// Ray in object space (in KM)
|
||||
ray.origin = cameraPositionInObject / dvec4(1000.0, 1000.0, 1000.0, 1.0);
|
||||
ray.direction = dvec4(normalize(objectCoords.xyz - cameraPositionInObject.xyz), 0.0);
|
||||
}
|
||||
|
||||
/*
|
||||
* Calculates Intersection Ray by walking through
|
||||
* all the graphic pipile transformations in the
|
||||
* opposite direction.
|
||||
* Khornos way.
|
||||
*/
|
||||
void calculateRay3(out Ray ray, out vec4 planetPositionObjectCoords) {
|
||||
|
||||
vec4 viewPort = vec4(screenX, screenY, screenWIDTH, screenHEIGHT);
|
||||
vec4 ndcPos;
|
||||
ndcPos.xy = ((2.0 * gl_FragCoord.xy) - (2.0 * viewPort.xy)) / (viewPort.zw) - 1;
|
||||
ndcPos.z = (2.0 * gl_FragCoord.z - gl_DepthRange.near - gl_DepthRange.far) /
|
||||
(gl_DepthRange.far - gl_DepthRange.near);
|
||||
ndcPos.w = 1.0;
|
||||
|
||||
vec4 clipPos = ndcPos / gl_FragCoord.w;
|
||||
|
||||
// Clip to SGCT Eye
|
||||
vec4 sgctEyeCoords = inverseSgctProjectionMatrix * clipPos;
|
||||
|
||||
// SGCT Eye to OS Eye (This is SGCT eye to OS eye)
|
||||
vec4 osEyeCoords = viewToEyeTranform * sgctEyeCoords;
|
||||
|
||||
// OS Eye to World
|
||||
vec4 worldCoords = eyeToWorldTransform * osEyeCoords;
|
||||
|
||||
// World to Object
|
||||
vec4 objectCoords = worldToObjectTransform * worldCoords;
|
||||
|
||||
// Planet Position in Object Space
|
||||
planetPositionObjectCoords = worldToObjectTransform * vec4(objpos.xyz, 1.0);
|
||||
|
||||
// Camera Position in Object Space
|
||||
vec4 cameraOriginObjectCoords = worldToObjectTransform * vec4(campos.xyz, 1.0);
|
||||
|
||||
// ============================
|
||||
// ====== Building Ray ========
|
||||
// Ray in object space
|
||||
ray.origin = cameraOriginObjectCoords;
|
||||
ray.direction = vec4(normalize(objectCoords.xyz - cameraOriginObjectCoords.xyz), 0.0);
|
||||
}
|
||||
|
||||
/*
|
||||
* Calculates Intersection Ray by walking through
|
||||
* all the graphic pipile transformations in the
|
||||
* opposite direction.
|
||||
* Optimized Khronos way.
|
||||
*/
|
||||
void calculateRay4(out Ray ray, out vec4 planetPositionObjectCoords) {
|
||||
|
||||
// ================================
|
||||
// ======== From Kronos ===========
|
||||
vec4 viewPort = vec4(screenX, screenY, screenWIDTH, screenHEIGHT);
|
||||
vec3 ndcPos;
|
||||
ndcPos.xy = ((2.0 * gl_FragCoord.xy) - (2.0 * viewPort.xy)) / (viewPort.zw) - 1;
|
||||
ndcPos.z = (2.0 * gl_FragCoord.z - depthrange.x - depthrange.y) /
|
||||
(depthrange.y - depthrange.x);
|
||||
|
||||
vec4 clipPos;
|
||||
clipPos.w = sgctProjectionMatrix[3][2] / (ndcPos.z - (sgctProjectionMatrix[2][2] / sgctProjectionMatrix[2][3]));
|
||||
clipPos.xyz = ndcPos * clipPos.w;
|
||||
|
||||
// Clip to SGCT Eye
|
||||
vec4 sgctEyeCoords = inverseSgctProjectionMatrix * clipPos;
|
||||
|
||||
// SGCT Eye to OS Eye (This is SGCT eye to OS eye)
|
||||
vec4 osEyeCoords = viewToEyeTranform * sgctEyeCoords;
|
||||
|
||||
// OS Eye to World coords
|
||||
// Now we execute the transformations with no matrices:
|
||||
vec4 ttmp = inverse(scaleTransformMatrix) * osEyeCoords;
|
||||
vec3 ttmp2 = inverse(camrot) * vec3(ttmp);
|
||||
vec4 ttmp3 = vec4(campos + ttmp2, 1.0);
|
||||
|
||||
vec4 worldCoords = ttmp3;
|
||||
|
||||
// World to Object coords
|
||||
vec4 objectCoords = inverseTransformMatrix * vec4(-objpos.xyz + worldCoords.xyz, 1.0);
|
||||
|
||||
// Planet Position in Object Space
|
||||
planetPositionObjectCoords = inverseTransformMatrix * vec4(-objpos.xyz + objpos.xyz, 1.0);
|
||||
|
||||
// Camera Position in Object Space
|
||||
vec4 cameraOriginObjectCoords = inverseTransformMatrix * vec4(-objpos.xyz + campos.xyz, 1.0);
|
||||
|
||||
// ============================
|
||||
// ====== Building Ray ========
|
||||
// Ray in object space
|
||||
ray.origin = cameraOriginObjectCoords;
|
||||
ray.direction = vec4(normalize(objectCoords.xyz - cameraOriginObjectCoords.xyz), 0.0);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Double Version
|
||||
void main() {
|
||||
//vec4 position = vs_position;
|
||||
float depth = 0.0;
|
||||
// vec4 diffuse = texture(texture1, vs_st);
|
||||
// vec4 diffuse2 = texture(nightTex, vs_st);
|
||||
// vec4 clouds = texture(cloudsTexture, vs_st);
|
||||
|
||||
double depth = 0.0;
|
||||
if (_performShading) {
|
||||
|
||||
// Ray in object space
|
||||
Ray ray;
|
||||
vec4 planetPositionObjectCoords = vec4(0.0);
|
||||
//calculateRay(ray, planetPositionObjectCoords);
|
||||
calculateRay2(ray, planetPositionObjectCoords);
|
||||
//calculateInterpolatedRay(ray, planetPositionObjectCoords);
|
||||
//calculateRay3(ray, planetPositionObjectCoords);
|
||||
//calculateRay4(ray, planetPositionObjectCoords);
|
||||
dRay ray;
|
||||
dvec4 planetPositionObjectCoords = dvec4(0.0);
|
||||
dCalculateRay2(ray, planetPositionObjectCoords);
|
||||
//dCalculateInterpolatedRay(ray, planetPositionObjectCoords);
|
||||
|
||||
bool insideATM = false;
|
||||
float offset = 0.0f;
|
||||
float maxLength = 0.0f;
|
||||
//bool intersectATM = atmosphereIntersection(planetPositionObjectCoords.xyz, ray, Rt*1000.0,
|
||||
// insideATM, offset, maxLength );
|
||||
bool intersectATM = algebraicIntersecSphere(ray, Rt*1000.0, planetPositionObjectCoords, offset, maxLength);
|
||||
double offset = 0.0f;
|
||||
double maxLength = 0.0f;
|
||||
bool intersectATM = dAtmosphereIntersection(planetPositionObjectCoords.xyz, ray, Rt,
|
||||
insideATM, offset, maxLength );
|
||||
if ( intersectATM ) {
|
||||
//renderTarget = vec4(1.0, 0.0, 0.0, 1.0);
|
||||
//renderTarget = vec4(offset/maxLength, offset/maxLength, offset/maxLength, 1.0);
|
||||
//return;
|
||||
|
||||
// Following paper nomenclature
|
||||
double t = 0.0;
|
||||
if ( offset != -1.0 ) {
|
||||
// Camera is inside Atmosphere
|
||||
t = offset;
|
||||
}
|
||||
// Moving camera to top of Atmosphere if needed
|
||||
vec3 x = vec3(ray.origin.xyz);
|
||||
float r = length(x);
|
||||
vec3 v = vec3(ray.direction.xyz);
|
||||
float mu = dot(x, v) / r;
|
||||
vec3 s = vec3(sunDirectionObj);
|
||||
|
||||
float tF = float(maxLength);
|
||||
vec3 attenuation;
|
||||
|
||||
// if ( intersectATM ) {
|
||||
// renderTarget = vec4(1.0, 0.0, 0.0, 1.0);
|
||||
// } else {
|
||||
// renderTarget = vec4(0.0, 0.0, 0.0, 1.0);
|
||||
// }
|
||||
//renderTarget = vec4(analyticTransmittance(r, mu, tF).xyz, 1.0);
|
||||
//renderTarget = vec4(s, 1.0);
|
||||
//renderTarget = HDR(vec4(abs(mu*mu), abs(mu*mu), abs(mu*mu), 1.0));
|
||||
//renderTarget = HDR(vec4(abs(Rt*Rt), abs(Rt*Rt), abs(Rt*Rt), 1.0));
|
||||
//renderTarget = HDR(vec4(abs(Rg*Rg), abs(Rg*Rg), abs(Rg*Rg), 1.0));
|
||||
//renderTarget = HDR(vec4(normalize(vec3(abs(r), abs(r), abs(r))), 1.0));
|
||||
//renderTarget = HDR(vec4(normalize(ray.origin.xyz + t * ray.direction.xyz), 1.0));
|
||||
//renderTarget = HDR(vec4(vec3(length(ray.origin.xyz + t * ray.direction.xyz)), 1.0));
|
||||
//float nu = dot(v, s);//float(dot(vec3(ray.direction.xyz), s));
|
||||
//float muSun = dot(x, s) / r;
|
||||
//renderTarget = vec4(nu, nu, nu, 1.0);
|
||||
//renderTarget = HDR(vec4(muSun, muSun, muSun, 1.0));
|
||||
//renderTarget = HDR(vec4(abs(nu), abs(nu), abs(nu), 1.0));
|
||||
//renderTarget = vec4(abs(muSun), abs(muSun), abs(muSun), 1.0);
|
||||
//renderTarget = vec4(vec3(max(texture4D(inscatterTexture, r, mu, muSun, nu), 0.0)), 1.0);
|
||||
|
||||
// Debugging:
|
||||
//renderTarget = vec4(interpolatedNDCPos.xy*0.5 + vec2(0.5), 0.0, 1.0);
|
||||
//renderTarget = vec4(ndcCoords.xy*0.5 + vec2(0.5), 0.0, 1.0);
|
||||
//renderTarget = vec4(normalize(sgctEyeCoords.xyz) * 0.5 + vec3(0.5), 1.0);
|
||||
//renderTarget = vec4(osEyeCoords.xyz * 0.5 + 0.5, 1.0);
|
||||
//vec2 temp = farPlaneObjectPos.xy;
|
||||
//vec2 temp = sgctEyeCoords.xy;
|
||||
//vec2 temp = osEyeCoords.xy;
|
||||
//vec2 temp = worldCoords.xy;
|
||||
// if (temp.x > temp.y)
|
||||
// temp /= temp.x;
|
||||
// else
|
||||
// temp /= temp.y;
|
||||
//renderTarget = vec4(temp, 0.0, 1.0);
|
||||
|
||||
//renderTarget = vec4(ray.direction.xyz, 1.0);
|
||||
// renderTarget = vec4(normalize(sgctEyeCoords).xyz, 1.0);
|
||||
//renderTarget = vec4(inverseSgctProjectionMatrix[2][1], 0.0, 0.0, 1.0);
|
||||
|
||||
//renderTarget = vec4(0.5*normalize(worldCoords.xyz) + vec3(0.5), 1.0);
|
||||
vec3 inscatterColor = inscatterRadiance(x, tF, v, s, r, mu, attenuation);
|
||||
vec3 groundColor = groundColor(x, tF, v, s, r, mu, attenuation);
|
||||
vec3 sunColor = sunColor(x, tF, v, s, r, mu);
|
||||
|
||||
//renderTarget = vec4(HDR(inscatterColor), 1.0);
|
||||
//renderTarget = vec4(HDR(groundColor), 1.0);
|
||||
//renderTarget = vec4(groundColor, 1.0);
|
||||
//renderTarget = vec4(HDR(sunColor), 1.0);
|
||||
renderTarget = vec4(HDR(inscatterColor + groundColor + inscatterColor), 1.0);
|
||||
|
||||
} else {
|
||||
renderTarget = vec4(0.0, 0.0, 0.0, 1.0);
|
||||
}
|
||||
|
||||
} else {
|
||||
renderTarget = vec4(0.5, 0.5, 0.5, 1.0);
|
||||
}
|
||||
|
||||
// Testing Uniforms:
|
||||
//renderTarget.xyz = vec3(1.0f);
|
||||
//renderTarget.xyz = vec3(Rg/6378.1366);
|
||||
//0renderTarget.xyz = vec3(Rt/6420.0);
|
||||
//renderTarget.xyz = vec3(AverageGroundReflectance/0.1f);
|
||||
//renderTarget.xyz = vec3(HR/8.0f);
|
||||
//renderTarget.xyz = vec3(HM/1.2f);
|
||||
//renderTarget.xyz = vec3(mieG/1.0f);
|
||||
//renderTarget.xyz = vec3(sunRadiance/50.0f);
|
||||
//renderTarget.xyz = vec3(betaRayleigh.x/5.8e-3, betaRayleigh.y/1.35e-2, betaRayleigh.z/3.31e-2);
|
||||
//renderTarget.xyz = vec3(betaMieScattering.x/4e-3, betaMieScattering.y/4e-3, betaMieScattering.z/4e-3);
|
||||
//renderTarget.xyz = vec3(betaMieExtinction.x/(betaMieScattering.x/0.9), betaMieExtinction.y/(betaMieScattering.y/0.9),
|
||||
// betaMieExtinction.z/(betaMieScattering.z/0.9));
|
||||
//renderTarget.xyz = vec3(mieG);
|
||||
|
||||
//renderTarget = vec4(interpolatedRayDirection * 0.5 + 0.5, 1.0);
|
||||
}
|
||||
|
||||
|
||||
@@ -537,7 +537,7 @@ vec3 groundColor(const vec3 x, const float t, const vec3 v, const vec3 s, const
|
||||
|
||||
// E[L*] at x0
|
||||
vec3 irradianceReflected = irradiance(irradianceTexture, r0, muSun);
|
||||
|
||||
|
||||
// Adding clouds texture
|
||||
vec4 clouds = vec4(0.85)*texture(cloudsTexture, vs_st);
|
||||
|
||||
@@ -679,12 +679,14 @@ Fragment getFragment() {
|
||||
|
||||
//diffuse = HDR(vec4(sunColor + groundColor + inscatterColor, 1.0));
|
||||
//diffuse = HDR(vec4(sunColor, 1.0));
|
||||
//diffuse = HDR(vec4(groundColor, 1.0));
|
||||
//diffuse = vec4(HDR(groundColor), 1.0);
|
||||
//diffuse = HDR(vec4(inscatterColor, 1.0));
|
||||
|
||||
//diffuse = HDR(vec4(sunColor + groundColor + inscatterColor, 1.0) + diffuse2);
|
||||
diffuse = HDR((vec4(sunColor + groundColor + inscatterColor, 1.0) + diffuse2) *
|
||||
calcShadow(shadowDataArray, vs_posWorld.xyz) );
|
||||
vec4 finalRadiance = calcShadow(shadowDataArray, vs_posWorld.xyz) *
|
||||
(vec4(sunColor + groundColor + inscatterColor, 1.0) + diffuse2);
|
||||
|
||||
diffuse = vec4(HDR(finalRadiance.xyz), finalRadiance.w);
|
||||
}
|
||||
// else
|
||||
// diffuse = HDR(diffuse);
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
uniform float exposure;
|
||||
|
||||
vec4 HDR(vec4 color) {
|
||||
vec3 HDR(vec3 color) {
|
||||
color *= exposure;
|
||||
|
||||
color.r = color.r < 1.413 ? pow(color.r * 0.38317, 1.0 / 2.2) : 1.0 - exp(-color.r);
|
||||
@@ -32,4 +32,4 @@ vec4 HDR(vec4 color) {
|
||||
color.b = color.b < 1.413 ? pow(color.b * 0.38317, 1.0 / 2.2) : 1.0 - exp(-color.b);
|
||||
|
||||
return color;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user