Merge pull request #1690 from OpenSpace/issue/1669

Issue/1669
This commit is contained in:
Gene Payne
2021-07-30 13:49:57 -06:00
committed by GitHub
8 changed files with 112 additions and 37 deletions

View File

@@ -145,6 +145,19 @@ public:
*/
std::chrono::steady_clock::time_point currentPlaybackInterpolationTime() const;
/**
* Returns the simulated application time. This simulated application time is only
* used when playback is set to be in the mode where a screenshot is captured with
* every rendered frame (enableTakeScreenShotDuringPlayback() is used to enable this
* mode). At the start of playback, this timer is set to the value of the current
* applicationTime function provided by the window delegate (used during normal
* mode or playback). However, during playback it is incremented by the fixed
* framerate of the playback rather than the actual clock value.
*
* \returns application time in seconds, for use in playback-with-frames mode
*/
double currentApplicationInterpolationTime() const;
/**
* Starts a recording session, which will save data to the provided filename
* according to the data format specified, and will continue until recording is
@@ -719,6 +732,7 @@ protected:
double _saveRenderingCurrentRecordedTime;
std::chrono::steady_clock::duration _saveRenderingDeltaTime_interpolation_usec;
std::chrono::steady_clock::time_point _saveRenderingCurrentRecordedTime_interpolation;
double _saveRenderingCurrentApplicationTime_interpolation;
long long _saveRenderingClockInterpolation_countsPerSec;
bool _saveRendering_isFirstFrame = true;

View File

@@ -238,7 +238,7 @@ private:
* Update dependencies.
*/
void updateNodeRegistry();
std::chrono::steady_clock::time_point currentTimeForInterpolation();
void sortTopologically();
std::unique_ptr<Camera> _camera;

View File

@@ -32,16 +32,16 @@
namespace openspace {
/**
* Base class for keyframes
*/
* Base class for keyframes
*/
struct KeyframeBase {
size_t id;
double timestamp;
};
/**
* Templated class for keyframes containing data
*/
* Templated class for keyframes containing data
*/
template <typename T>
struct Keyframe : public KeyframeBase {
Keyframe(size_t i, double t, T d);
@@ -54,8 +54,8 @@ struct Keyframe : public KeyframeBase {
};
/**
* Templated class for timelines
*/
* Templated class for timelines
*/
template <typename T>
class Timeline {
public:
@@ -81,20 +81,30 @@ private:
};
/**
* Return true if the timestamp of a is smaller the timestamp of b.
*/
* Return true if the timestamp of a is smaller the timestamp of b.
*/
bool compareKeyframeTimes(const KeyframeBase& a, const KeyframeBase& b);
/**
* Return true if a is smaller than the timestamp of b.
*/
* Return true if a is smaller than the timestamp of b.
*/
bool compareTimeWithKeyframeTime(double a, const KeyframeBase& b);
/**
* Return true if the timestamp of a is smaller than b.
*/
* Return true if the timestamp of a is smaller than b.
*/
bool compareKeyframeTimeWithTime(const KeyframeBase& a, double b);
/**
* Return true if the timestamp of a is smaller than or equal to b.
* This is used only in the mode of saving render frames during session recording
* playback. This was necessary to correct a small timing issue caused by fixing
* the application time according to the playback framerate. In normal operation,
* the application time at the instant the keyframes are evaluated is always a
* little bit newer than the first keyframe in the timeline.
*/
bool compareKeyframeTimeWithTime_playbackWithFrames(const KeyframeBase& a, double b);
} // namespace openspace
#include "timeline.inl"

View File

@@ -121,12 +121,14 @@ public:
private:
void progressTime(double dt);
void applyKeyframeData(const TimeKeyframeData& keyframe);
void applyKeyframeData(const TimeKeyframeData& keyframe, double dt);
TimeKeyframeData interpolate(const Keyframe<TimeKeyframeData>& past,
const Keyframe<TimeKeyframeData>& future, double time);
void addDeltaTimesKeybindings();
void clearDeltaTimesKeybindings();
double currentApplicationTimeForInterpolation() const;
double previousApplicationTimeForInterpolation() const;
Timeline<TimeKeyframeData> _timeline;
SyncData<Time> _currentTime;
@@ -139,6 +141,7 @@ private:
bool _lastTimePaused = false;
double _lastDeltaTime = 0.0;
double _lastTargetDeltaTime = 0.0;
double _previousApplicationTime = 0.0;
bool _deltaTimeStepsChanged = false;
std::vector<double> _deltaTimeSteps;

View File

@@ -472,6 +472,8 @@ void SessionRecording::initializePlayback_time(double now) {
_timestampPlaybackStarted_simulation = global::timeManager->time().j2000Seconds();
_timestampApplicationStarted_simulation = _timestampPlaybackStarted_simulation - now;
_saveRenderingCurrentRecordedTime_interpolation = steady_clock::now();
_saveRenderingCurrentApplicationTime_interpolation =
global::windowDelegate->applicationTime();
_saveRenderingClockInterpolation_countsPerSec =
system_clock::duration::period::den / system_clock::duration::period::num;
_playbackPauseOffset = 0.0;
@@ -1176,11 +1178,14 @@ double SessionRecording::fixedDeltaTimeDuringFrameOutput() const {
}
std::chrono::steady_clock::time_point
SessionRecording::currentPlaybackInterpolationTime() const
{
SessionRecording::currentPlaybackInterpolationTime() const {
return _saveRenderingCurrentRecordedTime_interpolation;
}
double SessionRecording::currentApplicationInterpolationTime() const {
return _saveRenderingCurrentApplicationTime_interpolation;
}
bool SessionRecording::playbackCamera() {
Timestamps times;
datamessagestructures::CameraKeyframe kf;
@@ -1798,6 +1803,8 @@ void SessionRecording::moveAheadInTime() {
_saveRenderingCurrentRecordedTime_interpolation +=
_saveRenderingDeltaTime_interpolation_usec;
_saveRenderingCurrentRecordedTime += _saveRenderingDeltaTime;
_saveRenderingCurrentApplicationTime_interpolation +=
_saveRenderingDeltaTime;
global::renderEngine->takeScreenshot();
}
}

View File

@@ -474,6 +474,15 @@ SceneGraphNode* Scene::loadNode(const ghoul::Dictionary& nodeDictionary) {
return rawNodePointer;
}
std::chrono::steady_clock::time_point Scene::currentTimeForInterpolation() {
if (global::sessionRecording->isSavingFramesDuringPlayback()) {
return global::sessionRecording->currentPlaybackInterpolationTime();
}
else {
return std::chrono::steady_clock::now();
}
}
void Scene::addPropertyInterpolation(properties::Property* prop, float durationSeconds,
ghoul::EasingFunction easingFunction)
{
@@ -496,11 +505,7 @@ void Scene::addPropertyInterpolation(properties::Property* prop, float durationS
ghoul::easingFunction<float>(easingFunction);
// First check if the current property already has an interpolation information
std::chrono::steady_clock::time_point now = (
global::sessionRecording->isSavingFramesDuringPlayback() ?
global::sessionRecording->currentPlaybackInterpolationTime() :
std::chrono::steady_clock::now()
);
std::chrono::steady_clock::time_point now = currentTimeForInterpolation();
for (PropertyInterpolationInfo& info : _propertyInterpolationInfos) {
if (info.prop == prop) {
info.beginTime = now;
@@ -550,13 +555,7 @@ void Scene::updateInterpolations() {
using namespace std::chrono;
steady_clock::time_point now;
if (global::sessionRecording->isSavingFramesDuringPlayback()) {
now = global::sessionRecording->currentPlaybackInterpolationTime();
}
else {
now = steady_clock::now();
}
steady_clock::time_point now = currentTimeForInterpolation();
// First, let's update the properties
for (PropertyInterpolationInfo& i : _propertyInterpolationInfos) {
long long usPassed = duration_cast<std::chrono::microseconds>(

View File

@@ -38,4 +38,8 @@ bool compareKeyframeTimeWithTime(const KeyframeBase& a, double b) {
return a.timestamp < b;
}
bool compareKeyframeTimeWithTime_playbackWithFrames(const KeyframeBase& a, double b) {
return a.timestamp <= b;
}
} // namespace

View File

@@ -27,6 +27,7 @@
#include <openspace/engine/globals.h>
#include <openspace/engine/windowdelegate.h>
#include <openspace/interaction/keybindingmanager.h>
#include <openspace/interaction/sessionrecording.h>
#include <openspace/network/parallelpeer.h>
#include <openspace/util/keys.h>
#include <openspace/util/timeline.h>
@@ -112,7 +113,7 @@ TimeManager::TimeManager()
void TimeManager::interpolateTime(double targetTime, double durationSeconds) {
ghoul_precondition(durationSeconds > 0.f, "durationSeconds must be positive");
const double now = global::windowDelegate->applicationTime();
const double now = currentApplicationTimeForInterpolation();
const bool pause = isPaused();
const TimeKeyframeData current = { time(), deltaTime(), false, false };
@@ -129,7 +130,7 @@ void TimeManager::interpolateTimeRelative(double delta, double durationSeconds)
const float duration = global::timeManager->defaultTimeInterpolationDuration();
const TimeKeyframeData predictedTime = interpolate(
global::windowDelegate->applicationTime() + duration
currentApplicationTimeForInterpolation() + duration
);
const double targetTime = predictedTime.time.j2000Seconds() + delta;
interpolateTime(targetTime, durationSeconds);
@@ -188,6 +189,7 @@ void TimeManager::preSynchronization(double dt) {
_lastTimePaused = _timePaused;
_deltaTimeStepsChanged = false;
_timelineChanged = false;
_previousApplicationTime = currentApplicationTimeForInterpolation();
}
TimeKeyframeData TimeManager::interpolate(double applicationTime) {
@@ -262,14 +264,17 @@ void TimeManager::progressTime(double dt) {
return;
}
const double now = global::windowDelegate->applicationTime();
const double now = currentApplicationTimeForInterpolation();
const std::deque<Keyframe<TimeKeyframeData>>& keyframes = _timeline.keyframes();
std::function<bool(const KeyframeBase&, double)> comparisonFunc =
(global::sessionRecording->isPlayingBack()) ?
&compareKeyframeTimeWithTime_playbackWithFrames : &compareKeyframeTimeWithTime;
auto firstFutureKeyframe = std::lower_bound(
keyframes.begin(),
keyframes.end(),
now,
&compareKeyframeTimeWithTime
comparisonFunc
);
const bool hasFutureKeyframes = firstFutureKeyframe != keyframes.end();
@@ -296,7 +301,7 @@ void TimeManager::progressTime(double dt) {
_deltaTime = interpolated.delta;
}
else if (!hasConsumedLastPastKeyframe) {
applyKeyframeData(lastPastKeyframe->data);
applyKeyframeData(lastPastKeyframe->data, dt);
}
else if (!isPaused()) {
// If there are no keyframes to consider
@@ -352,9 +357,15 @@ TimeKeyframeData TimeManager::interpolate(const Keyframe<TimeKeyframeData>& past
return data;
}
void TimeManager::applyKeyframeData(const TimeKeyframeData& keyframeData) {
void TimeManager::applyKeyframeData(const TimeKeyframeData& keyframeData, double dt) {
const Time& currentTime = keyframeData.time;
_currentTime.data().setTime(currentTime.j2000Seconds());
_deltaTime = _timePaused ? 0.0 : _targetDeltaTime;
if (global::sessionRecording->isPlayingBack()) {
_currentTime.data().advanceTime(dt * _deltaTime);
}
else {
_currentTime.data().setTime(currentTime.j2000Seconds());
}
_timePaused = keyframeData.pause;
_targetDeltaTime = keyframeData.delta;
_deltaTime = _timePaused ? 0.0 : _targetDeltaTime;
@@ -713,7 +724,7 @@ void TimeManager::interpolateDeltaTime(double newDeltaTime, double interpolation
return;
}
const double now = global::windowDelegate->applicationTime();
double now = currentApplicationTimeForInterpolation();
Time newTime(
time().j2000Seconds() + (_deltaTime + newDeltaTime) * 0.5 * interpolationDuration
);
@@ -723,6 +734,9 @@ void TimeManager::interpolateDeltaTime(double newDeltaTime, double interpolation
_targetDeltaTime = newDeltaTime;
if (global::sessionRecording->isPlayingBack()) {
now = previousApplicationTimeForInterpolation();
}
addKeyframe(now, currentKeyframe);
addKeyframe(now + interpolationDuration, futureKeyframe);
}
@@ -809,7 +823,7 @@ void TimeManager::interpolatePause(bool pause, double interpolationDuration) {
return;
}
const double now = global::windowDelegate->applicationTime();
double now = currentApplicationTimeForInterpolation();
double targetDelta = pause ? 0.0 : _targetDeltaTime;
Time newTime(
time().j2000Seconds() + (_deltaTime + targetDelta) * 0.5 * interpolationDuration
@@ -819,6 +833,9 @@ void TimeManager::interpolatePause(bool pause, double interpolationDuration) {
TimeKeyframeData futureKeyframe = { newTime, _targetDeltaTime, pause, false };
_timePaused = pause;
if (global::sessionRecording->isPlayingBack()) {
now = previousApplicationTimeForInterpolation();
}
clearKeyframes();
if (interpolationDuration > 0) {
addKeyframe(now, currentKeyframe);
@@ -826,4 +843,25 @@ void TimeManager::interpolatePause(bool pause, double interpolationDuration) {
addKeyframe(now + interpolationDuration, futureKeyframe);
}
double TimeManager::currentApplicationTimeForInterpolation() const {
if (global::sessionRecording->isSavingFramesDuringPlayback()) {
return global::sessionRecording->currentApplicationInterpolationTime();
}
else {
return global::windowDelegate->applicationTime();
}
}
double TimeManager::previousApplicationTimeForInterpolation() const {
//If playing back with frames, this function needs to be called when a time rate
// interpolation (either speed change or pause) begins and ends. If the application
// time of the interpolation keyframe timestamp (when it was added to timeline) is
// exactly the same as when it is evaluated, then the interpolation math fails and
// two identical frames are generated at the begin & end. This only happens when the
// application time is forced to discrete intervals for a fixed rendering framerate.
// Using the previous frame render time fixes this problem. This doesn't adversely
// affect playback without frames.
return _previousApplicationTime;
}
} // namespace openspace