Split videotileprovider and videoplayer into two classes

This commit is contained in:
Ylva Selling
2023-02-15 10:55:23 -05:00
parent a61d98c268
commit c01487458c
4 changed files with 106 additions and 816 deletions

View File

@@ -53,15 +53,17 @@ public:
void stepFrameBackward();
void seekToTime(double time);
const std::unique_ptr<ghoul::opengl::Texture>& frameTexture() const;
bool isPaused() const;
bool isInitialized() const;
double videoDuration() const;
double currentPlaybackTime() const;
void reset();
void destroy();
documentation::Documentation Documentation();
private:
properties::TriggerProperty _play;
properties::TriggerProperty _pause;
properties::TriggerProperty _goToStart;
// libmpv property keys
enum class LibmpvPropertyKey : uint64_t {
Duration = 1,
@@ -76,17 +78,8 @@ private:
Pause
};
enum class PlaybackMode {
MapToSimulationTime = 0,
RealTimeLoop
};
void createFBO(int width, int height);
void resizeFBO(int width, int height);
// Map to simulation time functions
double correctVideoPlaybackTime() const;
bool isWithingStartEndTime() const;
// Libmpv
static void on_mpv_render_update(void*); // Has to be static because of C api
@@ -95,7 +88,6 @@ private:
void handleMpvEvents();
void handleMpvProperties(mpv_event* event);
void swapBuffersMpv(); // Called in postDraw
void cleanUpMpv(); // Called in internalDeinitialze
void observePropertyMpv(std::string name, mpv_format format, LibmpvPropertyKey key);
void setPropertyStringMpv(std::string name, std::string value);
void getPropertyAsyncMpv(std::string name, mpv_format format, LibmpvPropertyKey key);
@@ -108,8 +100,8 @@ private:
double _currentVideoTime = 0.0;
double _fps = 24.0; // If when we read it it is 0, use 24 fps
double _videoDuration = 0.0;
glm::ivec2 _videoResolution = glm::ivec2(4096, 2048); // Used for the fbos
glm::ivec2 _videoResolution = glm::ivec2(2048, 1024); // Used for the fbos
bool _isPaused = false;
// Libmpv
mpv_handle* _mpvHandle = nullptr;
mpv_render_context* _mpvRenderContext = nullptr;
@@ -120,8 +112,7 @@ private:
bool _isInitialized = false; // If libmpv has been inititalized
bool _isDestroying = false; // If libmpv has been inititalized
bool _isSeeking = false; // Prevent seeking while already seeking
double _seekThreshold = 1.0; // Threshold to ensure we seek to a
double _seekThreshold = 1.0; // Threshold to ensure we seek to a different time
};
} // namespace video::globebrowsing

View File

@@ -26,6 +26,7 @@
#define __OPENSPACE_MODULE_GLOBEBROWSING___TILEPROVIDER__VIDEOTILEPROVIDER___H__
#include <modules/globebrowsing/src/tileprovider/tileprovider.h>
#include <modules/video/include/videoplayer.h>
#include <openspace/properties/triggerproperty.h>
#include <openspace/properties/scalar/doubleproperty.h>
@@ -55,63 +56,24 @@ public:
Tile tile(const TileIndex& tileIndex) override final;
Tile::Status tileStatus(const TileIndex& tileIndex) override final;
TileDepthTransform depthTransform() override final;
// Video interaction
void pause();
void play();
void goToStart();
void stepFrameForward();
void stepFrameBackward();
void seekToTime(double time);
void syncToSimulationTime();
static documentation::Documentation Documentation();
private:
properties::TriggerProperty _play;
properties::TriggerProperty _pause;
properties::TriggerProperty _goToStart;
// libmpv property keys
enum class LibmpvPropertyKey : uint64_t {
Duration = 1,
Height,
Width,
Meta,
Params,
Time,
Command,
Seek,
Fps,
Pause
};
enum class PlaybackMode {
MapToSimulationTime = 0,
RealTimeLoop
};
void createFBO(int width, int height);
void resizeFBO(int width, int height);
// Map to simulation time functions
double correctVideoPlaybackTime() const;
bool isWithingStartEndTime() const;
// Libmpv
static void on_mpv_render_update(void*); // Has to be static because of C api
void initializeMpv(); // Called first time in postSyncPreDraw
void renderMpv(); // Called in postSyncPreDraw
void handleMpvEvents();
void handleMpvProperties(mpv_event* event);
void swapBuffersMpv(); // Called in postDraw
void cleanUpMpv(); // Called in internalDeinitialze
void observePropertyMpv(std::string name, mpv_format format, LibmpvPropertyKey key);
void setPropertyStringMpv(std::string name, std::string value);
void getPropertyAsyncMpv(std::string name, mpv_format format, LibmpvPropertyKey key);
void commandAsyncMpv(const char* cmd[],
LibmpvPropertyKey key = LibmpvPropertyKey::Command);
void internalInitialize() override final;
void internalDeinitialize() override final;
@@ -124,27 +86,12 @@ private:
double _timeAtLastRender = 0.0;
double _frameDuration = 0.0;
// Video properties. Try to read all these values from the video
double _currentVideoTime = 0.0;
double _fps = 24.0; // If when we read it it is 0, use 24 fps
double _videoDuration = 0.0;
glm::ivec2 _videoResolution = glm::ivec2(4096, 2048); // Used for the fbos
// Libmpv
mpv_handle* _mpvHandle = nullptr;
mpv_render_context* _mpvRenderContext = nullptr;
std::unique_ptr<ghoul::opengl::Texture> _frameTexture = nullptr;
GLuint _fbo = 0; // Our opengl framebuffer where mpv renders to
int _wakeup = 0; // Signals when libmpv has a new frame ready
bool _didRender = false; // To know when to swap buffers
bool _isInitialized = false; // If libmpv has been inititalized
bool _isDestroying = false; // If libmpv has been inititalized
bool _isSeeking = false; // Prevent seeking while already seeking
double _seekThreshold = 1.0; // Threshold where we are officially out of sync
// Tile handling
std::map<TileIndex::TileHashKey, Tile> _tileCache; // Cache for rendering 1 frame
bool _tileIsReady = false;
double _seekThreshold = 1.0; // Threshold to ensure we seek to a different time
VideoPlayer _videoPlayer;
};
} // namespace video::globebrowsing

View File

@@ -42,24 +42,6 @@ namespace {
"video that is then loaded and used for all tiles"
};
constexpr openspace::properties::Property::PropertyInfo PlayInfo = {
"Play",
"Play",
"Play video"
};
constexpr openspace::properties::Property::PropertyInfo PauseInfo = {
"Pause",
"Pause",
"Pause video"
};
constexpr openspace::properties::Property::PropertyInfo GoToStartInfo = {
"GoToStart",
"Go To Start",
"Go to start in video"
};
struct [[codegen::Dictionary(VideoPlayer)]] Parameters {
// [[codegen::verbatim(FileInfo.description)]]
std::filesystem::path file;
@@ -140,9 +122,6 @@ documentation::Documentation VideoPlayer::Documentation() {
VideoPlayer::VideoPlayer(const ghoul::Dictionary& dictionary)
: PropertyOwner({ "VideoPlayer" })
, _play(PlayInfo)
, _pause(PauseInfo)
, _goToStart(GoToStartInfo)
{
ZoneScoped
@@ -167,6 +146,9 @@ VideoPlayer::VideoPlayer(const ghoul::Dictionary& dictionary)
}
void VideoPlayer::pause() {
if (!_isInitialized) {
return;
}
bool isPaused = true;
int result = mpv_set_property_async(
_mpvHandle,
@@ -181,12 +163,16 @@ void VideoPlayer::pause() {
}
void VideoPlayer::play() {
if (!_isInitialized) {
return;
}
bool isPaused = false;
int result = mpv_set_property_async(
_mpvHandle,
static_cast<uint64_t>(LibmpvPropertyKey::Pause),
"pause",
MPV_FORMAT_FLAG,
nullptr
&isPaused
);
if (!checkMpvError(result)) {
LWARNING("Error when playing video");
@@ -198,11 +184,17 @@ void VideoPlayer::goToStart() {
}
void VideoPlayer::stepFrameForward() {
if (!_isInitialized) {
return;
}
const char* cmd[] = { "frame-step", nullptr };
commandAsyncMpv(cmd);
}
void VideoPlayer::stepFrameBackward() {
if (!_isInitialized) {
return;
}
const char* cmd[] = { "frame-back-step", nullptr };
commandAsyncMpv(cmd);
}
@@ -248,7 +240,7 @@ void VideoPlayer::initializeMpv() {
//mpv_set_property_string(_mpvHandle, "script-opts", "autoload-disabled=yes");
// Verbose mode
//mpv_set_property_string(_mpvHandle, "msg-level", "all=v");
mpv_set_property_string(_mpvHandle, "msg-level", "all=v");
//mpv_request_log_messages(_mpvHandle, "debug");
if (mpv_initialize(_mpvHandle) < 0) {
@@ -286,7 +278,8 @@ void VideoPlayer::initializeMpv() {
);
// Load file
const char* cmd[] = { "loadfile", _videoFile.string().c_str(), nullptr };
std::string file = _videoFile.string();
const char* cmd[] = { "loadfile", file.c_str(), nullptr };
int result = mpv_command(_mpvHandle, cmd);
if (!checkMpvError(result)) {
LERROR("Could not open video file");
@@ -310,6 +303,9 @@ void VideoPlayer::initializeMpv() {
}
void VideoPlayer::seekToTime(double time) {
if (!_isInitialized) {
return;
}
// Prevent from seeking to the same time multiple times in a row
bool seekIsDifferent = abs(time - _currentVideoTime) > _seekThreshold;
if (seekIsDifferent && !_isSeeking) {
@@ -323,9 +319,14 @@ void VideoPlayer::seekToTime(double time) {
}
}
bool VideoPlayer::isPaused() const {
return _isPaused;
}
void VideoPlayer::renderMpv() {
handleMpvEvents();
std::string isPaused = _isPaused ? "Is Paused" : "Not Paused";
//LINFO(isPaused);
if (_wakeup) {
if ((mpv_render_context_update(_mpvRenderContext) & MPV_RENDER_UPDATE_FRAME)) {
// See render_gl.h on what OpenGL environment mpv expects, and other API
@@ -666,6 +667,8 @@ void VideoPlayer::handleMpvProperties(mpv_event* event) {
LERROR("Could not find pause property");
break;
}
bool* videoIsPaused = static_cast<bool*>(event->data);
_isPaused = *videoIsPaused;
break;
}
default: {
@@ -684,7 +687,7 @@ void VideoPlayer::swapBuffersMpv() {
}
}
void VideoPlayer::cleanUpMpv() {
void VideoPlayer::destroy() {
_isDestroying = true;
// Destroy the GL renderer and all of the GL objects it allocated. If video
// is still running, the video track will be deselected.
@@ -695,14 +698,30 @@ void VideoPlayer::cleanUpMpv() {
glDeleteFramebuffers(1, &_fbo);
}
const std::unique_ptr<ghoul::opengl::Texture>& VideoPlayer::frameTexture() const {
return _frameTexture;
}
void VideoPlayer::reset() {
if (_videoFile.empty()) {
return;
}
cleanUpMpv();
destroy();
initializeMpv();
}
bool VideoPlayer::isInitialized() const {
return _isInitialized;
}
double VideoPlayer::videoDuration() const {
return _videoDuration;
}
double VideoPlayer::currentPlaybackTime() const {
return _currentVideoTime;
}
void VideoPlayer::createFBO(int width, int height) {
LINFO(fmt::format("Creating new FBO with width: {} and height: {}", width, height));
@@ -746,11 +765,10 @@ void VideoPlayer::createFBO(int width, int height) {
}
void VideoPlayer::resizeFBO(int width, int height) {
LINFO(fmt::format("Resizing FBO with width: {} and height: {}", width, height));
if (width == _videoResolution.x && height == _videoResolution.y) {
return;
}
LINFO(fmt::format("Resizing FBO with width: {} and height: {}", width, height));
// Update resolution of video
_videoResolution = glm::ivec2(width, height);

View File

@@ -109,86 +109,22 @@ namespace {
namespace openspace {
bool checkMpvError(int status) {
if (status < 0) {
LERROR(fmt::format("Libmpv API error: {}", mpv_error_string(status)));
return false;
}
return true;
}
void* getOpenGLProcAddress(void*, const char* name) {
return reinterpret_cast<void*>(
global::windowDelegate->openGLProcedureAddress(name)
);
}
void VideoTileProvider::on_mpv_render_update(void* ctx) {
// The wakeup flag is set here to enable the mpv_render_context_render
// path in the main loop.
// The pattern here with a static function and a void pointer to the the class
// instance is a common pattern where C++ integrates a C library
static_cast<VideoTileProvider*>(ctx)->_wakeup = 1;
}
void VideoTileProvider::observePropertyMpv(std::string name, mpv_format format,
LibmpvPropertyKey key) {
mpv_observe_property(
_mpvHandle,
static_cast<uint64_t>(key),
name.c_str(),
format
);
}
void VideoTileProvider::setPropertyStringMpv(std::string name, std::string value) {
int result = mpv_set_property_string(_mpvHandle, name.c_str(), value.c_str());
if (!checkMpvError(result)) {
LWARNING(fmt::format("Error setting property {}", name));
}
}
void VideoTileProvider::getPropertyAsyncMpv(std::string name, mpv_format format,
LibmpvPropertyKey key) {
int result = mpv_get_property_async(
_mpvHandle,
static_cast<uint64_t>(key),
name.c_str(),
format
);
if (!checkMpvError(result)) {
LWARNING("Could not find property " + name);
return;
}
}
void VideoTileProvider::commandAsyncMpv(const char* cmd[], LibmpvPropertyKey key) {
int result = mpv_command_async(
_mpvHandle,
static_cast<uint64_t>(key),
cmd
);
if (!checkMpvError(result)) {
LERROR(fmt::format("Could not execute command {}", cmd[0]));
return;
}
}
documentation::Documentation VideoTileProvider::Documentation() {
return codegen::doc<Parameters>("video_videotileprovider");
}
VideoTileProvider::VideoTileProvider(const ghoul::Dictionary& dictionary)
: _play(PlayInfo)
: _videoPlayer(dictionary)
, _play(PlayInfo)
, _pause(PauseInfo)
, _goToStart(GoToStartInfo)
, _goToStart(GoToStartInfo)
{
ZoneScoped
const Parameters p = codegen::bake<Parameters>(dictionary);
_videoFile = p.file;
if (p.playbackMode.has_value()) {
switch (*p.playbackMode) {
case Parameters::PlaybackMode::RealTimeLoop:
@@ -198,17 +134,18 @@ VideoTileProvider::VideoTileProvider(const ghoul::Dictionary& dictionary)
_playbackMode = PlaybackMode::MapToSimulationTime;
break;
default:
LERROR("Missing playback mode in VideoTileProvider");
throw ghoul::MissingCaseException();
}
}
if (_playbackMode == PlaybackMode::RealTimeLoop) {
// Video interaction. Only valid for real time looping
_play.onChange([this]() { play(); });
_play.onChange([this]() { _videoPlayer.play(); });
addProperty(_play);
_pause.onChange([this]() { pause(); });
_pause.onChange([this]() { _videoPlayer.pause(); });
addProperty(_pause);
_goToStart.onChange([this]() { goToStart(); });
_goToStart.onChange([this]() { _videoPlayer.goToStart(); });
addProperty(_goToStart);
}
else if (_playbackMode == PlaybackMode::MapToSimulationTime) {
@@ -217,36 +154,27 @@ VideoTileProvider::VideoTileProvider(const ghoul::Dictionary& dictionary)
" end time");
return;
}
//_videoDuration = *p.duration;
_startJ200Time = Time::convertTime(*p.startTime);
_endJ200Time = Time::convertTime(*p.endTime);
ghoul_assert(_endJ200Time > _startJ200Time, "Invalid times for video");
// Change the video time if OpenSpace time changes
global::timeManager->addTimeJumpCallback([this]() {
seekToTime(correctVideoPlaybackTime());
_videoPlayer.seekToTime(correctVideoPlaybackTime());
});
// Ensure we are synchronized to OpenSpace time in presync step
global::callback::preSync->emplace_back([this]() {
// This mode should always be paused as we're stepping through the frames
_videoPlayer.pause();
syncToSimulationTime();
});
}
global::callback::postSyncPreDraw->emplace_back([this]() {
// Initialize mpv here to ensure that the opengl context is the same as in for
// the rendering
if (!_isInitialized) {
initializeMpv();
}
else if(!_isDestroying) {
renderMpv();
}
});
global::callback::postDraw->emplace_back([this]() {
swapBuffersMpv();
});
}
Tile VideoTileProvider::tile(const TileIndex& tileIndex) {
ZoneScoped
if (!_isInitialized) {
if (!_videoPlayer.isInitialized()) {
return Tile();
}
@@ -254,15 +182,25 @@ Tile VideoTileProvider::tile(const TileIndex& tileIndex) {
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
LINFO("Framebuffer is not complete");
}
return Tile{ _videoPlayer.frameTexture().get(), std::nullopt, Tile::Status::OK };
/*
// For now, don't use the cache as we're trying to debug the problem w playback
uint64_t hash = tileIndex.hashKey();
if (_tileCache.find(hash) == _tileCache.end()) {
auto foundTile = _tileCache.find(hash);
bool textureChanged = foundTile != _tileCache.end() &&
foundTile->second.texture != _videoPlayer.frameTexture().get();
if (foundTile == _tileCache.end() || textureChanged) {
_tileCache[hash] = Tile{
_frameTexture.get(),
_videoPlayer.frameTexture().get(),
std::nullopt,
Tile::Status::OK
};
}
return _tileCache[hash];
return _tileCache[hash];*/
}
Tile::Status VideoTileProvider::tileStatus(const TileIndex& tileIndex) {
@@ -283,6 +221,10 @@ TileDepthTransform VideoTileProvider::depthTransform() {
void VideoTileProvider::update() {}
void VideoTileProvider::reset() {
_videoPlayer.reset();
}
ChunkTile VideoTileProvider::chunkTile(TileIndex tileIndex, int parents, int maxParents) {
ZoneScoped
@@ -303,154 +245,6 @@ ChunkTile VideoTileProvider::chunkTile(TileIndex tileIndex, int parents, int max
return traverseTree(tileIndex, parents, maxParents, ascendToParent, uvTransform);
}
void VideoTileProvider::pause() {
bool isPaused = true;
int result = mpv_set_property_async(
_mpvHandle,
static_cast<uint64_t>(LibmpvPropertyKey::Pause),
"pause",
MPV_FORMAT_FLAG,
&isPaused
);
if (!checkMpvError(result)) {
LWARNING("Error when pausing video");
}
}
void VideoTileProvider::play() {
int result = mpv_set_property_async(
_mpvHandle,
static_cast<uint64_t>(LibmpvPropertyKey::Pause),
"pause",
MPV_FORMAT_FLAG,
nullptr
);
if (!checkMpvError(result)) {
LWARNING("Error when playing video");
}
}
void VideoTileProvider::goToStart() {
seekToTime(0.0);
}
void VideoTileProvider::stepFrameForward() {
const char* cmd[] = { "frame-step", nullptr };
commandAsyncMpv(cmd);
}
void VideoTileProvider::stepFrameBackward() {
const char* cmd[] = { "frame-back-step", nullptr };
commandAsyncMpv(cmd);
}
void VideoTileProvider::initializeMpv() {
_mpvHandle = mpv_create();
if (!_mpvHandle) {
LINFO("LibMpv: mpv context init failed");
}
// Set libmpv flags before initializing
// See order at https://github.com/mpv-player/mpv/blob/master/libmpv/client.h#L420
// Avoiding async calls in uninitialized state
if (_playbackMode == PlaybackMode::RealTimeLoop) {
// Loop video
// https://mpv.io/manual/master/#options-loop
setPropertyStringMpv("loop", "");
}
// Allow only OpenGL (requires OpenGL 2.1+ or GLES 2.0+)
// https://mpv.io/manual/master/#options-gpu-api
setPropertyStringMpv("gpu-api", "opengl");
// Enable hardware decoding
// https://mpv.io/manual/master/#options-hwdec
setPropertyStringMpv("hwdec", "auto");
// Enable direct rendering (default: auto). If this is set to yes, the video will be
// decoded directly to GPU video memory (or staging buffers).
// https://mpv.io/manual/master/#options-vd-lavc-dr
setPropertyStringMpv("vd-lavc-dr", "yes");
// Print libmpv couts to the terminal
// https://mpv.io/manual/master/#options-terminal
setPropertyStringMpv("terminal", "yes");
// Control how long before video display target time the frame should be rendered
// https://mpv.io/manual/master/#options-video-timing-offset
setPropertyStringMpv("video-timing-offset", "0");
//setPropertyStringMpv("load-stats-overlay", "");
//mpv_set_property_string(_mpvHandle, "script-opts", "autoload-disabled=yes");
// Verbose mode
//mpv_set_property_string(_mpvHandle, "msg-level", "all=v");
//mpv_request_log_messages(_mpvHandle, "debug");
if (mpv_initialize(_mpvHandle) < 0) {
LINFO("mpv init failed");
}
mpv_opengl_init_params gl_init_params{ getOpenGLProcAddress, nullptr };
int adv = 1; // Use libmpv advanced mode since we will use the update callback
// Decouple mpv from waiting to get the correct fps. Use with flag video-timing-offset
// set to 0
int blockTime = 0;
mpv_render_param params[]{
{MPV_RENDER_PARAM_API_TYPE, const_cast<char*>(MPV_RENDER_API_TYPE_OPENGL)},
{MPV_RENDER_PARAM_OPENGL_INIT_PARAMS, &gl_init_params},
{MPV_RENDER_PARAM_ADVANCED_CONTROL, &adv},
{MPV_RENDER_PARAM_BLOCK_FOR_TARGET_TIME, &blockTime},
{MPV_RENDER_PARAM_INVALID, nullptr}
};
// This makes mpv use the currently set GL context. It will use the callback
// (passed via params) to resolve GL builtin functions, as well as extensions.
if (mpv_render_context_create(&_mpvRenderContext, _mpvHandle, params) < 0) {
LINFO("Failed to initialize libmpv OpenGL context");
}
// When there is a need to call mpv_render_context_update(), which can
// request a new frame to be rendered.
// (Separate from the normal event handling mechanism for the sake of
// users which run OpenGL on a different thread.)
mpv_render_context_set_update_callback(
_mpvRenderContext,
on_mpv_render_update,
this
);
// Load file
const char* cmd[] = { "loadfile", _videoFile.string().c_str(), nullptr };
int result = mpv_command(_mpvHandle, cmd);
if (!checkMpvError(result)) {
LERROR("Could not open video file");
return;
}
//Create FBO to render video into
createFBO(_videoResolution.x, _videoResolution.y);
//Observe video parameters
observePropertyMpv("video-params", MPV_FORMAT_NODE, LibmpvPropertyKey::Params);
observePropertyMpv("pause", MPV_FORMAT_FLAG, LibmpvPropertyKey::Pause);
observePropertyMpv("time-pos", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Time);
observePropertyMpv("duration", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Duration);
observePropertyMpv("height", MPV_FORMAT_INT64, LibmpvPropertyKey::Height);
observePropertyMpv("width", MPV_FORMAT_INT64, LibmpvPropertyKey::Width);
observePropertyMpv("metadata", MPV_FORMAT_NODE, LibmpvPropertyKey::Meta);
observePropertyMpv("container-fps", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Fps);
if (_playbackMode == PlaybackMode::MapToSimulationTime) {
pause();
}
_isInitialized = true;
}
bool VideoTileProvider::isWithingStartEndTime() const {
const double now = global::timeManager->time().j2000Seconds();
return now <= _endJ200Time && now >= _startJ200Time;
@@ -468,24 +262,10 @@ double VideoTileProvider::correctVideoPlaybackTime() const {
else {
percentage = (now - _startJ200Time) / (_endJ200Time - _startJ200Time);
}
return percentage * _videoDuration;
return percentage * _videoPlayer.videoDuration();
}
void VideoTileProvider::seekToTime(double time) {
// Prevent from seeking to the same time multiple times in a row
bool seekIsDifferent = abs(time - _currentVideoTime) > _seekThreshold;
if (seekIsDifferent && !_isSeeking) {
// Pause while seeking
pause();
std::string timeString = std::to_string(time);
const char* params = timeString.c_str();
const char* cmd[] = { "seek", params, "absolute", NULL };
commandAsyncMpv(cmd, LibmpvPropertyKey::Seek);
_isSeeking = true;
}
}
void VideoTileProvider::renderMpv() {
void VideoTileProvider::syncToSimulationTime() {
if (_playbackMode == PlaybackMode::MapToSimulationTime) {
// If we are in valid times, step frames accordingly
if (isWithingStartEndTime()) {
@@ -493,402 +273,25 @@ void VideoTileProvider::renderMpv() {
double deltaTime = now - _timeAtLastRender;
if (deltaTime > _frameDuration) {
// Stepping forwards
stepFrameForward();
_videoPlayer.stepFrameForward();
_timeAtLastRender = now;
}
else if (deltaTime < -_frameDuration) {
// Stepping backwards
stepFrameBackward();
_videoPlayer.stepFrameBackward();
_timeAtLastRender = now;
}
}
else if (!_videoPlayer.isPaused()) {
_videoPlayer.pause();
}
// Make sure we are at the correct time
double time = correctVideoPlaybackTime();
bool shouldSeek = abs(time - _currentVideoTime) > _seekThreshold;
bool shouldSeek = abs(time - _videoPlayer.currentPlaybackTime()) > _seekThreshold;
if (shouldSeek) {
seekToTime(time); // We end up two frames too late
_videoPlayer.seekToTime(time);
}
}
handleMpvEvents();
if (_wakeup) {
if ((mpv_render_context_update(_mpvRenderContext) & MPV_RENDER_UPDATE_FRAME)) {
// See render_gl.h on what OpenGL environment mpv expects, and other API
// details. This function fills the fbo and texture with data, after it
// we can get the data on the GPU, not the CPU
int fboInt = static_cast<int>(_fbo);
mpv_opengl_fbo mpfbo{
fboInt ,
_videoResolution.x,
_videoResolution.y, 0
};
int flip_y{ 1 };
mpv_render_param params[] = {
{MPV_RENDER_PARAM_OPENGL_FBO, &mpfbo},
{MPV_RENDER_PARAM_FLIP_Y, &flip_y},
{MPV_RENDER_PARAM_INVALID, nullptr}
};
// This "renders" to the video_framebuffer "linked by ID" in the
// params_fbo
mpv_render_context_render(_mpvRenderContext, params);
/* TODO: remove this comment in case we never encounter this issue again */
// We have to set the Viewport on every cycle because
// mpv_render_context_render internally rescales the fb of the context(?!)...
//glm::ivec2 window = global::windowDelegate->currentDrawBufferResolution();
//glViewport(0, 0, window.x, window.y);
_didRender = true;
// Since all tiles use the same texture, all tiles are ready now
_tileIsReady = true;
}
}
}
void VideoTileProvider::handleMpvEvents() {
while (_mpvHandle) {
mpv_event* event = mpv_wait_event(_mpvHandle, 0.0);
if (event->event_id == MPV_EVENT_NONE) {
return;
}
switch (event->event_id) {
case MPV_EVENT_VIDEO_RECONFIG: {
// Retrieve the new video size
// Get width
getPropertyAsyncMpv("width", MPV_FORMAT_INT64, LibmpvPropertyKey::Width);
getPropertyAsyncMpv("height", MPV_FORMAT_INT64, LibmpvPropertyKey::Height);
break;
}
case MPV_EVENT_PROPERTY_CHANGE: {
mpv_event_property* prop = (mpv_event_property*)event->data;
if (strcmp(prop->name, "video-params") == 0 &&
prop->format == MPV_FORMAT_NODE)
{
getPropertyAsyncMpv("video-params", MPV_FORMAT_NODE, LibmpvPropertyKey::Params);
}
if (strcmp(prop->name, "time-pos") == 0 &&
prop->format == MPV_FORMAT_DOUBLE)
{
getPropertyAsyncMpv("time-pos", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Time);
}
if (strcmp(prop->name, "duration") == 0 &&
prop->format == MPV_FORMAT_DOUBLE)
{
getPropertyAsyncMpv("duration", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Duration);
}
if (strcmp(prop->name, "container-fps") == 0 &&
prop->format == MPV_FORMAT_DOUBLE)
{
getPropertyAsyncMpv("container-fps", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Fps);
}
if (strcmp(prop->name, "pause") == 0 &&
prop->format == MPV_FORMAT_FLAG)
{
getPropertyAsyncMpv("pause", MPV_FORMAT_FLAG, LibmpvPropertyKey::Pause);
}
if (strcmp(prop->name, "height") == 0 &&
prop->format == MPV_FORMAT_INT64)
{
getPropertyAsyncMpv("height", MPV_FORMAT_INT64, LibmpvPropertyKey::Height);
}
if (strcmp(prop->name, "width") == 0 &&
prop->format == MPV_FORMAT_INT64)
{
getPropertyAsyncMpv("width", MPV_FORMAT_INT64, LibmpvPropertyKey::Width);
}
if (strcmp(prop->name, "metadata") == 0 &&
prop->format == MPV_FORMAT_NODE)
{
getPropertyAsyncMpv("metadata", MPV_FORMAT_NODE, LibmpvPropertyKey::Meta);
}
break;
}
case MPV_EVENT_LOG_MESSAGE: {
struct mpv_event_log_message* msg =
(struct mpv_event_log_message*)event->data;
std::stringstream ss;
ss << "[" << msg->prefix << "] " << msg->level << ": " << msg->text;
LINFO(ss.str());
break;
}
case MPV_EVENT_COMMAND_REPLY: {
switch (event->reply_userdata) {
case static_cast<uint64_t>(LibmpvPropertyKey::Command): {
int result = event->error;
if (!checkMpvError) {
LINFO("Command Error");
}
break;
}
case static_cast<uint64_t>(LibmpvPropertyKey::Seek): {
int result = event->error;
if (!checkMpvError) {
LINFO("Seek Error");
}
_isSeeking = false;
break;
}
default: {
break;
}
}
break;
}
case MPV_EVENT_GET_PROPERTY_REPLY: {
int result = event->error;
if (!checkMpvError(result)) {
LWARNING(fmt::format(
"Error while gettting property of type: {}", event->reply_userdata
));
break;
}
handleMpvProperties(event);
break;
}
default: {
// Ignore uninteresting or unknown events.
break;
}
}
}
}
void VideoTileProvider::handleMpvProperties(mpv_event* event) {
switch (static_cast<LibmpvPropertyKey>(event->reply_userdata)) {
case LibmpvPropertyKey::Duration: {
if (!event->data) {
LERROR("Could not find duration property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
double* duration = static_cast<double*>(property->data);
if (!duration) {
LERROR("Could not find duration property");
break;
}
_videoDuration = *duration;
_frameDuration = ( 1.0 / _fps) * ((_endJ200Time - _startJ200Time) / _videoDuration);
if (_playbackMode == PlaybackMode::MapToSimulationTime) {
seekToTime(correctVideoPlaybackTime());
}
LINFO(fmt::format("Duration: {}", *duration));
break;
}
case LibmpvPropertyKey::Height: {
if (!event->data) {
LERROR("Could not find height property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
int* height = static_cast<int*>(property->data);
if (!height) {
LERROR("Could not find height property");
break;
}
if (*height == _videoResolution.y) {
break;
}
LINFO(fmt::format("New height: {}", *height));
if (*height > 0 && _videoResolution.x > 0 && _fbo > 0) {
resizeFBO(_videoResolution.x, *height);
}
break;
}
case LibmpvPropertyKey::Width: {
if (!event->data) {
LERROR("Could not find height property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
int* width = static_cast<int*>(property->data);
if (!width) {
LERROR("Could not find width property");
break;
}
if (*width == _videoResolution.y) {
break;
}
LINFO(fmt::format("New width: {}", *width));
if (*width > 0 && _videoResolution.y > 0 && _fbo > 0) {
resizeFBO(*width, _videoResolution.y);
}
break;
}
case LibmpvPropertyKey::Meta: {
if (!event->data) {
LERROR("Could not find video parameters");
break;
}
mpv_node node;
int result = mpv_event_to_node(&node, event);
if (!checkMpvError(result)) {
LWARNING("Could not find video parameters of video");
}
LINFO("Printing meta data reply");
if (node.format == MPV_FORMAT_NODE_MAP) {
for (int n = 0; n < node.u.list->num; n++) {
if (node.u.list->values[n].format == MPV_FORMAT_STRING) {
LINFO(node.u.list->values[n].u.string);
}
}
}
else {
LWARNING("No meta data could be read");
}
break;
}
case LibmpvPropertyKey::Params: {
if (!event->data) {
LINFO("Could not find video parameters");
break;
}
mpv_node videoParams;
int result = mpv_event_to_node(&videoParams, event);
if (!checkMpvError(result)) {
LWARNING("Could not find video parameters of video");
}
if (videoParams.format == MPV_FORMAT_NODE_ARRAY ||
videoParams.format == MPV_FORMAT_NODE_MAP)
{
mpv_node_list* list = videoParams.u.list;
mpv_node width, height;
bool foundWidth = false;
bool foundHeight = false;
for (int i = 0; i < list->num; ++i) {
if (foundWidth && foundHeight) {
break;
}
if (list->keys[i] == "w") {
width = list->values[i];
foundWidth = true;
}
else if (list->keys[i] == "h") {
height = list->values[i];
foundHeight = true;
}
}
if (!foundWidth || !foundHeight) {
LERROR("Could not find width or height params");
return;
}
int w = -1;
int h = -1;
if (width.format == MPV_FORMAT_INT64) {
w = width.u.int64;
}
if (height.format == MPV_FORMAT_INT64) {
h = height.u.int64;
}
if (w == -1 || h == -1) {
LERROR("Invalid width or height params");
return;
}
resizeFBO(w, h);
}
break;
}
case LibmpvPropertyKey::Time: {
if (!event->data) {
LERROR("Could not find playback time property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
double* time = static_cast<double*>(property->data);
if (!time) {
LERROR("Could not find playback time property");
break;
}
_currentVideoTime = *time;
// Time has changed - we don't have a texture yet
_tileIsReady = false;
break;
}
case LibmpvPropertyKey::Fps: {
if (!event->data) {
LERROR("Could not find fps property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
double* fps = static_cast<double*>(property->data);
if (*fps < glm::epsilon<double>()) {
LWARNING("Detected fps was 0. Falling back on 24 fps");
break;
}
if (!fps) {
LERROR("Could not find fps property");
break;
}
_fps = *fps;
LINFO(fmt::format("Detected fps: {}", *fps));
_frameDuration = (1.0 / _fps) * ((_endJ200Time - _startJ200Time) / _videoDuration);
_seekThreshold = 2.0 * (1.0 / _fps);
break;
}
case LibmpvPropertyKey::Pause: {
if (!event->data) {
LERROR("Could not find pause property");
break;
}
break;
}
default: {
throw ghoul::MissingCaseException();
break;
}
}
}
void VideoTileProvider::swapBuffersMpv() {
// Only swap buffers if there was a frame rendered and there is a new frame waiting
if (_wakeup && _didRender) {
mpv_render_context_report_swap(_mpvRenderContext);
_wakeup = 0;
_didRender = 0;
}
}
void VideoTileProvider::cleanUpMpv() {
_isDestroying = true;
// Destroy the GL renderer and all of the GL objects it allocated. If video
// is still running, the video track will be deselected.
mpv_render_context_free(_mpvRenderContext);
mpv_destroy(_mpvHandle);
glDeleteFramebuffers(1, &_fbo);
}
int VideoTileProvider::minLevel() {
@@ -901,85 +304,16 @@ int VideoTileProvider::maxLevel() {
return 1337;
}
void VideoTileProvider::reset() {
if (_videoFile.empty()) {
return;
}
cleanUpMpv();
initializeMpv();
}
float VideoTileProvider::noDataValueAsFloat() {
return std::numeric_limits<float>::min();
}
void VideoTileProvider::internalInitialize() {}
void VideoTileProvider::createFBO(int width, int height) {
LINFO(fmt::format("Creating new FBO with width: {} and height: {}", width, height));
if (width <= 0 || height <= 0) {
LERROR("Cannot create empty fbo");
return;
}
// Update resolution of video
_videoResolution = glm::ivec2(width, height);
glGenFramebuffers(1, &_fbo);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
_frameTexture = std::make_unique<ghoul::opengl::Texture>(
glm::uvec3(width, height, 1),
GL_TEXTURE_2D
);
_frameTexture->uploadTexture();
// Configure
_frameTexture->bind();
glPixelStorei(GL_PACK_ALIGNMENT, 1);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Disable mipmaps
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
// Bind texture to framebuffer
glFramebufferTexture2D(
GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D,
*_frameTexture,
0
);
// Unbind FBO
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
void VideoTileProvider::resizeFBO(int width, int height) {
LINFO(fmt::format("Resizing FBO with width: {} and height: {}", width, height));
if (width == _videoResolution.x && height == _videoResolution.y) {
return;
}
// Update resolution of video
_videoResolution = glm::ivec2(width, height);
// Delete old FBO and texture
glDeleteFramebuffers(1, &_fbo);
_frameTexture.reset(nullptr);
_tileCache.clear();
_tileIsReady = false;
createFBO(width, height);
}
VideoTileProvider::~VideoTileProvider() {}
void VideoTileProvider::internalDeinitialize() {
cleanUpMpv();
_videoPlayer.destroy();
}
} // namespace openspace::video