Move videotileprovider to video module

This commit is contained in:
Ylva Selling
2023-02-14 15:38:47 -05:00
parent eccb98dd24
commit 8cb5bbf49a
13 changed files with 135 additions and 64 deletions
+26
View File
@@ -26,15 +26,41 @@ include(${PROJECT_SOURCE_DIR}/support/cmake/module_definition.cmake)
set(HEADER_FILES
videomodule.h
include/videotileprovider.h
)
source_group("Header Files" FILES ${HEADER_FILES})
set(SOURCE_FILES
videomodule.cpp
videomodule_lua.inl
src/videotileprovider.cpp
)
source_group("Source Files" FILES ${SOURCE_FILES})
# Libmpv
if(WIN32)
add_library(libmpv SHARED IMPORTED)
target_include_directories(libmpv SYSTEM INTERFACE ${CMAKE_CURRENT_SOURCE_DIR}/ext/libmpv/include)
set_target_properties(libmpv PROPERTIES
IMPORTED_LOCATION "${CMAKE_CURRENT_SOURCE_DIR}/ext/libmpv/bin/mpv-2.dll"
IMPORTED_IMPLIB "${CMAKE_CURRENT_SOURCE_DIR}/ext/libmpv/lib/mpv.lib"
)
add_library(libopenh264 SHARED IMPORTED)
set_target_properties(libopenh264 PROPERTIES
IMPORTED_LOCATION "${CMAKE_CURRENT_SOURCE_DIR}/ext/libmpv/bin/libopenh264.dll"
IMPORTED_IMPLIB "${CMAKE_CURRENT_SOURCE_DIR}/ext/libmpv/lib/mpv.lib"
)
target_link_libraries(openspace-module-globebrowsing PUBLIC libmpv libopenh264)
else(WIN32)
find_package(LIBMPV REQUIRED)
target_include_directories(openspace-module-globebrowsing SYSTEM PRIVATE ${LIBMPV_INCLUDE_DIR})
target_link_libraries(openspace-module-globebrowsing PRIVATE ${LIBMPV_LIBRARY})
mark_as_advanced(LIBMPV_CONFIG LIBMPV_INCLUDE_DIR LIBMPV_LIBRARY)
endif() # WIN32
create_new_module(
"Video"
video_module
+152
View File
@@ -0,0 +1,152 @@
/*****************************************************************************************
* *
* OpenSpace *
* *
* Copyright (c) 2014-2022 *
* *
* Permission is hereby granted, free of charge, to any person obtaining a copy of this *
* software and associated documentation files (the "Software"), to deal in the Software *
* without restriction, including without limitation the rights to use, copy, modify, *
* merge, publish, distribute, sublicense, and/or sell copies of the Software, and to *
* permit persons to whom the Software is furnished to do so, subject to the following *
* conditions: *
* *
* The above copyright notice and this permission notice shall be included in all copies *
* or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A *
* PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT *
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF *
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE *
* OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *
****************************************************************************************/
#ifndef __OPENSPACE_MODULE_GLOBEBROWSING___TILEPROVIDER__VIDEOTILEPROVIDER___H__
#define __OPENSPACE_MODULE_GLOBEBROWSING___TILEPROVIDER__VIDEOTILEPROVIDER___H__
#include <modules/globebrowsing/src/tileprovider/tileprovider.h>
#include <openspace/properties/triggerproperty.h>
#include <openspace/properties/scalar/doubleproperty.h>
#include <openspace/properties/vector/ivec2property.h>
#include <ghoul/glm.h>
// libmpv
#include <client.h>
#include <render_gl.h>
namespace openspace { struct Documentation; }
namespace openspace {
using namespace globebrowsing;
class VideoTileProvider : public TileProvider {
public:
VideoTileProvider(const ghoul::Dictionary& dictionary);
~VideoTileProvider();
void update() override final;
void reset() override final;
int minLevel() override final;
int maxLevel() override final;
float noDataValueAsFloat() override final;
ChunkTile chunkTile(TileIndex tileIndex, int parents, int maxParents = 1337) override;
Tile tile(const TileIndex& tileIndex) override final;
Tile::Status tileStatus(const TileIndex& tileIndex) override final;
TileDepthTransform depthTransform() override final;
// Video interaction
void pause();
void play();
void goToStart();
void stepFrameForward();
void stepFrameBackward();
void seekToTime(double time);
static documentation::Documentation Documentation();
private:
properties::TriggerProperty _play;
properties::TriggerProperty _pause;
properties::TriggerProperty _goToStart;
// libmpv property keys
enum class LibmpvPropertyKey : uint64_t {
Duration = 1,
Height,
Width,
Meta,
Params,
Time,
Command,
Seek,
Fps,
Pause
};
enum class PlaybackMode {
MapToSimulationTime = 0,
RealTimeLoop
};
void createFBO(int width, int height);
void resizeFBO(int width, int height);
// Map to simulation time functions
double correctVideoPlaybackTime() const;
bool isWithingStartEndTime() const;
// Libmpv
static void on_mpv_render_update(void*); // Has to be static because of C api
void initializeMpv(); // Called first time in postSyncPreDraw
void renderMpv(); // Called in postSyncPreDraw
void handleMpvEvents();
void handleMpvProperties(mpv_event* event);
void swapBuffersMpv(); // Called in postDraw
void cleanUpMpv(); // Called in internalDeinitialze
void observePropertyMpv(std::string name, mpv_format format, LibmpvPropertyKey key);
void setPropertyStringMpv(std::string name, std::string value);
void getPropertyAsyncMpv(std::string name, mpv_format format, LibmpvPropertyKey key);
void commandAsyncMpv(const char* cmd[],
LibmpvPropertyKey key = LibmpvPropertyKey::Command);
void internalInitialize() override final;
void internalDeinitialize() override final;
PlaybackMode _playbackMode = PlaybackMode::RealTimeLoop; // Default is to loop
std::filesystem::path _videoFile;
// Video stretching: map to simulation time animation mode
double _startJ200Time = 0.0;
double _endJ200Time = 0.0;
double _timeAtLastRender = 0.0;
double _frameDuration = 0.0;
// Video properties. Try to read all these values from the video
double _currentVideoTime = 0.0;
double _fps = 24.0; // If when we read it it is 0, use 24 fps
double _videoDuration = 0.0;
glm::ivec2 _videoResolution = glm::ivec2(4096, 2048); // Used for the fbos
// Libmpv
mpv_handle* _mpvHandle = nullptr;
mpv_render_context* _mpvRenderContext = nullptr;
std::unique_ptr<ghoul::opengl::Texture> _frameTexture = nullptr;
GLuint _fbo = 0; // Our opengl framebuffer where mpv renders to
int _wakeup = 0; // Signals when libmpv has a new frame ready
bool _didRender = false; // To know when to swap buffers
bool _isInitialized = false; // If libmpv has been inititalized
bool _isDestroying = false; // If libmpv has been inititalized
bool _isSeeking = false; // Prevent seeking while already seeking
double _seekThreshold = 1.0; // Threshold where we are officially out of sync
// Tile handling
std::map<TileIndex::TileHashKey, Tile> _tileCache; // Cache for rendering 1 frame
bool _tileIsReady = false;
};
} // namespace video::globebrowsing
#endif // __OPENSPACE_MODULE_GLOBEBROWSING___TILEPROVIDER__VIDEOTILEPROVIDER___H__
+985
View File
@@ -0,0 +1,985 @@
/*****************************************************************************************
* *
* OpenSpace *
* *
* Copyright (c) 2014-2022 *
* *
* Permission is hereby granted, free of charge, to any person obtaining a copy of this *
* software and associated documentation files (the "Software"), to deal in the Software *
* without restriction, including without limitation the rights to use, copy, modify, *
* merge, publish, distribute, sublicense, and/or sell copies of the Software, and to *
* permit persons to whom the Software is furnished to do so, subject to the following *
* conditions: *
* *
* The above copyright notice and this permission notice shall be included in all copies *
* or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A *
* PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT *
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF *
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE *
* OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *
****************************************************************************************/
#include <modules/video/include/videotileprovider.h>
#include <modules/globebrowsing/globebrowsingmodule.h>
#include <modules/globebrowsing/src/memoryawaretilecache.h>
#include <openspace/documentation/documentation.h>
#include <openspace/engine/globals.h>
#include <openspace/engine/globalscallbacks.h>
#include <openspace/engine/moduleengine.h>
#include <openspace/engine/windowdelegate.h>
#include <openspace/util/time.h>
#include <openspace/util/timemanager.h>
#include <ghoul/filesystem/filesystem.h>
namespace {
constexpr std::string_view _loggerCat = "VideoTileProvider";
constexpr openspace::properties::Property::PropertyInfo FileInfo = {
"File",
"File",
"The file path that is used for this video provider. The file must point to a "
"video that is then loaded and used for all tiles"
};
constexpr openspace::properties::Property::PropertyInfo StartTimeInfo = {
"StartTime",
"Start Time",
"The date and time that the video should start in the format "
"'YYYY MM DD hh:mm:ss'."
};
constexpr openspace::properties::Property::PropertyInfo EndTimeInfo = {
"EndTime",
"End Time",
"The date and time that the video should end in the format "
"'YYYY MM DD hh:mm:ss'."
};
constexpr openspace::properties::Property::PropertyInfo PlaybackModeInfo = {
"PlaybackMode",
"Playback Mode",
"Determines the way the video should be played. The start and end time of the "
"video can be set, or the video can be played as a loop in real time."
};
constexpr openspace::properties::Property::PropertyInfo PlayInfo = {
"Play",
"Play",
"Play video"
};
constexpr openspace::properties::Property::PropertyInfo PauseInfo = {
"Pause",
"Pause",
"Pause video"
};
constexpr openspace::properties::Property::PropertyInfo GoToStartInfo = {
"GoToStart",
"Go To Start",
"Go to start in video"
};
struct [[codegen::Dictionary(VideoTileProvider)]] Parameters {
// [[codegen::verbatim(FileInfo.description)]]
std::filesystem::path file;
// [[codegen::verbatim(StartTimeInfo.description)]]
std::optional<std::string> startTime [[codegen::datetime()]];
// [[codegen::verbatim(EndTimeInfo.description)]]
std::optional<std::string> endTime [[codegen::datetime()]];
enum class PlaybackMode {
MapToSimulationTime = 0,
RealTimeLoop
};
// The mode of how the video should be played back.
// Default is video is played back according to the set start and end times.
std::optional<PlaybackMode> playbackMode;
};
#include "videotileprovider_codegen.cpp"
} // namespace
namespace openspace {
bool checkMpvError(int status) {
if (status < 0) {
LERROR(fmt::format("Libmpv API error: {}", mpv_error_string(status)));
return false;
}
return true;
}
void* getOpenGLProcAddress(void*, const char* name) {
return reinterpret_cast<void*>(
global::windowDelegate->openGLProcedureAddress(name)
);
}
void VideoTileProvider::on_mpv_render_update(void* ctx) {
// The wakeup flag is set here to enable the mpv_render_context_render
// path in the main loop.
// The pattern here with a static function and a void pointer to the the class
// instance is a common pattern where C++ integrates a C library
static_cast<VideoTileProvider*>(ctx)->_wakeup = 1;
}
void VideoTileProvider::observePropertyMpv(std::string name, mpv_format format,
LibmpvPropertyKey key) {
mpv_observe_property(
_mpvHandle,
static_cast<uint64_t>(key),
name.c_str(),
format
);
}
void VideoTileProvider::setPropertyStringMpv(std::string name, std::string value) {
int result = mpv_set_property_string(_mpvHandle, name.c_str(), value.c_str());
if (!checkMpvError(result)) {
LWARNING(fmt::format("Error setting property {}", name));
}
}
void VideoTileProvider::getPropertyAsyncMpv(std::string name, mpv_format format,
LibmpvPropertyKey key) {
int result = mpv_get_property_async(
_mpvHandle,
static_cast<uint64_t>(key),
name.c_str(),
format
);
if (!checkMpvError(result)) {
LWARNING("Could not find property " + name);
return;
}
}
void VideoTileProvider::commandAsyncMpv(const char* cmd[], LibmpvPropertyKey key) {
int result = mpv_command_async(
_mpvHandle,
static_cast<uint64_t>(key),
cmd
);
if (!checkMpvError(result)) {
LERROR(fmt::format("Could not execute command {}", cmd[0]));
return;
}
}
documentation::Documentation VideoTileProvider::Documentation() {
return codegen::doc<Parameters>("video_videotileprovider");
}
VideoTileProvider::VideoTileProvider(const ghoul::Dictionary& dictionary)
: _play(PlayInfo)
, _pause(PauseInfo)
, _goToStart(GoToStartInfo)
{
ZoneScoped
const Parameters p = codegen::bake<Parameters>(dictionary);
_videoFile = p.file;
if (p.playbackMode.has_value()) {
switch (*p.playbackMode) {
case Parameters::PlaybackMode::RealTimeLoop:
_playbackMode = PlaybackMode::RealTimeLoop;
break;
case Parameters::PlaybackMode::MapToSimulationTime:
_playbackMode = PlaybackMode::MapToSimulationTime;
break;
default:
throw ghoul::MissingCaseException();
}
}
if (_playbackMode == PlaybackMode::RealTimeLoop) {
// Video interaction. Only valid for real time looping
_play.onChange([this]() { play(); });
addProperty(_play);
_pause.onChange([this]() { pause(); });
addProperty(_pause);
_goToStart.onChange([this]() { goToStart(); });
addProperty(_goToStart);
}
else if (_playbackMode == PlaybackMode::MapToSimulationTime) {
if (!p.startTime.has_value() || !p.endTime.has_value()) {
LERROR("Video tile layer tried to map to simulation time but lacked start or"
" end time");
return;
}
//_videoDuration = *p.duration;
_startJ200Time = Time::convertTime(*p.startTime);
_endJ200Time = Time::convertTime(*p.endTime);
ghoul_assert(_endJ200Time > _startJ200Time, "Invalid times for video");
global::timeManager->addTimeJumpCallback([this]() {
seekToTime(correctVideoPlaybackTime());
});
}
global::callback::postSyncPreDraw->emplace_back([this]() {
// Initialize mpv here to ensure that the opengl context is the same as in for
// the rendering
if (!_isInitialized) {
initializeMpv();
}
else if(!_isDestroying) {
renderMpv();
}
});
global::callback::postDraw->emplace_back([this]() {
swapBuffersMpv();
});
}
Tile VideoTileProvider::tile(const TileIndex& tileIndex) {
ZoneScoped
if (!_isInitialized) {
return Tile();
}
// Always check that our framebuffer is ok
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
LINFO("Framebuffer is not complete");
}
uint64_t hash = tileIndex.hashKey();
if (_tileCache.find(hash) == _tileCache.end()) {
_tileCache[hash] = Tile{
_frameTexture.get(),
std::nullopt,
Tile::Status::OK
};
}
return _tileCache[hash];
}
Tile::Status VideoTileProvider::tileStatus(const TileIndex& tileIndex) {
if (tileIndex.level > maxLevel()) {
return Tile::Status::OutOfRange;
}
else if (_tileIsReady) {
return Tile::Status::OK;
}
else {
return Tile::Status::Unavailable;
}
}
TileDepthTransform VideoTileProvider::depthTransform() {
return { 0.f, 1.f };
}
void VideoTileProvider::update() {}
ChunkTile VideoTileProvider::chunkTile(TileIndex tileIndex, int parents, int maxParents) {
ZoneScoped
ghoul_assert(_isInitialized, "VideoTileProvider was not initialized");
lambda ascendToParent = [](TileIndex& ti, TileUvTransform& uv) {
ti.level--;
};
glm::vec2 noOfTiles = { pow(2, tileIndex.level), pow(2, tileIndex.level - 1) };
glm::vec2 ratios = { 1.f / noOfTiles.x, 1.f / noOfTiles.y };
float offsetX = ratios.x * static_cast<float>(tileIndex.x);
// The tiles on the y-axis should be traversed backwards
float offsetY = ratios.y * (noOfTiles.y - static_cast<float>(tileIndex.y) - 1.f);
TileUvTransform uvTransform = { glm::vec2(offsetX, offsetY), ratios };
return traverseTree(tileIndex, parents, maxParents, ascendToParent, uvTransform);
}
void VideoTileProvider::pause() {
bool isPaused = true;
int result = mpv_set_property_async(
_mpvHandle,
static_cast<uint64_t>(LibmpvPropertyKey::Pause),
"pause",
MPV_FORMAT_FLAG,
&isPaused
);
if (!checkMpvError(result)) {
LWARNING("Error when pausing video");
}
}
void VideoTileProvider::play() {
int result = mpv_set_property_async(
_mpvHandle,
static_cast<uint64_t>(LibmpvPropertyKey::Pause),
"pause",
MPV_FORMAT_FLAG,
nullptr
);
if (!checkMpvError(result)) {
LWARNING("Error when playing video");
}
}
void VideoTileProvider::goToStart() {
seekToTime(0.0);
}
void VideoTileProvider::stepFrameForward() {
const char* cmd[] = { "frame-step", nullptr };
commandAsyncMpv(cmd);
}
void VideoTileProvider::stepFrameBackward() {
const char* cmd[] = { "frame-back-step", nullptr };
commandAsyncMpv(cmd);
}
void VideoTileProvider::initializeMpv() {
_mpvHandle = mpv_create();
if (!_mpvHandle) {
LINFO("LibMpv: mpv context init failed");
}
// Set libmpv flags before initializing
// See order at https://github.com/mpv-player/mpv/blob/master/libmpv/client.h#L420
// Avoiding async calls in uninitialized state
if (_playbackMode == PlaybackMode::RealTimeLoop) {
// Loop video
// https://mpv.io/manual/master/#options-loop
setPropertyStringMpv("loop", "");
}
// Allow only OpenGL (requires OpenGL 2.1+ or GLES 2.0+)
// https://mpv.io/manual/master/#options-gpu-api
setPropertyStringMpv("gpu-api", "opengl");
// Enable hardware decoding
// https://mpv.io/manual/master/#options-hwdec
setPropertyStringMpv("hwdec", "auto");
// Enable direct rendering (default: auto). If this is set to yes, the video will be
// decoded directly to GPU video memory (or staging buffers).
// https://mpv.io/manual/master/#options-vd-lavc-dr
setPropertyStringMpv("vd-lavc-dr", "yes");
// Print libmpv couts to the terminal
// https://mpv.io/manual/master/#options-terminal
setPropertyStringMpv("terminal", "yes");
// Control how long before video display target time the frame should be rendered
// https://mpv.io/manual/master/#options-video-timing-offset
setPropertyStringMpv("video-timing-offset", "0");
//setPropertyStringMpv("load-stats-overlay", "");
//mpv_set_property_string(_mpvHandle, "script-opts", "autoload-disabled=yes");
// Verbose mode
//mpv_set_property_string(_mpvHandle, "msg-level", "all=v");
//mpv_request_log_messages(_mpvHandle, "debug");
if (mpv_initialize(_mpvHandle) < 0) {
LINFO("mpv init failed");
}
mpv_opengl_init_params gl_init_params{ getOpenGLProcAddress, nullptr };
int adv = 1; // Use libmpv advanced mode since we will use the update callback
// Decouple mpv from waiting to get the correct fps. Use with flag video-timing-offset
// set to 0
int blockTime = 0;
mpv_render_param params[]{
{MPV_RENDER_PARAM_API_TYPE, const_cast<char*>(MPV_RENDER_API_TYPE_OPENGL)},
{MPV_RENDER_PARAM_OPENGL_INIT_PARAMS, &gl_init_params},
{MPV_RENDER_PARAM_ADVANCED_CONTROL, &adv},
{MPV_RENDER_PARAM_BLOCK_FOR_TARGET_TIME, &blockTime},
{MPV_RENDER_PARAM_INVALID, nullptr}
};
// This makes mpv use the currently set GL context. It will use the callback
// (passed via params) to resolve GL builtin functions, as well as extensions.
if (mpv_render_context_create(&_mpvRenderContext, _mpvHandle, params) < 0) {
LINFO("Failed to initialize libmpv OpenGL context");
}
// When there is a need to call mpv_render_context_update(), which can
// request a new frame to be rendered.
// (Separate from the normal event handling mechanism for the sake of
// users which run OpenGL on a different thread.)
mpv_render_context_set_update_callback(
_mpvRenderContext,
on_mpv_render_update,
this
);
// Load file
const char* cmd[] = { "loadfile", _videoFile.string().c_str(), nullptr };
int result = mpv_command(_mpvHandle, cmd);
if (!checkMpvError(result)) {
LERROR("Could not open video file");
return;
}
//Create FBO to render video into
createFBO(_videoResolution.x, _videoResolution.y);
//Observe video parameters
observePropertyMpv("video-params", MPV_FORMAT_NODE, LibmpvPropertyKey::Params);
observePropertyMpv("pause", MPV_FORMAT_FLAG, LibmpvPropertyKey::Pause);
observePropertyMpv("time-pos", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Time);
observePropertyMpv("duration", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Duration);
observePropertyMpv("height", MPV_FORMAT_INT64, LibmpvPropertyKey::Height);
observePropertyMpv("width", MPV_FORMAT_INT64, LibmpvPropertyKey::Width);
observePropertyMpv("metadata", MPV_FORMAT_NODE, LibmpvPropertyKey::Meta);
observePropertyMpv("container-fps", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Fps);
if (_playbackMode == PlaybackMode::MapToSimulationTime) {
pause();
}
_isInitialized = true;
}
bool VideoTileProvider::isWithingStartEndTime() const {
const double now = global::timeManager->time().j2000Seconds();
return now <= _endJ200Time && now >= _startJ200Time;
}
double VideoTileProvider::correctVideoPlaybackTime() const {
const double now = global::timeManager->time().j2000Seconds();
double percentage = 0.0;
if (now > _endJ200Time) {
percentage = 1.0;
}
else if (now < _startJ200Time) {
percentage = 0.0;
}
else {
percentage = (now - _startJ200Time) / (_endJ200Time - _startJ200Time);
}
return percentage * _videoDuration;
}
void VideoTileProvider::seekToTime(double time) {
// Prevent from seeking to the same time multiple times in a row
bool seekIsDifferent = abs(time - _currentVideoTime) > _seekThreshold;
if (seekIsDifferent && !_isSeeking) {
// Pause while seeking
pause();
std::string timeString = std::to_string(time);
const char* params = timeString.c_str();
const char* cmd[] = { "seek", params, "absolute", NULL };
commandAsyncMpv(cmd, LibmpvPropertyKey::Seek);
_isSeeking = true;
}
}
void VideoTileProvider::renderMpv() {
if (_playbackMode == PlaybackMode::MapToSimulationTime) {
// If we are in valid times, step frames accordingly
if (isWithingStartEndTime()) {
double now = global::timeManager->time().j2000Seconds();
double deltaTime = now - _timeAtLastRender;
if (deltaTime > _frameDuration) {
// Stepping forwards
stepFrameForward();
_timeAtLastRender = now;
}
else if (deltaTime < -_frameDuration) {
// Stepping backwards
stepFrameBackward();
_timeAtLastRender = now;
}
}
// Make sure we are at the correct time
double time = correctVideoPlaybackTime();
bool shouldSeek = abs(time - _currentVideoTime) > _seekThreshold;
if (shouldSeek) {
seekToTime(time); // We end up two frames too late
}
}
handleMpvEvents();
if (_wakeup) {
if ((mpv_render_context_update(_mpvRenderContext) & MPV_RENDER_UPDATE_FRAME)) {
// See render_gl.h on what OpenGL environment mpv expects, and other API
// details. This function fills the fbo and texture with data, after it
// we can get the data on the GPU, not the CPU
int fboInt = static_cast<int>(_fbo);
mpv_opengl_fbo mpfbo{
fboInt ,
_videoResolution.x,
_videoResolution.y, 0
};
int flip_y{ 1 };
mpv_render_param params[] = {
{MPV_RENDER_PARAM_OPENGL_FBO, &mpfbo},
{MPV_RENDER_PARAM_FLIP_Y, &flip_y},
{MPV_RENDER_PARAM_INVALID, nullptr}
};
// This "renders" to the video_framebuffer "linked by ID" in the
// params_fbo
mpv_render_context_render(_mpvRenderContext, params);
/* TODO: remove this comment in case we never encounter this issue again */
// We have to set the Viewport on every cycle because
// mpv_render_context_render internally rescales the fb of the context(?!)...
//glm::ivec2 window = global::windowDelegate->currentDrawBufferResolution();
//glViewport(0, 0, window.x, window.y);
_didRender = true;
// Since all tiles use the same texture, all tiles are ready now
_tileIsReady = true;
}
}
}
void VideoTileProvider::handleMpvEvents() {
while (_mpvHandle) {
mpv_event* event = mpv_wait_event(_mpvHandle, 0.0);
if (event->event_id == MPV_EVENT_NONE) {
return;
}
switch (event->event_id) {
case MPV_EVENT_VIDEO_RECONFIG: {
// Retrieve the new video size
// Get width
getPropertyAsyncMpv("width", MPV_FORMAT_INT64, LibmpvPropertyKey::Width);
getPropertyAsyncMpv("height", MPV_FORMAT_INT64, LibmpvPropertyKey::Height);
break;
}
case MPV_EVENT_PROPERTY_CHANGE: {
mpv_event_property* prop = (mpv_event_property*)event->data;
if (strcmp(prop->name, "video-params") == 0 &&
prop->format == MPV_FORMAT_NODE)
{
getPropertyAsyncMpv("video-params", MPV_FORMAT_NODE, LibmpvPropertyKey::Params);
}
if (strcmp(prop->name, "time-pos") == 0 &&
prop->format == MPV_FORMAT_DOUBLE)
{
getPropertyAsyncMpv("time-pos", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Time);
}
if (strcmp(prop->name, "duration") == 0 &&
prop->format == MPV_FORMAT_DOUBLE)
{
getPropertyAsyncMpv("duration", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Duration);
}
if (strcmp(prop->name, "container-fps") == 0 &&
prop->format == MPV_FORMAT_DOUBLE)
{
getPropertyAsyncMpv("container-fps", MPV_FORMAT_DOUBLE, LibmpvPropertyKey::Fps);
}
if (strcmp(prop->name, "pause") == 0 &&
prop->format == MPV_FORMAT_FLAG)
{
getPropertyAsyncMpv("pause", MPV_FORMAT_FLAG, LibmpvPropertyKey::Pause);
}
if (strcmp(prop->name, "height") == 0 &&
prop->format == MPV_FORMAT_INT64)
{
getPropertyAsyncMpv("height", MPV_FORMAT_INT64, LibmpvPropertyKey::Height);
}
if (strcmp(prop->name, "width") == 0 &&
prop->format == MPV_FORMAT_INT64)
{
getPropertyAsyncMpv("width", MPV_FORMAT_INT64, LibmpvPropertyKey::Width);
}
if (strcmp(prop->name, "metadata") == 0 &&
prop->format == MPV_FORMAT_NODE)
{
getPropertyAsyncMpv("metadata", MPV_FORMAT_NODE, LibmpvPropertyKey::Meta);
}
break;
}
case MPV_EVENT_LOG_MESSAGE: {
struct mpv_event_log_message* msg =
(struct mpv_event_log_message*)event->data;
std::stringstream ss;
ss << "[" << msg->prefix << "] " << msg->level << ": " << msg->text;
LINFO(ss.str());
break;
}
case MPV_EVENT_COMMAND_REPLY: {
switch (event->reply_userdata) {
case static_cast<uint64_t>(LibmpvPropertyKey::Command): {
int result = event->error;
if (!checkMpvError) {
LINFO("Command Error");
}
break;
}
case static_cast<uint64_t>(LibmpvPropertyKey::Seek): {
int result = event->error;
if (!checkMpvError) {
LINFO("Seek Error");
}
_isSeeking = false;
break;
}
default: {
break;
}
}
break;
}
case MPV_EVENT_GET_PROPERTY_REPLY: {
int result = event->error;
if (!checkMpvError(result)) {
LWARNING(fmt::format(
"Error while gettting property of type: {}", event->reply_userdata
));
break;
}
handleMpvProperties(event);
break;
}
default: {
// Ignore uninteresting or unknown events.
break;
}
}
}
}
void VideoTileProvider::handleMpvProperties(mpv_event* event) {
switch (static_cast<LibmpvPropertyKey>(event->reply_userdata)) {
case LibmpvPropertyKey::Duration: {
if (!event->data) {
LERROR("Could not find duration property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
double* duration = static_cast<double*>(property->data);
if (!duration) {
LERROR("Could not find duration property");
break;
}
_videoDuration = *duration;
_frameDuration = ( 1.0 / _fps) * ((_endJ200Time - _startJ200Time) / _videoDuration);
if (_playbackMode == PlaybackMode::MapToSimulationTime) {
seekToTime(correctVideoPlaybackTime());
}
LINFO(fmt::format("Duration: {}", *duration));
break;
}
case LibmpvPropertyKey::Height: {
if (!event->data) {
LERROR("Could not find height property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
int* height = static_cast<int*>(property->data);
if (!height) {
LERROR("Could not find height property");
break;
}
if (*height == _videoResolution.y) {
break;
}
LINFO(fmt::format("New height: {}", *height));
if (*height > 0 && _videoResolution.x > 0 && _fbo > 0) {
resizeFBO(_videoResolution.x, *height);
}
break;
}
case LibmpvPropertyKey::Width: {
if (!event->data) {
LERROR("Could not find height property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
int* width = static_cast<int*>(property->data);
if (!width) {
LERROR("Could not find width property");
break;
}
if (*width == _videoResolution.y) {
break;
}
LINFO(fmt::format("New width: {}", *width));
if (*width > 0 && _videoResolution.y > 0 && _fbo > 0) {
resizeFBO(*width, _videoResolution.y);
}
break;
}
case LibmpvPropertyKey::Meta: {
if (!event->data) {
LERROR("Could not find video parameters");
break;
}
mpv_node node;
int result = mpv_event_to_node(&node, event);
if (!checkMpvError(result)) {
LWARNING("Could not find video parameters of video");
}
LINFO("Printing meta data reply");
if (node.format == MPV_FORMAT_NODE_MAP) {
for (int n = 0; n < node.u.list->num; n++) {
if (node.u.list->values[n].format == MPV_FORMAT_STRING) {
LINFO(node.u.list->values[n].u.string);
}
}
}
else {
LWARNING("No meta data could be read");
}
break;
}
case LibmpvPropertyKey::Params: {
if (!event->data) {
LINFO("Could not find video parameters");
break;
}
mpv_node videoParams;
int result = mpv_event_to_node(&videoParams, event);
if (!checkMpvError(result)) {
LWARNING("Could not find video parameters of video");
}
if (videoParams.format == MPV_FORMAT_NODE_ARRAY ||
videoParams.format == MPV_FORMAT_NODE_MAP)
{
mpv_node_list* list = videoParams.u.list;
mpv_node width, height;
bool foundWidth = false;
bool foundHeight = false;
for (int i = 0; i < list->num; ++i) {
if (foundWidth && foundHeight) {
break;
}
if (list->keys[i] == "w") {
width = list->values[i];
foundWidth = true;
}
else if (list->keys[i] == "h") {
height = list->values[i];
foundHeight = true;
}
}
if (!foundWidth || !foundHeight) {
LERROR("Could not find width or height params");
return;
}
int w = -1;
int h = -1;
if (width.format == MPV_FORMAT_INT64) {
w = width.u.int64;
}
if (height.format == MPV_FORMAT_INT64) {
h = height.u.int64;
}
if (w == -1 || h == -1) {
LERROR("Invalid width or height params");
return;
}
resizeFBO(w, h);
}
break;
}
case LibmpvPropertyKey::Time: {
if (!event->data) {
LERROR("Could not find playback time property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
double* time = static_cast<double*>(property->data);
if (!time) {
LERROR("Could not find playback time property");
break;
}
_currentVideoTime = *time;
// Time has changed - we don't have a texture yet
_tileIsReady = false;
break;
}
case LibmpvPropertyKey::Fps: {
if (!event->data) {
LERROR("Could not find fps property");
break;
}
struct mpv_event_property* property = (struct mpv_event_property*)event->data;
double* fps = static_cast<double*>(property->data);
if (*fps < glm::epsilon<double>()) {
LWARNING("Detected fps was 0. Falling back on 24 fps");
break;
}
if (!fps) {
LERROR("Could not find fps property");
break;
}
_fps = *fps;
LINFO(fmt::format("Detected fps: {}", *fps));
_frameDuration = (1.0 / _fps) * ((_endJ200Time - _startJ200Time) / _videoDuration);
_seekThreshold = 2.0 * (1.0 / _fps);
break;
}
case LibmpvPropertyKey::Pause: {
if (!event->data) {
LERROR("Could not find pause property");
break;
}
break;
}
default: {
throw ghoul::MissingCaseException();
break;
}
}
}
void VideoTileProvider::swapBuffersMpv() {
// Only swap buffers if there was a frame rendered and there is a new frame waiting
if (_wakeup && _didRender) {
mpv_render_context_report_swap(_mpvRenderContext);
_wakeup = 0;
_didRender = 0;
}
}
void VideoTileProvider::cleanUpMpv() {
_isDestroying = true;
// Destroy the GL renderer and all of the GL objects it allocated. If video
// is still running, the video track will be deselected.
mpv_render_context_free(_mpvRenderContext);
mpv_destroy(_mpvHandle);
glDeleteFramebuffers(1, &_fbo);
}
int VideoTileProvider::minLevel() {
return 1;
}
int VideoTileProvider::maxLevel() {
// This is the level where above the tile is marked as unavailable and is no longer
// displayed. Since we want to display the tiles at all times we set the max level
return 1337;
}
void VideoTileProvider::reset() {
if (_videoFile.empty()) {
return;
}
cleanUpMpv();
initializeMpv();
}
float VideoTileProvider::noDataValueAsFloat() {
return std::numeric_limits<float>::min();
}
void VideoTileProvider::internalInitialize() {}
void VideoTileProvider::createFBO(int width, int height) {
LINFO(fmt::format("Creating new FBO with width: {} and height: {}", width, height));
if (width <= 0 || height <= 0) {
LERROR("Cannot create empty fbo");
return;
}
// Update resolution of video
_videoResolution = glm::ivec2(width, height);
glGenFramebuffers(1, &_fbo);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
_frameTexture = std::make_unique<ghoul::opengl::Texture>(
glm::uvec3(width, height, 1),
GL_TEXTURE_2D
);
_frameTexture->uploadTexture();
// Configure
_frameTexture->bind();
glPixelStorei(GL_PACK_ALIGNMENT, 1);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Disable mipmaps
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
// Bind texture to framebuffer
glFramebufferTexture2D(
GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D,
*_frameTexture,
0
);
// Unbind FBO
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
void VideoTileProvider::resizeFBO(int width, int height) {
LINFO(fmt::format("Resizing FBO with width: {} and height: {}", width, height));
if (width == _videoResolution.x && height == _videoResolution.y) {
return;
}
// Update resolution of video
_videoResolution = glm::ivec2(width, height);
// Delete old FBO and texture
glDeleteFramebuffers(1, &_fbo);
_frameTexture.reset(nullptr);
_tileCache.clear();
_tileIsReady = false;
createFBO(width, height);
}
VideoTileProvider::~VideoTileProvider() {}
void VideoTileProvider::internalDeinitialize() {
cleanUpMpv();
}
} // namespace openspace::video
+30
View File
@@ -0,0 +1,30 @@
local layer = {
Identifier = "TestVideo",
File = "C:/Users/ylvaselling/Documents/Work/Testmovies/result.mp4",
StartTime = '2023 01 29 20:00:00',
EndTime = '2023 01 29 21:00:00',
Name = "Test Video",
PlaybackMode = "MapToSimulationTime",
Enabled = asset.enabled,
Type = "VideoTileLayer",
Description = [[Testing video]]
}
asset.onInitialize(function()
openspace.globebrowsing.addLayer("Earth", "ColorLayers", layer)
end)
asset.onDeinitialize(function()
openspace.globebrowsing.deleteLayer("Earth", "ColorLayers", layer)
end)
asset.export("layer", layer)
asset.meta = {
Name = "Aqua Modis (Temporal)",
Version = "1.1",
Description = "GIBS hosted layer",
Author = "NASA EOSDIS Global Imagery Browse Services",
URL = "https://earthdata.nasa.gov/eosdis/science-system-description/eosdis-components/gibs",
License = "NASA"
}
+27
View File
@@ -0,0 +1,27 @@
local layer = {
Identifier = "TestVideoLoop",
File = "C:/Users/ylvaselling/Documents/Work/Testmovies/black_carbon_sulfate.mp4",
Name = "Test Video Loop",
Enabled = asset.enabled,
Type = "VideoTileLayer",
Description = [[Testing video]]
}
asset.onInitialize(function()
openspace.globebrowsing.addLayer("Earth", "ColorLayers", layer)
end)
asset.onDeinitialize(function()
openspace.globebrowsing.deleteLayer("Earth", "ColorLayers", layer)
end)
asset.export("layer", layer)
asset.meta = {
Name = "Aqua Modis (Temporal)",
Version = "1.1",
Description = "GIBS hosted layer",
Author = "NASA EOSDIS Global Imagery Browse Services",
URL = "https://earthdata.nasa.gov/eosdis/science-system-description/eosdis-components/gibs",
License = "NASA"
}
+27
View File
@@ -0,0 +1,27 @@
local layer = {
Identifier = "SosLoopTest",
File = "C:/Users/ylvaselling/Documents/Work/Testmovies/chlorophyll_model_2048.mp4",
Name = "Science On A Sphere Loop Video",
Enabled = asset.enabled,
Type = "VideoTileLayer",
Description = [[Testing video]]
}
asset.onInitialize(function()
openspace.globebrowsing.addLayer("Earth", "ColorLayers", layer)
end)
asset.onDeinitialize(function()
openspace.globebrowsing.deleteLayer("Earth", "ColorLayers", layer)
end)
asset.export("layer", layer)
asset.meta = {
Name = "Aqua Modis (Temporal)",
Version = "1.1",
Description = "GIBS hosted layer",
Author = "NASA EOSDIS Global Imagery Browse Services",
URL = "https://earthdata.nasa.gov/eosdis/science-system-description/eosdis-components/gibs",
License = "NASA"
}
@@ -0,0 +1,27 @@
local layer = {
Identifier = "SosLoop3",
File = "C:/Users/ylvaselling/Documents/Work/Testmovies/air_traffic_2048.mp4",
Name = "Science On A Sphere Loop Video",
Enabled = asset.enabled,
Type = "VideoTileLayer",
Description = [[Testing video]]
}
asset.onInitialize(function()
openspace.globebrowsing.addLayer("Earth", "ColorLayers", layer)
end)
asset.onDeinitialize(function()
openspace.globebrowsing.deleteLayer("Earth", "ColorLayers", layer)
end)
asset.export("layer", layer)
asset.meta = {
Name = "Aqua Modis (Temporal)",
Version = "1.1",
Description = "GIBS hosted layer",
Author = "NASA EOSDIS Global Imagery Browse Services",
URL = "https://earthdata.nasa.gov/eosdis/science-system-description/eosdis-components/gibs",
License = "NASA"
}
+27
View File
@@ -0,0 +1,27 @@
local layer = {
Identifier = "SosLoop2",
File = "C:/Users/ylvaselling/Documents/Work/Testmovies/2048_jpg-2048x1024.mp4",
Name = "Science On A Sphere Loop Video",
Enabled = asset.enabled,
Type = "VideoTileLayer",
Description = [[Testing video]]
}
asset.onInitialize(function()
openspace.globebrowsing.addLayer("Earth", "ColorLayers", layer)
end)
asset.onDeinitialize(function()
openspace.globebrowsing.deleteLayer("Earth", "ColorLayers", layer)
end)
asset.export("layer", layer)
asset.meta = {
Name = "Aqua Modis (Temporal)",
Version = "1.1",
Description = "GIBS hosted layer",
Author = "NASA EOSDIS Global Imagery Browse Services",
URL = "https://earthdata.nasa.gov/eosdis/science-system-description/eosdis-components/gibs",
License = "NASA"
}
+30
View File
@@ -0,0 +1,30 @@
local layer = {
Identifier = "ScienceOnASphere",
File = "C:/Users/ylvaselling/Documents/Work/Testmovies/4096-4.mp4",
StartTime = '2004 01 01 00:00:00',
EndTime = '2004 06 05 20:00:00',
Name = "Science On A Sphere Video",
PlaybackMode = "MapToSimulationTime",
Enabled = asset.enabled,
Type = "VideoTileLayer",
Description = [[Testing video]]
}
asset.onInitialize(function()
openspace.globebrowsing.addLayer("Earth", "ColorLayers", layer)
end)
asset.onDeinitialize(function()
openspace.globebrowsing.deleteLayer("Earth", "ColorLayers", layer)
end)
asset.export("layer", layer)
asset.meta = {
Name = "Aqua Modis (Temporal)",
Version = "1.1",
Description = "GIBS hosted layer",
Author = "NASA EOSDIS Global Imagery Browse Services",
URL = "https://earthdata.nasa.gov/eosdis/science-system-description/eosdis-components/gibs",
License = "NASA"
}
+26
View File
@@ -0,0 +1,26 @@
local layer = {
Identifier = "Capitals",
FilePath = "C:/Users/ylvaselling/Documents/Work/Testmovies/capitals/4096.png",
Name = "Capitals",
Enabled = asset.enabled,
Description = [[Testing video]]
}
asset.onInitialize(function()
openspace.globebrowsing.addLayer("Earth", "ColorLayers", layer)
end)
asset.onDeinitialize(function()
openspace.globebrowsing.deleteLayer("Earth", "ColorLayers", layer)
end)
asset.export("layer", layer)
asset.meta = {
Name = "Aqua Modis (Temporal)",
Version = "1.1",
Description = "GIBS hosted layer",
Author = "NASA EOSDIS Global Imagery Browse Services",
URL = "https://earthdata.nasa.gov/eosdis/science-system-description/eosdis-components/gibs",
License = "NASA"
}
+10 -17
View File
@@ -23,6 +23,8 @@
****************************************************************************************/
#include <modules/video/videomodule.h>
#include <modules/video/include/videotileprovider.h>
#include <modules/globebrowsing/src/tileprovider/tileprovider.h>
#include <openspace/util/factorymanager.h>
#include <openspace/documentation/documentation.h>
#include <openspace/scripting/lualibrary.h>
@@ -55,30 +57,21 @@ VideoModule::VideoModule()
void VideoModule::internalInitialize(const ghoul::Dictionary& dict) {
const Parameters p = codegen::bake<Parameters>(dict);
using namespace globebrowsing;
_enabled = p.enabled.value_or(_enabled);
/*
ghoul::TemplateFactory<ScreenSpaceRenderable>* fScreenSpaceRenderable =
FactoryManager::ref().factory<ScreenSpaceRenderable>();
ghoul_assert(fScreenSpaceRenderable, "ScreenSpaceRenderable factory was not created");
// Register ScreenSpaceSkyBrowser
fScreenSpaceRenderable->registerClass<ScreenSpaceSkyBrowser>("ScreenSpaceSkyBrowser");
ghoul::TemplateFactory<TileProvider>* fTileProvider =
FactoryManager::ref().factory<TileProvider>();
ghoul_assert(fTileProvider, "TileProvider factory was not created");
fTileProvider->registerClass<VideoTileProvider>("VideoTileLayer");
ghoul::TemplateFactory<Renderable>* fRenderable =
FactoryManager::ref().factory<Renderable>();
ghoul_assert(fRenderable, "Renderable factory was not created");
// Register ScreenSpaceSkyTarget
fRenderable->registerClass<RenderableSkyTarget>("RenderableSkyTarget");
*/
}
std::vector<documentation::Documentation> VideoModule::documentations() const {
/* return {
RenderableSkyTarget::Documentation(),
ScreenSpaceSkyBrowser::Documentation()
};*/
return {
VideoTileProvider::Documentation(),
};
return std::vector<documentation::Documentation>();
}