Try to sync video

This commit is contained in:
rock88 2020-06-25 22:43:41 +03:00
parent 698d138b9f
commit 3b5254aad8
10 changed files with 135 additions and 76 deletions

View file

@ -287,6 +287,7 @@
367CD959245DE25F00A95738 /* StreamWindow.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = StreamWindow.hpp; sourceTree = "<group>"; };
367D2D7224829A0800A946F4 /* LogsWindow.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = LogsWindow.cpp; sourceTree = "<group>"; };
367D2D7324829A0800A946F4 /* LogsWindow.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = LogsWindow.hpp; sourceTree = "<group>"; };
367F117624A3EEB30084C632 /* IAVFrameSyncDrawer.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = IAVFrameSyncDrawer.hpp; sourceTree = "<group>"; };
3689D6D0248EBEFA0008CB75 /* build.yml */ = {isa = PBXFileReference; lastKnownFileType = text.yaml; name = build.yml; path = .github/workflows/build.yml; sourceTree = "<group>"; };
3689D6DB249154F90008CB75 /* GamepadMapper.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = GamepadMapper.cpp; sourceTree = "<group>"; };
3689D6DC249154F90008CB75 /* GamepadMapper.hpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.h; path = GamepadMapper.hpp; sourceTree = "<group>"; };
@ -691,6 +692,7 @@
3661D2F72469D1940060EE24 /* FFmpegVideoDecoder.cpp */,
3661D2F82469D1940060EE24 /* FFmpegVideoDecoder.hpp */,
3661D2FA2469D1E50060EE24 /* IFFmpegVideoDecoder.hpp */,
367F117624A3EEB30084C632 /* IAVFrameSyncDrawer.hpp */,
);
path = ffmpeg;
sourceTree = "<group>";

View file

@ -244,9 +244,7 @@ void MoonlightSession::stop(int terminate_app) {
void MoonlightSession::draw() {
if (m_video_decoder && m_video_renderer) {
if (auto frame = m_video_decoder->frame()) {
m_video_renderer->draw(m_config.width, m_config.height, frame);
}
m_video_renderer->draw();
m_session_stats.video_decode_stats = *m_video_decoder->video_decode_stats();
m_session_stats.video_render_stats = *m_video_renderer->video_render_stats();

View file

@ -9,17 +9,12 @@
#define DECODER_BUFFER_SIZE 92 * 1024 * 2
FFmpegVideoDecoder::FFmpegVideoDecoder() {
pthread_mutex_init(&m_mutex, NULL);
FFmpegVideoDecoder::FFmpegVideoDecoder(IAVFrameSyncDrawer* drawer): IFFmpegVideoDecoder(drawer) {
}
FFmpegVideoDecoder::~FFmpegVideoDecoder() {
pthread_mutex_destroy(&m_mutex);
cleanup();
if (m_hardware_video_decoder) {
delete m_hardware_video_decoder;
}
}
int FFmpegVideoDecoder::setup(int video_format, int width, int height, int redraw_rate, void *context, int dr_flags) {
@ -103,13 +98,15 @@ int FFmpegVideoDecoder::setup(int video_format, int width, int height, int redra
return -1;
}
if (m_hardware_video_decoder) {
m_hardware_video_decoder->prepare_decoder_context(m_decoder_context, nullptr);
}
return DR_OK;
}
void FFmpegVideoDecoder::stop() {
if (m_semaphore.is_waiting()) {
m_semaphore.notify();
}
}
void FFmpegVideoDecoder::cleanup() {
if (m_decoder_context) {
avcodec_close(m_decoder_context);
@ -178,11 +175,14 @@ int FFmpegVideoDecoder::submit_decode_unit(PDECODE_UNIT decode_unit) {
m_video_decode_stats.total_decode_time += (m_frames_in - m_frames_out) * (1000 / m_stream_fps);
m_video_decode_stats.decoded_frames++;
if (pthread_mutex_lock(&m_mutex) == 0) {
m_frame = get_frame(true);
// Push event!!
pthread_mutex_unlock(&m_mutex);
AVFrame *frame = get_frame(true);
if (frame) {
m_drawer->sync_draw([this, frame](auto func) {
func(frame);
m_semaphore.notify();
});
m_semaphore.wait();
}
}
}
@ -190,7 +190,7 @@ int FFmpegVideoDecoder::submit_decode_unit(PDECODE_UNIT decode_unit) {
}
int FFmpegVideoDecoder::capabilities() const {
return CAPABILITY_SLICES_PER_FRAME(4) | CAPABILITY_DIRECT_SUBMIT;
return CAPABILITY_SLICES_PER_FRAME(4);
}
int FFmpegVideoDecoder::decode(char* indata, int inlen) {
@ -223,10 +223,6 @@ AVFrame* FFmpegVideoDecoder::get_frame(bool native_frame) {
return NULL;
}
AVFrame* FFmpegVideoDecoder::frame() const {
return m_frame;
}
VideoDecodeStats* FFmpegVideoDecoder::video_decode_stats() {
uint64_t now = LiGetMillis();
m_video_decode_stats.total_fps = (float)m_video_decode_stats.total_frames / ((float)(now - m_video_decode_stats.measurement_start_timestamp) / 1000);

View file

@ -1,35 +1,49 @@
#include "IFFmpegVideoDecoder.hpp"
#include <pthread.h>
#include <condition_variable>
#pragma once
class IFFmpegHardwareVideoDecoder {
class semaphore {
private:
std::mutex m_mutex;
std::condition_variable m_condition;
unsigned long m_count = 0; // Initialized as locked.
public:
virtual ~IFFmpegHardwareVideoDecoder() {};
virtual bool prepare_decoder_context(AVCodecContext* context, AVDictionary** options) = 0;
void notify() {
std::lock_guard<std::mutex> lock(m_mutex);
++m_count;
m_condition.notify_one();
}
void wait() {
std::unique_lock<std::mutex> lock(m_mutex);
while (!m_count) { // Handle spurious wake-ups.
m_condition.wait(lock);
}
--m_count;
}
bool is_waiting() const {
return m_count == 0;
}
};
class FFmpegVideoDecoder: public IFFmpegVideoDecoder {
public:
FFmpegVideoDecoder();
FFmpegVideoDecoder(IAVFrameSyncDrawer* drawer);
~FFmpegVideoDecoder();
void set_hardware_video_decoder(IFFmpegHardwareVideoDecoder* hardware_video_decoder) {
m_hardware_video_decoder = hardware_video_decoder;
}
int setup(int video_format, int width, int height, int redraw_rate, void *context, int dr_flags) override;
void stop() override;
void cleanup() override;
int submit_decode_unit(PDECODE_UNIT decode_unit) override;
int capabilities() const override;
AVFrame* frame() const override;
VideoDecodeStats* video_decode_stats() override;
private:
int decode(char* indata, int inlen);
AVFrame* get_frame(bool native_frame);
IFFmpegHardwareVideoDecoder* m_hardware_video_decoder = nullptr;
AVPacket m_packet;
AVCodec* m_decoder = nullptr;
AVCodecContext* m_decoder_context = nullptr;
@ -45,6 +59,6 @@ private:
VideoDecodeStats m_video_decode_stats = {};
char* m_ffmpeg_buffer = nullptr;
AVFrame* m_frame = nullptr;
pthread_mutex_t m_mutex;
semaphore m_semaphore;
bool m_is_started = false;
};

View file

@ -0,0 +1,11 @@
#include <functional>
#pragma once
struct AVFrame;
using SyncDrawFunc = std::function<void(std::function<void(AVFrame* frame)>)>;
class IAVFrameSyncDrawer {
public:
virtual void sync_draw(SyncDrawFunc func) = 0;
};

View file

@ -1,4 +1,5 @@
#include <Limelight.h>
#include "IAVFrameSyncDrawer.hpp"
#pragma once
extern "C" {
@ -20,6 +21,7 @@ struct VideoDecodeStats {
class IFFmpegVideoDecoder {
public:
IFFmpegVideoDecoder(IAVFrameSyncDrawer* drawer): m_drawer(drawer) {};
virtual ~IFFmpegVideoDecoder() {};
virtual int setup(int video_format, int width, int height, int redraw_rate, void* context, int dr_flags) = 0;
virtual void start() {};
@ -27,6 +29,8 @@ public:
virtual void cleanup() = 0;
virtual int submit_decode_unit(PDECODE_UNIT decode_unit) = 0;
virtual int capabilities() const = 0;
virtual AVFrame* frame() const = 0;
virtual VideoDecodeStats* video_decode_stats() = 0;
protected:
IAVFrameSyncDrawer* m_drawer;
};

View file

@ -39,39 +39,39 @@ static const float vertices[] = {
static const char* texture_mappings[] = { "ymap", "umap", "vmap" };
static const float* gl_color_offset(bool color_full) {
static const float limitedOffsets[] = { 16.0f / 255.0f, 128.0f / 255.0f, 128.0f / 255.0f };
static const float fullOffsets[] = { 0.0f, 128.0f / 255.0f, 128.0f / 255.0f };
static const std::vector<float> gl_color_offset(bool color_full) {
static const std::vector<float> limitedOffsets = { 16.0f / 255.0f, 128.0f / 255.0f, 128.0f / 255.0f };
static const std::vector<float> fullOffsets = { 0.0f, 128.0f / 255.0f, 128.0f / 255.0f };
return color_full ? fullOffsets : limitedOffsets;
}
static const float* gl_color_matrix(enum AVColorSpace color_space, bool color_full) {
static const float bt601Lim[] = {
static const std::vector<float> gl_color_matrix(enum AVColorSpace color_space, bool color_full) {
static const std::vector<float> bt601Lim = {
1.1644f, 1.1644f, 1.1644f,
0.0f, -0.3917f, 2.0172f,
1.5960f, -0.8129f, 0.0f
};
static const float bt601Full[] = {
static const std::vector<float> bt601Full = {
1.0f, 1.0f, 1.0f,
0.0f, -0.3441f, 1.7720f,
1.4020f, -0.7141f, 0.0f
};
static const float bt709Lim[] = {
static const std::vector<float> bt709Lim = {
1.1644f, 1.1644f, 1.1644f,
0.0f, -0.2132f, 2.1124f,
1.7927f, -0.5329f, 0.0f
};
static const float bt709Full[] = {
static const std::vector<float> bt709Full = {
1.0f, 1.0f, 1.0f,
0.0f, -0.1873f, 1.8556f,
1.5748f, -0.4681f, 0.0f
};
static const float bt2020Lim[] = {
static const std::vector<float> bt2020Lim = {
1.1644f, 1.1644f, 1.1644f,
0.0f, -0.1874f, 2.1418f,
1.6781f, -0.6505f, 0.0f
};
static const float bt2020Full[] = {
static const std::vector<float> bt2020Full = {
1.0f, 1.0f, 1.0f,
0.0f, -0.1646f, 1.8814f,
1.4746f, -0.5714f, 0.0f
@ -91,6 +91,11 @@ static const float* gl_color_matrix(enum AVColorSpace color_space, bool color_fu
};
}
GLVideoRenderer::GLVideoRenderer() {
m_gl_color_offset.resize(3);
m_gl_color_matrix.resize(9);
}
GLVideoRenderer::~GLVideoRenderer() {
if (m_shader_program) {
glDeleteProgram(m_shader_program);
@ -149,7 +154,9 @@ void GLVideoRenderer::initialize() {
m_offset_location = glGetUniformLocation(m_shader_program, "offset");
}
void GLVideoRenderer::draw(int width, int height, AVFrame *frame) {
void GLVideoRenderer::draw() {
//std::lock_guard<std::mutex> guard(m_mutex);
if (!m_video_render_stats.rendered_frames) {
m_video_render_stats.measurement_start_timestamp = LiGetMillis();
}
@ -161,24 +168,38 @@ void GLVideoRenderer::draw(int width, int height, AVFrame *frame) {
m_is_initialized = true;
}
if (m_width != frame->width || m_height != frame->height) {
m_width = frame->width;
m_height = frame->height;
for (int i = 0; i < 3; i++) {
if (m_texture_id[i]) {
glDeleteTextures(1, &m_texture_id[i]);
if (m_sync_draw_func) {
m_sync_draw_func([this](auto frame) {
if (m_width != frame->width || m_height != frame->height) {
m_width = frame->width;
m_height = frame->height;
for (int i = 0; i < 3; i++) {
if (m_texture_id[i]) {
glDeleteTextures(1, &m_texture_id[i]);
}
}
glGenTextures(3, m_texture_id);
for (int i = 0; i < 3; i++) {
glBindTexture(GL_TEXTURE_2D, m_texture_id[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, i > 0 ? m_width / 2 : m_width, i > 0 ? m_height / 2 : m_height, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
}
}
}
m_gl_color_offset = gl_color_offset(frame->color_range == AVCOL_RANGE_JPEG);
m_gl_color_matrix = gl_color_matrix(frame->colorspace, frame->color_range == AVCOL_RANGE_JPEG);
for (int i = 0; i < 3; i++) {
glBindTexture(GL_TEXTURE_2D, m_texture_id[i]);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, i > 0 ? m_width / 2 : m_width, i > 0 ? m_height / 2 : m_height, GL_RED, GL_UNSIGNED_BYTE, frame->data[i]);
}
});
glGenTextures(3, m_texture_id);
for (int i = 0; i < 3; i++) {
glBindTexture(GL_TEXTURE_2D, m_texture_id[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, i > 0 ? m_width / 2 : m_width, i > 0 ? m_height / 2 : m_height, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
}
m_sync_draw_func = nullptr;
}
glClearColor(0, 0, 0, 1);
@ -186,14 +207,12 @@ void GLVideoRenderer::draw(int width, int height, AVFrame *frame) {
glUseProgram(m_shader_program);
glUniform3fv(m_offset_location, 1, gl_color_offset(frame->color_range == AVCOL_RANGE_JPEG));
glUniformMatrix3fv(m_yuvmat_location, 1, GL_FALSE, gl_color_matrix(frame->colorspace, frame->color_range == AVCOL_RANGE_JPEG));
glUniform3fv(m_offset_location, 1, m_gl_color_offset.data());
glUniformMatrix3fv(m_yuvmat_location, 1, GL_FALSE, m_gl_color_matrix.data());
for (int i = 0; i < 3; i++) {
auto image = frame->data[i];
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, m_texture_id[i]);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, i > 0 ? m_width / 2 : m_width, i > 0 ? m_height / 2 : m_height, GL_RED, GL_UNSIGNED_BYTE, image);
glUniform1i(m_texture_uniform[i], i);
glActiveTexture(GL_TEXTURE0);
}

View file

@ -1,13 +1,14 @@
#include "IVideoRenderer.hpp"
#include <glad/glad.h>
#include <vector>
#pragma once
class GLVideoRenderer: public IVideoRenderer {
public:
GLVideoRenderer() {};
GLVideoRenderer();
~GLVideoRenderer();
void draw(int width, int height, AVFrame *frame) override;
void draw() override;
VideoRenderStats* video_render_stats() override;
@ -20,5 +21,7 @@ private:
GLuint m_vbo, m_vao;
int m_width = 0, m_height = 0;
int m_yuvmat_location, m_offset_location;
std::vector<float> m_gl_color_offset;
std::vector<float> m_gl_color_matrix;
VideoRenderStats m_video_render_stats = {};
};

View file

@ -1,4 +1,6 @@
#include <Limelight.h>
#include "IAVFrameSyncDrawer.hpp"
#include <mutex>
#pragma once
extern "C" {
@ -12,9 +14,18 @@ struct VideoRenderStats {
double measurement_start_timestamp;
};
class IVideoRenderer {
class IVideoRenderer: public IAVFrameSyncDrawer {
public:
virtual ~IVideoRenderer() {};
virtual void draw(int width, int height, AVFrame* frame) = 0;
virtual void draw() = 0;
virtual VideoRenderStats* video_render_stats() = 0;
void sync_draw(SyncDrawFunc func) {
//std::lock_guard<std::mutex> guard(m_mutex);
m_sync_draw_func = func;
}
protected:
SyncDrawFunc m_sync_draw_func = nullptr;
std::mutex m_mutex;
};

View file

@ -18,8 +18,9 @@ StreamWindow::StreamWindow(Widget *parent, const std::string &address, int app_i
m_size = parent->size();
m_session = new MoonlightSession(address, app_id);
m_session->set_video_decoder(new FFmpegVideoDecoder());
m_session->set_video_renderer(new GLVideoRenderer());
auto renderer = new GLVideoRenderer();
m_session->set_video_decoder(new FFmpegVideoDecoder(renderer));
m_session->set_video_renderer(renderer);
#ifdef __SWITCH__
m_session->set_audio_renderer(new AudrenAudioRenderer());