mirror of
https://github.com/moonlight-stream/moonlight-qt
synced 2025-01-25 09:05:00 +00:00
Move hw->sw frame mapping into a separate class
This commit is contained in:
parent
5639bcc0f5
commit
ba507d8046
5 changed files with 181 additions and 152 deletions
|
@ -212,12 +212,14 @@ ffmpeg {
|
||||||
SOURCES += \
|
SOURCES += \
|
||||||
streaming/video/ffmpeg.cpp \
|
streaming/video/ffmpeg.cpp \
|
||||||
streaming/video/ffmpeg-renderers/sdlvid.cpp \
|
streaming/video/ffmpeg-renderers/sdlvid.cpp \
|
||||||
|
streaming/video/ffmpeg-renderers/swframemapper.cpp \
|
||||||
streaming/video/ffmpeg-renderers/pacer/pacer.cpp
|
streaming/video/ffmpeg-renderers/pacer/pacer.cpp
|
||||||
|
|
||||||
HEADERS += \
|
HEADERS += \
|
||||||
streaming/video/ffmpeg.h \
|
streaming/video/ffmpeg.h \
|
||||||
streaming/video/ffmpeg-renderers/renderer.h \
|
streaming/video/ffmpeg-renderers/renderer.h \
|
||||||
streaming/video/ffmpeg-renderers/sdlvid.h \
|
streaming/video/ffmpeg-renderers/sdlvid.h \
|
||||||
|
streaming/video/ffmpeg-renderers/swframemapper.h \
|
||||||
streaming/video/ffmpeg-renderers/pacer/pacer.h
|
streaming/video/ffmpeg-renderers/pacer/pacer.h
|
||||||
}
|
}
|
||||||
libva {
|
libva {
|
||||||
|
|
|
@ -9,9 +9,8 @@ SdlRenderer::SdlRenderer()
|
||||||
: m_VideoFormat(0),
|
: m_VideoFormat(0),
|
||||||
m_Renderer(nullptr),
|
m_Renderer(nullptr),
|
||||||
m_Texture(nullptr),
|
m_Texture(nullptr),
|
||||||
m_SwPixelFormat(AV_PIX_FMT_NONE),
|
|
||||||
m_ColorSpace(-1),
|
m_ColorSpace(-1),
|
||||||
m_MapFrame(false)
|
m_SwFrameMapper(this)
|
||||||
{
|
{
|
||||||
SDL_zero(m_OverlayTextures);
|
SDL_zero(m_OverlayTextures);
|
||||||
|
|
||||||
|
@ -92,6 +91,7 @@ bool SdlRenderer::initialize(PDECODER_PARAMETERS params)
|
||||||
Uint32 rendererFlags = SDL_RENDERER_ACCELERATED;
|
Uint32 rendererFlags = SDL_RENDERER_ACCELERATED;
|
||||||
|
|
||||||
m_VideoFormat = params->videoFormat;
|
m_VideoFormat = params->videoFormat;
|
||||||
|
m_SwFrameMapper.setVideoFormat(m_VideoFormat);
|
||||||
|
|
||||||
if (params->videoFormat & VIDEO_FORMAT_MASK_10BIT) {
|
if (params->videoFormat & VIDEO_FORMAT_MASK_10BIT) {
|
||||||
// SDL doesn't support rendering YUV 10-bit textures yet
|
// SDL doesn't support rendering YUV 10-bit textures yet
|
||||||
|
@ -211,137 +211,6 @@ void SdlRenderer::renderOverlay(Overlay::OverlayType type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool SdlRenderer::initializeReadBackFormat(AVBufferRef* hwFrameCtxRef, AVFrame* testFrame)
|
|
||||||
{
|
|
||||||
auto hwFrameCtx = (AVHWFramesContext*)hwFrameCtxRef->data;
|
|
||||||
int err;
|
|
||||||
enum AVPixelFormat *formats;
|
|
||||||
AVFrame* outputFrame;
|
|
||||||
|
|
||||||
// This function must only be called once per instance
|
|
||||||
SDL_assert(m_SwPixelFormat == AV_PIX_FMT_NONE);
|
|
||||||
SDL_assert(!m_MapFrame);
|
|
||||||
|
|
||||||
// Try direct mapping before resorting to copying the frame
|
|
||||||
outputFrame = av_frame_alloc();
|
|
||||||
if (outputFrame != nullptr) {
|
|
||||||
err = av_hwframe_map(outputFrame, testFrame, AV_HWFRAME_MAP_READ);
|
|
||||||
if (err == 0) {
|
|
||||||
if (isPixelFormatSupported(m_VideoFormat, (AVPixelFormat)outputFrame->format)) {
|
|
||||||
m_SwPixelFormat = (AVPixelFormat)outputFrame->format;
|
|
||||||
m_MapFrame = true;
|
|
||||||
goto Exit;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
|
||||||
"Skipping unsupported hwframe mapping format: %d",
|
|
||||||
outputFrame->format);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
|
||||||
"av_hwframe_map() is unsupported (error: %d)",
|
|
||||||
err);
|
|
||||||
SDL_assert(err == AVERROR(ENOSYS));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Direct mapping didn't work, so let's see what transfer formats we have
|
|
||||||
err = av_hwframe_transfer_get_formats(hwFrameCtxRef, AV_HWFRAME_TRANSFER_DIRECTION_FROM, &formats, 0);
|
|
||||||
if (err < 0) {
|
|
||||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
||||||
"av_hwframe_transfer_get_formats() failed: %d",
|
|
||||||
err);
|
|
||||||
goto Exit;
|
|
||||||
}
|
|
||||||
|
|
||||||
// NB: In this algorithm, we prefer to get a preferred hardware readback format
|
|
||||||
// and non-preferred rendering format rather than the other way around. This is
|
|
||||||
// why we loop through the readback format list in order, rather than searching
|
|
||||||
// for the format from getPreferredPixelFormat() in the list.
|
|
||||||
for (int i = 0; formats[i] != AV_PIX_FMT_NONE; i++) {
|
|
||||||
SDL_assert(m_VideoFormat != 0);
|
|
||||||
if (!isPixelFormatSupported(m_VideoFormat, formats[i])) {
|
|
||||||
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
|
||||||
"Skipping unsupported hwframe transfer format %d",
|
|
||||||
formats[i]);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
m_SwPixelFormat = formats[i];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
av_freep(&formats);
|
|
||||||
|
|
||||||
Exit:
|
|
||||||
av_frame_free(&outputFrame);
|
|
||||||
|
|
||||||
// If we didn't find any supported formats, try hwFrameCtx->sw_format.
|
|
||||||
if (m_SwPixelFormat == AV_PIX_FMT_NONE) {
|
|
||||||
if (isPixelFormatSupported(m_VideoFormat, hwFrameCtx->sw_format)) {
|
|
||||||
m_SwPixelFormat = hwFrameCtx->sw_format;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
||||||
"Unable to find compatible hwframe transfer format (sw_format = %d)",
|
|
||||||
hwFrameCtx->sw_format);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
|
||||||
"Selected hwframe->swframe format: %d (mapping: %s)",
|
|
||||||
m_SwPixelFormat,
|
|
||||||
m_MapFrame ? "yes" : "no");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
AVFrame* SdlRenderer::getSwFrameFromHwFrame(AVFrame* hwFrame)
|
|
||||||
{
|
|
||||||
int err;
|
|
||||||
|
|
||||||
SDL_assert(m_SwPixelFormat != AV_PIX_FMT_NONE);
|
|
||||||
|
|
||||||
AVFrame* swFrame = av_frame_alloc();
|
|
||||||
if (swFrame == nullptr) {
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
swFrame->format = m_SwPixelFormat;
|
|
||||||
|
|
||||||
if (m_MapFrame) {
|
|
||||||
// We don't use AV_HWFRAME_MAP_DIRECT here because it can cause huge
|
|
||||||
// performance penalties on Intel hardware with VAAPI due to mappings
|
|
||||||
// being uncached memory.
|
|
||||||
err = av_hwframe_map(swFrame, hwFrame, AV_HWFRAME_MAP_READ);
|
|
||||||
if (err < 0) {
|
|
||||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
||||||
"av_hwframe_map() failed: %d",
|
|
||||||
err);
|
|
||||||
av_frame_free(&swFrame);
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
err = av_hwframe_transfer_data(swFrame, hwFrame, 0);
|
|
||||||
if (err < 0) {
|
|
||||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
|
||||||
"av_hwframe_transfer_data() failed: %d",
|
|
||||||
err);
|
|
||||||
av_frame_free(&swFrame);
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
// av_hwframe_transfer_data() doesn't transfer metadata
|
|
||||||
// (and can even nuke existing metadata in dst), so we
|
|
||||||
// will propagate metadata manually afterwards.
|
|
||||||
av_frame_copy_props(swFrame, hwFrame);
|
|
||||||
}
|
|
||||||
|
|
||||||
return swFrame;
|
|
||||||
}
|
|
||||||
|
|
||||||
void SdlRenderer::renderFrame(AVFrame* frame)
|
void SdlRenderer::renderFrame(AVFrame* frame)
|
||||||
{
|
{
|
||||||
int err;
|
int err;
|
||||||
|
@ -355,17 +224,8 @@ ReadbackRetry:
|
||||||
// accelerated decoder, we'll need to read the frame
|
// accelerated decoder, we'll need to read the frame
|
||||||
// back to render it.
|
// back to render it.
|
||||||
|
|
||||||
// Find the native read-back format
|
|
||||||
if (m_SwPixelFormat == AV_PIX_FMT_NONE) {
|
|
||||||
initializeReadBackFormat(frame->hw_frames_ctx, frame);
|
|
||||||
|
|
||||||
// If we don't support any of the hw transfer formats, we should
|
|
||||||
// have failed inside testRenderFrame() and not made it here.
|
|
||||||
SDL_assert(m_SwPixelFormat != AV_PIX_FMT_NONE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map or copy this hwframe to a swframe that we can work with
|
// Map or copy this hwframe to a swframe that we can work with
|
||||||
frame = swFrame = getSwFrameFromHwFrame(frame);
|
frame = swFrame = m_SwFrameMapper.getSwFrameFromHwFrame(frame);
|
||||||
if (swFrame == nullptr) {
|
if (swFrame == nullptr) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -579,11 +439,7 @@ bool SdlRenderer::testRenderFrame(AVFrame* frame)
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (!initializeReadBackFormat(frame->hw_frames_ctx, frame)) {
|
AVFrame* swFrame = m_SwFrameMapper.getSwFrameFromHwFrame(frame);
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
AVFrame* swFrame = getSwFrameFromHwFrame(frame);
|
|
||||||
if (swFrame == nullptr) {
|
if (swFrame == nullptr) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "renderer.h"
|
#include "renderer.h"
|
||||||
|
#include "swframemapper.h"
|
||||||
|
|
||||||
#ifdef HAVE_CUDA
|
#ifdef HAVE_CUDA
|
||||||
#include "cuda.h"
|
#include "cuda.h"
|
||||||
|
@ -19,18 +20,16 @@ public:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void renderOverlay(Overlay::OverlayType type);
|
void renderOverlay(Overlay::OverlayType type);
|
||||||
bool initializeReadBackFormat(AVBufferRef* hwFrameCtxRef, AVFrame* testFrame);
|
|
||||||
AVFrame* getSwFrameFromHwFrame(AVFrame* hwFrame);
|
|
||||||
|
|
||||||
int m_VideoFormat;
|
int m_VideoFormat;
|
||||||
SDL_Renderer* m_Renderer;
|
SDL_Renderer* m_Renderer;
|
||||||
SDL_Texture* m_Texture;
|
SDL_Texture* m_Texture;
|
||||||
enum AVPixelFormat m_SwPixelFormat;
|
|
||||||
int m_ColorSpace;
|
int m_ColorSpace;
|
||||||
bool m_MapFrame;
|
|
||||||
SDL_Texture* m_OverlayTextures[Overlay::OverlayMax];
|
SDL_Texture* m_OverlayTextures[Overlay::OverlayMax];
|
||||||
SDL_Rect m_OverlayRects[Overlay::OverlayMax];
|
SDL_Rect m_OverlayRects[Overlay::OverlayMax];
|
||||||
|
|
||||||
|
SwFrameMapper m_SwFrameMapper;
|
||||||
|
|
||||||
#ifdef HAVE_CUDA
|
#ifdef HAVE_CUDA
|
||||||
CUDAGLInteropHelper* m_CudaGLHelper;
|
CUDAGLInteropHelper* m_CudaGLHelper;
|
||||||
#endif
|
#endif
|
||||||
|
|
153
app/streaming/video/ffmpeg-renderers/swframemapper.cpp
Normal file
153
app/streaming/video/ffmpeg-renderers/swframemapper.cpp
Normal file
|
@ -0,0 +1,153 @@
|
||||||
|
#include "swframemapper.h"
|
||||||
|
|
||||||
|
SwFrameMapper::SwFrameMapper(IFFmpegRenderer* renderer)
|
||||||
|
: m_Renderer(renderer),
|
||||||
|
m_VideoFormat(0),
|
||||||
|
m_SwPixelFormat(AV_PIX_FMT_NONE),
|
||||||
|
m_MapFrame(false)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
void SwFrameMapper::setVideoFormat(int videoFormat)
|
||||||
|
{
|
||||||
|
m_VideoFormat = videoFormat;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool SwFrameMapper::initializeReadBackFormat(AVBufferRef* hwFrameCtxRef, AVFrame* testFrame)
|
||||||
|
{
|
||||||
|
auto hwFrameCtx = (AVHWFramesContext*)hwFrameCtxRef->data;
|
||||||
|
int err;
|
||||||
|
enum AVPixelFormat *formats;
|
||||||
|
AVFrame* outputFrame;
|
||||||
|
|
||||||
|
// This function must only be called once per instance
|
||||||
|
SDL_assert(m_SwPixelFormat == AV_PIX_FMT_NONE);
|
||||||
|
SDL_assert(!m_MapFrame);
|
||||||
|
SDL_assert(m_VideoFormat != 0);
|
||||||
|
|
||||||
|
// Try direct mapping before resorting to copying the frame
|
||||||
|
outputFrame = av_frame_alloc();
|
||||||
|
if (outputFrame != nullptr) {
|
||||||
|
err = av_hwframe_map(outputFrame, testFrame, AV_HWFRAME_MAP_READ);
|
||||||
|
if (err == 0) {
|
||||||
|
if (m_Renderer->isPixelFormatSupported(m_VideoFormat, (AVPixelFormat)outputFrame->format)) {
|
||||||
|
m_SwPixelFormat = (AVPixelFormat)outputFrame->format;
|
||||||
|
m_MapFrame = true;
|
||||||
|
goto Exit;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
"Skipping unsupported hwframe mapping format: %d",
|
||||||
|
outputFrame->format);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
"av_hwframe_map() is unsupported (error: %d)",
|
||||||
|
err);
|
||||||
|
SDL_assert(err == AVERROR(ENOSYS));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Direct mapping didn't work, so let's see what transfer formats we have
|
||||||
|
err = av_hwframe_transfer_get_formats(hwFrameCtxRef, AV_HWFRAME_TRANSFER_DIRECTION_FROM, &formats, 0);
|
||||||
|
if (err < 0) {
|
||||||
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
"av_hwframe_transfer_get_formats() failed: %d",
|
||||||
|
err);
|
||||||
|
goto Exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
// NB: In this algorithm, we prefer to get a preferred hardware readback format
|
||||||
|
// and non-preferred rendering format rather than the other way around. This is
|
||||||
|
// why we loop through the readback format list in order, rather than searching
|
||||||
|
// for the format from getPreferredPixelFormat() in the list.
|
||||||
|
for (int i = 0; formats[i] != AV_PIX_FMT_NONE; i++) {
|
||||||
|
if (!m_Renderer->isPixelFormatSupported(m_VideoFormat, formats[i])) {
|
||||||
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
"Skipping unsupported hwframe transfer format %d",
|
||||||
|
formats[i]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
m_SwPixelFormat = formats[i];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
av_freep(&formats);
|
||||||
|
|
||||||
|
Exit:
|
||||||
|
av_frame_free(&outputFrame);
|
||||||
|
|
||||||
|
// If we didn't find any supported formats, try hwFrameCtx->sw_format.
|
||||||
|
if (m_SwPixelFormat == AV_PIX_FMT_NONE) {
|
||||||
|
if (m_Renderer->isPixelFormatSupported(m_VideoFormat, hwFrameCtx->sw_format)) {
|
||||||
|
m_SwPixelFormat = hwFrameCtx->sw_format;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
"Unable to find compatible hwframe transfer format (sw_format = %d)",
|
||||||
|
hwFrameCtx->sw_format);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
"Selected hwframe->swframe format: %d (mapping: %s)",
|
||||||
|
m_SwPixelFormat,
|
||||||
|
m_MapFrame ? "yes" : "no");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
AVFrame* SwFrameMapper::getSwFrameFromHwFrame(AVFrame* hwFrame)
|
||||||
|
{
|
||||||
|
int err;
|
||||||
|
|
||||||
|
// setVideoFormat() must have been called before our first frame
|
||||||
|
SDL_assert(m_VideoFormat != 0);
|
||||||
|
|
||||||
|
if (m_SwPixelFormat == AV_PIX_FMT_NONE) {
|
||||||
|
SDL_assert(hwFrame->hw_frames_ctx != nullptr);
|
||||||
|
if (!initializeReadBackFormat(hwFrame->hw_frames_ctx, hwFrame)) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
AVFrame* swFrame = av_frame_alloc();
|
||||||
|
if (swFrame == nullptr) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
swFrame->format = m_SwPixelFormat;
|
||||||
|
|
||||||
|
if (m_MapFrame) {
|
||||||
|
// We don't use AV_HWFRAME_MAP_DIRECT here because it can cause huge
|
||||||
|
// performance penalties on Intel hardware with VAAPI due to mappings
|
||||||
|
// being uncached memory.
|
||||||
|
err = av_hwframe_map(swFrame, hwFrame, AV_HWFRAME_MAP_READ);
|
||||||
|
if (err < 0) {
|
||||||
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
"av_hwframe_map() failed: %d",
|
||||||
|
err);
|
||||||
|
av_frame_free(&swFrame);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
err = av_hwframe_transfer_data(swFrame, hwFrame, 0);
|
||||||
|
if (err < 0) {
|
||||||
|
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
"av_hwframe_transfer_data() failed: %d",
|
||||||
|
err);
|
||||||
|
av_frame_free(&swFrame);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
// av_hwframe_transfer_data() doesn't transfer metadata
|
||||||
|
// (and can even nuke existing metadata in dst), so we
|
||||||
|
// will propagate metadata manually afterwards.
|
||||||
|
av_frame_copy_props(swFrame, hwFrame);
|
||||||
|
}
|
||||||
|
|
||||||
|
return swFrame;
|
||||||
|
}
|
19
app/streaming/video/ffmpeg-renderers/swframemapper.h
Normal file
19
app/streaming/video/ffmpeg-renderers/swframemapper.h
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "renderer.h"
|
||||||
|
|
||||||
|
class SwFrameMapper
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
explicit SwFrameMapper(IFFmpegRenderer* renderer);
|
||||||
|
void setVideoFormat(int videoFormat);
|
||||||
|
AVFrame* getSwFrameFromHwFrame(AVFrame* hwFrame);
|
||||||
|
|
||||||
|
private:
|
||||||
|
bool initializeReadBackFormat(AVBufferRef* hwFrameCtxRef, AVFrame* testFrame);
|
||||||
|
|
||||||
|
IFFmpegRenderer* m_Renderer;
|
||||||
|
int m_VideoFormat;
|
||||||
|
enum AVPixelFormat m_SwPixelFormat;
|
||||||
|
bool m_MapFrame;
|
||||||
|
};
|
Loading…
Reference in a new issue