mirror of
https://github.com/moonlight-stream/moonlight-qt
synced 2024-11-10 13:44:17 +00:00
DXVA2 Hardware Decoding (#3)
This commit is contained in:
parent
86f843464c
commit
1cae5f83e5
8 changed files with 810 additions and 87 deletions
16
app/app.pro
16
app/app.pro
|
@ -25,7 +25,7 @@ win32 {
|
|||
LIBS += -L$$PWD/../libs/windows/lib/x64
|
||||
}
|
||||
|
||||
LIBS += ws2_32.lib winmm.lib
|
||||
LIBS += ws2_32.lib winmm.lib dxva2.lib ole32.lib
|
||||
}
|
||||
macx {
|
||||
INCLUDEPATH += $$PWD/../libs/mac/include
|
||||
|
@ -56,7 +56,12 @@ SOURCES += \
|
|||
streaming/audio.cpp \
|
||||
streaming/video.cpp \
|
||||
gui/computermodel.cpp \
|
||||
gui/appmodel.cpp
|
||||
gui/appmodel.cpp \
|
||||
streaming/renderers/sdl.cpp
|
||||
|
||||
win32 {
|
||||
SOURCES += streaming/renderers/dxva2.cpp
|
||||
}
|
||||
|
||||
HEADERS += \
|
||||
utils.h \
|
||||
|
@ -69,7 +74,12 @@ HEADERS += \
|
|||
streaming/input.hpp \
|
||||
streaming/session.hpp \
|
||||
gui/computermodel.h \
|
||||
gui/appmodel.h
|
||||
gui/appmodel.h \
|
||||
streaming/renderers/renderer.h
|
||||
|
||||
win32 {
|
||||
HEADERS += streaming/renderers/dxva2.h
|
||||
}
|
||||
|
||||
RESOURCES += \
|
||||
resources.qrc \
|
||||
|
|
425
app/streaming/renderers/dxva2.cpp
Normal file
425
app/streaming/renderers/dxva2.cpp
Normal file
|
@ -0,0 +1,425 @@
|
|||
#include <Initguid.h>
|
||||
#include "dxva2.h"
|
||||
|
||||
#include <Limelight.h>
|
||||
|
||||
DEFINE_GUID(DXVADDI_Intel_ModeH264_E, 0x604F8E68,0x4951,0x4C54,0x88,0xFE,0xAB,0xD2,0x5C,0x15,0xB3,0xD6);
|
||||
|
||||
#define SAFE_COM_RELEASE(x) if (x) { (x)->Release(); }
|
||||
|
||||
DXVA2Renderer::DXVA2Renderer() :
|
||||
m_FrameIndex(0),
|
||||
m_SurfacesUsed(0),
|
||||
m_SdlRenderer(nullptr),
|
||||
m_DecService(nullptr),
|
||||
m_Decoder(nullptr),
|
||||
m_Pool(nullptr),
|
||||
m_Device(nullptr),
|
||||
m_RenderTarget(nullptr),
|
||||
m_ProcService(nullptr),
|
||||
m_Processor(nullptr)
|
||||
{
|
||||
RtlZeroMemory(m_DecSurfaces, sizeof(m_DecSurfaces));
|
||||
RtlZeroMemory(&m_DXVAContext, sizeof(m_DXVAContext));
|
||||
}
|
||||
|
||||
DXVA2Renderer::~DXVA2Renderer()
|
||||
{
|
||||
SAFE_COM_RELEASE(m_DecService);
|
||||
SAFE_COM_RELEASE(m_Decoder);
|
||||
SAFE_COM_RELEASE(m_Device);
|
||||
SAFE_COM_RELEASE(m_RenderTarget);
|
||||
SAFE_COM_RELEASE(m_ProcService);
|
||||
SAFE_COM_RELEASE(m_Processor);
|
||||
|
||||
for (int i = 0; i < ARRAYSIZE(m_DecSurfaces); i++) {
|
||||
SAFE_COM_RELEASE(m_DecSurfaces[i]);
|
||||
}
|
||||
|
||||
if (m_Pool != nullptr) {
|
||||
av_buffer_pool_uninit(&m_Pool);
|
||||
}
|
||||
|
||||
if (m_SdlRenderer != nullptr) {
|
||||
SDL_DestroyRenderer(m_SdlRenderer);
|
||||
}
|
||||
}
|
||||
|
||||
void DXVA2Renderer::ffPoolDummyDelete(void*, uint8_t*)
|
||||
{
|
||||
/* Do nothing */
|
||||
}
|
||||
|
||||
AVBufferRef* DXVA2Renderer::ffPoolAlloc(void* opaque, int)
|
||||
{
|
||||
DXVA2Renderer* me = reinterpret_cast<DXVA2Renderer*>(opaque);
|
||||
|
||||
if (me->m_SurfacesUsed < ARRAYSIZE(me->m_DecSurfaces)) {
|
||||
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION, "Using buffer: %d", me->m_SurfacesUsed);
|
||||
return av_buffer_create((uint8_t*)me->m_DecSurfaces[me->m_SurfacesUsed++],
|
||||
sizeof(*me->m_DecSurfaces), ffPoolDummyDelete, 0, 0);
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
bool DXVA2Renderer::prepareDecoderContext(AVCodecContext* context)
|
||||
{
|
||||
// m_DXVAContext.workaround and report_id already initialized elsewhere
|
||||
m_DXVAContext.decoder = m_Decoder;
|
||||
m_DXVAContext.cfg = &m_Config;
|
||||
m_DXVAContext.surface = m_DecSurfaces;
|
||||
m_DXVAContext.surface_count = ARRAYSIZE(m_DecSurfaces);
|
||||
|
||||
context->hwaccel_context = &m_DXVAContext;
|
||||
|
||||
context->get_format = ffGetFormat;
|
||||
context->get_buffer2 = ffGetBuffer2;
|
||||
|
||||
context->opaque = this;
|
||||
|
||||
m_Pool = av_buffer_pool_init2(ARRAYSIZE(m_DecSurfaces), this, ffPoolAlloc, nullptr);
|
||||
if (!m_Pool) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"Failed create buffer pool");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
int DXVA2Renderer::ffGetBuffer2(AVCodecContext* context, AVFrame* frame, int)
|
||||
{
|
||||
DXVA2Renderer* me = reinterpret_cast<DXVA2Renderer*>(context->opaque);
|
||||
|
||||
frame->buf[0] = av_buffer_pool_get(me->m_Pool);
|
||||
if (!frame->buf[0]) {
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
|
||||
frame->data[3] = frame->buf[0]->data;
|
||||
frame->format = AV_PIX_FMT_DXVA2_VLD;
|
||||
frame->width = me->m_Width;
|
||||
frame->height = me->m_Height;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
enum AVPixelFormat DXVA2Renderer::ffGetFormat(AVCodecContext*,
|
||||
const enum AVPixelFormat* pixFmts)
|
||||
{
|
||||
const enum AVPixelFormat *p;
|
||||
|
||||
for (p = pixFmts; *p != -1; p++) {
|
||||
if (*p == AV_PIX_FMT_DXVA2_VLD)
|
||||
return *p;
|
||||
}
|
||||
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"Failed to find DXVA2 HW surface format");
|
||||
return AV_PIX_FMT_NONE;
|
||||
}
|
||||
|
||||
bool DXVA2Renderer::initializeDecoder()
|
||||
{
|
||||
HRESULT hr;
|
||||
|
||||
hr = DXVA2CreateVideoService(m_Device, IID_IDirectXVideoDecoderService,
|
||||
reinterpret_cast<void**>(&m_DecService));
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"DXVA2CreateVideoService(IID_IDirectXVideoDecoderService) failed: %x",
|
||||
hr);
|
||||
return false;
|
||||
}
|
||||
|
||||
GUID* guids;
|
||||
GUID chosenDeviceGuid;
|
||||
UINT guidCount;
|
||||
hr = m_DecService->GetDecoderDeviceGuids(&guidCount, &guids);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GetDecoderDeviceGuids() failed: %x",
|
||||
hr);
|
||||
return false;
|
||||
}
|
||||
|
||||
UINT i;
|
||||
for (i = 0; i < guidCount; i++) {
|
||||
if (m_VideoFormat == VIDEO_FORMAT_H264) {
|
||||
if (IsEqualGUID(guids[i], DXVA2_ModeH264_E) ||
|
||||
IsEqualGUID(guids[i], DXVA2_ModeH264_F)) {
|
||||
chosenDeviceGuid = guids[i];
|
||||
break;
|
||||
}
|
||||
else if (IsEqualGUID(guids[i], DXVADDI_Intel_ModeH264_E)) {
|
||||
chosenDeviceGuid = guids[i];
|
||||
m_DXVAContext.workaround |= FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO;
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (m_VideoFormat == VIDEO_FORMAT_H265) {
|
||||
if (IsEqualGUID(guids[i], DXVA2_ModeHEVC_VLD_Main)) {
|
||||
chosenDeviceGuid = guids[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (m_VideoFormat == VIDEO_FORMAT_H265_MAIN10) {
|
||||
if (IsEqualGUID(guids[i], DXVA2_ModeHEVC_VLD_Main10)) {
|
||||
chosenDeviceGuid = guids[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CoTaskMemFree(guids);
|
||||
|
||||
if (i == guidCount) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"No matching decoder device GUIDs");
|
||||
return false;
|
||||
}
|
||||
|
||||
DXVA2_ConfigPictureDecode* configs;
|
||||
UINT configCount;
|
||||
hr = m_DecService->GetDecoderConfigurations(chosenDeviceGuid, &m_Desc, nullptr, &configCount, &configs);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GetDecoderConfigurations() failed: %x",
|
||||
hr);
|
||||
return false;
|
||||
}
|
||||
|
||||
for (i = 0; i < configCount; i++) {
|
||||
if ((configs[i].ConfigBitstreamRaw == 1 || configs[i].ConfigBitstreamRaw == 2) &&
|
||||
IsEqualGUID(configs[i].guidConfigBitstreamEncryption, DXVA2_NoEncrypt)) {
|
||||
m_Config = configs[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
CoTaskMemFree(configs);
|
||||
|
||||
if (i == configCount) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"No matching decoder configurations");
|
||||
return false;
|
||||
}
|
||||
|
||||
int alignment;
|
||||
|
||||
// HEVC using DXVA requires 128B alignment
|
||||
if (m_VideoFormat & VIDEO_FORMAT_MASK_H265) {
|
||||
alignment = 128;
|
||||
}
|
||||
else {
|
||||
alignment = 16;
|
||||
}
|
||||
|
||||
hr = m_DecService->CreateSurface(FFALIGN(m_Width, alignment),
|
||||
FFALIGN(m_Height, alignment),
|
||||
ARRAYSIZE(m_DecSurfaces) - 1,
|
||||
m_Desc.Format,
|
||||
D3DPOOL_DEFAULT,
|
||||
0,
|
||||
DXVA2_VideoDecoderRenderTarget,
|
||||
m_DecSurfaces,
|
||||
nullptr);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"CreateSurface() failed: %x",
|
||||
hr);
|
||||
return false;
|
||||
}
|
||||
|
||||
hr = m_DecService->CreateVideoDecoder(chosenDeviceGuid, &m_Desc, &m_Config,
|
||||
m_DecSurfaces, ARRAYSIZE(m_DecSurfaces),
|
||||
&m_Decoder);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"CreateVideoDecoder() failed: %x",
|
||||
hr);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool DXVA2Renderer::initializeRenderer()
|
||||
{
|
||||
HRESULT hr;
|
||||
|
||||
hr = m_Device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &m_RenderTarget);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GetBackBuffer() failed: %x",
|
||||
hr);
|
||||
return false;
|
||||
}
|
||||
|
||||
D3DSURFACE_DESC renderTargetDesc;
|
||||
m_RenderTarget->GetDesc(&renderTargetDesc);
|
||||
|
||||
hr = DXVA2CreateVideoService(m_Device, IID_IDirectXVideoProcessorService,
|
||||
reinterpret_cast<void**>(&m_ProcService));
|
||||
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"DXVA2CreateVideoService(IID_IDirectXVideoProcessorService) failed: %x",
|
||||
hr);
|
||||
return false;
|
||||
}
|
||||
|
||||
UINT guidCount;
|
||||
GUID* guids;
|
||||
hr = m_ProcService->GetVideoProcessorDeviceGuids(&m_Desc, &guidCount, &guids);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GetVideoProcessorDeviceGuids() failed: %x",
|
||||
hr);
|
||||
return false;
|
||||
}
|
||||
|
||||
UINT i;
|
||||
for (i = 0; i < guidCount; i++) {
|
||||
DXVA2_VideoProcessorCaps caps;
|
||||
hr = m_ProcService->GetVideoProcessorCaps(guids[i], &m_Desc, renderTargetDesc.Format, &caps);
|
||||
if (SUCCEEDED(hr)) {
|
||||
m_ProcService->GetProcAmpRange(guids[i], &m_Desc, renderTargetDesc.Format, DXVA2_ProcAmp_Brightness, &m_BrightnessRange);
|
||||
m_ProcService->GetProcAmpRange(guids[i], &m_Desc, renderTargetDesc.Format, DXVA2_ProcAmp_Contrast, &m_ContrastRange);
|
||||
m_ProcService->GetProcAmpRange(guids[i], &m_Desc, renderTargetDesc.Format, DXVA2_ProcAmp_Hue, &m_HueRange);
|
||||
m_ProcService->GetProcAmpRange(guids[i], &m_Desc, renderTargetDesc.Format, DXVA2_ProcAmp_Saturation, &m_SaturationRange);
|
||||
|
||||
// TODO: Validate some caps?
|
||||
|
||||
hr = m_ProcService->CreateVideoProcessor(guids[i], &m_Desc, renderTargetDesc.Format, 0, &m_Processor);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"CreateVideoProcessor() failed for GUID %d: %x",
|
||||
i,
|
||||
hr);
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
else {
|
||||
SDL_LogWarn(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"GetVideoProcessorCaps() failed for GUID %d: %x",
|
||||
i,
|
||||
hr);
|
||||
}
|
||||
}
|
||||
|
||||
CoTaskMemFree(guids);
|
||||
|
||||
if (i == guidCount) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"Unable to find a usable DXVA2 processor");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool DXVA2Renderer::initialize(SDL_Window* window, int videoFormat, int width, int height)
|
||||
{
|
||||
m_VideoFormat = videoFormat;
|
||||
m_Width = width;
|
||||
m_Height = height;
|
||||
|
||||
m_SdlRenderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED);
|
||||
if (!m_SdlRenderer) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"SDL_CreateRenderer() failed: %s",
|
||||
SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
m_Device = SDL_RenderGetD3D9Device(m_SdlRenderer);
|
||||
|
||||
RtlZeroMemory(&m_Desc, sizeof(m_Desc));
|
||||
m_Desc.SampleWidth = m_Width;
|
||||
m_Desc.SampleHeight = m_Height;
|
||||
m_Desc.SampleFormat.VideoChromaSubsampling = DXVA2_VideoChromaSubsampling_ProgressiveChroma;
|
||||
m_Desc.SampleFormat.NominalRange = DXVA2_NominalRange_0_255;
|
||||
m_Desc.SampleFormat.VideoTransferMatrix = DXVA2_VideoTransferMatrix_BT709;
|
||||
m_Desc.SampleFormat.VideoLighting = DXVA2_VideoLighting_dim;
|
||||
m_Desc.SampleFormat.VideoPrimaries = DXVA2_VideoPrimaries_BT709;
|
||||
m_Desc.SampleFormat.VideoTransferFunction = DXVA2_VideoTransFunc_709;
|
||||
m_Desc.SampleFormat.SampleFormat = DXVA2_SampleProgressiveFrame;
|
||||
m_Desc.Format = (D3DFORMAT)MAKEFOURCC('N','V','1','2');
|
||||
|
||||
if (!initializeDecoder()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!initializeRenderer()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void DXVA2Renderer::renderFrame(AVFrame* frame)
|
||||
{
|
||||
IDirect3DSurface9* surface = reinterpret_cast<IDirect3DSurface9*>(frame->data[3]);
|
||||
HRESULT hr;
|
||||
|
||||
hr = m_Device->TestCooperativeLevel();
|
||||
switch (hr) {
|
||||
case D3D_OK:
|
||||
break;
|
||||
case D3DERR_DEVICELOST:
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"D3D device lost!");
|
||||
return;
|
||||
case D3DERR_DEVICENOTRESET:
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"D3D device not reset!");
|
||||
return;
|
||||
default:
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"Unknown D3D error: %x",
|
||||
hr);
|
||||
return;
|
||||
}
|
||||
|
||||
DXVA2_VideoSample sample = {};
|
||||
sample.Start = m_FrameIndex;
|
||||
sample.End = m_FrameIndex + 1;
|
||||
sample.SrcSurface = surface;
|
||||
sample.SrcRect.right = m_Desc.SampleWidth;
|
||||
sample.SrcRect.bottom = m_Desc.SampleHeight;
|
||||
sample.DstRect = sample.SrcRect;
|
||||
sample.SampleFormat = m_Desc.SampleFormat;
|
||||
sample.PlanarAlpha = DXVA2_Fixed32OpaqueAlpha();
|
||||
|
||||
DXVA2_VideoProcessBltParams bltParams = {};
|
||||
|
||||
bltParams.TargetFrame = m_FrameIndex++;
|
||||
bltParams.TargetRect.right = m_Desc.SampleWidth;
|
||||
bltParams.TargetRect.bottom = m_Desc.SampleHeight;
|
||||
bltParams.BackgroundColor.Y = 0x1000;
|
||||
bltParams.BackgroundColor.Cb = 0x8000;
|
||||
bltParams.BackgroundColor.Cr = 0x8000;
|
||||
bltParams.BackgroundColor.Alpha = 0xFFFF;
|
||||
|
||||
bltParams.DestFormat.SampleFormat = DXVA2_SampleProgressiveFrame;
|
||||
|
||||
bltParams.ProcAmpValues.Brightness = m_BrightnessRange.DefaultValue;
|
||||
bltParams.ProcAmpValues.Contrast = m_ContrastRange.DefaultValue;
|
||||
bltParams.ProcAmpValues.Hue = m_HueRange.DefaultValue;
|
||||
bltParams.ProcAmpValues.Saturation = m_SaturationRange.DefaultValue;
|
||||
|
||||
bltParams.Alpha = DXVA2_Fixed32OpaqueAlpha();
|
||||
|
||||
hr = m_Processor->VideoProcessBlt(m_RenderTarget, &bltParams, &sample, 1, nullptr);
|
||||
if (FAILED(hr)) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"VideoProcessBlt() failed: %x",
|
||||
hr);
|
||||
}
|
||||
|
||||
m_Device->Present(nullptr, nullptr, nullptr, nullptr);
|
||||
}
|
65
app/streaming/renderers/dxva2.h
Normal file
65
app/streaming/renderers/dxva2.h
Normal file
|
@ -0,0 +1,65 @@
|
|||
#pragma once
|
||||
|
||||
#include "renderer.h"
|
||||
|
||||
#include <d3d9.h>
|
||||
#include <dxva2api.h>
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/dxva2.h>
|
||||
}
|
||||
|
||||
class DXVA2Renderer : public IRenderer
|
||||
{
|
||||
public:
|
||||
DXVA2Renderer();
|
||||
virtual ~DXVA2Renderer();
|
||||
virtual bool initialize(SDL_Window* window,
|
||||
int videoFormat,
|
||||
int width,
|
||||
int height);
|
||||
virtual bool prepareDecoderContext(AVCodecContext* context);
|
||||
virtual void renderFrame(AVFrame* frame);
|
||||
|
||||
private:
|
||||
bool initializeDecoder();
|
||||
bool initializeRenderer();
|
||||
|
||||
static
|
||||
AVBufferRef* ffPoolAlloc(void* opaque, int size);
|
||||
|
||||
static
|
||||
void ffPoolDummyDelete(void*, uint8_t*);
|
||||
|
||||
static
|
||||
int ffGetBuffer2(AVCodecContext* context, AVFrame* frame, int flags);
|
||||
|
||||
static
|
||||
enum AVPixelFormat ffGetFormat(AVCodecContext*,
|
||||
const enum AVPixelFormat* pixFmts);
|
||||
|
||||
int m_VideoFormat;
|
||||
int m_Width;
|
||||
int m_Height;
|
||||
|
||||
SDL_Renderer* m_SdlRenderer;
|
||||
|
||||
struct dxva_context m_DXVAContext;
|
||||
IDirect3DSurface9* m_DecSurfaces[19];
|
||||
DXVA2_ConfigPictureDecode m_Config;
|
||||
IDirectXVideoDecoderService* m_DecService;
|
||||
IDirectXVideoDecoder* m_Decoder;
|
||||
int m_SurfacesUsed;
|
||||
AVBufferPool* m_Pool;
|
||||
|
||||
IDirect3DDevice9* m_Device;
|
||||
IDirect3DSurface9* m_RenderTarget;
|
||||
IDirectXVideoProcessorService* m_ProcService;
|
||||
IDirectXVideoProcessor* m_Processor;
|
||||
DXVA2_ValueRange m_BrightnessRange;
|
||||
DXVA2_ValueRange m_ContrastRange;
|
||||
DXVA2_ValueRange m_HueRange;
|
||||
DXVA2_ValueRange m_SaturationRange;
|
||||
DXVA2_VideoDesc m_Desc;
|
||||
REFERENCE_TIME m_FrameIndex;
|
||||
};
|
34
app/streaming/renderers/renderer.h
Normal file
34
app/streaming/renderers/renderer.h
Normal file
|
@ -0,0 +1,34 @@
|
|||
#pragma once
|
||||
|
||||
#include <SDL.h>
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
}
|
||||
|
||||
class IRenderer {
|
||||
public:
|
||||
virtual ~IRenderer() {}
|
||||
virtual bool initialize(SDL_Window* window,
|
||||
int videoFormat,
|
||||
int width,
|
||||
int height) = 0;
|
||||
virtual bool prepareDecoderContext(AVCodecContext* context) = 0;
|
||||
virtual void renderFrame(AVFrame* frame) = 0;
|
||||
};
|
||||
|
||||
class SdlRenderer : public IRenderer {
|
||||
public:
|
||||
SdlRenderer();
|
||||
virtual ~SdlRenderer();
|
||||
virtual bool initialize(SDL_Window* window,
|
||||
int videoFormat,
|
||||
int width,
|
||||
int height);
|
||||
virtual bool prepareDecoderContext(AVCodecContext* context);
|
||||
virtual void renderFrame(AVFrame* frame);
|
||||
|
||||
private:
|
||||
SDL_Renderer* m_Renderer;
|
||||
SDL_Texture* m_Texture;
|
||||
};
|
67
app/streaming/renderers/sdl.cpp
Normal file
67
app/streaming/renderers/sdl.cpp
Normal file
|
@ -0,0 +1,67 @@
|
|||
#include "streaming/session.hpp"
|
||||
|
||||
SdlRenderer::SdlRenderer()
|
||||
: m_Renderer(nullptr),
|
||||
m_Texture(nullptr)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
SdlRenderer::~SdlRenderer()
|
||||
{
|
||||
if (m_Texture != nullptr) {
|
||||
SDL_DestroyTexture(m_Texture);
|
||||
}
|
||||
|
||||
if (m_Renderer != nullptr) {
|
||||
SDL_DestroyRenderer(m_Renderer);
|
||||
}
|
||||
}
|
||||
|
||||
bool SdlRenderer::prepareDecoderContext(AVCodecContext*)
|
||||
{
|
||||
/* Nothing to do */
|
||||
return true;
|
||||
}
|
||||
|
||||
bool SdlRenderer::initialize(SDL_Window* window,
|
||||
int,
|
||||
int width,
|
||||
int height)
|
||||
{
|
||||
m_Renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED);
|
||||
if (!m_Renderer) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"SDL_CreateRenderer() failed: %s",
|
||||
SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
m_Texture = SDL_CreateTexture(m_Renderer,
|
||||
SDL_PIXELFORMAT_YV12,
|
||||
SDL_TEXTUREACCESS_STREAMING,
|
||||
width,
|
||||
height);
|
||||
if (!m_Texture) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"SDL_CreateRenderer() failed: %s",
|
||||
SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void SdlRenderer::renderFrame(AVFrame* frame)
|
||||
{
|
||||
SDL_UpdateYUVTexture(m_Texture, nullptr,
|
||||
frame->data[0],
|
||||
frame->linesize[0],
|
||||
frame->data[1],
|
||||
frame->linesize[1],
|
||||
frame->data[2],
|
||||
frame->linesize[2]);
|
||||
SDL_RenderClear(m_Renderer);
|
||||
SDL_RenderCopy(m_Renderer, m_Texture, nullptr, nullptr);
|
||||
SDL_RenderPresent(m_Renderer);
|
||||
}
|
|
@ -79,9 +79,7 @@ void Session::clLogMessage(const char* format, ...)
|
|||
Session::Session(NvComputer* computer, NvApp& app)
|
||||
: m_Computer(computer),
|
||||
m_App(app),
|
||||
m_Window(nullptr),
|
||||
m_Renderer(nullptr),
|
||||
m_Texture(nullptr)
|
||||
m_Window(nullptr)
|
||||
{
|
||||
LiInitializeVideoCallbacks(&m_VideoCallbacks);
|
||||
m_VideoCallbacks.setup = drSetup;
|
||||
|
@ -113,15 +111,16 @@ Session::Session(NvComputer* computer, NvApp& app)
|
|||
m_StreamConfig.audioConfiguration = AUDIO_CONFIGURATION_51_SURROUND;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (m_Preferences.videoCodecConfig)
|
||||
{
|
||||
case StreamingPreferences::VCC_AUTO:
|
||||
// TODO: Determine if HEVC is better depending on the decoder
|
||||
// NOTE: HEVC currently uses only 1 slice regardless of what
|
||||
// we provide in CAPABILITY_SLICES_PER_FRAME(), so we should
|
||||
// never use it for software decoding (unless common-c starts
|
||||
// respecting it for HEVC).
|
||||
m_StreamConfig.supportsHevc = false;
|
||||
m_StreamConfig.supportsHevc =
|
||||
isHardwareDecodeAvailable(m_Preferences.videoDecoderSelection,
|
||||
VIDEO_FORMAT_H265,
|
||||
m_StreamConfig.width,
|
||||
m_StreamConfig.height);
|
||||
m_StreamConfig.enableHdr = false;
|
||||
break;
|
||||
case StreamingPreferences::VCC_FORCE_H264:
|
||||
|
@ -151,8 +150,16 @@ bool Session::validateLaunch()
|
|||
"A GeForce GTX 900-series (Maxwell) or later GPU is required for HEVC streaming.");
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Validate HEVC support based on decoder caps
|
||||
else if (!isHardwareDecodeAvailable(m_Preferences.videoDecoderSelection,
|
||||
VIDEO_FORMAT_H265,
|
||||
m_StreamConfig.width,
|
||||
m_StreamConfig.height)) {
|
||||
// NOTE: HEVC currently uses only 1 slice regardless of what
|
||||
// we provide in CAPABILITY_SLICES_PER_FRAME(), so we should
|
||||
// never use it for software decoding (unless common-c starts
|
||||
// respecting it for HEVC).
|
||||
m_StreamConfig.supportsHevc = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (m_StreamConfig.enableHdr) {
|
||||
|
@ -168,8 +175,14 @@ bool Session::validateLaunch()
|
|||
emit displayLaunchWarning("Your host PC GPU doesn't support HDR streaming. "
|
||||
"A GeForce GTX 1000-series (Pascal) or later GPU is required for HDR streaming.");
|
||||
}
|
||||
else if (!isHardwareDecodeAvailable(m_Preferences.videoDecoderSelection,
|
||||
VIDEO_FORMAT_H265_MAIN10,
|
||||
m_StreamConfig.width,
|
||||
m_StreamConfig.height)) {
|
||||
emit displayLaunchWarning("Your client PC GPU doesn't support HEVC Main10 decoding for HDR streaming.");
|
||||
}
|
||||
else {
|
||||
// TODO: Also validate client decoder and display capabilites
|
||||
// TODO: Also validate display capabilites
|
||||
|
||||
// Validation successful so HDR is good to go
|
||||
m_StreamConfig.enableHdr = true;
|
||||
|
@ -206,12 +219,6 @@ class DeferredSessionCleanupTask : public QRunnable
|
|||
{
|
||||
// Finish cleanup of the connection state
|
||||
LiStopConnection();
|
||||
if (Session::s_ActiveSession->m_Texture != nullptr) {
|
||||
SDL_DestroyTexture(Session::s_ActiveSession->m_Texture);
|
||||
}
|
||||
if (Session::s_ActiveSession->m_Renderer != nullptr) {
|
||||
SDL_DestroyRenderer(Session::s_ActiveSession->m_Renderer);
|
||||
}
|
||||
if (Session::s_ActiveSession->m_Window != nullptr) {
|
||||
SDL_DestroyWindow(Session::s_ActiveSession->m_Window);
|
||||
}
|
||||
|
@ -265,6 +272,23 @@ void Session::exec()
|
|||
return;
|
||||
}
|
||||
|
||||
m_Window = SDL_CreateWindow("Moonlight",
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
m_StreamConfig.width,
|
||||
m_StreamConfig.height,
|
||||
SDL_WINDOW_HIDDEN |
|
||||
(m_Preferences.fullScreen ?
|
||||
SDL_WINDOW_FULLSCREEN :
|
||||
0));
|
||||
if (!m_Window) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"SDL_CreateWindow() failed: %s",
|
||||
SDL_GetError());
|
||||
s_ActiveSessionSemaphore.release();
|
||||
return;
|
||||
}
|
||||
|
||||
QByteArray hostnameStr = m_Computer->activeAddress.toLatin1();
|
||||
QByteArray siAppVersion = m_Computer->appVersion.toLatin1();
|
||||
|
||||
|
@ -295,45 +319,15 @@ void Session::exec()
|
|||
emit connectionStarted();
|
||||
QCoreApplication::processEvents(QEventLoop::ExcludeUserInputEvents);
|
||||
|
||||
m_Window = SDL_CreateWindow("Moonlight",
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
m_StreamConfig.width,
|
||||
m_StreamConfig.height,
|
||||
(m_Preferences.fullScreen ?
|
||||
SDL_WINDOW_FULLSCREEN :
|
||||
0));
|
||||
if (!m_Window) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"SDL_CreateWindow() failed: %s",
|
||||
SDL_GetError());
|
||||
goto DispatchDeferredCleanup;
|
||||
}
|
||||
|
||||
m_Renderer = SDL_CreateRenderer(m_Window, -1,
|
||||
SDL_RENDERER_ACCELERATED);
|
||||
if (!m_Renderer) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"SDL_CreateRenderer() failed: %s",
|
||||
SDL_GetError());
|
||||
goto DispatchDeferredCleanup;
|
||||
}
|
||||
|
||||
m_Texture = SDL_CreateTexture(m_Renderer,
|
||||
SDL_PIXELFORMAT_YV12,
|
||||
SDL_TEXTUREACCESS_STREAMING,
|
||||
m_StreamConfig.width,
|
||||
m_StreamConfig.height);
|
||||
if (!m_Texture) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"SDL_CreateRenderer() failed: %s",
|
||||
SDL_GetError());
|
||||
goto DispatchDeferredCleanup;
|
||||
}
|
||||
// Show the streaming window
|
||||
SDL_ShowWindow(m_Window);
|
||||
|
||||
// Capture the mouse
|
||||
SDL_SetRelativeMouseMode(SDL_TRUE);
|
||||
|
||||
// Disable the screen saver
|
||||
SDL_DisableScreenSaver();
|
||||
|
||||
// Hijack this thread to be the SDL main thread. We have to do this
|
||||
// because we want to suspend all Qt processing until the stream is over.
|
||||
SDL_Event event;
|
||||
|
@ -398,6 +392,7 @@ DispatchDeferredCleanup:
|
|||
// Uncapture the mouse and hide the window immediately,
|
||||
// so we can return to the Qt GUI ASAP.
|
||||
SDL_SetRelativeMouseMode(SDL_FALSE);
|
||||
SDL_EnableScreenSaver();
|
||||
if (m_Window != nullptr) {
|
||||
SDL_HideWindow(m_Window);
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
#include "backend/computermanager.h"
|
||||
#include "settings/streamingpreferences.h"
|
||||
#include "input.hpp"
|
||||
#include "renderers/renderer.h"
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
|
@ -44,6 +45,23 @@ private:
|
|||
|
||||
int sdlDetermineAudioConfiguration();
|
||||
|
||||
static
|
||||
bool chooseDecoder(StreamingPreferences::VideoDecoderSelection vds,
|
||||
SDL_Window* window,
|
||||
int videoFormat,
|
||||
int width, int height,
|
||||
AVCodec*& chosenDecoder,
|
||||
const AVCodecHWConfig*& chosenHwConfig,
|
||||
IRenderer*& newRenderer);
|
||||
|
||||
static
|
||||
enum AVPixelFormat getHwFormat(AVCodecContext*,
|
||||
const enum AVPixelFormat* pixFmts);
|
||||
|
||||
static
|
||||
bool isHardwareDecodeAvailable(StreamingPreferences::VideoDecoderSelection vds,
|
||||
int videoFormat, int width, int height);
|
||||
|
||||
void renderFrame(SDL_UserEvent* event);
|
||||
|
||||
void dropFrame(SDL_UserEvent* event);
|
||||
|
@ -92,12 +110,13 @@ private:
|
|||
NvComputer* m_Computer;
|
||||
NvApp m_App;
|
||||
SDL_Window* m_Window;
|
||||
SDL_Renderer* m_Renderer;
|
||||
SDL_Texture* m_Texture;
|
||||
|
||||
static AVPacket s_Pkt;
|
||||
static AVCodecContext* s_VideoDecoderCtx;
|
||||
static QByteArray s_DecodeBuffer;
|
||||
static AVBufferRef* s_HwDeviceCtx;
|
||||
static const AVCodecHWConfig* s_HwDecodeCfg;
|
||||
static IRenderer* s_Renderer;
|
||||
|
||||
static SDL_AudioDeviceID s_AudioDevice;
|
||||
static OpusMSDecoder* s_OpusDecoder;
|
||||
|
|
|
@ -1,9 +1,15 @@
|
|||
#include <Limelight.h>
|
||||
#include "session.hpp"
|
||||
|
||||
#ifdef _WIN32
|
||||
#include "renderers/dxva2.h"
|
||||
#endif
|
||||
|
||||
AVPacket Session::s_Pkt;
|
||||
AVCodecContext* Session::s_VideoDecoderCtx;
|
||||
QByteArray Session::s_DecodeBuffer;
|
||||
const AVCodecHWConfig* Session::s_HwDecodeCfg;
|
||||
IRenderer* Session::s_Renderer;
|
||||
|
||||
#define MAX_SLICES 4
|
||||
|
||||
|
@ -15,31 +21,122 @@ int Session::getDecoderCapabilities()
|
|||
caps |= CAPABILITY_DIRECT_SUBMIT;
|
||||
|
||||
// Slice up to 4 times for parallel decode, once slice per core
|
||||
caps |= CAPABILITY_SLICES_PER_FRAME(std::min(MAX_SLICES, SDL_GetCPUCount()));
|
||||
caps |= CAPABILITY_SLICES_PER_FRAME(qMin(MAX_SLICES, SDL_GetCPUCount()));
|
||||
|
||||
return caps;
|
||||
}
|
||||
|
||||
bool Session::chooseDecoder(StreamingPreferences::VideoDecoderSelection vds,
|
||||
SDL_Window* window,
|
||||
int videoFormat,
|
||||
int width, int height,
|
||||
AVCodec*& chosenDecoder,
|
||||
const AVCodecHWConfig*& chosenHwConfig,
|
||||
IRenderer*& newRenderer)
|
||||
{
|
||||
if (videoFormat & VIDEO_FORMAT_MASK_H264) {
|
||||
chosenDecoder = avcodec_find_decoder(AV_CODEC_ID_H264);
|
||||
}
|
||||
else if (videoFormat & VIDEO_FORMAT_MASK_H265) {
|
||||
chosenDecoder = avcodec_find_decoder(AV_CODEC_ID_HEVC);
|
||||
}
|
||||
else {
|
||||
Q_ASSERT(false);
|
||||
chosenDecoder = nullptr;
|
||||
}
|
||||
|
||||
if (!chosenDecoder) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"Unable to find decoder for format: %x",
|
||||
videoFormat);
|
||||
return false;
|
||||
}
|
||||
|
||||
for (int i = 0;; i++) {
|
||||
const AVCodecHWConfig *config = avcodec_get_hw_config(chosenDecoder, i);
|
||||
if (!config || vds == StreamingPreferences::VDS_FORCE_SOFTWARE) {
|
||||
// No matching hardware acceleration support.
|
||||
// This is not an error.
|
||||
chosenHwConfig = nullptr;
|
||||
newRenderer = new SdlRenderer();
|
||||
if (vds != StreamingPreferences::VDS_FORCE_HARDWARE &&
|
||||
newRenderer->initialize(window, videoFormat, width, height)) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
delete newRenderer;
|
||||
newRenderer = nullptr;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!(config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Look for acceleration types we support
|
||||
switch (config->device_type) {
|
||||
#ifdef _WIN32
|
||||
case AV_HWDEVICE_TYPE_DXVA2:
|
||||
newRenderer = new DXVA2Renderer();
|
||||
break;
|
||||
#endif
|
||||
default:
|
||||
continue;
|
||||
}
|
||||
|
||||
if (newRenderer->initialize(window, videoFormat, width, height)) {
|
||||
chosenHwConfig = config;
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
// Failed to initialize
|
||||
delete newRenderer;
|
||||
newRenderer = nullptr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool Session::isHardwareDecodeAvailable(
|
||||
StreamingPreferences::VideoDecoderSelection vds,
|
||||
int videoFormat, int width, int height)
|
||||
{
|
||||
AVCodec* decoder;
|
||||
const AVCodecHWConfig* hwConfig;
|
||||
IRenderer* renderer;
|
||||
|
||||
// Create temporary window to instantiate the decoder
|
||||
SDL_Window* window = SDL_CreateWindow("", 0, 0, width, height, SDL_WINDOW_HIDDEN);
|
||||
if (!window) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (chooseDecoder(vds, window, videoFormat, width, height, decoder, hwConfig, renderer)) {
|
||||
// The renderer may have referenced the window, so
|
||||
// we must delete the renderer before the window.
|
||||
delete renderer;
|
||||
|
||||
SDL_DestroyWindow(window);
|
||||
return hwConfig != nullptr;
|
||||
}
|
||||
else {
|
||||
SDL_DestroyWindow(window);
|
||||
// Failed to find *any* decoder, including software
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
int Session::drSetup(int videoFormat, int width, int height, int /* frameRate */, void*, int)
|
||||
{
|
||||
AVCodec* decoder;
|
||||
|
||||
av_init_packet(&s_Pkt);
|
||||
|
||||
switch (videoFormat) {
|
||||
case VIDEO_FORMAT_H264:
|
||||
decoder = avcodec_find_decoder(AV_CODEC_ID_H264);
|
||||
break;
|
||||
case VIDEO_FORMAT_H265:
|
||||
case VIDEO_FORMAT_H265_MAIN10:
|
||||
decoder = avcodec_find_decoder(AV_CODEC_ID_HEVC);
|
||||
break;
|
||||
}
|
||||
|
||||
if (!decoder) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"Unable to find decoder for format: %x",
|
||||
videoFormat);
|
||||
if (!chooseDecoder(s_ActiveSession->m_Preferences.videoDecoderSelection,
|
||||
s_ActiveSession->m_Window,
|
||||
videoFormat, width, height,
|
||||
decoder, s_HwDecodeCfg, s_Renderer)) {
|
||||
// Error logged in chooseDecoder()
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
@ -47,24 +144,41 @@ int Session::drSetup(int videoFormat, int width, int height, int /* frameRate */
|
|||
if (!s_VideoDecoderCtx) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"Unable to allocate video decoder context");
|
||||
delete s_Renderer;
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Enable slice multi-threading for software decoding
|
||||
// Always request low delay decoding
|
||||
s_VideoDecoderCtx->flags |= AV_CODEC_FLAG_LOW_DELAY;
|
||||
s_VideoDecoderCtx->thread_type = FF_THREAD_SLICE;
|
||||
s_VideoDecoderCtx->thread_count = std::min(MAX_SLICES, SDL_GetCPUCount());
|
||||
|
||||
// Enable slice multi-threading for software decoding
|
||||
if (!s_HwDecodeCfg) {
|
||||
s_VideoDecoderCtx->thread_type = FF_THREAD_SLICE;
|
||||
s_VideoDecoderCtx->thread_count = qMin(MAX_SLICES, SDL_GetCPUCount());
|
||||
}
|
||||
else {
|
||||
// No threading for HW decode
|
||||
s_VideoDecoderCtx->thread_count = 1;
|
||||
}
|
||||
|
||||
// Setup decoding parameters
|
||||
s_VideoDecoderCtx->width = width;
|
||||
s_VideoDecoderCtx->height = height;
|
||||
s_VideoDecoderCtx->pix_fmt = AV_PIX_FMT_YUV420P; // FIXME: HDR
|
||||
|
||||
// Allow the renderer to attach data to this decoder
|
||||
if (!s_Renderer->prepareDecoderContext(s_VideoDecoderCtx)) {
|
||||
delete s_Renderer;
|
||||
av_free(s_VideoDecoderCtx);
|
||||
return -1;
|
||||
}
|
||||
|
||||
int err = avcodec_open2(s_VideoDecoderCtx, decoder, nullptr);
|
||||
if (err < 0) {
|
||||
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION,
|
||||
"Unable to open decoder for format: %x",
|
||||
videoFormat);
|
||||
delete s_Renderer;
|
||||
av_free(s_VideoDecoderCtx);
|
||||
return -1;
|
||||
}
|
||||
|
@ -80,6 +194,11 @@ void Session::drCleanup()
|
|||
avcodec_close(s_VideoDecoderCtx);
|
||||
av_free(s_VideoDecoderCtx);
|
||||
s_VideoDecoderCtx = nullptr;
|
||||
|
||||
s_HwDecodeCfg = nullptr;
|
||||
|
||||
delete s_Renderer;
|
||||
s_Renderer = nullptr;
|
||||
}
|
||||
|
||||
int Session::drSubmitDecodeUnit(PDECODE_UNIT du)
|
||||
|
@ -147,18 +266,7 @@ int Session::drSubmitDecodeUnit(PDECODE_UNIT du)
|
|||
void Session::renderFrame(SDL_UserEvent* event)
|
||||
{
|
||||
AVFrame* frame = reinterpret_cast<AVFrame*>(event->data1);
|
||||
|
||||
SDL_UpdateYUVTexture(m_Texture, nullptr,
|
||||
frame->data[0],
|
||||
frame->linesize[0],
|
||||
frame->data[1],
|
||||
frame->linesize[1],
|
||||
frame->data[2],
|
||||
frame->linesize[2]);
|
||||
SDL_RenderClear(m_Renderer);
|
||||
SDL_RenderCopy(m_Renderer, m_Texture, nullptr, nullptr);
|
||||
SDL_RenderPresent(m_Renderer);
|
||||
|
||||
s_Renderer->renderFrame(frame);
|
||||
av_frame_free(&frame);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue