mirror of
https://github.com/moonlight-stream/moonlight-qt
synced 2024-12-14 21:32:27 +00:00
Avoid hardcoding VIDEO_FORMAT_H265_MAIN10 for HDR/10-bit color
This commit is contained in:
parent
685136d98f
commit
be2f4433db
11 changed files with 18 additions and 18 deletions
|
@ -322,7 +322,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params)
|
||||||
m_DisplayWidth = swapChainDesc.Width;
|
m_DisplayWidth = swapChainDesc.Width;
|
||||||
m_DisplayHeight = swapChainDesc.Height;
|
m_DisplayHeight = swapChainDesc.Height;
|
||||||
|
|
||||||
if (params->videoFormat == VIDEO_FORMAT_H265_MAIN10) {
|
if (params->videoFormat & VIDEO_FORMAT_MASK_10BIT) {
|
||||||
swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;
|
swapChainDesc.Format = DXGI_FORMAT_R10G10B10A2_UNORM;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -446,7 +446,7 @@ bool D3D11VARenderer::initialize(PDECODER_PARAMETERS params)
|
||||||
|
|
||||||
// We require NV12 or P010 textures for our shader
|
// We require NV12 or P010 textures for our shader
|
||||||
framesContext->format = AV_PIX_FMT_D3D11;
|
framesContext->format = AV_PIX_FMT_D3D11;
|
||||||
framesContext->sw_format = params->videoFormat == VIDEO_FORMAT_H265_MAIN10 ?
|
framesContext->sw_format = (params->videoFormat & VIDEO_FORMAT_MASK_10BIT) ?
|
||||||
AV_PIX_FMT_P010 : AV_PIX_FMT_NV12;
|
AV_PIX_FMT_P010 : AV_PIX_FMT_NV12;
|
||||||
|
|
||||||
framesContext->width = FFALIGN(params->width, m_TextureAlignment);
|
framesContext->width = FFALIGN(params->width, m_TextureAlignment);
|
||||||
|
@ -1325,7 +1325,7 @@ bool D3D11VARenderer::setupTexturePoolViews(AVD3D11VAFramesContext* frameContext
|
||||||
|
|
||||||
srvDesc.Texture2DArray.FirstArraySlice = frameContext->texture_infos[i].index;
|
srvDesc.Texture2DArray.FirstArraySlice = frameContext->texture_infos[i].index;
|
||||||
|
|
||||||
srvDesc.Format = m_DecoderParams.videoFormat == VIDEO_FORMAT_H265_MAIN10 ? DXGI_FORMAT_R16_UNORM : DXGI_FORMAT_R8_UNORM;
|
srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16_UNORM : DXGI_FORMAT_R8_UNORM;
|
||||||
hr = m_Device->CreateShaderResourceView(frameContext->texture_infos[i].texture, &srvDesc, &m_VideoTextureResourceViews[i][0]);
|
hr = m_Device->CreateShaderResourceView(frameContext->texture_infos[i].texture, &srvDesc, &m_VideoTextureResourceViews[i][0]);
|
||||||
if (FAILED(hr)) {
|
if (FAILED(hr)) {
|
||||||
m_VideoTextureResourceViews[i][0] = nullptr;
|
m_VideoTextureResourceViews[i][0] = nullptr;
|
||||||
|
@ -1335,7 +1335,7 @@ bool D3D11VARenderer::setupTexturePoolViews(AVD3D11VAFramesContext* frameContext
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
srvDesc.Format = m_DecoderParams.videoFormat == VIDEO_FORMAT_H265_MAIN10 ? DXGI_FORMAT_R16G16_UNORM : DXGI_FORMAT_R8G8_UNORM;
|
srvDesc.Format = (m_DecoderParams.videoFormat & VIDEO_FORMAT_MASK_10BIT) ? DXGI_FORMAT_R16G16_UNORM : DXGI_FORMAT_R8G8_UNORM;
|
||||||
hr = m_Device->CreateShaderResourceView(frameContext->texture_infos[i].texture, &srvDesc, &m_VideoTextureResourceViews[i][1]);
|
hr = m_Device->CreateShaderResourceView(frameContext->texture_infos[i].texture, &srvDesc, &m_VideoTextureResourceViews[i][1]);
|
||||||
if (FAILED(hr)) {
|
if (FAILED(hr)) {
|
||||||
m_VideoTextureResourceViews[i][1] = nullptr;
|
m_VideoTextureResourceViews[i][1] = nullptr;
|
||||||
|
|
|
@ -111,7 +111,7 @@ bool DrmRenderer::initialize(PDECODER_PARAMETERS params)
|
||||||
{
|
{
|
||||||
int i;
|
int i;
|
||||||
|
|
||||||
m_Main10Hdr = (params->videoFormat == VIDEO_FORMAT_H265_MAIN10);
|
m_Main10Hdr = (params->videoFormat & VIDEO_FORMAT_MASK_10BIT);
|
||||||
|
|
||||||
#if SDL_VERSION_ATLEAST(2, 0, 15)
|
#if SDL_VERSION_ATLEAST(2, 0, 15)
|
||||||
SDL_SysWMinfo info;
|
SDL_SysWMinfo info;
|
||||||
|
|
|
@ -559,7 +559,7 @@ bool DXVA2Renderer::initializeDevice(SDL_Window* window, bool enableVsync)
|
||||||
d3dpp.hDeviceWindow = info.info.win.window;
|
d3dpp.hDeviceWindow = info.info.win.window;
|
||||||
d3dpp.Flags = D3DPRESENTFLAG_VIDEO;
|
d3dpp.Flags = D3DPRESENTFLAG_VIDEO;
|
||||||
|
|
||||||
if (m_VideoFormat == VIDEO_FORMAT_H265_MAIN10) {
|
if (m_VideoFormat & VIDEO_FORMAT_MASK_10BIT) {
|
||||||
// Verify 10-bit A2R10G10B10 color support. This is only available
|
// Verify 10-bit A2R10G10B10 color support. This is only available
|
||||||
// as a display format in full-screen exclusive mode on DX9.
|
// as a display format in full-screen exclusive mode on DX9.
|
||||||
hr = d3d9ex->CheckDeviceType(adapterIndex,
|
hr = d3d9ex->CheckDeviceType(adapterIndex,
|
||||||
|
|
|
@ -459,8 +459,8 @@ bool EGLRenderer::initialize(PDECODER_PARAMETERS params)
|
||||||
// Don't retry if we've already failed to create a renderer for this
|
// Don't retry if we've already failed to create a renderer for this
|
||||||
// window *unless* the format has changed from 10-bit to 8-bit.
|
// window *unless* the format has changed from 10-bit to 8-bit.
|
||||||
if (m_Window == s_LastFailedWindow &&
|
if (m_Window == s_LastFailedWindow &&
|
||||||
(params->videoFormat & VIDEO_FORMAT_H265_MAIN10) ==
|
!!(params->videoFormat & VIDEO_FORMAT_MASK_10BIT) ==
|
||||||
(s_LastFailedVideoFormat & VIDEO_FORMAT_H265_MAIN10)) {
|
!!(s_LastFailedVideoFormat & VIDEO_FORMAT_MASK_10BIT)) {
|
||||||
EGL_LOG(Error, "SDL_CreateRenderer() already failed on this window!");
|
EGL_LOG(Error, "SDL_CreateRenderer() already failed on this window!");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -169,7 +169,7 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual AVPixelFormat getPreferredPixelFormat(int videoFormat) {
|
virtual AVPixelFormat getPreferredPixelFormat(int videoFormat) {
|
||||||
if (videoFormat == VIDEO_FORMAT_H265_MAIN10) {
|
if (videoFormat & VIDEO_FORMAT_MASK_10BIT) {
|
||||||
// 10-bit YUV 4:2:0
|
// 10-bit YUV 4:2:0
|
||||||
return AV_PIX_FMT_P010;
|
return AV_PIX_FMT_P010;
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,7 +92,7 @@ bool SdlRenderer::initialize(PDECODER_PARAMETERS params)
|
||||||
|
|
||||||
m_VideoFormat = params->videoFormat;
|
m_VideoFormat = params->videoFormat;
|
||||||
|
|
||||||
if (params->videoFormat == VIDEO_FORMAT_H265_MAIN10) {
|
if (params->videoFormat & VIDEO_FORMAT_MASK_10BIT) {
|
||||||
// SDL doesn't support rendering YUV 10-bit textures yet
|
// SDL doesn't support rendering YUV 10-bit textures yet
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -417,7 +417,7 @@ VAAPIRenderer::isDirectRenderingSupported()
|
||||||
"Using indirect rendering due to WM or blacklist");
|
"Using indirect rendering due to WM or blacklist");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
else if (m_VideoFormat == VIDEO_FORMAT_H265_MAIN10) {
|
else if (m_VideoFormat & VIDEO_FORMAT_MASK_10BIT) {
|
||||||
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
|
||||||
"Using indirect rendering for 10-bit video");
|
"Using indirect rendering for 10-bit video");
|
||||||
return false;
|
return false;
|
||||||
|
@ -777,12 +777,12 @@ VAAPIRenderer::canExportSurfaceHandle(int layerTypeFlag) {
|
||||||
attrs[attributeCount].type = VASurfaceAttribPixelFormat;
|
attrs[attributeCount].type = VASurfaceAttribPixelFormat;
|
||||||
attrs[attributeCount].flags = VA_SURFACE_ATTRIB_SETTABLE;
|
attrs[attributeCount].flags = VA_SURFACE_ATTRIB_SETTABLE;
|
||||||
attrs[attributeCount].value.type = VAGenericValueTypeInteger;
|
attrs[attributeCount].value.type = VAGenericValueTypeInteger;
|
||||||
attrs[attributeCount].value.value.i = (m_VideoFormat == VIDEO_FORMAT_H265_MAIN10) ?
|
attrs[attributeCount].value.value.i = (m_VideoFormat & VIDEO_FORMAT_MASK_10BIT) ?
|
||||||
VA_FOURCC_P010 : VA_FOURCC_NV12;
|
VA_FOURCC_P010 : VA_FOURCC_NV12;
|
||||||
attributeCount++;
|
attributeCount++;
|
||||||
|
|
||||||
st = vaCreateSurfaces(vaDeviceContext->display,
|
st = vaCreateSurfaces(vaDeviceContext->display,
|
||||||
m_VideoFormat == VIDEO_FORMAT_H265_MAIN10 ?
|
(m_VideoFormat & VIDEO_FORMAT_MASK_10BIT) ?
|
||||||
VA_RT_FORMAT_YUV420_10 : VA_RT_FORMAT_YUV420,
|
VA_RT_FORMAT_YUV420_10 : VA_RT_FORMAT_YUV420,
|
||||||
1280,
|
1280,
|
||||||
720,
|
720,
|
||||||
|
@ -831,7 +831,7 @@ VAAPIRenderer::canExportEGL() {
|
||||||
}
|
}
|
||||||
|
|
||||||
AVPixelFormat VAAPIRenderer::getEGLImagePixelFormat() {
|
AVPixelFormat VAAPIRenderer::getEGLImagePixelFormat() {
|
||||||
return m_VideoFormat == VIDEO_FORMAT_H265_MAIN10 ?
|
return (m_VideoFormat & VIDEO_FORMAT_MASK_10BIT) ?
|
||||||
AV_PIX_FMT_P010 : AV_PIX_FMT_NV12;
|
AV_PIX_FMT_P010 : AV_PIX_FMT_NV12;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -216,7 +216,7 @@ bool VDPAURenderer::initialize(PDECODER_PARAMETERS params)
|
||||||
VdpBool supported;
|
VdpBool supported;
|
||||||
uint32_t maxWidth, maxHeight;
|
uint32_t maxWidth, maxHeight;
|
||||||
VdpRGBAFormat candidateFormat =
|
VdpRGBAFormat candidateFormat =
|
||||||
params->videoFormat == VIDEO_FORMAT_H265_MAIN10 ?
|
(params->videoFormat & VIDEO_FORMAT_MASK_10BIT) ?
|
||||||
k_OutputFormats10Bit[i] : k_OutputFormats8Bit[i];
|
k_OutputFormats10Bit[i] : k_OutputFormats8Bit[i];
|
||||||
|
|
||||||
status = m_VdpOutputSurfaceQueryCapabilities(m_Device, candidateFormat,
|
status = m_VdpOutputSurfaceQueryCapabilities(m_Device, candidateFormat,
|
||||||
|
|
|
@ -364,7 +364,7 @@ public:
|
||||||
//
|
//
|
||||||
// https://github.com/moonlight-stream/moonlight-qt/issues/493
|
// https://github.com/moonlight-stream/moonlight-qt/issues/493
|
||||||
// https://github.com/moonlight-stream/moonlight-qt/issues/722
|
// https://github.com/moonlight-stream/moonlight-qt/issues/722
|
||||||
if (params->videoFormat != VIDEO_FORMAT_H265_MAIN10) {
|
if (!(params->videoFormat & VIDEO_FORMAT_MASK_10BIT)) {
|
||||||
int err;
|
int err;
|
||||||
uint32_t cpuType;
|
uint32_t cpuType;
|
||||||
size_t size = sizeof(cpuType);
|
size_t size = sizeof(cpuType);
|
||||||
|
|
|
@ -234,7 +234,7 @@ bool FFmpegVideoDecoder::createFrontendRenderer(PDECODER_PARAMETERS params, bool
|
||||||
// rendering mode so it can set the HDR metadata on the display. EGL does
|
// rendering mode so it can set the HDR metadata on the display. EGL does
|
||||||
// not currently support this (and even if it did, Mesa and Wayland don't
|
// not currently support this (and even if it did, Mesa and Wayland don't
|
||||||
// currently have protocols to actually get that metadata to the display).
|
// currently have protocols to actually get that metadata to the display).
|
||||||
if (params->videoFormat == VIDEO_FORMAT_H265_MAIN10 && m_BackendRenderer->canExportDrmPrime()) {
|
if ((params->videoFormat & VIDEO_FORMAT_MASK_10BIT) && m_BackendRenderer->canExportDrmPrime()) {
|
||||||
m_FrontendRenderer = new DrmRenderer(m_BackendRenderer);
|
m_FrontendRenderer = new DrmRenderer(m_BackendRenderer);
|
||||||
if (m_FrontendRenderer->initialize(params)) {
|
if (m_FrontendRenderer->initialize(params)) {
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit e62dc56047b038e5f2a5404b023fec453bf1bf8a
|
Subproject commit bf22101c7d11cd1fe36409fe5c12b38cbfa8dd06
|
Loading…
Reference in a new issue